@danalexilewis/taskgraph 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +47 -0
  3. package/dist/cli/block.js +114 -0
  4. package/dist/cli/context.js +139 -0
  5. package/dist/cli/done.js +98 -0
  6. package/dist/cli/edge.js +99 -0
  7. package/dist/cli/export.js +97 -0
  8. package/dist/cli/import.js +123 -0
  9. package/dist/cli/index.js +78 -0
  10. package/dist/cli/init.js +106 -0
  11. package/dist/cli/next.js +97 -0
  12. package/dist/cli/note.js +72 -0
  13. package/dist/cli/plan.js +108 -0
  14. package/dist/cli/portfolio.js +159 -0
  15. package/dist/cli/setup.js +142 -0
  16. package/dist/cli/show.js +142 -0
  17. package/dist/cli/split.js +191 -0
  18. package/dist/cli/start.js +94 -0
  19. package/dist/cli/status.js +149 -0
  20. package/dist/cli/task.js +92 -0
  21. package/dist/cli/utils.js +74 -0
  22. package/dist/db/commit.js +18 -0
  23. package/dist/db/connection.js +22 -0
  24. package/dist/db/escape.js +6 -0
  25. package/dist/db/migrate.js +159 -0
  26. package/dist/db/query.js +102 -0
  27. package/dist/domain/errors.js +33 -0
  28. package/dist/domain/invariants.js +103 -0
  29. package/dist/domain/types.js +120 -0
  30. package/dist/export/dot.js +21 -0
  31. package/dist/export/graph-data.js +41 -0
  32. package/dist/export/markdown.js +108 -0
  33. package/dist/export/mermaid.js +27 -0
  34. package/dist/plan-import/importer.js +155 -0
  35. package/dist/plan-import/parser.js +213 -0
  36. package/dist/template/.cursor/memory.md +14 -0
  37. package/dist/template/.cursor/rules/memory.mdc +11 -0
  38. package/dist/template/.cursor/rules/plan-authoring.mdc +42 -0
  39. package/dist/template/.cursor/rules/session-start.mdc +18 -0
  40. package/dist/template/.cursor/rules/taskgraph-workflow.mdc +35 -0
  41. package/dist/template/AGENT.md +73 -0
  42. package/dist/template/docs/backend.md +33 -0
  43. package/dist/template/docs/frontend.md +31 -0
  44. package/dist/template/docs/infra.md +26 -0
  45. package/dist/template/docs/skills/README.md +14 -0
  46. package/dist/template/docs/skills/plan-authoring.md +38 -0
  47. package/dist/template/docs/skills/refactoring-safely.md +21 -0
  48. package/dist/template/docs/skills/taskgraph-lifecycle-execution.md +23 -0
  49. package/package.json +47 -0
@@ -0,0 +1,94 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.startCommand = startCommand;
4
+ const uuid_1 = require("uuid");
5
+ const commit_1 = require("../db/commit");
6
+ const utils_1 = require("./utils"); // Import Config
7
+ const invariants_1 = require("../domain/invariants");
8
+ const neverthrow_1 = require("neverthrow");
9
+ const errors_1 = require("../domain/errors");
10
+ const query_1 = require("../db/query");
11
+ const connection_1 = require("../db/connection");
12
+ const escape_1 = require("../db/escape");
13
+ function startCommand(program) {
14
+ program
15
+ .command("start")
16
+ .description("Start a task")
17
+ .argument("<taskId>", "ID of the task to start")
18
+ .option("--agent <name>", "Agent identifier for multi-agent visibility")
19
+ .option("--force", "Override claim when task is already being worked")
20
+ .action(async (taskId, options, cmd) => {
21
+ const agentName = options.agent ?? "default";
22
+ const force = options.force ?? false;
23
+ const result = await (0, utils_1.readConfig)().asyncAndThen((config) => {
24
+ const currentTimestamp = (0, query_1.now)();
25
+ const q = (0, query_1.query)(config.doltRepoPath);
26
+ return q
27
+ .select("task", {
28
+ columns: ["status"],
29
+ where: { task_id: taskId },
30
+ })
31
+ .andThen((currentStatusResult) => {
32
+ if (currentStatusResult.length === 0) {
33
+ return (0, neverthrow_1.err)((0, errors_1.buildError)(errors_1.ErrorCode.TASK_NOT_FOUND, `Task with ID ${taskId} not found.`));
34
+ }
35
+ const currentStatus = currentStatusResult[0].status;
36
+ if (currentStatus === "doing" && !force) {
37
+ const sql = `SELECT body FROM \`event\` WHERE task_id = '${(0, escape_1.sqlEscape)(taskId)}' AND kind = 'started' ORDER BY created_at DESC LIMIT 1`;
38
+ return (0, connection_1.doltSql)(sql, config.doltRepoPath).andThen((rows) => {
39
+ const row = rows[0];
40
+ const raw = row?.body;
41
+ const parsed = raw != null
42
+ ? typeof raw === "string"
43
+ ? JSON.parse(raw)
44
+ : raw
45
+ : null;
46
+ const claimant = parsed?.agent ?? "unknown";
47
+ return (0, neverthrow_1.err)((0, errors_1.buildError)(errors_1.ErrorCode.TASK_ALREADY_CLAIMED, `Task is being worked by ${claimant}. Use --force to override.`));
48
+ });
49
+ }
50
+ if (currentStatus === "todo") {
51
+ return (0, invariants_1.checkRunnable)(taskId, config.doltRepoPath);
52
+ }
53
+ if (currentStatus === "doing" && force) {
54
+ return (0, neverthrow_1.okAsync)(undefined); // Bypass claim guard
55
+ }
56
+ const tr = (0, invariants_1.checkValidTransition)(currentStatus, "doing");
57
+ return tr.isOk() ? (0, neverthrow_1.okAsync)(undefined) : (0, neverthrow_1.errAsync)(tr.error);
58
+ })
59
+ .andThen(() => q.update("task", { status: "doing", updated_at: currentTimestamp }, { task_id: taskId }))
60
+ .andThen(() => q.insert("event", {
61
+ event_id: (0, uuid_1.v4)(),
62
+ task_id: taskId,
63
+ kind: "started",
64
+ body: (0, query_1.jsonObj)({
65
+ agent: agentName,
66
+ timestamp: currentTimestamp,
67
+ }),
68
+ created_at: currentTimestamp,
69
+ }))
70
+ .andThen(() => (0, commit_1.doltCommit)(`task: start ${taskId}`, config.doltRepoPath, cmd.parent?.opts().noCommit))
71
+ .map(() => ({ task_id: taskId, status: "doing" }));
72
+ });
73
+ result.match((data) => {
74
+ const resultData = data;
75
+ if (!cmd.parent?.opts().json) {
76
+ console.log(`Task ${resultData.task_id} started.`);
77
+ }
78
+ else {
79
+ console.log(JSON.stringify(resultData, null, 2));
80
+ }
81
+ }, (error) => {
82
+ console.error(`Error starting task: ${error.message}`);
83
+ if (cmd.parent?.opts().json) {
84
+ console.log(JSON.stringify({
85
+ status: "error",
86
+ code: error.code,
87
+ message: error.message,
88
+ cause: error.cause,
89
+ }));
90
+ }
91
+ process.exit(1);
92
+ });
93
+ });
94
+ }
@@ -0,0 +1,149 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.statusCommand = statusCommand;
4
+ const utils_1 = require("./utils");
5
+ const query_1 = require("../db/query");
6
+ const escape_1 = require("../db/escape");
7
+ function backtickWrap(name) {
8
+ return `\`${name}\``;
9
+ }
10
+ function statusCommand(program) {
11
+ program
12
+ .command("status")
13
+ .description("Quick overview: plans count, tasks by status, next runnable tasks")
14
+ .option("--plan <planId>", "Filter by plan ID or title")
15
+ .option("--domain <domain>", "Filter by task domain")
16
+ .option("--skill <skill>", "Filter by task skill")
17
+ .option("--change-type <type>", "Filter by change type: create, modify, refactor, fix, investigate, test, document")
18
+ .action(async (options, cmd) => {
19
+ const result = await (0, utils_1.readConfig)().asyncAndThen((config) => {
20
+ const q = (0, query_1.query)(config.doltRepoPath);
21
+ const isUUID = options.plan &&
22
+ /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/.test(options.plan);
23
+ let planWhere = "";
24
+ if (options.plan) {
25
+ if (isUUID) {
26
+ planWhere = `WHERE ${backtickWrap("plan_id")} = '${(0, escape_1.sqlEscape)(options.plan)}'`;
27
+ }
28
+ else {
29
+ planWhere = `WHERE ${backtickWrap("title")} = '${(0, escape_1.sqlEscape)(options.plan)}'`;
30
+ }
31
+ }
32
+ const dimFilter = (options.domain
33
+ ? ` AND EXISTS (SELECT 1 FROM \`task_domain\` td WHERE td.task_id = t.task_id AND td.domain = '${(0, escape_1.sqlEscape)(options.domain)}')`
34
+ : "") +
35
+ (options.skill
36
+ ? ` AND EXISTS (SELECT 1 FROM \`task_skill\` ts WHERE ts.task_id = t.task_id AND ts.skill = '${(0, escape_1.sqlEscape)(options.skill)}')`
37
+ : "") +
38
+ (options.changeType
39
+ ? ` AND t.\`change_type\` = '${(0, escape_1.sqlEscape)(options.changeType)}'`
40
+ : "");
41
+ const planFilter = options.plan
42
+ ? isUUID
43
+ ? `WHERE p.plan_id = '${(0, escape_1.sqlEscape)(options.plan)}'`
44
+ : `WHERE p.title = '${(0, escape_1.sqlEscape)(options.plan)}'`
45
+ : "";
46
+ const plansCountSql = dimFilter
47
+ ? `SELECT COUNT(DISTINCT p.plan_id) AS count FROM \`plan\` p JOIN \`task\` t ON t.plan_id = p.plan_id ${planFilter || "WHERE 1=1"} ${dimFilter}`
48
+ : `SELECT COUNT(*) AS count FROM \`plan\` ${planWhere}`;
49
+ const statusCountsSql = `SELECT t.status, COUNT(*) AS count FROM \`task\` t JOIN \`plan\` p ON t.plan_id = p.plan_id ${planFilter || "WHERE 1=1"} ${dimFilter} GROUP BY t.status`;
50
+ const nextSql = `
51
+ SELECT t.task_id, t.title, p.title as plan_title
52
+ FROM \`task\` t
53
+ JOIN \`plan\` p ON t.plan_id = p.plan_id
54
+ WHERE t.status = 'todo'
55
+ AND (SELECT COUNT(*) FROM \`edge\` e
56
+ JOIN \`task\` bt ON e.from_task_id = bt.task_id
57
+ WHERE e.to_task_id = t.task_id AND e.type = 'blocks'
58
+ AND bt.status NOT IN ('done','canceled')) = 0
59
+ ${options.plan ? (isUUID ? `AND p.plan_id = '${(0, escape_1.sqlEscape)(options.plan)}'` : `AND p.title = '${(0, escape_1.sqlEscape)(options.plan)}'`) : ""}
60
+ ${dimFilter}
61
+ ORDER BY p.priority DESC, t.created_at ASC
62
+ LIMIT 2
63
+ `;
64
+ const activeWorkSql = `
65
+ SELECT t.task_id, t.title, p.title as plan_title, e.body, e.created_at
66
+ FROM \`task\` t
67
+ JOIN \`plan\` p ON t.plan_id = p.plan_id
68
+ JOIN \`event\` e ON e.task_id = t.task_id AND e.kind = 'started'
69
+ WHERE t.status = 'doing'
70
+ AND e.created_at = (
71
+ SELECT MAX(e2.created_at) FROM \`event\` e2
72
+ WHERE e2.task_id = t.task_id AND e2.kind = 'started'
73
+ )
74
+ ${options.plan ? (isUUID ? `AND p.plan_id = '${(0, escape_1.sqlEscape)(options.plan)}'` : `AND p.title = '${(0, escape_1.sqlEscape)(options.plan)}'`) : ""}
75
+ ${dimFilter}
76
+ ORDER BY e.created_at DESC
77
+ `;
78
+ return q.raw(plansCountSql).andThen((plansRes) => {
79
+ const plansCount = plansRes[0]?.count ?? 0;
80
+ return q
81
+ .raw(statusCountsSql)
82
+ .andThen((statusRows) => {
83
+ const statusCounts = {};
84
+ statusRows.forEach((r) => {
85
+ statusCounts[r.status] = r.count;
86
+ });
87
+ return q
88
+ .raw(nextSql)
89
+ .andThen((nextTasks) => q
90
+ .raw(activeWorkSql)
91
+ .map((activeWork) => ({
92
+ plansCount,
93
+ statusCounts,
94
+ nextTasks,
95
+ activeWork,
96
+ })));
97
+ });
98
+ });
99
+ });
100
+ result.match((data) => {
101
+ const d = data;
102
+ if (!(0, utils_1.rootOpts)(cmd).json) {
103
+ console.log(`Plans: ${d.plansCount}`);
104
+ const statusOrder = [
105
+ "todo",
106
+ "doing",
107
+ "blocked",
108
+ "done",
109
+ "canceled",
110
+ ];
111
+ statusOrder.forEach((s) => {
112
+ const count = d.statusCounts[s] ?? 0;
113
+ if (count > 0)
114
+ console.log(` ${s}: ${count}`);
115
+ });
116
+ if (d.activeWork.length > 0) {
117
+ console.log("Active work:");
118
+ d.activeWork.forEach((w) => {
119
+ const body = typeof w.body === "string"
120
+ ? JSON.parse(w.body)
121
+ : w.body;
122
+ const agent = body?.agent ?? "unknown";
123
+ console.log(` ${w.task_id} ${w.title} (${w.plan_title}) [${agent}] ${w.created_at}`);
124
+ });
125
+ }
126
+ if (d.nextTasks.length > 0) {
127
+ console.log("Next runnable:");
128
+ d.nextTasks.forEach((t) => {
129
+ console.log(` ${t.task_id} ${t.title} (${t.plan_title})`);
130
+ });
131
+ }
132
+ }
133
+ else {
134
+ console.log(JSON.stringify(d, null, 2));
135
+ }
136
+ }, (error) => {
137
+ console.error(`Error fetching status: ${error.message}`);
138
+ if ((0, utils_1.rootOpts)(cmd).json) {
139
+ console.log(JSON.stringify({
140
+ status: "error",
141
+ code: error.code,
142
+ message: error.message,
143
+ cause: error.cause,
144
+ }));
145
+ }
146
+ process.exit(1);
147
+ });
148
+ });
149
+ }
@@ -0,0 +1,92 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.taskCommand = taskCommand;
4
+ const commander_1 = require("commander");
5
+ const uuid_1 = require("uuid");
6
+ const commit_1 = require("../db/commit");
7
+ const utils_1 = require("./utils");
8
+ const neverthrow_1 = require("neverthrow"); // Import errAsync
9
+ const errors_1 = require("../domain/errors");
10
+ const query_1 = require("../db/query");
11
+ function taskCommand(program) {
12
+ program
13
+ .command("task")
14
+ .description("Manage tasks")
15
+ .addCommand(taskNewCommand());
16
+ }
17
+ function taskNewCommand() {
18
+ return new commander_1.Command("new")
19
+ .description("Create a new task")
20
+ .argument("<title>", "Title of the task")
21
+ .requiredOption("--plan <planId>", "ID of the parent plan") // Changed to requiredOption
22
+ .option("--feature <featureKey>", "Feature key for portfolio analysis")
23
+ .option("--area <area>", "Area of the task (e.g., frontend, backend)")
24
+ .option("--acceptance <json>", "JSON array of acceptance checks")
25
+ .action(async (title, options, cmd) => {
26
+ const result = await (0, utils_1.readConfig)().asyncAndThen((config) => {
27
+ // Removed async, added type
28
+ const task_id = (0, uuid_1.v4)();
29
+ const currentTimestamp = (0, query_1.now)();
30
+ const q = (0, query_1.query)(config.doltRepoPath);
31
+ let acceptanceJson = null;
32
+ if (options.acceptance) {
33
+ try {
34
+ const parsedAcceptance = JSON.parse(options.acceptance);
35
+ acceptanceJson = (0, query_1.jsonObj)({ val: options.acceptance });
36
+ }
37
+ catch (e) {
38
+ return (0, neverthrow_1.errAsync)((0, errors_1.buildError)(errors_1.ErrorCode.VALIDATION_FAILED, `Invalid JSON for acceptance criteria: ${options.acceptance}`, e));
39
+ }
40
+ }
41
+ return q
42
+ .insert("task", {
43
+ task_id,
44
+ plan_id: options.plan,
45
+ feature_key: options.feature ?? null,
46
+ title,
47
+ area: options.area ?? null,
48
+ acceptance: acceptanceJson,
49
+ created_at: currentTimestamp,
50
+ updated_at: currentTimestamp,
51
+ })
52
+ .andThen(() => {
53
+ return q.insert("event", {
54
+ event_id: (0, uuid_1.v4)(),
55
+ task_id,
56
+ kind: "created",
57
+ body: (0, query_1.jsonObj)({ title }),
58
+ created_at: currentTimestamp,
59
+ });
60
+ })
61
+ .andThen(() => (0, commit_1.doltCommit)(`task: create ${task_id} - ${title}`, config.doltRepoPath, (0, utils_1.rootOpts)(cmd).noCommit))
62
+ .map(() => ({
63
+ task_id,
64
+ plan_id: options.plan,
65
+ title,
66
+ feature_key: options.feature,
67
+ area: options.area,
68
+ }));
69
+ });
70
+ result.match((data) => {
71
+ // Type unknown
72
+ const resultData = data; // Cast to Task
73
+ if (!(0, utils_1.rootOpts)(cmd).json) {
74
+ console.log(`Task created with ID: ${resultData.task_id} for Plan ID: ${resultData.plan_id}`);
75
+ }
76
+ else {
77
+ console.log(JSON.stringify(resultData, null, 2));
78
+ }
79
+ }, (error) => {
80
+ console.error(`Error creating task: ${error.message}`);
81
+ if ((0, utils_1.rootOpts)(cmd).json) {
82
+ console.log(JSON.stringify({
83
+ status: "error",
84
+ code: error.code,
85
+ message: error.message,
86
+ cause: error.cause,
87
+ }));
88
+ }
89
+ process.exit(1);
90
+ });
91
+ });
92
+ }
@@ -0,0 +1,74 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.rootOpts = rootOpts;
37
+ exports.readConfig = readConfig;
38
+ exports.writeConfig = writeConfig;
39
+ const fs_1 = require("fs");
40
+ const path = __importStar(require("path"));
41
+ const neverthrow_1 = require("neverthrow");
42
+ const errors_1 = require("../domain/errors");
43
+ const TASKGRAPH_DIR = ".taskgraph";
44
+ /** Walk to root command to access global options like --json */
45
+ function rootOpts(cmd) {
46
+ let c = cmd;
47
+ while (c?.parent)
48
+ c = c.parent;
49
+ return (c?.opts?.() ?? {});
50
+ }
51
+ const CONFIG_FILE = path.join(TASKGRAPH_DIR, "config.json");
52
+ function readConfig(basePath) {
53
+ const configPath = path.join(basePath ?? process.cwd(), CONFIG_FILE);
54
+ if (!(0, fs_1.existsSync)(configPath)) {
55
+ return (0, neverthrow_1.err)((0, errors_1.buildError)(errors_1.ErrorCode.CONFIG_NOT_FOUND, `Config file not found at ${configPath}. Please run 'tg init' first.`));
56
+ }
57
+ try {
58
+ const configContents = (0, fs_1.readFileSync)(configPath, "utf-8");
59
+ return (0, neverthrow_1.ok)(JSON.parse(configContents));
60
+ }
61
+ catch (e) {
62
+ return (0, neverthrow_1.err)((0, errors_1.buildError)(errors_1.ErrorCode.CONFIG_PARSE_FAILED, `Failed to parse config file at ${configPath}`, e));
63
+ }
64
+ }
65
+ function writeConfig(config, basePath) {
66
+ const configPath = path.join(basePath ?? process.cwd(), CONFIG_FILE);
67
+ try {
68
+ (0, fs_1.writeFileSync)(configPath, JSON.stringify(config, null, 2));
69
+ return (0, neverthrow_1.ok)(undefined);
70
+ }
71
+ catch (e) {
72
+ return (0, neverthrow_1.err)((0, errors_1.buildError)(errors_1.ErrorCode.CONFIG_PARSE_FAILED, `Failed to write config file to ${configPath}`, e));
73
+ }
74
+ }
@@ -0,0 +1,18 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.doltCommit = doltCommit;
4
+ const execa_1 = require("execa");
5
+ const neverthrow_1 = require("neverthrow");
6
+ const errors_1 = require("../domain/errors");
7
+ function doltCommit(msg, repoPath, noCommit = false) {
8
+ if (noCommit) {
9
+ return neverthrow_1.ResultAsync.fromPromise(Promise.resolve(), () => (0, errors_1.buildError)(errors_1.ErrorCode.DB_COMMIT_FAILED, "Dry run commit failed"));
10
+ }
11
+ return neverthrow_1.ResultAsync.fromPromise((0, execa_1.execa)("dolt", ["add", "-A"], { cwd: repoPath }), (e) => (0, errors_1.buildError)(errors_1.ErrorCode.DB_COMMIT_FAILED, `Dolt add failed before commit: ${msg}`, e))
12
+ .andThen(() => {
13
+ return neverthrow_1.ResultAsync.fromPromise((0, execa_1.execa)("dolt", ["commit", "-m", msg, "--allow-empty"], {
14
+ cwd: repoPath,
15
+ }), (e) => (0, errors_1.buildError)(errors_1.ErrorCode.DB_COMMIT_FAILED, `Dolt commit failed: ${msg}`, e));
16
+ })
17
+ .map(() => undefined);
18
+ }
@@ -0,0 +1,22 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.doltSql = doltSql;
4
+ const execa_1 = require("execa");
5
+ const neverthrow_1 = require("neverthrow");
6
+ const errors_1 = require("../domain/errors");
7
+ function doltSql(query, repoPath) {
8
+ return neverthrow_1.ResultAsync.fromPromise((0, execa_1.execa)(process.env.DOLT_PATH || "dolt", ["sql", "-q", query, "-r", "json"], {
9
+ cwd: repoPath,
10
+ }), (e) => (0, errors_1.buildError)(errors_1.ErrorCode.DB_QUERY_FAILED, `Dolt SQL query failed: ${query}`, e)).andThen((result) => {
11
+ const out = (result.stdout || "").trim();
12
+ if (!out)
13
+ return (0, neverthrow_1.ok)([]); // DML (INSERT/UPDATE/DELETE) returns no JSON
14
+ try {
15
+ const parsed = JSON.parse(out);
16
+ return (0, neverthrow_1.ok)(parsed?.rows ?? []);
17
+ }
18
+ catch (e) {
19
+ return (0, neverthrow_1.err)((0, errors_1.buildError)(errors_1.ErrorCode.DB_PARSE_FAILED, `Failed to parse Dolt SQL output: ${result.stdout}`, e));
20
+ }
21
+ });
22
+ }
@@ -0,0 +1,6 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.sqlEscape = sqlEscape;
4
+ function sqlEscape(value) {
5
+ return value.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/\0/g, "");
6
+ }
@@ -0,0 +1,159 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.applyPlanRichFieldsMigration = applyPlanRichFieldsMigration;
37
+ exports.applyTaskDimensionsMigration = applyTaskDimensionsMigration;
38
+ exports.applyTaskSuggestedChangesMigration = applyTaskSuggestedChangesMigration;
39
+ exports.applyTaskDomainSkillJunctionMigration = applyTaskDomainSkillJunctionMigration;
40
+ exports.ensureMigrations = ensureMigrations;
41
+ exports.applyMigrations = applyMigrations;
42
+ const connection_1 = require("./connection");
43
+ const commit_1 = require("./commit");
44
+ const fs = __importStar(require("fs"));
45
+ const execa_1 = require("execa");
46
+ const neverthrow_1 = require("neverthrow");
47
+ const errors_1 = require("../domain/errors");
48
+ const SCHEMA = [
49
+ "CREATE TABLE IF NOT EXISTS `plan` (plan_id CHAR(36) PRIMARY KEY, title VARCHAR(255) NOT NULL, intent TEXT NOT NULL, status ENUM(\'draft\',\'active\',\'paused\',\'done\',\'abandoned\') DEFAULT \'draft\', priority INT DEFAULT 0, source_path VARCHAR(512) NULL, source_commit VARCHAR(64) NULL, created_at DATETIME NOT NULL, updated_at DATETIME NOT NULL);",
50
+ "CREATE TABLE IF NOT EXISTS `task` (task_id CHAR(36) PRIMARY KEY, plan_id CHAR(36) NOT NULL, feature_key VARCHAR(64) NULL, title VARCHAR(255) NOT NULL, intent TEXT NULL, scope_in TEXT NULL, scope_out TEXT NULL, acceptance JSON NULL, status ENUM(\'todo\',\'doing\',\'blocked\',\'done\',\'canceled\') DEFAULT \'todo\', owner ENUM('human','agent') DEFAULT 'agent', area VARCHAR(64) NULL, risk ENUM('low','medium','high') DEFAULT 'low', estimate_mins INT NULL, created_at DATETIME NOT NULL, updated_at DATETIME NOT NULL, external_key VARCHAR(128) NULL UNIQUE, FOREIGN KEY (plan_id) REFERENCES `plan`(plan_id));",
51
+ "CREATE TABLE IF NOT EXISTS `edge` (from_task_id CHAR(36) NOT NULL, to_task_id CHAR(36) NOT NULL, type ENUM(\'blocks\',\'relates\') DEFAULT \'blocks\', reason TEXT NULL, PRIMARY KEY (from_task_id, to_task_id, type), FOREIGN KEY (from_task_id) REFERENCES `task`(task_id), FOREIGN KEY (to_task_id) REFERENCES `task`(task_id));",
52
+ "CREATE TABLE IF NOT EXISTS `event` (event_id CHAR(36) PRIMARY KEY, task_id CHAR(36) NOT NULL, kind ENUM(\'created\',\'started\',\'progress\',\'blocked\',\'unblocked\',\'done\',\'split\',\'decision_needed\',\'note\') NOT NULL, body JSON NOT NULL, actor ENUM(\'human\',\'agent\') DEFAULT \'agent\', created_at DATETIME NOT NULL, FOREIGN KEY (task_id) REFERENCES `task`(task_id));",
53
+ "CREATE TABLE IF NOT EXISTS `decision` (decision_id CHAR(36) PRIMARY KEY, plan_id CHAR(36) NOT NULL, task_id CHAR(36) NULL, summary VARCHAR(255) NOT NULL, context TEXT NOT NULL, options JSON NULL, decision TEXT NOT NULL, consequences TEXT NULL, source_ref VARCHAR(512) NULL, created_at DATETIME NOT NULL, FOREIGN KEY (plan_id) REFERENCES `plan`(plan_id), FOREIGN KEY (task_id) REFERENCES `task`(task_id));",
54
+ ];
55
+ /** Returns true if the task table has the given column. */
56
+ function taskColumnExists(repoPath, columnName) {
57
+ const q = `SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = 'task' AND COLUMN_NAME = '${columnName}' LIMIT 1`;
58
+ return (0, connection_1.doltSql)(q, repoPath).map((rows) => rows.length > 0);
59
+ }
60
+ /** Returns true if the plan table has the given column. */
61
+ function planColumnExists(repoPath, columnName) {
62
+ const q = `SELECT 1 FROM information_schema.COLUMNS WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = 'plan' AND COLUMN_NAME = '${columnName}' LIMIT 1`;
63
+ return (0, connection_1.doltSql)(q, repoPath).map((rows) => rows.length > 0);
64
+ }
65
+ /** Add file_tree, risks, tests columns to plan table if missing (idempotent). */
66
+ function applyPlanRichFieldsMigration(repoPath, noCommit = false) {
67
+ return planColumnExists(repoPath, "file_tree").andThen((hasFileTree) => {
68
+ if (hasFileTree)
69
+ return neverthrow_1.ResultAsync.fromSafePromise(Promise.resolve());
70
+ const alter = "ALTER TABLE `plan` ADD COLUMN `file_tree` TEXT NULL, ADD COLUMN `risks` JSON NULL, ADD COLUMN `tests` JSON NULL";
71
+ return (0, connection_1.doltSql)(alter, repoPath)
72
+ .map(() => undefined)
73
+ .andThen(() => (0, commit_1.doltCommit)("db: add plan rich fields (file_tree, risks, tests)", repoPath, noCommit))
74
+ .map(() => undefined);
75
+ });
76
+ }
77
+ /** Add domain, skill, change_type columns to task table if missing (idempotent). */
78
+ function applyTaskDimensionsMigration(repoPath, noCommit = false) {
79
+ return tableExists(repoPath, "task_domain").andThen((hasJunction) => {
80
+ if (hasJunction)
81
+ return neverthrow_1.ResultAsync.fromSafePromise(Promise.resolve()); // Already migrated to junction tables
82
+ return taskColumnExists(repoPath, "domain").andThen((hasDomain) => {
83
+ if (hasDomain)
84
+ return neverthrow_1.ResultAsync.fromSafePromise(Promise.resolve());
85
+ const alter = "ALTER TABLE `task` ADD COLUMN `domain` VARCHAR(64) NULL, ADD COLUMN `skill` VARCHAR(64) NULL, ADD COLUMN `change_type` ENUM('create','modify','refactor','fix','investigate','test','document') NULL";
86
+ return (0, connection_1.doltSql)(alter, repoPath)
87
+ .map(() => undefined)
88
+ .andThen(() => (0, commit_1.doltCommit)("db: add task dimensions (domain, skill, change_type)", repoPath, noCommit))
89
+ .map(() => undefined);
90
+ });
91
+ });
92
+ }
93
+ /** Add suggested_changes column to task table if missing (idempotent). */
94
+ function applyTaskSuggestedChangesMigration(repoPath, noCommit = false) {
95
+ return taskColumnExists(repoPath, "suggested_changes").andThen((hasCol) => {
96
+ if (hasCol)
97
+ return neverthrow_1.ResultAsync.fromSafePromise(Promise.resolve());
98
+ const alter = "ALTER TABLE `task` ADD COLUMN `suggested_changes` TEXT NULL";
99
+ return (0, connection_1.doltSql)(alter, repoPath)
100
+ .map(() => undefined)
101
+ .andThen(() => (0, commit_1.doltCommit)("db: add task suggested_changes column", repoPath, noCommit))
102
+ .map(() => undefined);
103
+ });
104
+ }
105
+ /** Returns true if the table exists. */
106
+ function tableExists(repoPath, tableName) {
107
+ const q = `SELECT 1 FROM information_schema.TABLES WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = '${tableName}' LIMIT 1`;
108
+ return (0, connection_1.doltSql)(q, repoPath).map((rows) => rows.length > 0);
109
+ }
110
+ /** Replace task.domain/task.skill with task_domain and task_skill junction tables; migrate data and drop columns. */
111
+ function applyTaskDomainSkillJunctionMigration(repoPath, noCommit = false) {
112
+ return tableExists(repoPath, "task_domain").andThen((exists) => {
113
+ if (exists)
114
+ return neverthrow_1.ResultAsync.fromSafePromise(Promise.resolve());
115
+ return (0, connection_1.doltSql)(`CREATE TABLE \`task_domain\` (task_id CHAR(36) NOT NULL, domain VARCHAR(64) NOT NULL, PRIMARY KEY (task_id, domain), FOREIGN KEY (task_id) REFERENCES \`task\`(task_id))`, repoPath)
116
+ .andThen(() => (0, connection_1.doltSql)(`CREATE TABLE \`task_skill\` (task_id CHAR(36) NOT NULL, skill VARCHAR(64) NOT NULL, PRIMARY KEY (task_id, skill), FOREIGN KEY (task_id) REFERENCES \`task\`(task_id))`, repoPath))
117
+ .andThen(() => taskColumnExists(repoPath, "domain").andThen((hasDomain) => {
118
+ if (!hasDomain)
119
+ return neverthrow_1.ResultAsync.fromSafePromise(Promise.resolve());
120
+ return (0, connection_1.doltSql)("INSERT INTO `task_domain` (task_id, domain) SELECT task_id, domain FROM `task` WHERE domain IS NOT NULL", repoPath).andThen(() => (0, connection_1.doltSql)("INSERT INTO `task_skill` (task_id, skill) SELECT task_id, skill FROM `task` WHERE skill IS NOT NULL", repoPath));
121
+ }))
122
+ .andThen(() => taskColumnExists(repoPath, "domain").andThen((hasDomain) => {
123
+ if (!hasDomain)
124
+ return neverthrow_1.ResultAsync.fromSafePromise(Promise.resolve());
125
+ return (0, connection_1.doltSql)("ALTER TABLE `task` DROP COLUMN `domain`, DROP COLUMN `skill`", repoPath);
126
+ }))
127
+ .andThen(() => (0, commit_1.doltCommit)("db: task_domain/task_skill junction tables; drop task.domain/task.skill", repoPath, noCommit))
128
+ .map(() => undefined);
129
+ });
130
+ }
131
+ /** Chains all idempotent migrations. Safe to run on every command. */
132
+ function ensureMigrations(repoPath, noCommit = false) {
133
+ return applyPlanRichFieldsMigration(repoPath, noCommit)
134
+ .andThen(() => applyTaskDimensionsMigration(repoPath, noCommit))
135
+ .andThen(() => applyTaskSuggestedChangesMigration(repoPath, noCommit))
136
+ .andThen(() => applyTaskDomainSkillJunctionMigration(repoPath, noCommit))
137
+ .map(() => undefined);
138
+ }
139
+ function applyMigrations(repoPath, noCommit = false) {
140
+ return neverthrow_1.ResultAsync.fromPromise((async () => {
141
+ for (const statement of SCHEMA) {
142
+ const tempSqlFile = `${repoPath}/temp_migration.sql`;
143
+ fs.writeFileSync(tempSqlFile, statement);
144
+ const res = await neverthrow_1.ResultAsync.fromPromise((0, execa_1.execa)(process.env.DOLT_PATH || "dolt", ["sql"], {
145
+ cwd: repoPath,
146
+ shell: true,
147
+ input: fs.readFileSync(tempSqlFile, "utf8"),
148
+ }), (e) => (0, errors_1.buildError)(errors_1.ErrorCode.DB_QUERY_FAILED, `Dolt SQL query failed for statement: ${statement}`, e));
149
+ fs.unlinkSync(tempSqlFile);
150
+ if (res.isErr()) {
151
+ console.error("Migration failed:", statement, res.error);
152
+ throw res.error;
153
+ }
154
+ }
155
+ return undefined;
156
+ })(), (e) => (0, errors_1.buildError)(errors_1.ErrorCode.DB_QUERY_FAILED, "Failed to apply schema migrations", e))
157
+ .andThen(() => (0, commit_1.doltCommit)("db: apply schema migrations", repoPath, noCommit))
158
+ .map(() => undefined);
159
+ }