trekoon 0.1.2 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.agents/skills/trekoon/SKILL.md +160 -6
- package/README.md +3 -1
- package/package.json +3 -2
- package/src/commands/events.ts +88 -0
- package/src/commands/help.ts +6 -1
- package/src/commands/migrate.ts +123 -0
- package/src/commands/subtask.ts +121 -2
- package/src/domain/tracker-domain.ts +18 -37
- package/src/runtime/cli-shell.ts +8 -0
- package/src/storage/database.ts +11 -2
- package/src/storage/events-retention.ts +138 -0
- package/src/storage/migrations.ts +340 -19
- package/src/storage/schema.ts +1 -0
- package/src/storage/types.ts +1 -0
- package/src/sync/service.ts +9 -1
|
@@ -7,9 +7,154 @@ description: Use Trekoon to create issues/tasks, plan backlog and sprints, creat
|
|
|
7
7
|
|
|
8
8
|
Trekoon is a local-first issue tracker for epics, tasks, and subtasks.
|
|
9
9
|
|
|
10
|
-
|
|
10
|
+
## CRITICAL: Always Use --toon Flag
|
|
11
|
+
|
|
12
|
+
**Every trekoon command MUST include `--toon` for machine-readable output.**
|
|
13
|
+
|
|
14
|
+
The `--toon` flag outputs structured YAML-like data that is easy to parse. Never run trekoon commands without it.
|
|
15
|
+
|
|
16
|
+
### TOON Output Format
|
|
17
|
+
|
|
18
|
+
All `--toon` output follows this structure:
|
|
19
|
+
|
|
20
|
+
```yaml
|
|
21
|
+
ok: true
|
|
22
|
+
command: task.list
|
|
23
|
+
data:
|
|
24
|
+
tasks[0]:
|
|
25
|
+
id: abc-123
|
|
26
|
+
epicId: epic-456
|
|
27
|
+
title: Implement feature X
|
|
28
|
+
status: todo
|
|
29
|
+
createdAt: 1700000000000
|
|
30
|
+
updatedAt: 1700000000000
|
|
31
|
+
tasks[1]:
|
|
32
|
+
id: def-789
|
|
33
|
+
epicId: epic-456
|
|
34
|
+
title: Write tests
|
|
35
|
+
status: in_progress
|
|
36
|
+
createdAt: 1700000001000
|
|
37
|
+
updatedAt: 1700000001000
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
On error:
|
|
41
|
+
|
|
42
|
+
```yaml
|
|
43
|
+
ok: false
|
|
44
|
+
command: task.show
|
|
45
|
+
data: {}
|
|
46
|
+
error:
|
|
47
|
+
code: not_found
|
|
48
|
+
message: task not found: invalid-id
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
### Key Fields
|
|
52
|
+
|
|
53
|
+
| Field | Meaning |
|
|
54
|
+
|-------|---------|
|
|
55
|
+
| `ok` | `true` if command succeeded, `false` on error |
|
|
56
|
+
| `command` | The command that was executed (e.g., `task.list`, `epic.create`) |
|
|
57
|
+
| `data` | The response payload (tasks, epics, dependencies, etc.) |
|
|
58
|
+
| `error` | Present only on failure, contains `code` and `message` |
|
|
59
|
+
|
|
60
|
+
Use long flags (`--status`, `--description`, etc.) and ALWAYS append `--toon` to every command.
|
|
61
|
+
|
|
62
|
+
## 1) Status Management
|
|
63
|
+
|
|
64
|
+
### Valid Statuses
|
|
65
|
+
|
|
66
|
+
| Status | Meaning |
|
|
67
|
+
|--------|---------|
|
|
68
|
+
| `todo` | Work not started (default for new items) |
|
|
69
|
+
| `in_progress` | Actively being worked on |
|
|
70
|
+
| `done` | Completed successfully |
|
|
71
|
+
|
|
72
|
+
Note: `in-progress` (hyphenated) is equivalent to `in_progress`.
|
|
73
|
+
|
|
74
|
+
### When to Change Status
|
|
75
|
+
|
|
76
|
+
| Transition | When to apply |
|
|
77
|
+
|------------|---------------|
|
|
78
|
+
| `todo → in_progress` | When you START working on a task/subtask/epic |
|
|
79
|
+
| `in_progress → done` | When you COMPLETE the work and it is ready |
|
|
80
|
+
|
|
81
|
+
### Status Change Commands
|
|
82
|
+
|
|
83
|
+
```bash
|
|
84
|
+
trekoon task update <task-id> --status in_progress --toon
|
|
85
|
+
trekoon task update <task-id> --status done --toon
|
|
86
|
+
trekoon subtask update <subtask-id> --status done --toon
|
|
87
|
+
trekoon epic update <epic-id> --status done --toon
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
## 2) Dependency Management
|
|
91
|
+
|
|
92
|
+
Dependencies define what must be completed before a task can start. A task/subtask can depend on other tasks/subtasks.
|
|
93
|
+
|
|
94
|
+
### Commands
|
|
95
|
+
|
|
96
|
+
```bash
|
|
97
|
+
trekoon dep add <source-id> <depends-on-id> --toon
|
|
98
|
+
trekoon dep list <source-id> --toon
|
|
99
|
+
trekoon dep remove <source-id> <depends-on-id> --toon
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
- `<source-id>`: The task/subtask that has the dependency
|
|
103
|
+
- `<depends-on-id>`: The task/subtask that must be completed first
|
|
11
104
|
|
|
12
|
-
|
|
105
|
+
### Checking Dependencies
|
|
106
|
+
|
|
107
|
+
Before starting any task, always check its dependencies:
|
|
108
|
+
|
|
109
|
+
```bash
|
|
110
|
+
trekoon dep list <task-id> --toon
|
|
111
|
+
```
|
|
112
|
+
|
|
113
|
+
The response `data.dependencies` array contains entries with:
|
|
114
|
+
- `sourceId`: the task you're checking
|
|
115
|
+
- `dependsOnId`: what must be done first
|
|
116
|
+
- `dependsOnKind`: "task" or "subtask"
|
|
117
|
+
|
|
118
|
+
### Dependency Rules
|
|
119
|
+
|
|
120
|
+
1. A task with dependencies should only be marked `in_progress` when ALL dependencies have status `done`
|
|
121
|
+
2. Dependencies can only exist between tasks and subtasks (not epics)
|
|
122
|
+
3. Cycles are automatically detected and rejected
|
|
123
|
+
|
|
124
|
+
## 3) Task Completion Flow
|
|
125
|
+
|
|
126
|
+
### Before Starting a Task
|
|
127
|
+
|
|
128
|
+
1. Check if task has unmet dependencies:
|
|
129
|
+
```bash
|
|
130
|
+
trekoon dep list <task-id> --toon
|
|
131
|
+
```
|
|
132
|
+
|
|
133
|
+
2. If dependencies exist and are not `done`, complete those first
|
|
134
|
+
|
|
135
|
+
3. Only mark `in_progress` when all dependencies are `done`
|
|
136
|
+
|
|
137
|
+
### When Completing a Task
|
|
138
|
+
|
|
139
|
+
1. Mark the task as done:
|
|
140
|
+
```bash
|
|
141
|
+
trekoon task update <task-id> --status done --toon
|
|
142
|
+
```
|
|
143
|
+
|
|
144
|
+
2. To find the next task that was blocked by this one:
|
|
145
|
+
- List all tasks: `trekoon task list --all --toon`
|
|
146
|
+
- Check which tasks have dependencies on the completed task
|
|
147
|
+
- The task(s) with all dependencies now satisfied are ready to start
|
|
148
|
+
|
|
149
|
+
### Finding Next Work
|
|
150
|
+
|
|
151
|
+
```bash
|
|
152
|
+
trekoon task list --status todo --limit 20 --toon
|
|
153
|
+
```
|
|
154
|
+
|
|
155
|
+
Tasks are sorted with `in_progress` first, then `todo`. Look for tasks with no dependencies or all dependencies satisfied.
|
|
156
|
+
|
|
157
|
+
## 4) Load existing work first
|
|
13
158
|
|
|
14
159
|
Before creating or changing anything, inspect current context:
|
|
15
160
|
|
|
@@ -36,7 +181,14 @@ trekoon task list --all --toon
|
|
|
36
181
|
- `epic show <id> --all --toon`: full epic tree (tasks + subtasks)
|
|
37
182
|
- `task show <id> --all --toon`: task plus its subtasks
|
|
38
183
|
|
|
39
|
-
|
|
184
|
+
### View Options
|
|
185
|
+
|
|
186
|
+
| Command | `--view` options |
|
|
187
|
+
|---------|------------------|
|
|
188
|
+
| `list` | `table` (default), `compact` |
|
|
189
|
+
| `show` | `table` (default), `compact`, `tree`, `detail` |
|
|
190
|
+
|
|
191
|
+
## 5) Create work (epic/task/subtask)
|
|
40
192
|
|
|
41
193
|
```bash
|
|
42
194
|
trekoon epic create --title "..." --description "..." --status todo --toon
|
|
@@ -47,8 +199,9 @@ trekoon subtask create --task <task-id> --title "..." --description "..." --stat
|
|
|
47
199
|
Notes:
|
|
48
200
|
- `description` is required for epic/task create and it must be well written.
|
|
49
201
|
- `status` defaults to `todo` if omitted.
|
|
202
|
+
- `description` is optional for subtask create.
|
|
50
203
|
|
|
51
|
-
##
|
|
204
|
+
## 6) Update work
|
|
52
205
|
|
|
53
206
|
### Single-item update
|
|
54
207
|
|
|
@@ -73,7 +226,7 @@ Rules:
|
|
|
73
226
|
- In bulk mode, do not pass a positional ID.
|
|
74
227
|
- Bulk update supports `--append` and/or `--status`.
|
|
75
228
|
|
|
76
|
-
##
|
|
229
|
+
## 7) Setup/install/init (if `trekoon` is unavailable)
|
|
77
230
|
|
|
78
231
|
1. Install Trekoon (or make sure it is on `PATH`).
|
|
79
232
|
2. In the target repository/worktree, initialize tracker state:
|
|
@@ -81,11 +234,12 @@ Rules:
|
|
|
81
234
|
```bash
|
|
82
235
|
trekoon init
|
|
83
236
|
```
|
|
237
|
+
|
|
84
238
|
3. You can always run `trekoon quickstart` or `trekoon --help` to get more information.
|
|
85
239
|
|
|
86
240
|
If `.trekoon/trekoon.db` is missing, initialize before any create/update commands.
|
|
87
241
|
|
|
88
|
-
##
|
|
242
|
+
## 8) Safety
|
|
89
243
|
|
|
90
244
|
- Never edit `.trekoon/trekoon.db` directly.
|
|
91
245
|
- `trekoon wipe --yes` is prohibited unless the user explicitly confirms they want a destructive wipe.
|
package/README.md
CHANGED
|
@@ -89,7 +89,7 @@ List defaults and filters (`epic list`, `task list`):
|
|
|
89
89
|
- All rows and statuses: `--all`
|
|
90
90
|
- `--all` is mutually exclusive with `--status` and `--limit`
|
|
91
91
|
|
|
92
|
-
Bulk updates (`epic update`, `task update`):
|
|
92
|
+
Bulk updates (`epic update`, `task update`, `subtask update`):
|
|
93
93
|
|
|
94
94
|
- Target all rows: `--all`
|
|
95
95
|
- Target specific rows: `--ids <id1,id2,...>`
|
|
@@ -103,6 +103,8 @@ Examples:
|
|
|
103
103
|
```bash
|
|
104
104
|
trekoon task update --all --status in_progress
|
|
105
105
|
trekoon task update --ids <task-1>,<task-2> --append "\nFollow-up note"
|
|
106
|
+
trekoon subtask update --all --status done
|
|
107
|
+
trekoon subtask update --ids <subtask-1>,<subtask-2> --append "\nFollow-up note"
|
|
106
108
|
trekoon epic update --ids <epic-1>,<epic-2> --status done
|
|
107
109
|
```
|
|
108
110
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "trekoon",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.4",
|
|
4
4
|
"description": "AI-first local issue tracker CLI.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"type": "module",
|
|
@@ -17,7 +17,8 @@
|
|
|
17
17
|
"scripts": {
|
|
18
18
|
"run": "bun run ./src/index.ts",
|
|
19
19
|
"build": "bun build ./src/index.ts --outdir ./dist --target bun",
|
|
20
|
-
"test": "bun test ./tests"
|
|
20
|
+
"test": "bun test ./tests",
|
|
21
|
+
"lint": "bunx tsc --noEmit"
|
|
21
22
|
},
|
|
22
23
|
"devDependencies": {
|
|
23
24
|
"@types/bun": "^1.3.9",
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import { hasFlag, parseArgs, parseStrictPositiveInt, readMissingOptionValue, readOption } from "./arg-parser";
|
|
2
|
+
|
|
3
|
+
import { failResult, okResult } from "../io/output";
|
|
4
|
+
import { type CliContext, type CliResult } from "../runtime/command-types";
|
|
5
|
+
import { openTrekoonDatabase } from "../storage/database";
|
|
6
|
+
import { DEFAULT_EVENT_RETENTION_DAYS, pruneEvents } from "../storage/events-retention";
|
|
7
|
+
|
|
8
|
+
const EVENTS_USAGE = "Usage: trekoon events prune [--dry-run] [--archive] [--retention-days <n>]";
|
|
9
|
+
|
|
10
|
+
function usage(message: string): CliResult {
|
|
11
|
+
return failResult({
|
|
12
|
+
command: "events",
|
|
13
|
+
human: `${message}\n${EVENTS_USAGE}`,
|
|
14
|
+
data: { message },
|
|
15
|
+
error: {
|
|
16
|
+
code: "invalid_args",
|
|
17
|
+
message,
|
|
18
|
+
},
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function invalidInput(command: string, message: string, option: string): CliResult {
|
|
23
|
+
return failResult({
|
|
24
|
+
command,
|
|
25
|
+
human: message,
|
|
26
|
+
data: {
|
|
27
|
+
option,
|
|
28
|
+
},
|
|
29
|
+
error: {
|
|
30
|
+
code: "invalid_input",
|
|
31
|
+
message,
|
|
32
|
+
},
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export async function runEvents(context: CliContext): Promise<CliResult> {
|
|
37
|
+
const parsed = parseArgs(context.args);
|
|
38
|
+
const subcommand: string | undefined = parsed.positional[0];
|
|
39
|
+
|
|
40
|
+
if (!subcommand) {
|
|
41
|
+
return usage("Missing events subcommand.");
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
if (subcommand !== "prune") {
|
|
45
|
+
return usage(`Unknown events subcommand '${subcommand}'.`);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
if (parsed.positional.length > 1) {
|
|
49
|
+
return usage("Unexpected positional arguments for events prune.");
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
const missingOption: string | undefined = readMissingOptionValue(parsed.missingOptionValues, "retention-days");
|
|
53
|
+
if (missingOption !== undefined) {
|
|
54
|
+
return invalidInput("events.prune", `Option --${missingOption} requires a value.`, missingOption);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const parsedRetentionDays: number | undefined = parseStrictPositiveInt(readOption(parsed.options, "retention-days"));
|
|
58
|
+
if (Number.isNaN(parsedRetentionDays)) {
|
|
59
|
+
return invalidInput("events.prune", "--retention-days must be a positive integer.", "retention-days");
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const retentionDays: number = parsedRetentionDays ?? DEFAULT_EVENT_RETENTION_DAYS;
|
|
63
|
+
const dryRun: boolean = hasFlag(parsed.flags, "dry-run");
|
|
64
|
+
const archive: boolean = hasFlag(parsed.flags, "archive");
|
|
65
|
+
const storage = openTrekoonDatabase(context.cwd);
|
|
66
|
+
|
|
67
|
+
try {
|
|
68
|
+
const summary = pruneEvents(storage.db, {
|
|
69
|
+
retentionDays,
|
|
70
|
+
dryRun,
|
|
71
|
+
archive,
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
return okResult({
|
|
75
|
+
command: "events.prune",
|
|
76
|
+
human: [
|
|
77
|
+
dryRun ? "Dry run complete." : "Prune complete.",
|
|
78
|
+
`Retention days: ${summary.retentionDays}`,
|
|
79
|
+
`Candidates: ${summary.candidateCount}`,
|
|
80
|
+
`Archived: ${summary.archivedCount}`,
|
|
81
|
+
`Deleted: ${summary.deletedCount}`,
|
|
82
|
+
].join("\n"),
|
|
83
|
+
data: summary,
|
|
84
|
+
});
|
|
85
|
+
} finally {
|
|
86
|
+
storage.close();
|
|
87
|
+
}
|
|
88
|
+
}
|
package/src/commands/help.ts
CHANGED
|
@@ -21,6 +21,8 @@ const ROOT_HELP = [
|
|
|
21
21
|
" task Task lifecycle commands",
|
|
22
22
|
" subtask Subtask lifecycle commands",
|
|
23
23
|
" dep Dependency graph commands",
|
|
24
|
+
" events Event retention and cleanup commands",
|
|
25
|
+
" migrate Migration status and rollback commands",
|
|
24
26
|
" sync Cross-branch sync commands",
|
|
25
27
|
" skills Project-local skill install/link commands",
|
|
26
28
|
].join("\n");
|
|
@@ -33,8 +35,11 @@ const COMMAND_HELP: Record<string, string> = {
|
|
|
33
35
|
"Usage: trekoon epic <subcommand> [options] (list defaults: open statuses + limit 10; list flags: --status <csv> | --limit <n> | --all | --view table|compact; show: compact=epic summary, tree=hierarchy, detail=descriptions, and --all defaults to detail in machine modes; update bulk flags: --all | --ids <csv> with --append <text> and/or --status <status>)",
|
|
34
36
|
task:
|
|
35
37
|
"Usage: trekoon task <subcommand> [options] (list defaults: open statuses + limit 10; list flags: --status <csv> | --limit <n> | --all | --view table|compact; show: compact=task summary, tree=hierarchy, detail=descriptions, and --all defaults to detail in machine modes; update bulk flags: --all | --ids <csv> with --append <text> and/or --status <status>)",
|
|
36
|
-
subtask:
|
|
38
|
+
subtask:
|
|
39
|
+
"Usage: trekoon subtask <subcommand> [options] (list supports --view table|compact; update bulk flags: --all | --ids <csv> with --append <text> and/or --status <status>)",
|
|
37
40
|
dep: "Usage: trekoon dep <subcommand> [options]",
|
|
41
|
+
events: "Usage: trekoon events prune [--dry-run] [--archive] [--retention-days <n>]",
|
|
42
|
+
migrate: "Usage: trekoon migrate <status|rollback> [--to-version <n>]",
|
|
38
43
|
sync: "Usage: trekoon sync <subcommand> [options]",
|
|
39
44
|
skills:
|
|
40
45
|
"Usage: trekoon skills install [--link --editor opencode|claude] [--to <path>] (--to sets symlink root for --link only; install path always <cwd>/.agents/skills/trekoon/SKILL.md)",
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import { parseArgs, readMissingOptionValue, readOption } from "./arg-parser";
|
|
2
|
+
|
|
3
|
+
import { failResult, okResult } from "../io/output";
|
|
4
|
+
import { type CliContext, type CliResult } from "../runtime/command-types";
|
|
5
|
+
import { openTrekoonDatabase } from "../storage/database";
|
|
6
|
+
import { describeMigrations, rollbackDatabase } from "../storage/migrations";
|
|
7
|
+
|
|
8
|
+
const MIGRATE_USAGE = "Usage: trekoon migrate <status|rollback> [--to-version <n>]";
|
|
9
|
+
|
|
10
|
+
function usage(message: string): CliResult {
|
|
11
|
+
return failResult({
|
|
12
|
+
command: "migrate",
|
|
13
|
+
human: `${message}\n${MIGRATE_USAGE}`,
|
|
14
|
+
data: { message },
|
|
15
|
+
error: {
|
|
16
|
+
code: "invalid_args",
|
|
17
|
+
message,
|
|
18
|
+
},
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function parseVersion(rawValue: string | undefined): number | null {
|
|
23
|
+
if (rawValue === undefined) {
|
|
24
|
+
return null;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
if (!/^\d+$/.test(rawValue)) {
|
|
28
|
+
return Number.NaN;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
return Number.parseInt(rawValue, 10);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export async function runMigrate(context: CliContext): Promise<CliResult> {
|
|
35
|
+
const parsed = parseArgs(context.args);
|
|
36
|
+
const subcommand: string | undefined = parsed.positional[0];
|
|
37
|
+
|
|
38
|
+
if (!subcommand) {
|
|
39
|
+
return usage("Missing migrate subcommand.");
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
const missingOption = readMissingOptionValue(parsed.missingOptionValues, "to-version");
|
|
43
|
+
if (missingOption !== undefined) {
|
|
44
|
+
return failResult({
|
|
45
|
+
command: "migrate",
|
|
46
|
+
human: `Option --${missingOption} requires a value.`,
|
|
47
|
+
data: {
|
|
48
|
+
option: missingOption,
|
|
49
|
+
},
|
|
50
|
+
error: {
|
|
51
|
+
code: "invalid_input",
|
|
52
|
+
message: `Option --${missingOption} requires a value.`,
|
|
53
|
+
},
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const storage = openTrekoonDatabase(context.cwd, { autoMigrate: false });
|
|
58
|
+
|
|
59
|
+
try {
|
|
60
|
+
if (subcommand === "status") {
|
|
61
|
+
const status = describeMigrations(storage.db);
|
|
62
|
+
|
|
63
|
+
return okResult({
|
|
64
|
+
command: "migrate.status",
|
|
65
|
+
human: [
|
|
66
|
+
`Current version: ${status.currentVersion}`,
|
|
67
|
+
`Latest version: ${status.latestVersion}`,
|
|
68
|
+
`Pending migrations: ${status.pending.length}`,
|
|
69
|
+
].join("\n"),
|
|
70
|
+
data: status,
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if (subcommand === "rollback") {
|
|
75
|
+
const status = describeMigrations(storage.db);
|
|
76
|
+
const parsedVersion: number | null = parseVersion(readOption(parsed.options, "to-version"));
|
|
77
|
+
|
|
78
|
+
if (Number.isNaN(parsedVersion)) {
|
|
79
|
+
return failResult({
|
|
80
|
+
command: "migrate.rollback",
|
|
81
|
+
human: "--to-version must be a non-negative integer.",
|
|
82
|
+
data: {
|
|
83
|
+
option: "to-version",
|
|
84
|
+
},
|
|
85
|
+
error: {
|
|
86
|
+
code: "invalid_input",
|
|
87
|
+
message: "--to-version must be a non-negative integer.",
|
|
88
|
+
},
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const targetVersion: number = parsedVersion ?? Math.max(0, status.currentVersion - 1);
|
|
93
|
+
const summary = rollbackDatabase(storage.db, targetVersion);
|
|
94
|
+
|
|
95
|
+
return okResult({
|
|
96
|
+
command: "migrate.rollback",
|
|
97
|
+
human: [
|
|
98
|
+
`Rolled back ${summary.rolledBack} migration(s).`,
|
|
99
|
+
`From version ${summary.fromVersion} to ${summary.toVersion}.`,
|
|
100
|
+
].join("\n"),
|
|
101
|
+
data: summary,
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
return usage(`Unknown migrate subcommand '${subcommand}'.`);
|
|
106
|
+
} catch (error: unknown) {
|
|
107
|
+
const message = error instanceof Error ? error.message : "Unknown migration failure.";
|
|
108
|
+
|
|
109
|
+
return failResult({
|
|
110
|
+
command: "migrate",
|
|
111
|
+
human: message,
|
|
112
|
+
data: {
|
|
113
|
+
reason: "migrate_failed",
|
|
114
|
+
},
|
|
115
|
+
error: {
|
|
116
|
+
code: "migrate_failed",
|
|
117
|
+
message,
|
|
118
|
+
},
|
|
119
|
+
});
|
|
120
|
+
} finally {
|
|
121
|
+
storage.close();
|
|
122
|
+
}
|
|
123
|
+
}
|
package/src/commands/subtask.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { parseArgs, readEnumOption, readMissingOptionValue, readOption } from "./arg-parser";
|
|
1
|
+
import { hasFlag, parseArgs, readEnumOption, readMissingOptionValue, readOption } from "./arg-parser";
|
|
2
2
|
|
|
3
3
|
import { DomainError, type SubtaskRecord } from "../domain/types";
|
|
4
4
|
import { TrackerDomain } from "../domain/tracker-domain";
|
|
@@ -13,6 +13,21 @@ function formatSubtask(subtask: SubtaskRecord): string {
|
|
|
13
13
|
|
|
14
14
|
const VIEW_MODES = ["table", "compact"] as const;
|
|
15
15
|
|
|
16
|
+
function parseIdsOption(rawIds: string | undefined): string[] {
|
|
17
|
+
if (rawIds === undefined) {
|
|
18
|
+
return [];
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
return rawIds
|
|
22
|
+
.split(",")
|
|
23
|
+
.map((value) => value.trim())
|
|
24
|
+
.filter((value) => value.length > 0);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function appendLine(existing: string, line: string): string {
|
|
28
|
+
return existing.length > 0 ? `${existing}\n${line}` : line;
|
|
29
|
+
}
|
|
30
|
+
|
|
16
31
|
function formatSubtaskListTable(subtasks: readonly SubtaskRecord[]): string {
|
|
17
32
|
return formatHumanTable(
|
|
18
33
|
["ID", "TASK", "TITLE", "STATUS"],
|
|
@@ -138,6 +153,8 @@ export async function runSubtask(context: CliContext): Promise<CliResult> {
|
|
|
138
153
|
}
|
|
139
154
|
case "update": {
|
|
140
155
|
const missingUpdateOption =
|
|
156
|
+
readMissingOptionValue(parsed.missingOptionValues, "ids") ??
|
|
157
|
+
readMissingOptionValue(parsed.missingOptionValues, "append") ??
|
|
141
158
|
readMissingOptionValue(parsed.missingOptionValues, "description", "d") ??
|
|
142
159
|
readMissingOptionValue(parsed.missingOptionValues, "status", "s");
|
|
143
160
|
if (missingUpdateOption !== undefined) {
|
|
@@ -145,10 +162,112 @@ export async function runSubtask(context: CliContext): Promise<CliResult> {
|
|
|
145
162
|
}
|
|
146
163
|
|
|
147
164
|
const subtaskId: string = parsed.positional[1] ?? "";
|
|
165
|
+
const updateAll: boolean = hasFlag(parsed.flags, "all");
|
|
166
|
+
const rawIds: string | undefined = readOption(parsed.options, "ids");
|
|
167
|
+
const ids = parseIdsOption(rawIds);
|
|
148
168
|
const title: string | undefined = readOption(parsed.options, "title");
|
|
149
169
|
const description: string | undefined = readOption(parsed.options, "description", "d");
|
|
170
|
+
const append: string | undefined = readOption(parsed.options, "append");
|
|
150
171
|
const status: string | undefined = readOption(parsed.options, "status", "s");
|
|
151
|
-
|
|
172
|
+
|
|
173
|
+
if (updateAll && ids.length > 0) {
|
|
174
|
+
return failResult({
|
|
175
|
+
command: "subtask.update",
|
|
176
|
+
human: "Use either --all or --ids, not both.",
|
|
177
|
+
data: { code: "invalid_input", target: ["all", "ids"] },
|
|
178
|
+
error: {
|
|
179
|
+
code: "invalid_input",
|
|
180
|
+
message: "--all and --ids are mutually exclusive",
|
|
181
|
+
},
|
|
182
|
+
});
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
if (append !== undefined && description !== undefined) {
|
|
186
|
+
return failResult({
|
|
187
|
+
command: "subtask.update",
|
|
188
|
+
human: "Use either --append or --description, not both.",
|
|
189
|
+
data: { code: "invalid_input", fields: ["append", "description"] },
|
|
190
|
+
error: {
|
|
191
|
+
code: "invalid_input",
|
|
192
|
+
message: "--append and --description are mutually exclusive",
|
|
193
|
+
},
|
|
194
|
+
});
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
const hasBulkTarget = updateAll || ids.length > 0;
|
|
198
|
+
if (hasBulkTarget) {
|
|
199
|
+
if (subtaskId.length > 0) {
|
|
200
|
+
return failResult({
|
|
201
|
+
command: "subtask.update",
|
|
202
|
+
human: "Do not pass a subtask id when using --all or --ids.",
|
|
203
|
+
data: { code: "invalid_input", id: subtaskId },
|
|
204
|
+
error: {
|
|
205
|
+
code: "invalid_input",
|
|
206
|
+
message: "Positional id is not allowed with --all/--ids",
|
|
207
|
+
},
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
if (title !== undefined || description !== undefined) {
|
|
212
|
+
return failResult({
|
|
213
|
+
command: "subtask.update",
|
|
214
|
+
human: "Bulk update supports only --append and/or --status.",
|
|
215
|
+
data: { code: "invalid_input" },
|
|
216
|
+
error: {
|
|
217
|
+
code: "invalid_input",
|
|
218
|
+
message: "Bulk update supports only --append and --status",
|
|
219
|
+
},
|
|
220
|
+
});
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
if (append === undefined && status === undefined) {
|
|
224
|
+
return failResult({
|
|
225
|
+
command: "subtask.update",
|
|
226
|
+
human: "Bulk update requires --append and/or --status.",
|
|
227
|
+
data: { code: "invalid_input" },
|
|
228
|
+
error: {
|
|
229
|
+
code: "invalid_input",
|
|
230
|
+
message: "Missing bulk update fields",
|
|
231
|
+
},
|
|
232
|
+
});
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
const targets = updateAll ? [...domain.listSubtasks()] : ids.map((id) => domain.getSubtaskOrThrow(id));
|
|
236
|
+
const subtasks = targets.map((target) =>
|
|
237
|
+
domain.updateSubtask(target.id, {
|
|
238
|
+
status,
|
|
239
|
+
description: append === undefined ? undefined : appendLine(target.description, append),
|
|
240
|
+
}),
|
|
241
|
+
);
|
|
242
|
+
|
|
243
|
+
return okResult({
|
|
244
|
+
command: "subtask.update",
|
|
245
|
+
human: `Updated ${subtasks.length} subtask(s)`,
|
|
246
|
+
data: {
|
|
247
|
+
subtasks,
|
|
248
|
+
target: updateAll ? "all" : "ids",
|
|
249
|
+
ids: subtasks.map((subtask) => subtask.id),
|
|
250
|
+
},
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
if (subtaskId.length === 0) {
|
|
255
|
+
return failResult({
|
|
256
|
+
command: "subtask.update",
|
|
257
|
+
human: "Provide a subtask id, or use --all/--ids for bulk update.",
|
|
258
|
+
data: { code: "invalid_input" },
|
|
259
|
+
error: {
|
|
260
|
+
code: "invalid_input",
|
|
261
|
+
message: "Missing subtask id",
|
|
262
|
+
},
|
|
263
|
+
});
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
const nextDescription =
|
|
267
|
+
append === undefined
|
|
268
|
+
? description
|
|
269
|
+
: appendLine(domain.getSubtaskOrThrow(subtaskId).description, append);
|
|
270
|
+
const subtask = domain.updateSubtask(subtaskId, { title, description: nextDescription, status });
|
|
152
271
|
|
|
153
272
|
return okResult({
|
|
154
273
|
command: "subtask.update",
|
|
@@ -522,44 +522,25 @@ export class TrackerDomain {
|
|
|
522
522
|
}
|
|
523
523
|
|
|
524
524
|
private wouldCreateCycle(sourceId: string, dependsOnId: string): boolean {
|
|
525
|
-
const
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
const queue: string[] = [dependsOnId];
|
|
543
|
-
|
|
544
|
-
while (queue.length > 0) {
|
|
545
|
-
const next = queue.shift();
|
|
546
|
-
if (!next) {
|
|
547
|
-
continue;
|
|
548
|
-
}
|
|
549
|
-
if (next === sourceId) {
|
|
550
|
-
return true;
|
|
551
|
-
}
|
|
552
|
-
if (visited.has(next)) {
|
|
553
|
-
continue;
|
|
554
|
-
}
|
|
555
|
-
visited.add(next);
|
|
556
|
-
const outgoing = adjacency.get(next) ?? [];
|
|
557
|
-
for (const neighbor of outgoing) {
|
|
558
|
-
queue.push(neighbor);
|
|
559
|
-
}
|
|
560
|
-
}
|
|
525
|
+
const row = this.#db
|
|
526
|
+
.query(
|
|
527
|
+
`
|
|
528
|
+
WITH RECURSIVE reachable(id) AS (
|
|
529
|
+
SELECT ?
|
|
530
|
+
UNION
|
|
531
|
+
SELECT d.depends_on_id
|
|
532
|
+
FROM dependencies d
|
|
533
|
+
INNER JOIN reachable r ON d.source_id = r.id
|
|
534
|
+
)
|
|
535
|
+
SELECT 1 AS has_cycle
|
|
536
|
+
FROM reachable
|
|
537
|
+
WHERE id = ?
|
|
538
|
+
LIMIT 1;
|
|
539
|
+
`,
|
|
540
|
+
)
|
|
541
|
+
.get(dependsOnId, sourceId) as { has_cycle: number } | null;
|
|
561
542
|
|
|
562
|
-
return
|
|
543
|
+
return row !== null;
|
|
563
544
|
}
|
|
564
545
|
}
|
|
565
546
|
|
package/src/runtime/cli-shell.ts
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import { runHelp } from "../commands/help";
|
|
2
2
|
import { runDep } from "../commands/dep";
|
|
3
3
|
import { runEpic } from "../commands/epic";
|
|
4
|
+
import { runEvents } from "../commands/events";
|
|
4
5
|
import { runInit } from "../commands/init";
|
|
6
|
+
import { runMigrate } from "../commands/migrate";
|
|
5
7
|
import { runQuickstart } from "../commands/quickstart";
|
|
6
8
|
import { runSkills } from "../commands/skills";
|
|
7
9
|
import { runSubtask } from "../commands/subtask";
|
|
@@ -21,6 +23,8 @@ const SUPPORTED_ROOT_COMMANDS: readonly string[] = [
|
|
|
21
23
|
"task",
|
|
22
24
|
"subtask",
|
|
23
25
|
"dep",
|
|
26
|
+
"events",
|
|
27
|
+
"migrate",
|
|
24
28
|
"sync",
|
|
25
29
|
"skills",
|
|
26
30
|
"wipe",
|
|
@@ -147,6 +151,10 @@ export async function executeShell(parsed: ParsedInvocation, cwd: string = proce
|
|
|
147
151
|
return runSubtask(context);
|
|
148
152
|
case "dep":
|
|
149
153
|
return runDep(context);
|
|
154
|
+
case "events":
|
|
155
|
+
return runEvents(context);
|
|
156
|
+
case "migrate":
|
|
157
|
+
return runMigrate(context);
|
|
150
158
|
case "sync":
|
|
151
159
|
return runSync(context);
|
|
152
160
|
case "skills":
|
package/src/storage/database.ts
CHANGED
|
@@ -11,7 +11,14 @@ export interface TrekoonDatabase {
|
|
|
11
11
|
close(): void;
|
|
12
12
|
}
|
|
13
13
|
|
|
14
|
-
export
|
|
14
|
+
export interface OpenTrekoonDatabaseOptions {
|
|
15
|
+
readonly autoMigrate?: boolean;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export function openTrekoonDatabase(
|
|
19
|
+
workingDirectory: string = process.cwd(),
|
|
20
|
+
options: OpenTrekoonDatabaseOptions = {},
|
|
21
|
+
): TrekoonDatabase {
|
|
15
22
|
const paths: StoragePaths = resolveStoragePaths(workingDirectory);
|
|
16
23
|
|
|
17
24
|
mkdirSync(paths.storageDir, { recursive: true });
|
|
@@ -22,7 +29,9 @@ export function openTrekoonDatabase(workingDirectory: string = process.cwd()): T
|
|
|
22
29
|
db.exec("PRAGMA journal_mode = WAL;");
|
|
23
30
|
db.exec("PRAGMA foreign_keys = ON;");
|
|
24
31
|
|
|
25
|
-
|
|
32
|
+
if (options.autoMigrate ?? true) {
|
|
33
|
+
migrateDatabase(db);
|
|
34
|
+
}
|
|
26
35
|
|
|
27
36
|
return {
|
|
28
37
|
db,
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import { type Database } from "bun:sqlite";
|
|
2
|
+
|
|
3
|
+
export const DEFAULT_EVENT_RETENTION_DAYS = 90;
|
|
4
|
+
const DAY_IN_MILLISECONDS = 24 * 60 * 60 * 1000;
|
|
5
|
+
|
|
6
|
+
export interface EventPruneOptions {
|
|
7
|
+
readonly retentionDays?: number;
|
|
8
|
+
readonly dryRun?: boolean;
|
|
9
|
+
readonly archive?: boolean;
|
|
10
|
+
readonly now?: number;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export interface EventPruneSummary {
|
|
14
|
+
readonly retentionDays: number;
|
|
15
|
+
readonly cutoffTimestamp: number;
|
|
16
|
+
readonly dryRun: boolean;
|
|
17
|
+
readonly archive: boolean;
|
|
18
|
+
readonly candidateCount: number;
|
|
19
|
+
readonly archivedCount: number;
|
|
20
|
+
readonly deletedCount: number;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function ensureArchiveTable(db: Database): void {
|
|
24
|
+
db.exec(`
|
|
25
|
+
CREATE TABLE IF NOT EXISTS event_archive (
|
|
26
|
+
id TEXT PRIMARY KEY,
|
|
27
|
+
entity_kind TEXT NOT NULL,
|
|
28
|
+
entity_id TEXT NOT NULL,
|
|
29
|
+
operation TEXT NOT NULL,
|
|
30
|
+
payload TEXT NOT NULL,
|
|
31
|
+
git_branch TEXT,
|
|
32
|
+
git_head TEXT,
|
|
33
|
+
created_at INTEGER NOT NULL,
|
|
34
|
+
updated_at INTEGER NOT NULL,
|
|
35
|
+
version INTEGER NOT NULL DEFAULT 1
|
|
36
|
+
);
|
|
37
|
+
`);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function assertRetentionDays(value: number): number {
|
|
41
|
+
if (!Number.isInteger(value) || value < 1) {
|
|
42
|
+
throw new Error("retentionDays must be a positive integer.");
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
return value;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
function countCandidates(db: Database, cutoffTimestamp: number): number {
|
|
49
|
+
const row = db.query("SELECT COUNT(*) AS count FROM events WHERE created_at < ?;").get(cutoffTimestamp) as
|
|
50
|
+
| { count: number }
|
|
51
|
+
| null;
|
|
52
|
+
|
|
53
|
+
return row?.count ?? 0;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export function pruneEvents(db: Database, options: EventPruneOptions = {}): EventPruneSummary {
|
|
57
|
+
const retentionDays: number = assertRetentionDays(options.retentionDays ?? DEFAULT_EVENT_RETENTION_DAYS);
|
|
58
|
+
const dryRun: boolean = options.dryRun ?? false;
|
|
59
|
+
const archive: boolean = options.archive ?? false;
|
|
60
|
+
const now: number = options.now ?? Date.now();
|
|
61
|
+
const cutoffTimestamp: number = now - retentionDays * DAY_IN_MILLISECONDS;
|
|
62
|
+
const candidateCount: number = countCandidates(db, cutoffTimestamp);
|
|
63
|
+
|
|
64
|
+
if (dryRun || candidateCount === 0) {
|
|
65
|
+
return {
|
|
66
|
+
retentionDays,
|
|
67
|
+
cutoffTimestamp,
|
|
68
|
+
dryRun,
|
|
69
|
+
archive,
|
|
70
|
+
candidateCount,
|
|
71
|
+
archivedCount: 0,
|
|
72
|
+
deletedCount: 0,
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
return db.transaction((): EventPruneSummary => {
|
|
77
|
+
let archivedCount = 0;
|
|
78
|
+
|
|
79
|
+
if (archive) {
|
|
80
|
+
ensureArchiveTable(db);
|
|
81
|
+
const archived = db
|
|
82
|
+
.query(
|
|
83
|
+
`
|
|
84
|
+
INSERT INTO event_archive (
|
|
85
|
+
id,
|
|
86
|
+
entity_kind,
|
|
87
|
+
entity_id,
|
|
88
|
+
operation,
|
|
89
|
+
payload,
|
|
90
|
+
git_branch,
|
|
91
|
+
git_head,
|
|
92
|
+
created_at,
|
|
93
|
+
updated_at,
|
|
94
|
+
version
|
|
95
|
+
)
|
|
96
|
+
SELECT
|
|
97
|
+
id,
|
|
98
|
+
entity_kind,
|
|
99
|
+
entity_id,
|
|
100
|
+
operation,
|
|
101
|
+
payload,
|
|
102
|
+
git_branch,
|
|
103
|
+
git_head,
|
|
104
|
+
created_at,
|
|
105
|
+
updated_at,
|
|
106
|
+
version
|
|
107
|
+
FROM events
|
|
108
|
+
WHERE created_at < ?
|
|
109
|
+
ON CONFLICT(id) DO UPDATE SET
|
|
110
|
+
entity_kind = excluded.entity_kind,
|
|
111
|
+
entity_id = excluded.entity_id,
|
|
112
|
+
operation = excluded.operation,
|
|
113
|
+
payload = excluded.payload,
|
|
114
|
+
git_branch = excluded.git_branch,
|
|
115
|
+
git_head = excluded.git_head,
|
|
116
|
+
created_at = excluded.created_at,
|
|
117
|
+
updated_at = excluded.updated_at,
|
|
118
|
+
version = excluded.version;
|
|
119
|
+
`,
|
|
120
|
+
)
|
|
121
|
+
.run(cutoffTimestamp);
|
|
122
|
+
|
|
123
|
+
archivedCount = archived.changes;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
const deleted = db.query("DELETE FROM events WHERE created_at < ?;").run(cutoffTimestamp);
|
|
127
|
+
|
|
128
|
+
return {
|
|
129
|
+
retentionDays,
|
|
130
|
+
cutoffTimestamp,
|
|
131
|
+
dryRun,
|
|
132
|
+
archive,
|
|
133
|
+
candidateCount,
|
|
134
|
+
archivedCount,
|
|
135
|
+
deletedCount: deleted.changes,
|
|
136
|
+
};
|
|
137
|
+
})();
|
|
138
|
+
}
|
|
@@ -2,10 +2,142 @@ import { Database } from "bun:sqlite";
|
|
|
2
2
|
|
|
3
3
|
import { BASE_SCHEMA_STATEMENTS, SCHEMA_VERSION } from "./schema";
|
|
4
4
|
|
|
5
|
+
const BASE_MIGRATION_VERSION = 1;
|
|
5
6
|
const BASE_MIGRATION_NAME = `0001_base_schema_v${SCHEMA_VERSION}`;
|
|
7
|
+
const LEGACY_BASE_MIGRATION_NAME_PATTERNS: readonly string[] = [
|
|
8
|
+
"0001_base_schema_v*",
|
|
9
|
+
];
|
|
6
10
|
|
|
7
|
-
|
|
8
|
-
|
|
11
|
+
const BASE_ROLLBACK_STATEMENTS: readonly string[] = [
|
|
12
|
+
"DROP TABLE IF EXISTS sync_conflicts;",
|
|
13
|
+
"DROP TABLE IF EXISTS sync_cursors;",
|
|
14
|
+
"DROP TABLE IF EXISTS git_context;",
|
|
15
|
+
"DROP TABLE IF EXISTS events;",
|
|
16
|
+
"DROP TABLE IF EXISTS dependencies;",
|
|
17
|
+
"DROP TABLE IF EXISTS subtasks;",
|
|
18
|
+
"DROP TABLE IF EXISTS tasks;",
|
|
19
|
+
"DROP TABLE IF EXISTS epics;",
|
|
20
|
+
];
|
|
21
|
+
|
|
22
|
+
const INDEX_MIGRATION_UP_STATEMENTS: readonly string[] = [
|
|
23
|
+
"CREATE INDEX IF NOT EXISTS idx_events_created_at ON events(created_at);",
|
|
24
|
+
"CREATE INDEX IF NOT EXISTS idx_events_git_branch ON events(git_branch);",
|
|
25
|
+
"CREATE INDEX IF NOT EXISTS idx_events_created_at_id ON events(created_at, id);",
|
|
26
|
+
"CREATE INDEX IF NOT EXISTS idx_dependencies_source ON dependencies(source_id);",
|
|
27
|
+
"CREATE INDEX IF NOT EXISTS idx_dependencies_depends_on ON dependencies(depends_on_id);",
|
|
28
|
+
];
|
|
29
|
+
|
|
30
|
+
const INDEX_MIGRATION_DOWN_STATEMENTS: readonly string[] = [
|
|
31
|
+
"DROP INDEX IF EXISTS idx_events_created_at;",
|
|
32
|
+
"DROP INDEX IF EXISTS idx_events_git_branch;",
|
|
33
|
+
"DROP INDEX IF EXISTS idx_events_created_at_id;",
|
|
34
|
+
"DROP INDEX IF EXISTS idx_dependencies_source;",
|
|
35
|
+
"DROP INDEX IF EXISTS idx_dependencies_depends_on;",
|
|
36
|
+
];
|
|
37
|
+
|
|
38
|
+
const EVENT_ARCHIVE_MIGRATION_UP_STATEMENTS: readonly string[] = [
|
|
39
|
+
`
|
|
40
|
+
CREATE TABLE IF NOT EXISTS event_archive (
|
|
41
|
+
id TEXT PRIMARY KEY,
|
|
42
|
+
entity_kind TEXT NOT NULL,
|
|
43
|
+
entity_id TEXT NOT NULL,
|
|
44
|
+
operation TEXT NOT NULL,
|
|
45
|
+
payload TEXT NOT NULL,
|
|
46
|
+
git_branch TEXT,
|
|
47
|
+
git_head TEXT,
|
|
48
|
+
created_at INTEGER NOT NULL,
|
|
49
|
+
updated_at INTEGER NOT NULL,
|
|
50
|
+
version INTEGER NOT NULL DEFAULT 1
|
|
51
|
+
);
|
|
52
|
+
`,
|
|
53
|
+
"CREATE INDEX IF NOT EXISTS idx_event_archive_created_at ON event_archive(created_at);",
|
|
54
|
+
];
|
|
55
|
+
|
|
56
|
+
const EVENT_ARCHIVE_MIGRATION_DOWN_STATEMENTS: readonly string[] = [
|
|
57
|
+
"DROP INDEX IF EXISTS idx_event_archive_created_at;",
|
|
58
|
+
"DROP TABLE IF EXISTS event_archive;",
|
|
59
|
+
];
|
|
60
|
+
|
|
61
|
+
interface Migration {
|
|
62
|
+
readonly version: number;
|
|
63
|
+
readonly name: string;
|
|
64
|
+
up(db: Database): void;
|
|
65
|
+
down(db: Database): void;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
interface AppliedMigrationRow {
|
|
69
|
+
readonly version: number;
|
|
70
|
+
readonly name: string;
|
|
71
|
+
readonly applied_at: number;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
export interface AppliedMigration {
|
|
75
|
+
readonly version: number;
|
|
76
|
+
readonly name: string;
|
|
77
|
+
readonly appliedAt: number;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
export interface MigrationStatus {
|
|
81
|
+
readonly currentVersion: number;
|
|
82
|
+
readonly latestVersion: number;
|
|
83
|
+
readonly applied: readonly AppliedMigration[];
|
|
84
|
+
readonly pending: ReadonlyArray<{ version: number; name: string }>;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
export interface RollbackSummary {
|
|
88
|
+
readonly fromVersion: number;
|
|
89
|
+
readonly toVersion: number;
|
|
90
|
+
readonly rolledBack: number;
|
|
91
|
+
readonly rolledBackMigrations: readonly string[];
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
const MIGRATIONS: readonly Migration[] = [
|
|
95
|
+
{
|
|
96
|
+
version: BASE_MIGRATION_VERSION,
|
|
97
|
+
name: BASE_MIGRATION_NAME,
|
|
98
|
+
up(db: Database): void {
|
|
99
|
+
for (const statement of BASE_SCHEMA_STATEMENTS) {
|
|
100
|
+
db.exec(statement);
|
|
101
|
+
}
|
|
102
|
+
},
|
|
103
|
+
down(db: Database): void {
|
|
104
|
+
for (const statement of BASE_ROLLBACK_STATEMENTS) {
|
|
105
|
+
db.exec(statement);
|
|
106
|
+
}
|
|
107
|
+
},
|
|
108
|
+
},
|
|
109
|
+
{
|
|
110
|
+
version: 2,
|
|
111
|
+
name: "0002_sync_dependency_indexes",
|
|
112
|
+
up(db: Database): void {
|
|
113
|
+
for (const statement of INDEX_MIGRATION_UP_STATEMENTS) {
|
|
114
|
+
db.exec(statement);
|
|
115
|
+
}
|
|
116
|
+
},
|
|
117
|
+
down(db: Database): void {
|
|
118
|
+
for (const statement of INDEX_MIGRATION_DOWN_STATEMENTS) {
|
|
119
|
+
db.exec(statement);
|
|
120
|
+
}
|
|
121
|
+
},
|
|
122
|
+
},
|
|
123
|
+
{
|
|
124
|
+
version: 3,
|
|
125
|
+
name: "0003_event_archive_retention",
|
|
126
|
+
up(db: Database): void {
|
|
127
|
+
for (const statement of EVENT_ARCHIVE_MIGRATION_UP_STATEMENTS) {
|
|
128
|
+
db.exec(statement);
|
|
129
|
+
}
|
|
130
|
+
},
|
|
131
|
+
down(db: Database): void {
|
|
132
|
+
for (const statement of EVENT_ARCHIVE_MIGRATION_DOWN_STATEMENTS) {
|
|
133
|
+
db.exec(statement);
|
|
134
|
+
}
|
|
135
|
+
},
|
|
136
|
+
},
|
|
137
|
+
];
|
|
138
|
+
|
|
139
|
+
function migrationTableExists(db: Database): boolean {
|
|
140
|
+
const row = db
|
|
9
141
|
.query(
|
|
10
142
|
`
|
|
11
143
|
SELECT COUNT(*) AS count
|
|
@@ -15,32 +147,221 @@ function hasMigration(db: Database, name: string): boolean {
|
|
|
15
147
|
)
|
|
16
148
|
.get() as { count: number } | null;
|
|
17
149
|
|
|
18
|
-
|
|
19
|
-
|
|
150
|
+
return (row?.count ?? 0) > 0;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
function hasMigrationVersionColumn(db: Database): boolean {
|
|
154
|
+
const columns = db.query("PRAGMA table_info(schema_migrations);").all() as Array<{ name: string }>;
|
|
155
|
+
return columns.some((column) => column.name === "version");
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
function ensureMigrationTable(db: Database): void {
|
|
159
|
+
db.exec(`
|
|
160
|
+
CREATE TABLE IF NOT EXISTS schema_migrations (
|
|
161
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
162
|
+
version INTEGER NOT NULL UNIQUE,
|
|
163
|
+
name TEXT NOT NULL UNIQUE,
|
|
164
|
+
applied_at INTEGER NOT NULL
|
|
165
|
+
);
|
|
166
|
+
`);
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
function ensureMigrationVersionColumn(db: Database): void {
|
|
170
|
+
if (!migrationTableExists(db) || hasMigrationVersionColumn(db)) {
|
|
171
|
+
return;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
db.exec("ALTER TABLE schema_migrations ADD COLUMN version INTEGER;");
|
|
175
|
+
db.query("UPDATE schema_migrations SET version = ? WHERE version IS NULL AND name = ?;").run(BASE_MIGRATION_VERSION, BASE_MIGRATION_NAME);
|
|
176
|
+
|
|
177
|
+
for (const legacyPattern of LEGACY_BASE_MIGRATION_NAME_PATTERNS) {
|
|
178
|
+
db.query("UPDATE schema_migrations SET version = ? WHERE version IS NULL AND name GLOB ?;").run(
|
|
179
|
+
BASE_MIGRATION_VERSION,
|
|
180
|
+
legacyPattern,
|
|
181
|
+
);
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
db.exec("CREATE UNIQUE INDEX IF NOT EXISTS idx_schema_migrations_version ON schema_migrations(version);");
|
|
185
|
+
|
|
186
|
+
const unresolvedRow = db
|
|
187
|
+
.query(
|
|
188
|
+
`
|
|
189
|
+
SELECT COUNT(*) AS count
|
|
190
|
+
FROM schema_migrations
|
|
191
|
+
WHERE version IS NULL;
|
|
192
|
+
`,
|
|
193
|
+
)
|
|
194
|
+
.get() as { count: number } | null;
|
|
195
|
+
|
|
196
|
+
if ((unresolvedRow?.count ?? 0) > 0) {
|
|
197
|
+
throw new Error(
|
|
198
|
+
"Unable to infer one or more schema_migrations.version values during legacy upgrade. Repair schema_migrations entries manually so every row has a valid version, then rerun migrations.",
|
|
199
|
+
);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
function validateMigrationPlan(): void {
|
|
204
|
+
const seen = new Set<number>();
|
|
205
|
+
|
|
206
|
+
for (let index = 0; index < MIGRATIONS.length; index += 1) {
|
|
207
|
+
const migration: Migration = MIGRATIONS[index]!;
|
|
208
|
+
|
|
209
|
+
if (migration.version !== index + 1) {
|
|
210
|
+
throw new Error(`Migration versions must be contiguous from 1 (found ${migration.version} at index ${index}).`);
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
if (seen.has(migration.version)) {
|
|
214
|
+
throw new Error(`Duplicate migration version ${migration.version}.`);
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
seen.add(migration.version);
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
function runExclusive<T>(db: Database, operation: () => T): T {
|
|
222
|
+
db.exec("BEGIN EXCLUSIVE TRANSACTION;");
|
|
223
|
+
|
|
224
|
+
try {
|
|
225
|
+
const result: T = operation();
|
|
226
|
+
db.exec("COMMIT;");
|
|
227
|
+
return result;
|
|
228
|
+
} catch (error: unknown) {
|
|
229
|
+
db.exec("ROLLBACK;");
|
|
230
|
+
throw error;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
function currentVersion(db: Database): number {
|
|
235
|
+
if (!migrationTableExists(db)) {
|
|
236
|
+
return 0;
|
|
20
237
|
}
|
|
21
238
|
|
|
22
|
-
const row
|
|
23
|
-
.query("SELECT
|
|
24
|
-
.get(
|
|
239
|
+
const row = db
|
|
240
|
+
.query("SELECT COALESCE(MAX(version), 0) AS version FROM schema_migrations;")
|
|
241
|
+
.get() as { version: number } | null;
|
|
25
242
|
|
|
26
|
-
return
|
|
243
|
+
return row?.version ?? 0;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
function listAppliedMigrations(db: Database): AppliedMigrationRow[] {
|
|
247
|
+
if (!migrationTableExists(db)) {
|
|
248
|
+
return [];
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
return db
|
|
252
|
+
.query(
|
|
253
|
+
`
|
|
254
|
+
SELECT version, name, applied_at
|
|
255
|
+
FROM schema_migrations
|
|
256
|
+
WHERE version IS NOT NULL
|
|
257
|
+
ORDER BY version ASC;
|
|
258
|
+
`,
|
|
259
|
+
)
|
|
260
|
+
.all() as AppliedMigrationRow[];
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
function migrationForVersion(version: number): Migration {
|
|
264
|
+
const found = MIGRATIONS.find((migration) => migration.version === version);
|
|
265
|
+
|
|
266
|
+
if (!found) {
|
|
267
|
+
throw new Error(`No migration definition found for version ${version}.`);
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
return found;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
function recordMigration(db: Database, migration: Migration): void {
|
|
274
|
+
db.query("INSERT INTO schema_migrations (version, name, applied_at) VALUES (?, ?, ?);").run(
|
|
275
|
+
migration.version,
|
|
276
|
+
migration.name,
|
|
277
|
+
Date.now(),
|
|
278
|
+
);
|
|
27
279
|
}
|
|
28
280
|
|
|
29
281
|
export function migrateDatabase(db: Database): void {
|
|
30
|
-
|
|
31
|
-
|
|
282
|
+
runExclusive(db, (): void => {
|
|
283
|
+
ensureMigrationTable(db);
|
|
284
|
+
ensureMigrationVersionColumn(db);
|
|
285
|
+
validateMigrationPlan();
|
|
286
|
+
|
|
287
|
+
const version: number = currentVersion(db);
|
|
288
|
+
|
|
289
|
+
for (const migration of MIGRATIONS) {
|
|
290
|
+
if (migration.version <= version) {
|
|
291
|
+
continue;
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
migration.up(db);
|
|
295
|
+
recordMigration(db, migration);
|
|
296
|
+
}
|
|
297
|
+
});
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
export function describeMigrations(db: Database): MigrationStatus {
|
|
301
|
+
ensureMigrationTable(db);
|
|
302
|
+
ensureMigrationVersionColumn(db);
|
|
303
|
+
validateMigrationPlan();
|
|
304
|
+
|
|
305
|
+
const appliedRows: AppliedMigrationRow[] = listAppliedMigrations(db);
|
|
306
|
+
const latestVersion: number = MIGRATIONS[MIGRATIONS.length - 1]?.version ?? 0;
|
|
307
|
+
const activeVersion: number = appliedRows[appliedRows.length - 1]?.version ?? 0;
|
|
308
|
+
const appliedVersions = new Set(appliedRows.map((row) => row.version));
|
|
309
|
+
|
|
310
|
+
return {
|
|
311
|
+
currentVersion: activeVersion,
|
|
312
|
+
latestVersion,
|
|
313
|
+
applied: appliedRows.map((row) => ({
|
|
314
|
+
version: row.version,
|
|
315
|
+
name: row.name,
|
|
316
|
+
appliedAt: row.applied_at,
|
|
317
|
+
})),
|
|
318
|
+
pending: MIGRATIONS.filter((migration) => !appliedVersions.has(migration.version)).map((migration) => ({
|
|
319
|
+
version: migration.version,
|
|
320
|
+
name: migration.name,
|
|
321
|
+
})),
|
|
322
|
+
};
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
export function rollbackDatabase(db: Database, targetVersion: number): RollbackSummary {
|
|
326
|
+
if (!Number.isInteger(targetVersion) || targetVersion < 0) {
|
|
327
|
+
throw new Error("Rollback target version must be a non-negative integer.");
|
|
32
328
|
}
|
|
33
329
|
|
|
34
|
-
|
|
330
|
+
return runExclusive(db, (): RollbackSummary => {
|
|
331
|
+
ensureMigrationTable(db);
|
|
332
|
+
ensureMigrationVersionColumn(db);
|
|
333
|
+
validateMigrationPlan();
|
|
35
334
|
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
335
|
+
const fromVersion: number = currentVersion(db);
|
|
336
|
+
if (targetVersion > fromVersion) {
|
|
337
|
+
throw new Error(`Cannot roll back to version ${targetVersion}; current version is ${fromVersion}.`);
|
|
39
338
|
}
|
|
40
339
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
340
|
+
const appliedDescending = db
|
|
341
|
+
.query(
|
|
342
|
+
`
|
|
343
|
+
SELECT version, name, applied_at
|
|
344
|
+
FROM schema_migrations
|
|
345
|
+
WHERE version IS NOT NULL AND version > ?
|
|
346
|
+
ORDER BY version DESC;
|
|
347
|
+
`,
|
|
348
|
+
)
|
|
349
|
+
.all(targetVersion) as AppliedMigrationRow[];
|
|
350
|
+
|
|
351
|
+
const rolledBackMigrations: string[] = [];
|
|
352
|
+
|
|
353
|
+
for (const row of appliedDescending) {
|
|
354
|
+
const migration: Migration = migrationForVersion(row.version);
|
|
355
|
+
migration.down(db);
|
|
356
|
+
db.query("DELETE FROM schema_migrations WHERE version = ?;").run(row.version);
|
|
357
|
+
rolledBackMigrations.push(migration.name);
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
return {
|
|
361
|
+
fromVersion,
|
|
362
|
+
toVersion: targetVersion,
|
|
363
|
+
rolledBack: appliedDescending.length,
|
|
364
|
+
rolledBackMigrations,
|
|
365
|
+
};
|
|
366
|
+
});
|
|
46
367
|
}
|
package/src/storage/schema.ts
CHANGED
package/src/storage/types.ts
CHANGED
package/src/sync/service.ts
CHANGED
|
@@ -41,11 +41,19 @@ interface EventPayload {
|
|
|
41
41
|
readonly fields?: Record<string, unknown>;
|
|
42
42
|
}
|
|
43
43
|
|
|
44
|
+
function isObjectRecord(value: unknown): value is Record<string, unknown> {
|
|
45
|
+
return typeof value === "object" && value !== null && !Array.isArray(value);
|
|
46
|
+
}
|
|
47
|
+
|
|
44
48
|
function parsePayload(rawPayload: string): EventPayload {
|
|
45
49
|
try {
|
|
46
50
|
const parsed: unknown = JSON.parse(rawPayload);
|
|
47
51
|
|
|
48
|
-
if (
|
|
52
|
+
if (isObjectRecord(parsed)) {
|
|
53
|
+
if ("fields" in parsed && !isObjectRecord(parsed.fields)) {
|
|
54
|
+
return {};
|
|
55
|
+
}
|
|
56
|
+
|
|
49
57
|
return parsed as EventPayload;
|
|
50
58
|
}
|
|
51
59
|
} catch {
|