@tashks/core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/src/query.d.ts +294 -0
- package/dist/src/query.d.ts.map +1 -0
- package/dist/src/query.js +303 -0
- package/dist/src/query.test.d.ts +2 -0
- package/dist/src/query.test.d.ts.map +1 -0
- package/dist/src/query.test.js +673 -0
- package/dist/src/repository.d.ts +100 -0
- package/dist/src/repository.d.ts.map +1 -0
- package/dist/src/repository.js +742 -0
- package/dist/src/repository.test.d.ts +2 -0
- package/dist/src/repository.test.d.ts.map +1 -0
- package/dist/src/repository.test.js +1439 -0
- package/dist/src/schema.d.ts +460 -0
- package/dist/src/schema.d.ts.map +1 -0
- package/dist/src/schema.js +166 -0
- package/dist/src/schema.test.d.ts +2 -0
- package/dist/src/schema.test.d.ts.map +1 -0
- package/dist/src/schema.test.js +123 -0
- package/dist/tsconfig.tsbuildinfo +1 -0
- package/package.json +43 -0
|
@@ -0,0 +1,1439 @@
|
|
|
1
|
+
import { describe, expect, it } from "bun:test";
|
|
2
|
+
import { chmod, mkdtemp, mkdir, readFile, rm, writeFile, } from "node:fs/promises";
|
|
3
|
+
import { tmpdir } from "node:os";
|
|
4
|
+
import { join } from "node:path";
|
|
5
|
+
import * as Cause from "effect/Cause";
|
|
6
|
+
import * as Effect from "effect/Effect";
|
|
7
|
+
import * as Exit from "effect/Exit";
|
|
8
|
+
import * as Option from "effect/Option";
|
|
9
|
+
import YAML from "yaml";
|
|
10
|
+
import { applyTaskPatch, applyWorkLogPatch, createTaskFromInput, discoverHooksForEvent, generateTaskId, parseTaskRecord, parseWorkLogRecord, TaskRepository, TaskRepositoryLive, todayIso, } from "./repository.js";
|
|
11
|
+
const isoDatePattern = /^\d{4}-\d{2}-\d{2}$/;
|
|
12
|
+
const baseTask = () => ({
|
|
13
|
+
id: "revive-unzen",
|
|
14
|
+
title: "Revive unzen server",
|
|
15
|
+
status: "active",
|
|
16
|
+
area: "infrastructure",
|
|
17
|
+
project: "homelab",
|
|
18
|
+
tags: ["hardware", "weekend"],
|
|
19
|
+
created: "2026-02-16",
|
|
20
|
+
updated: "2026-02-20",
|
|
21
|
+
urgency: "high",
|
|
22
|
+
energy: "high",
|
|
23
|
+
due: "2026-03-01",
|
|
24
|
+
context: "Mini-ITX build",
|
|
25
|
+
subtasks: [
|
|
26
|
+
{ text: "Test PSU", done: true },
|
|
27
|
+
{ text: "Reassemble drives", done: false },
|
|
28
|
+
],
|
|
29
|
+
blocked_by: [],
|
|
30
|
+
estimated_minutes: 240,
|
|
31
|
+
actual_minutes: null,
|
|
32
|
+
completed_at: null,
|
|
33
|
+
last_surfaced: "2026-02-19",
|
|
34
|
+
defer_until: null,
|
|
35
|
+
nudge_count: 2,
|
|
36
|
+
recurrence: "FREQ=WEEKLY;BYDAY=MO",
|
|
37
|
+
recurrence_trigger: "clock",
|
|
38
|
+
recurrence_strategy: "replace",
|
|
39
|
+
recurrence_last_generated: "2026-02-24T08:00:00Z",
|
|
40
|
+
});
|
|
41
|
+
const baseWorkLogEntry = () => ({
|
|
42
|
+
id: "revive-unzen-20260220T0900",
|
|
43
|
+
task_id: "revive-unzen",
|
|
44
|
+
started_at: "2026-02-20T09:00:00Z",
|
|
45
|
+
ended_at: "2026-02-20T10:30:00Z",
|
|
46
|
+
date: "2026-02-20",
|
|
47
|
+
});
|
|
48
|
+
const unexpectedCall = () => Effect.fail("unexpected method call");
|
|
49
|
+
const makeRepositoryService = (overrides = {}) => ({
|
|
50
|
+
listTasks: () => Effect.succeed([]),
|
|
51
|
+
getTask: () => unexpectedCall(),
|
|
52
|
+
createTask: () => unexpectedCall(),
|
|
53
|
+
updateTask: () => unexpectedCall(),
|
|
54
|
+
completeTask: () => unexpectedCall(),
|
|
55
|
+
generateNextRecurrence: () => unexpectedCall(),
|
|
56
|
+
processDueRecurrences: () => unexpectedCall(),
|
|
57
|
+
deleteTask: () => unexpectedCall(),
|
|
58
|
+
setDailyHighlight: () => unexpectedCall(),
|
|
59
|
+
listStale: () => unexpectedCall(),
|
|
60
|
+
listWorkLog: () => unexpectedCall(),
|
|
61
|
+
createWorkLogEntry: () => unexpectedCall(),
|
|
62
|
+
updateWorkLogEntry: () => unexpectedCall(),
|
|
63
|
+
deleteWorkLogEntry: () => unexpectedCall(),
|
|
64
|
+
...overrides,
|
|
65
|
+
});
|
|
66
|
+
const runListTasks = (dataDir, filters) => Effect.runPromise(Effect.gen(function* () {
|
|
67
|
+
const repository = yield* TaskRepository;
|
|
68
|
+
return yield* repository.listTasks(filters);
|
|
69
|
+
}).pipe(Effect.provide(TaskRepositoryLive({ dataDir }))));
|
|
70
|
+
const runDiscoverHooks = (event, options) => Effect.runPromise(discoverHooksForEvent(event, options));
|
|
71
|
+
const addDaysToIsoDate = (date, days) => {
|
|
72
|
+
const next = new Date(`${date}T00:00:00.000Z`);
|
|
73
|
+
next.setUTCDate(next.getUTCDate() + days);
|
|
74
|
+
return next.toISOString().slice(0, 10);
|
|
75
|
+
};
|
|
76
|
+
const runRepository = (dataDir, run) => runRepositoryWithOptions({ dataDir }, run);
|
|
77
|
+
const runRepositoryWithOptions = (options, run) => Effect.runPromise(Effect.gen(function* () {
|
|
78
|
+
const repository = yield* TaskRepository;
|
|
79
|
+
return yield* run(repository);
|
|
80
|
+
}).pipe(Effect.provide(TaskRepositoryLive(options))));
|
|
81
|
+
const runRepositoryExit = (dataDir, run) => runRepositoryWithOptionsExit({ dataDir }, run);
|
|
82
|
+
const runRepositoryWithOptionsExit = (options, run) => Effect.runPromiseExit(Effect.gen(function* () {
|
|
83
|
+
const repository = yield* TaskRepository;
|
|
84
|
+
return yield* run(repository);
|
|
85
|
+
}).pipe(Effect.provide(TaskRepositoryLive(options))));
|
|
86
|
+
const writeTaskFiles = async (dataDir, tasks) => {
|
|
87
|
+
const tasksDir = join(dataDir, "tasks");
|
|
88
|
+
await mkdir(tasksDir, { recursive: true });
|
|
89
|
+
await Promise.all(tasks.map((task) => writeFile(join(tasksDir, `${task.id}.yaml`), YAML.stringify(task), "utf8")));
|
|
90
|
+
};
|
|
91
|
+
const writeWorkLogFiles = async (dataDir, entries) => {
|
|
92
|
+
const workLogDir = join(dataDir, "work-log");
|
|
93
|
+
await mkdir(workLogDir, { recursive: true });
|
|
94
|
+
await Promise.all(entries.map((entry) => writeFile(join(workLogDir, `${entry.id}.yaml`), YAML.stringify(entry), "utf8")));
|
|
95
|
+
};
|
|
96
|
+
const writeRawYamlFile = async (directory, fileName, source) => {
|
|
97
|
+
await mkdir(directory, { recursive: true });
|
|
98
|
+
await writeFile(join(directory, fileName), source, "utf8");
|
|
99
|
+
};
|
|
100
|
+
const writeExecutableHook = async (hooksDir, fileName, source) => {
|
|
101
|
+
await mkdir(hooksDir, { recursive: true });
|
|
102
|
+
const hookPath = join(hooksDir, fileName);
|
|
103
|
+
await writeFile(hookPath, source, "utf8");
|
|
104
|
+
await chmod(hookPath, 0o755);
|
|
105
|
+
};
|
|
106
|
+
describe("repository pure helpers", () => {
|
|
107
|
+
it("generateTaskId slugifies title and appends a six character suffix", () => {
|
|
108
|
+
const id = generateTaskId(" Repair ARRAY!!! ");
|
|
109
|
+
expect(id).toMatch(/^repair-array-[a-z0-9]{6}$/);
|
|
110
|
+
});
|
|
111
|
+
it("generateTaskId falls back to task slug when title has no slug content", () => {
|
|
112
|
+
const id = generateTaskId("!!!");
|
|
113
|
+
expect(id).toMatch(/^task-[a-z0-9]{6}$/);
|
|
114
|
+
});
|
|
115
|
+
it("todayIso returns an ISO calendar date", () => {
|
|
116
|
+
expect(todayIso()).toMatch(isoDatePattern);
|
|
117
|
+
});
|
|
118
|
+
it("parseTaskRecord returns task for valid data and null for invalid data", () => {
|
|
119
|
+
const task = baseTask();
|
|
120
|
+
expect(parseTaskRecord(task)).toEqual(task);
|
|
121
|
+
expect(parseTaskRecord({ ...task, status: "invalid-status" })).toBeNull();
|
|
122
|
+
});
|
|
123
|
+
it("parseWorkLogRecord returns entry for valid data and null for invalid data", () => {
|
|
124
|
+
const entry = baseWorkLogEntry();
|
|
125
|
+
expect(parseWorkLogRecord(entry)).toEqual(entry);
|
|
126
|
+
expect(parseWorkLogRecord({ ...entry, ended_at: 123 })).toBeNull();
|
|
127
|
+
});
|
|
128
|
+
it("createTaskFromInput applies defaults and generates a task id", () => {
|
|
129
|
+
const created = createTaskFromInput({ title: "Capture outage notes" });
|
|
130
|
+
expect(created.id).toMatch(/^capture-outage-notes-[a-z0-9]{6}$/);
|
|
131
|
+
expect(created).toMatchObject({
|
|
132
|
+
title: "Capture outage notes",
|
|
133
|
+
status: "active",
|
|
134
|
+
area: "personal",
|
|
135
|
+
project: null,
|
|
136
|
+
tags: [],
|
|
137
|
+
urgency: "medium",
|
|
138
|
+
energy: "medium",
|
|
139
|
+
due: null,
|
|
140
|
+
context: "",
|
|
141
|
+
subtasks: [],
|
|
142
|
+
blocked_by: [],
|
|
143
|
+
estimated_minutes: null,
|
|
144
|
+
actual_minutes: null,
|
|
145
|
+
completed_at: null,
|
|
146
|
+
last_surfaced: null,
|
|
147
|
+
defer_until: null,
|
|
148
|
+
nudge_count: 0,
|
|
149
|
+
recurrence: null,
|
|
150
|
+
recurrence_trigger: "clock",
|
|
151
|
+
recurrence_strategy: "replace",
|
|
152
|
+
recurrence_last_generated: null,
|
|
153
|
+
});
|
|
154
|
+
expect(created.created).toMatch(isoDatePattern);
|
|
155
|
+
expect(created.updated).toMatch(isoDatePattern);
|
|
156
|
+
});
|
|
157
|
+
it("applyTaskPatch merges patch fields and always refreshes updated date", () => {
|
|
158
|
+
const task = baseTask();
|
|
159
|
+
const patched = applyTaskPatch(task, {
|
|
160
|
+
title: "Repair array",
|
|
161
|
+
tags: ["hardware"],
|
|
162
|
+
updated: "1999-01-01",
|
|
163
|
+
});
|
|
164
|
+
expect(patched.title).toBe("Repair array");
|
|
165
|
+
expect(patched.tags).toEqual(["hardware"]);
|
|
166
|
+
expect(patched.updated).toBe(todayIso());
|
|
167
|
+
expect(patched.id).toBe(task.id);
|
|
168
|
+
expect(patched.created).toBe(task.created);
|
|
169
|
+
});
|
|
170
|
+
it("applyWorkLogPatch merges only provided fields", () => {
|
|
171
|
+
const entry = baseWorkLogEntry();
|
|
172
|
+
const patched = applyWorkLogPatch(entry, {
|
|
173
|
+
ended_at: null,
|
|
174
|
+
date: "2026-02-21",
|
|
175
|
+
});
|
|
176
|
+
expect(patched).toEqual({
|
|
177
|
+
...entry,
|
|
178
|
+
ended_at: null,
|
|
179
|
+
date: "2026-02-21",
|
|
180
|
+
});
|
|
181
|
+
});
|
|
182
|
+
it("discoverHooksForEvent returns empty when the hook directory does not exist", async () => {
|
|
183
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-hooks-missing-"));
|
|
184
|
+
try {
|
|
185
|
+
const hooks = await runDiscoverHooks("create", {
|
|
186
|
+
hooksDir: join(dataDir, "hooks"),
|
|
187
|
+
});
|
|
188
|
+
expect(hooks).toEqual([]);
|
|
189
|
+
}
|
|
190
|
+
finally {
|
|
191
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
192
|
+
}
|
|
193
|
+
});
|
|
194
|
+
it("discoverHooksForEvent finds only executable matching hooks in lexicographic order", async () => {
|
|
195
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-hooks-discovery-"));
|
|
196
|
+
const hooksDir = join(dataDir, "hooks");
|
|
197
|
+
try {
|
|
198
|
+
await mkdir(hooksDir, { recursive: true });
|
|
199
|
+
await writeFile(join(hooksDir, "on-create"), "#!/usr/bin/env bash\n", "utf8");
|
|
200
|
+
await chmod(join(hooksDir, "on-create"), 0o755);
|
|
201
|
+
await writeFile(join(hooksDir, "on-create.10-second"), "#!/usr/bin/env bash\n", "utf8");
|
|
202
|
+
await chmod(join(hooksDir, "on-create.10-second"), 0o755);
|
|
203
|
+
await writeFile(join(hooksDir, "on-create.20-not-executable"), "#!/usr/bin/env bash\n", "utf8");
|
|
204
|
+
await chmod(join(hooksDir, "on-create.20-not-executable"), 0o644);
|
|
205
|
+
await writeFile(join(hooksDir, "on-create-helper"), "#!/usr/bin/env bash\n", "utf8");
|
|
206
|
+
await chmod(join(hooksDir, "on-create-helper"), 0o755);
|
|
207
|
+
await writeFile(join(hooksDir, "on-modify"), "#!/usr/bin/env bash\n", "utf8");
|
|
208
|
+
await chmod(join(hooksDir, "on-modify"), 0o755);
|
|
209
|
+
const hooks = await runDiscoverHooks("create", { hooksDir });
|
|
210
|
+
expect(hooks).toEqual([
|
|
211
|
+
join(hooksDir, "on-create"),
|
|
212
|
+
join(hooksDir, "on-create.10-second"),
|
|
213
|
+
]);
|
|
214
|
+
}
|
|
215
|
+
finally {
|
|
216
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
217
|
+
}
|
|
218
|
+
});
|
|
219
|
+
it("discoverHooksForEvent resolves the XDG hook directory by default", async () => {
|
|
220
|
+
const xdgConfigHome = await mkdtemp(join(tmpdir(), "tasks-hooks-xdg-"));
|
|
221
|
+
const hooksDir = join(xdgConfigHome, "tashks", "hooks");
|
|
222
|
+
try {
|
|
223
|
+
await mkdir(hooksDir, { recursive: true });
|
|
224
|
+
await writeFile(join(hooksDir, "on-create"), "#!/usr/bin/env bash\n", "utf8");
|
|
225
|
+
await chmod(join(hooksDir, "on-create"), 0o755);
|
|
226
|
+
const hooks = await runDiscoverHooks("create", {
|
|
227
|
+
env: {
|
|
228
|
+
XDG_CONFIG_HOME: xdgConfigHome,
|
|
229
|
+
HOME: "/home/ignored",
|
|
230
|
+
},
|
|
231
|
+
});
|
|
232
|
+
expect(hooks).toEqual([join(hooksDir, "on-create")]);
|
|
233
|
+
}
|
|
234
|
+
finally {
|
|
235
|
+
await rm(xdgConfigHome, { recursive: true, force: true });
|
|
236
|
+
}
|
|
237
|
+
});
|
|
238
|
+
});
|
|
239
|
+
describe("TaskRepository listTasks", () => {
|
|
240
|
+
it("returns all tasks sorted by updated desc then title asc when no filters are provided", async () => {
|
|
241
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-list-sort-"));
|
|
242
|
+
try {
|
|
243
|
+
const tasks = [
|
|
244
|
+
{
|
|
245
|
+
...baseTask(),
|
|
246
|
+
id: "zeta",
|
|
247
|
+
title: "Zeta",
|
|
248
|
+
updated: "2026-02-20",
|
|
249
|
+
},
|
|
250
|
+
{
|
|
251
|
+
...baseTask(),
|
|
252
|
+
id: "alpha",
|
|
253
|
+
title: "Alpha",
|
|
254
|
+
updated: "2026-02-21",
|
|
255
|
+
},
|
|
256
|
+
{
|
|
257
|
+
...baseTask(),
|
|
258
|
+
id: "bravo",
|
|
259
|
+
title: "Bravo",
|
|
260
|
+
updated: "2026-02-21",
|
|
261
|
+
},
|
|
262
|
+
];
|
|
263
|
+
await writeTaskFiles(dataDir, tasks);
|
|
264
|
+
const result = await runListTasks(dataDir);
|
|
265
|
+
expect(result.map((task) => task.id)).toEqual(["alpha", "bravo", "zeta"]);
|
|
266
|
+
}
|
|
267
|
+
finally {
|
|
268
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
269
|
+
}
|
|
270
|
+
});
|
|
271
|
+
it("applies status, area, project, tags, due, defer_until, and unblocked filters", async () => {
|
|
272
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-list-filters-"));
|
|
273
|
+
try {
|
|
274
|
+
const tasks = [
|
|
275
|
+
{
|
|
276
|
+
...baseTask(),
|
|
277
|
+
id: "match-task",
|
|
278
|
+
title: "Match task",
|
|
279
|
+
status: "active",
|
|
280
|
+
area: "work",
|
|
281
|
+
project: "homelab",
|
|
282
|
+
tags: ["hardware", "weekend"],
|
|
283
|
+
due: "2026-03-05",
|
|
284
|
+
defer_until: "2026-03-01",
|
|
285
|
+
updated: "2026-02-25",
|
|
286
|
+
blocked_by: ["done-blocker"],
|
|
287
|
+
},
|
|
288
|
+
{
|
|
289
|
+
...baseTask(),
|
|
290
|
+
id: "done-blocker",
|
|
291
|
+
title: "Done blocker",
|
|
292
|
+
status: "done",
|
|
293
|
+
area: "work",
|
|
294
|
+
project: "homelab",
|
|
295
|
+
tags: ["ops"],
|
|
296
|
+
due: null,
|
|
297
|
+
defer_until: null,
|
|
298
|
+
updated: "2026-02-22",
|
|
299
|
+
blocked_by: [],
|
|
300
|
+
},
|
|
301
|
+
{
|
|
302
|
+
...baseTask(),
|
|
303
|
+
id: "active-blocker",
|
|
304
|
+
title: "Active blocker",
|
|
305
|
+
status: "active",
|
|
306
|
+
area: "work",
|
|
307
|
+
project: null,
|
|
308
|
+
tags: ["ops"],
|
|
309
|
+
due: null,
|
|
310
|
+
defer_until: null,
|
|
311
|
+
updated: "2026-02-23",
|
|
312
|
+
blocked_by: [],
|
|
313
|
+
},
|
|
314
|
+
{
|
|
315
|
+
...baseTask(),
|
|
316
|
+
id: "blocked-task",
|
|
317
|
+
title: "Blocked task",
|
|
318
|
+
status: "active",
|
|
319
|
+
area: "work",
|
|
320
|
+
project: "homelab",
|
|
321
|
+
tags: ["hardware"],
|
|
322
|
+
due: "2026-03-05",
|
|
323
|
+
defer_until: "2026-03-01",
|
|
324
|
+
updated: "2026-02-24",
|
|
325
|
+
blocked_by: ["active-blocker"],
|
|
326
|
+
},
|
|
327
|
+
{
|
|
328
|
+
...baseTask(),
|
|
329
|
+
id: "due-too-early",
|
|
330
|
+
title: "Due too early",
|
|
331
|
+
status: "active",
|
|
332
|
+
area: "work",
|
|
333
|
+
project: "homelab",
|
|
334
|
+
tags: ["hardware"],
|
|
335
|
+
due: "2026-03-02",
|
|
336
|
+
defer_until: "2026-03-01",
|
|
337
|
+
updated: "2026-02-21",
|
|
338
|
+
blocked_by: [],
|
|
339
|
+
},
|
|
340
|
+
{
|
|
341
|
+
...baseTask(),
|
|
342
|
+
id: "deferred-past-date",
|
|
343
|
+
title: "Deferred past date",
|
|
344
|
+
status: "active",
|
|
345
|
+
area: "work",
|
|
346
|
+
project: "homelab",
|
|
347
|
+
tags: ["hardware"],
|
|
348
|
+
due: "2026-03-05",
|
|
349
|
+
defer_until: "2026-03-10",
|
|
350
|
+
updated: "2026-02-21",
|
|
351
|
+
blocked_by: [],
|
|
352
|
+
},
|
|
353
|
+
{
|
|
354
|
+
...baseTask(),
|
|
355
|
+
id: "wrong-status",
|
|
356
|
+
title: "Wrong status",
|
|
357
|
+
status: "backlog",
|
|
358
|
+
area: "work",
|
|
359
|
+
project: "homelab",
|
|
360
|
+
tags: ["hardware"],
|
|
361
|
+
due: "2026-03-05",
|
|
362
|
+
defer_until: "2026-03-01",
|
|
363
|
+
updated: "2026-02-21",
|
|
364
|
+
blocked_by: [],
|
|
365
|
+
},
|
|
366
|
+
{
|
|
367
|
+
...baseTask(),
|
|
368
|
+
id: "wrong-area",
|
|
369
|
+
title: "Wrong area",
|
|
370
|
+
status: "active",
|
|
371
|
+
area: "personal",
|
|
372
|
+
project: "homelab",
|
|
373
|
+
tags: ["hardware"],
|
|
374
|
+
due: "2026-03-05",
|
|
375
|
+
defer_until: "2026-03-01",
|
|
376
|
+
updated: "2026-02-21",
|
|
377
|
+
blocked_by: [],
|
|
378
|
+
},
|
|
379
|
+
{
|
|
380
|
+
...baseTask(),
|
|
381
|
+
id: "wrong-project",
|
|
382
|
+
title: "Wrong project",
|
|
383
|
+
status: "active",
|
|
384
|
+
area: "work",
|
|
385
|
+
project: "side-quest",
|
|
386
|
+
tags: ["hardware"],
|
|
387
|
+
due: "2026-03-05",
|
|
388
|
+
defer_until: "2026-03-01",
|
|
389
|
+
updated: "2026-02-21",
|
|
390
|
+
blocked_by: [],
|
|
391
|
+
},
|
|
392
|
+
{
|
|
393
|
+
...baseTask(),
|
|
394
|
+
id: "wrong-tag",
|
|
395
|
+
title: "Wrong tag",
|
|
396
|
+
status: "active",
|
|
397
|
+
area: "work",
|
|
398
|
+
project: "homelab",
|
|
399
|
+
tags: ["errands"],
|
|
400
|
+
due: "2026-03-05",
|
|
401
|
+
defer_until: "2026-03-01",
|
|
402
|
+
updated: "2026-02-21",
|
|
403
|
+
blocked_by: [],
|
|
404
|
+
},
|
|
405
|
+
{
|
|
406
|
+
...baseTask(),
|
|
407
|
+
id: "due-too-late",
|
|
408
|
+
title: "Due too late",
|
|
409
|
+
status: "active",
|
|
410
|
+
area: "work",
|
|
411
|
+
project: "homelab",
|
|
412
|
+
tags: ["hardware"],
|
|
413
|
+
due: "2026-03-09",
|
|
414
|
+
defer_until: "2026-03-01",
|
|
415
|
+
updated: "2026-02-21",
|
|
416
|
+
blocked_by: [],
|
|
417
|
+
},
|
|
418
|
+
];
|
|
419
|
+
await writeTaskFiles(dataDir, tasks);
|
|
420
|
+
const result = await runListTasks(dataDir, {
|
|
421
|
+
status: "active",
|
|
422
|
+
area: "work",
|
|
423
|
+
project: "homelab",
|
|
424
|
+
tags: ["hardware", "important"],
|
|
425
|
+
due_after: "2026-03-03",
|
|
426
|
+
due_before: "2026-03-07",
|
|
427
|
+
unblocked_only: true,
|
|
428
|
+
date: "2026-03-05",
|
|
429
|
+
});
|
|
430
|
+
expect(result.map((task) => task.id)).toEqual(["match-task"]);
|
|
431
|
+
}
|
|
432
|
+
finally {
|
|
433
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
434
|
+
}
|
|
435
|
+
});
|
|
436
|
+
});
|
|
437
|
+
describe("TaskRepository integration with literal YAML files", () => {
|
|
438
|
+
it("decodes hand-written task YAML files and supports .yaml/.yml extensions", async () => {
|
|
439
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-literal-task-yaml-"));
|
|
440
|
+
try {
|
|
441
|
+
await writeRawYamlFile(join(dataDir, "tasks"), "literal-primary.yaml", `id: literal-primary
|
|
442
|
+
title: Literal primary
|
|
443
|
+
status: active
|
|
444
|
+
area: code
|
|
445
|
+
project: tasks
|
|
446
|
+
tags:
|
|
447
|
+
- yaml
|
|
448
|
+
- literal
|
|
449
|
+
created: '2026-02-20'
|
|
450
|
+
updated: '2026-02-26'
|
|
451
|
+
urgency: high
|
|
452
|
+
energy: low
|
|
453
|
+
due: 2026-03-01
|
|
454
|
+
context: |
|
|
455
|
+
Investigate parser behavior from sample files.
|
|
456
|
+
Keep YAML formatting realistic.
|
|
457
|
+
subtasks:
|
|
458
|
+
- text: Capture fixture
|
|
459
|
+
done: true
|
|
460
|
+
- text: Verify list sorting
|
|
461
|
+
done: false
|
|
462
|
+
blocked_by: []
|
|
463
|
+
estimated_minutes: 30
|
|
464
|
+
actual_minutes: null
|
|
465
|
+
completed_at: null
|
|
466
|
+
last_surfaced: 2026-02-25
|
|
467
|
+
defer_until: null
|
|
468
|
+
nudge_count: 1
|
|
469
|
+
recurrence: FREQ=WEEKLY;BYDAY=MO
|
|
470
|
+
recurrence_trigger: clock
|
|
471
|
+
recurrence_strategy: replace
|
|
472
|
+
recurrence_last_generated: 2026-02-24T08:00:00Z
|
|
473
|
+
`);
|
|
474
|
+
await writeRawYamlFile(join(dataDir, "tasks"), "literal-secondary.yml", `id: literal-secondary
|
|
475
|
+
title: Literal secondary
|
|
476
|
+
status: active
|
|
477
|
+
area: code
|
|
478
|
+
project: null
|
|
479
|
+
tags: []
|
|
480
|
+
created: '2026-02-20'
|
|
481
|
+
updated: '2026-02-25'
|
|
482
|
+
urgency: medium
|
|
483
|
+
energy: medium
|
|
484
|
+
due: null
|
|
485
|
+
context: ''
|
|
486
|
+
subtasks: []
|
|
487
|
+
blocked_by:
|
|
488
|
+
- literal-primary
|
|
489
|
+
estimated_minutes: null
|
|
490
|
+
actual_minutes: null
|
|
491
|
+
completed_at: null
|
|
492
|
+
last_surfaced: null
|
|
493
|
+
defer_until: null
|
|
494
|
+
nudge_count: 0
|
|
495
|
+
recurrence: null
|
|
496
|
+
recurrence_trigger: completion
|
|
497
|
+
recurrence_strategy: accumulate
|
|
498
|
+
recurrence_last_generated: null
|
|
499
|
+
`);
|
|
500
|
+
const listed = await runListTasks(dataDir);
|
|
501
|
+
expect(listed.map((task) => task.id)).toEqual([
|
|
502
|
+
"literal-primary",
|
|
503
|
+
"literal-secondary",
|
|
504
|
+
]);
|
|
505
|
+
expect(listed[0]).toMatchObject({
|
|
506
|
+
id: "literal-primary",
|
|
507
|
+
area: "code",
|
|
508
|
+
project: "tasks",
|
|
509
|
+
tags: ["yaml", "literal"],
|
|
510
|
+
due: "2026-03-01",
|
|
511
|
+
subtasks: [
|
|
512
|
+
{ text: "Capture fixture", done: true },
|
|
513
|
+
{ text: "Verify list sorting", done: false },
|
|
514
|
+
],
|
|
515
|
+
});
|
|
516
|
+
const fetched = await runRepository(dataDir, (repository) => repository.getTask("literal-secondary"));
|
|
517
|
+
expect(fetched).toMatchObject({
|
|
518
|
+
id: "literal-secondary",
|
|
519
|
+
project: null,
|
|
520
|
+
blocked_by: ["literal-primary"],
|
|
521
|
+
recurrence_trigger: "completion",
|
|
522
|
+
recurrence_strategy: "accumulate",
|
|
523
|
+
});
|
|
524
|
+
}
|
|
525
|
+
finally {
|
|
526
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
527
|
+
}
|
|
528
|
+
});
|
|
529
|
+
it("decodes hand-written work log YAML files and applies date filtering", async () => {
|
|
530
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-literal-worklog-yaml-"));
|
|
531
|
+
try {
|
|
532
|
+
await writeRawYamlFile(join(dataDir, "work-log"), "literal-primary-20260226T0900.yaml", `id: literal-primary-20260226T0900
|
|
533
|
+
task_id: literal-primary
|
|
534
|
+
started_at: 2026-02-26T09:00:00Z
|
|
535
|
+
ended_at: 2026-02-26T10:15:00Z
|
|
536
|
+
date: 2026-02-26
|
|
537
|
+
`);
|
|
538
|
+
await writeRawYamlFile(join(dataDir, "work-log"), "literal-primary-20260225T0900.yml", `id: literal-primary-20260225T0900
|
|
539
|
+
task_id: literal-primary
|
|
540
|
+
started_at: 2026-02-25T09:00:00Z
|
|
541
|
+
ended_at: null
|
|
542
|
+
date: 2026-02-25
|
|
543
|
+
`);
|
|
544
|
+
const listed = await runRepository(dataDir, (repository) => repository.listWorkLog());
|
|
545
|
+
expect(listed.map((entry) => entry.id)).toEqual([
|
|
546
|
+
"literal-primary-20260226T0900",
|
|
547
|
+
"literal-primary-20260225T0900",
|
|
548
|
+
]);
|
|
549
|
+
expect(listed[0]).toMatchObject({
|
|
550
|
+
task_id: "literal-primary",
|
|
551
|
+
date: "2026-02-26",
|
|
552
|
+
ended_at: "2026-02-26T10:15:00Z",
|
|
553
|
+
});
|
|
554
|
+
const filtered = await runRepository(dataDir, (repository) => repository.listWorkLog({ date: "2026-02-25" }));
|
|
555
|
+
expect(filtered).toEqual([
|
|
556
|
+
{
|
|
557
|
+
id: "literal-primary-20260225T0900",
|
|
558
|
+
task_id: "literal-primary",
|
|
559
|
+
started_at: "2026-02-25T09:00:00Z",
|
|
560
|
+
ended_at: null,
|
|
561
|
+
date: "2026-02-25",
|
|
562
|
+
},
|
|
563
|
+
]);
|
|
564
|
+
}
|
|
565
|
+
finally {
|
|
566
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
567
|
+
}
|
|
568
|
+
});
|
|
569
|
+
});
|
|
570
|
+
describe("TaskRepository service", () => {
|
|
571
|
+
it("listStale returns only active tasks staler than the requested threshold", async () => {
|
|
572
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-list-stale-"));
|
|
573
|
+
try {
|
|
574
|
+
const today = todayIso();
|
|
575
|
+
const staleOld = addDaysToIsoDate(today, -30);
|
|
576
|
+
const staleLessOld = addDaysToIsoDate(today, -20);
|
|
577
|
+
const atBoundary = addDaysToIsoDate(today, -14);
|
|
578
|
+
const tasks = [
|
|
579
|
+
{
|
|
580
|
+
...baseTask(),
|
|
581
|
+
id: "stale-a",
|
|
582
|
+
title: "Stale A",
|
|
583
|
+
status: "active",
|
|
584
|
+
updated: staleOld,
|
|
585
|
+
},
|
|
586
|
+
{
|
|
587
|
+
...baseTask(),
|
|
588
|
+
id: "stale-b",
|
|
589
|
+
title: "Stale B",
|
|
590
|
+
status: "active",
|
|
591
|
+
updated: staleLessOld,
|
|
592
|
+
},
|
|
593
|
+
{
|
|
594
|
+
...baseTask(),
|
|
595
|
+
id: "boundary",
|
|
596
|
+
title: "Boundary",
|
|
597
|
+
status: "active",
|
|
598
|
+
updated: atBoundary,
|
|
599
|
+
},
|
|
600
|
+
{
|
|
601
|
+
...baseTask(),
|
|
602
|
+
id: "fresh",
|
|
603
|
+
title: "Fresh",
|
|
604
|
+
status: "active",
|
|
605
|
+
updated: today,
|
|
606
|
+
},
|
|
607
|
+
{
|
|
608
|
+
...baseTask(),
|
|
609
|
+
id: "done-stale",
|
|
610
|
+
title: "Done stale",
|
|
611
|
+
status: "done",
|
|
612
|
+
updated: staleOld,
|
|
613
|
+
},
|
|
614
|
+
];
|
|
615
|
+
await writeTaskFiles(dataDir, tasks);
|
|
616
|
+
const result = await runRepository(dataDir, (repository) => repository.listStale(14));
|
|
617
|
+
expect(result.map((task) => task.id)).toEqual(["stale-b", "stale-a"]);
|
|
618
|
+
}
|
|
619
|
+
finally {
|
|
620
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
621
|
+
}
|
|
622
|
+
});
|
|
623
|
+
it("supports dependency injection via TaskRepository tag", async () => {
|
|
624
|
+
const service = makeRepositoryService({
|
|
625
|
+
listTasks: () => Effect.succeed([baseTask()]),
|
|
626
|
+
});
|
|
627
|
+
const tasks = await Effect.runPromise(Effect.gen(function* () {
|
|
628
|
+
const repository = yield* TaskRepository;
|
|
629
|
+
return yield* repository.listTasks({ status: "active" });
|
|
630
|
+
}).pipe(Effect.provideService(TaskRepository, service)));
|
|
631
|
+
expect(tasks).toHaveLength(1);
|
|
632
|
+
expect(tasks[0]?.id).toBe("revive-unzen");
|
|
633
|
+
});
|
|
634
|
+
it("TaskRepositoryLive provides a service with all repository methods", async () => {
|
|
635
|
+
const methodNames = await Effect.runPromise(Effect.gen(function* () {
|
|
636
|
+
const repository = yield* TaskRepository;
|
|
637
|
+
return Object.keys(repository).sort();
|
|
638
|
+
}).pipe(Effect.provide(TaskRepositoryLive())));
|
|
639
|
+
expect(methodNames).toEqual([
|
|
640
|
+
"completeTask",
|
|
641
|
+
"createTask",
|
|
642
|
+
"createWorkLogEntry",
|
|
643
|
+
"deleteTask",
|
|
644
|
+
"deleteWorkLogEntry",
|
|
645
|
+
"generateNextRecurrence",
|
|
646
|
+
"getTask",
|
|
647
|
+
"listStale",
|
|
648
|
+
"listTasks",
|
|
649
|
+
"listWorkLog",
|
|
650
|
+
"processDueRecurrences",
|
|
651
|
+
"setDailyHighlight",
|
|
652
|
+
"updateTask",
|
|
653
|
+
"updateWorkLogEntry",
|
|
654
|
+
]);
|
|
655
|
+
});
|
|
656
|
+
it("createTask writes a task that can be retrieved by getTask", async () => {
|
|
657
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-create-get-"));
|
|
658
|
+
try {
|
|
659
|
+
const created = await runRepository(dataDir, (repository) => repository.createTask({
|
|
660
|
+
title: "Capture outage notes",
|
|
661
|
+
project: "ops",
|
|
662
|
+
tags: ["incident"],
|
|
663
|
+
area: "work",
|
|
664
|
+
}));
|
|
665
|
+
const storedSource = await readFile(join(dataDir, "tasks", `${created.id}.yaml`), "utf8");
|
|
666
|
+
const stored = YAML.parse(storedSource);
|
|
667
|
+
expect(created.id).toMatch(/^capture-outage-notes-[a-z0-9]{6}$/);
|
|
668
|
+
expect(stored).toEqual(created);
|
|
669
|
+
const fetched = await runRepository(dataDir, (repository) => repository.getTask(created.id));
|
|
670
|
+
expect(fetched).toEqual(created);
|
|
671
|
+
}
|
|
672
|
+
finally {
|
|
673
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
674
|
+
}
|
|
675
|
+
});
|
|
676
|
+
it("updateTask merges the patch, refreshes updated, and persists the result", async () => {
|
|
677
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-update-"));
|
|
678
|
+
try {
|
|
679
|
+
await writeTaskFiles(dataDir, [baseTask()]);
|
|
680
|
+
const updated = await runRepository(dataDir, (repository) => repository.updateTask("revive-unzen", {
|
|
681
|
+
title: "Repair array",
|
|
682
|
+
tags: ["storage"],
|
|
683
|
+
updated: "1999-01-01",
|
|
684
|
+
}));
|
|
685
|
+
expect(updated.title).toBe("Repair array");
|
|
686
|
+
expect(updated.tags).toEqual(["storage"]);
|
|
687
|
+
expect(updated.updated).toBe(todayIso());
|
|
688
|
+
const fetched = await runRepository(dataDir, (repository) => repository.getTask("revive-unzen"));
|
|
689
|
+
expect(fetched).toEqual(updated);
|
|
690
|
+
}
|
|
691
|
+
finally {
|
|
692
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
693
|
+
}
|
|
694
|
+
});
|
|
695
|
+
it("createTask applies on-create mutating hooks in order", async () => {
|
|
696
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-create-hook-"));
|
|
697
|
+
const hooksDir = join(dataDir, "hooks");
|
|
698
|
+
try {
|
|
699
|
+
await writeExecutableHook(hooksDir, "on-create", `#!/usr/bin/env node
|
|
700
|
+
const fs = require("node:fs");
|
|
701
|
+
const task = JSON.parse(fs.readFileSync(0, "utf8"));
|
|
702
|
+
task.tags = [...task.tags, "hooked"];
|
|
703
|
+
task.context = "Mutated by on-create";
|
|
704
|
+
process.stdout.write(JSON.stringify(task));
|
|
705
|
+
`);
|
|
706
|
+
const created = await runRepositoryWithOptions({ dataDir, hooksDir }, (repository) => repository.createTask({
|
|
707
|
+
title: "Capture outage notes",
|
|
708
|
+
area: "work",
|
|
709
|
+
}));
|
|
710
|
+
expect(created.tags).toEqual(["hooked"]);
|
|
711
|
+
expect(created.context).toBe("Mutated by on-create");
|
|
712
|
+
const storedSource = await readFile(join(dataDir, "tasks", `${created.id}.yaml`), "utf8");
|
|
713
|
+
expect(YAML.parse(storedSource)).toEqual(created);
|
|
714
|
+
}
|
|
715
|
+
finally {
|
|
716
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
717
|
+
}
|
|
718
|
+
});
|
|
719
|
+
it("createTask hooks receive TASHKS_EVENT, TASHKS_ID, and TASHKS_DATA_DIR", async () => {
|
|
720
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-create-hook-env-"));
|
|
721
|
+
const hooksDir = join(dataDir, "hooks");
|
|
722
|
+
const markerPath = join(dataDir, "on-create-env.json");
|
|
723
|
+
try {
|
|
724
|
+
await writeExecutableHook(hooksDir, "on-create", `#!/usr/bin/env node
|
|
725
|
+
const fs = require("node:fs");
|
|
726
|
+
const task = JSON.parse(fs.readFileSync(0, "utf8"));
|
|
727
|
+
const payload = {
|
|
728
|
+
event: process.env.TASHKS_EVENT ?? null,
|
|
729
|
+
id: process.env.TASHKS_ID ?? null,
|
|
730
|
+
dataDir: process.env.TASHKS_DATA_DIR ?? null,
|
|
731
|
+
};
|
|
732
|
+
fs.writeFileSync(${JSON.stringify(markerPath)}, JSON.stringify(payload), "utf8");
|
|
733
|
+
process.stdout.write(JSON.stringify(task));
|
|
734
|
+
`);
|
|
735
|
+
const created = await runRepositoryWithOptions({ dataDir, hooksDir }, (repository) => repository.createTask({
|
|
736
|
+
title: "Capture outage notes",
|
|
737
|
+
}));
|
|
738
|
+
const payload = JSON.parse(await readFile(markerPath, "utf8"));
|
|
739
|
+
expect(payload).toEqual({
|
|
740
|
+
event: "create",
|
|
741
|
+
id: created.id,
|
|
742
|
+
dataDir,
|
|
743
|
+
});
|
|
744
|
+
}
|
|
745
|
+
finally {
|
|
746
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
747
|
+
}
|
|
748
|
+
});
|
|
749
|
+
it("updateTask applies on-modify mutating hooks using old and new task payload", async () => {
|
|
750
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-update-hook-"));
|
|
751
|
+
const hooksDir = join(dataDir, "hooks");
|
|
752
|
+
try {
|
|
753
|
+
await writeTaskFiles(dataDir, [baseTask()]);
|
|
754
|
+
await writeExecutableHook(hooksDir, "on-modify", `#!/usr/bin/env node
|
|
755
|
+
const fs = require("node:fs");
|
|
756
|
+
const payload = JSON.parse(fs.readFileSync(0, "utf8"));
|
|
757
|
+
payload.new.context = payload.old.title + " -> " + payload.new.title;
|
|
758
|
+
payload.new.tags = [...payload.new.tags, "modified-hook"];
|
|
759
|
+
process.stdout.write(JSON.stringify(payload.new));
|
|
760
|
+
`);
|
|
761
|
+
const updated = await runRepositoryWithOptions({ dataDir, hooksDir }, (repository) => repository.updateTask("revive-unzen", {
|
|
762
|
+
title: "Repair array",
|
|
763
|
+
}));
|
|
764
|
+
expect(updated.title).toBe("Repair array");
|
|
765
|
+
expect(updated.context).toBe("Revive unzen server -> Repair array");
|
|
766
|
+
expect(updated.tags).toEqual(["hardware", "weekend", "modified-hook"]);
|
|
767
|
+
}
|
|
768
|
+
finally {
|
|
769
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
770
|
+
}
|
|
771
|
+
});
|
|
772
|
+
it("updateTask hooks receive TASHKS_EVENT, TASHKS_ID, and TASHKS_DATA_DIR", async () => {
|
|
773
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-update-hook-env-"));
|
|
774
|
+
const hooksDir = join(dataDir, "hooks");
|
|
775
|
+
const markerPath = join(dataDir, "on-modify-env.json");
|
|
776
|
+
try {
|
|
777
|
+
await writeTaskFiles(dataDir, [baseTask()]);
|
|
778
|
+
await writeExecutableHook(hooksDir, "on-modify", `#!/usr/bin/env node
|
|
779
|
+
const fs = require("node:fs");
|
|
780
|
+
const payload = JSON.parse(fs.readFileSync(0, "utf8"));
|
|
781
|
+
const envPayload = {
|
|
782
|
+
event: process.env.TASHKS_EVENT ?? null,
|
|
783
|
+
id: process.env.TASHKS_ID ?? null,
|
|
784
|
+
dataDir: process.env.TASHKS_DATA_DIR ?? null,
|
|
785
|
+
};
|
|
786
|
+
fs.writeFileSync(${JSON.stringify(markerPath)}, JSON.stringify(envPayload), "utf8");
|
|
787
|
+
process.stdout.write(JSON.stringify(payload.new));
|
|
788
|
+
`);
|
|
789
|
+
await runRepositoryWithOptions({ dataDir, hooksDir }, (repository) => repository.updateTask("revive-unzen", {
|
|
790
|
+
title: "Repair array",
|
|
791
|
+
}));
|
|
792
|
+
const payload = JSON.parse(await readFile(markerPath, "utf8"));
|
|
793
|
+
expect(payload).toEqual({
|
|
794
|
+
event: "modify",
|
|
795
|
+
id: "revive-unzen",
|
|
796
|
+
dataDir,
|
|
797
|
+
});
|
|
798
|
+
}
|
|
799
|
+
finally {
|
|
800
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
801
|
+
}
|
|
802
|
+
});
|
|
803
|
+
it("createTask aborts when an on-create hook exits non-zero", async () => {
|
|
804
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-create-hook-fail-"));
|
|
805
|
+
const hooksDir = join(dataDir, "hooks");
|
|
806
|
+
try {
|
|
807
|
+
await writeExecutableHook(hooksDir, "on-create", `#!/usr/bin/env bash
|
|
808
|
+
echo "create hook rejected task" >&2
|
|
809
|
+
exit 23
|
|
810
|
+
`);
|
|
811
|
+
const result = await runRepositoryWithOptionsExit({ dataDir, hooksDir }, (repository) => repository.createTask({
|
|
812
|
+
title: "Should fail",
|
|
813
|
+
}));
|
|
814
|
+
expect(Exit.isFailure(result)).toBe(true);
|
|
815
|
+
if (Exit.isFailure(result)) {
|
|
816
|
+
const failure = Option.getOrNull(Cause.failureOption(result.cause));
|
|
817
|
+
expect(failure).toContain("create hook rejected task");
|
|
818
|
+
}
|
|
819
|
+
const listed = await runListTasks(dataDir);
|
|
820
|
+
expect(listed).toEqual([]);
|
|
821
|
+
}
|
|
822
|
+
finally {
|
|
823
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
824
|
+
}
|
|
825
|
+
});
|
|
826
|
+
it("updateTask aborts when an on-modify hook exits non-zero", async () => {
|
|
827
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-update-hook-fail-"));
|
|
828
|
+
const hooksDir = join(dataDir, "hooks");
|
|
829
|
+
try {
|
|
830
|
+
await writeTaskFiles(dataDir, [baseTask()]);
|
|
831
|
+
await writeExecutableHook(hooksDir, "on-modify", `#!/usr/bin/env bash
|
|
832
|
+
echo "modify hook rejected patch" >&2
|
|
833
|
+
exit 19
|
|
834
|
+
`);
|
|
835
|
+
const result = await runRepositoryWithOptionsExit({ dataDir, hooksDir }, (repository) => repository.updateTask("revive-unzen", {
|
|
836
|
+
title: "Repair array",
|
|
837
|
+
}));
|
|
838
|
+
expect(Exit.isFailure(result)).toBe(true);
|
|
839
|
+
if (Exit.isFailure(result)) {
|
|
840
|
+
const failure = Option.getOrNull(Cause.failureOption(result.cause));
|
|
841
|
+
expect(failure).toContain("modify hook rejected patch");
|
|
842
|
+
}
|
|
843
|
+
const fetched = await runRepository(dataDir, (repository) => repository.getTask("revive-unzen"));
|
|
844
|
+
expect(fetched.title).toBe("Revive unzen server");
|
|
845
|
+
}
|
|
846
|
+
finally {
|
|
847
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
848
|
+
}
|
|
849
|
+
});
|
|
850
|
+
it("updateTask aborts when an on-modify hook changes task id", async () => {
|
|
851
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-update-hook-id-"));
|
|
852
|
+
const hooksDir = join(dataDir, "hooks");
|
|
853
|
+
try {
|
|
854
|
+
await writeTaskFiles(dataDir, [baseTask()]);
|
|
855
|
+
await writeExecutableHook(hooksDir, "on-modify", `#!/usr/bin/env node
|
|
856
|
+
const fs = require("node:fs");
|
|
857
|
+
const payload = JSON.parse(fs.readFileSync(0, "utf8"));
|
|
858
|
+
payload.new.id = "mutated-id";
|
|
859
|
+
process.stdout.write(JSON.stringify(payload.new));
|
|
860
|
+
`);
|
|
861
|
+
const result = await runRepositoryWithOptionsExit({ dataDir, hooksDir }, (repository) => repository.updateTask("revive-unzen", {
|
|
862
|
+
title: "Repair array",
|
|
863
|
+
}));
|
|
864
|
+
expect(Exit.isFailure(result)).toBe(true);
|
|
865
|
+
if (Exit.isFailure(result)) {
|
|
866
|
+
const failure = Option.getOrNull(Cause.failureOption(result.cause));
|
|
867
|
+
expect(failure).toContain("on-modify hooks cannot change task id");
|
|
868
|
+
}
|
|
869
|
+
const fetched = await runRepository(dataDir, (repository) => repository.getTask("revive-unzen"));
|
|
870
|
+
expect(fetched.id).toBe("revive-unzen");
|
|
871
|
+
expect(fetched.title).toBe("Revive unzen server");
|
|
872
|
+
}
|
|
873
|
+
finally {
|
|
874
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
875
|
+
}
|
|
876
|
+
});
|
|
877
|
+
it("completeTask runs on-complete hooks with the completed task payload", async () => {
|
|
878
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-complete-hook-"));
|
|
879
|
+
const hooksDir = join(dataDir, "hooks");
|
|
880
|
+
const markerPath = join(dataDir, "on-complete.json");
|
|
881
|
+
try {
|
|
882
|
+
await writeTaskFiles(dataDir, [baseTask()]);
|
|
883
|
+
await writeExecutableHook(hooksDir, "on-complete", `#!/usr/bin/env node
|
|
884
|
+
const fs = require("node:fs");
|
|
885
|
+
const task = JSON.parse(fs.readFileSync(0, "utf8"));
|
|
886
|
+
fs.writeFileSync(${JSON.stringify(markerPath)}, JSON.stringify(task), "utf8");
|
|
887
|
+
`);
|
|
888
|
+
const completed = await runRepositoryWithOptions({ dataDir, hooksDir }, (repository) => repository.completeTask("revive-unzen"));
|
|
889
|
+
expect(completed.status).toBe("done");
|
|
890
|
+
const payload = JSON.parse(await readFile(markerPath, "utf8"));
|
|
891
|
+
expect(payload.id).toBe("revive-unzen");
|
|
892
|
+
expect(payload.status).toBe("done");
|
|
893
|
+
expect(payload.completed_at).toBe(completed.completed_at);
|
|
894
|
+
}
|
|
895
|
+
finally {
|
|
896
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
897
|
+
}
|
|
898
|
+
});
|
|
899
|
+
it("completeTask hooks receive TASHKS_EVENT, TASHKS_ID, and TASHKS_DATA_DIR", async () => {
|
|
900
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-complete-hook-env-"));
|
|
901
|
+
const hooksDir = join(dataDir, "hooks");
|
|
902
|
+
const markerPath = join(dataDir, "on-complete-env.json");
|
|
903
|
+
try {
|
|
904
|
+
await writeTaskFiles(dataDir, [baseTask()]);
|
|
905
|
+
await writeExecutableHook(hooksDir, "on-complete", `#!/usr/bin/env node
|
|
906
|
+
const fs = require("node:fs");
|
|
907
|
+
const task = JSON.parse(fs.readFileSync(0, "utf8"));
|
|
908
|
+
const payload = {
|
|
909
|
+
event: process.env.TASHKS_EVENT ?? null,
|
|
910
|
+
id: process.env.TASHKS_ID ?? null,
|
|
911
|
+
dataDir: process.env.TASHKS_DATA_DIR ?? null,
|
|
912
|
+
taskId: task.id,
|
|
913
|
+
};
|
|
914
|
+
fs.writeFileSync(${JSON.stringify(markerPath)}, JSON.stringify(payload), "utf8");
|
|
915
|
+
`);
|
|
916
|
+
await runRepositoryWithOptions({ dataDir, hooksDir }, (repository) => repository.completeTask("revive-unzen"));
|
|
917
|
+
const payload = JSON.parse(await readFile(markerPath, "utf8"));
|
|
918
|
+
expect(payload).toEqual({
|
|
919
|
+
event: "complete",
|
|
920
|
+
id: "revive-unzen",
|
|
921
|
+
dataDir,
|
|
922
|
+
taskId: "revive-unzen",
|
|
923
|
+
});
|
|
924
|
+
}
|
|
925
|
+
finally {
|
|
926
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
927
|
+
}
|
|
928
|
+
});
|
|
929
|
+
it("completeTask does not fail when an on-complete hook exits non-zero", async () => {
|
|
930
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-complete-hook-fail-"));
|
|
931
|
+
const hooksDir = join(dataDir, "hooks");
|
|
932
|
+
try {
|
|
933
|
+
await writeTaskFiles(dataDir, [baseTask()]);
|
|
934
|
+
await writeExecutableHook(hooksDir, "on-complete", `#!/usr/bin/env bash
|
|
935
|
+
echo "complete hook failed" >&2
|
|
936
|
+
exit 7
|
|
937
|
+
`);
|
|
938
|
+
const completed = await runRepositoryWithOptions({ dataDir, hooksDir }, (repository) => repository.completeTask("revive-unzen"));
|
|
939
|
+
expect(completed.status).toBe("done");
|
|
940
|
+
const fetched = await runRepository(dataDir, (repository) => repository.getTask("revive-unzen"));
|
|
941
|
+
expect(fetched.status).toBe("done");
|
|
942
|
+
}
|
|
943
|
+
finally {
|
|
944
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
945
|
+
}
|
|
946
|
+
});
|
|
947
|
+
it("completeTask marks the task done and does not create recurrence for clock-driven tasks", async () => {
|
|
948
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-complete-clock-"));
|
|
949
|
+
try {
|
|
950
|
+
await writeTaskFiles(dataDir, [
|
|
951
|
+
{
|
|
952
|
+
...baseTask(),
|
|
953
|
+
recurrence: "FREQ=DAILY",
|
|
954
|
+
recurrence_trigger: "clock",
|
|
955
|
+
},
|
|
956
|
+
]);
|
|
957
|
+
const completed = await runRepository(dataDir, (repository) => repository.completeTask("revive-unzen"));
|
|
958
|
+
expect(completed.status).toBe("done");
|
|
959
|
+
expect(completed.updated).toBe(todayIso());
|
|
960
|
+
expect(completed.completed_at).not.toBeNull();
|
|
961
|
+
const fetched = await runRepository(dataDir, (repository) => repository.getTask("revive-unzen"));
|
|
962
|
+
expect(fetched).toEqual(completed);
|
|
963
|
+
const listed = await runRepository(dataDir, (repository) => repository.listTasks());
|
|
964
|
+
expect(listed).toHaveLength(1);
|
|
965
|
+
}
|
|
966
|
+
finally {
|
|
967
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
968
|
+
}
|
|
969
|
+
});
|
|
970
|
+
it("completeTask creates the next completion-driven recurrence instance", async () => {
|
|
971
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-complete-recur-"));
|
|
972
|
+
try {
|
|
973
|
+
await writeTaskFiles(dataDir, [
|
|
974
|
+
{
|
|
975
|
+
...baseTask(),
|
|
976
|
+
recurrence: "FREQ=WEEKLY;INTERVAL=2",
|
|
977
|
+
recurrence_trigger: "completion",
|
|
978
|
+
recurrence_last_generated: null,
|
|
979
|
+
},
|
|
980
|
+
]);
|
|
981
|
+
const completed = await runRepository(dataDir, (repository) => repository.completeTask("revive-unzen"));
|
|
982
|
+
expect(completed.status).toBe("done");
|
|
983
|
+
expect(completed.completed_at).not.toBeNull();
|
|
984
|
+
const listed = await runRepository(dataDir, (repository) => repository.listTasks());
|
|
985
|
+
expect(listed).toHaveLength(2);
|
|
986
|
+
const next = listed.find((task) => task.id !== "revive-unzen");
|
|
987
|
+
expect(next).toBeDefined();
|
|
988
|
+
if (next === undefined || completed.completed_at === null) {
|
|
989
|
+
throw new Error("Expected completed task and next recurrence task");
|
|
990
|
+
}
|
|
991
|
+
const completionDate = completed.completed_at.slice(0, 10);
|
|
992
|
+
expect(next.id).toMatch(/^revive-unzen-server-[a-z0-9]{6}$/);
|
|
993
|
+
expect(next.status).toBe("active");
|
|
994
|
+
expect(next.created).toBe(completionDate);
|
|
995
|
+
expect(next.updated).toBe(completionDate);
|
|
996
|
+
expect(next.due).toBe(addDaysToIsoDate("2026-03-01", 14));
|
|
997
|
+
expect(next.completed_at).toBeNull();
|
|
998
|
+
expect(next.last_surfaced).toBeNull();
|
|
999
|
+
expect(next.defer_until).toBe(addDaysToIsoDate(completionDate, 14));
|
|
1000
|
+
expect(next.nudge_count).toBe(0);
|
|
1001
|
+
expect(next.recurrence).toBe("FREQ=WEEKLY;INTERVAL=2");
|
|
1002
|
+
expect(next.recurrence_trigger).toBe("completion");
|
|
1003
|
+
expect(next.recurrence_last_generated).toBe(completed.completed_at);
|
|
1004
|
+
}
|
|
1005
|
+
finally {
|
|
1006
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1007
|
+
}
|
|
1008
|
+
});
|
|
1009
|
+
it("completeTask parses RRULE-prefixed completion recurrence strings", async () => {
|
|
1010
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-complete-recur-rrule-prefix-"));
|
|
1011
|
+
try {
|
|
1012
|
+
await writeTaskFiles(dataDir, [
|
|
1013
|
+
{
|
|
1014
|
+
...baseTask(),
|
|
1015
|
+
recurrence: "RRULE:FREQ=WEEKLY;INTERVAL=2;BYDAY=MO",
|
|
1016
|
+
recurrence_trigger: "completion",
|
|
1017
|
+
recurrence_last_generated: null,
|
|
1018
|
+
},
|
|
1019
|
+
]);
|
|
1020
|
+
const completed = await runRepository(dataDir, (repository) => repository.completeTask("revive-unzen"));
|
|
1021
|
+
expect(completed.completed_at).not.toBeNull();
|
|
1022
|
+
const listed = await runRepository(dataDir, (repository) => repository.listTasks());
|
|
1023
|
+
expect(listed).toHaveLength(2);
|
|
1024
|
+
const next = listed.find((task) => task.id !== "revive-unzen");
|
|
1025
|
+
expect(next).toBeDefined();
|
|
1026
|
+
if (next === undefined || completed.completed_at === null) {
|
|
1027
|
+
throw new Error("Expected completed task and next recurrence task");
|
|
1028
|
+
}
|
|
1029
|
+
const completionDate = completed.completed_at.slice(0, 10);
|
|
1030
|
+
expect(next.defer_until).toBe(addDaysToIsoDate(completionDate, 14));
|
|
1031
|
+
expect(next.recurrence).toBe("RRULE:FREQ=WEEKLY;INTERVAL=2;BYDAY=MO");
|
|
1032
|
+
expect(next.recurrence_trigger).toBe("completion");
|
|
1033
|
+
expect(next.recurrence_last_generated).toBe(completed.completed_at);
|
|
1034
|
+
}
|
|
1035
|
+
finally {
|
|
1036
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1037
|
+
}
|
|
1038
|
+
});
|
|
1039
|
+
it("completeTask creates completion-driven recurrence when strategy is accumulate", async () => {
|
|
1040
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-complete-recur-accumulate-"));
|
|
1041
|
+
try {
|
|
1042
|
+
await writeTaskFiles(dataDir, [
|
|
1043
|
+
{
|
|
1044
|
+
...baseTask(),
|
|
1045
|
+
recurrence: "FREQ=WEEKLY;INTERVAL=1",
|
|
1046
|
+
recurrence_trigger: "completion",
|
|
1047
|
+
recurrence_strategy: "accumulate",
|
|
1048
|
+
recurrence_last_generated: null,
|
|
1049
|
+
},
|
|
1050
|
+
]);
|
|
1051
|
+
const completed = await runRepository(dataDir, (repository) => repository.completeTask("revive-unzen"));
|
|
1052
|
+
expect(completed.status).toBe("done");
|
|
1053
|
+
expect(completed.completed_at).not.toBeNull();
|
|
1054
|
+
const original = await runRepository(dataDir, (repository) => repository.getTask("revive-unzen"));
|
|
1055
|
+
expect(original.status).toBe("done");
|
|
1056
|
+
const listed = await runRepository(dataDir, (repository) => repository.listTasks());
|
|
1057
|
+
expect(listed).toHaveLength(2);
|
|
1058
|
+
const next = listed.find((task) => task.id !== "revive-unzen");
|
|
1059
|
+
expect(next).toBeDefined();
|
|
1060
|
+
if (next === undefined || completed.completed_at === null) {
|
|
1061
|
+
throw new Error("Expected completed task and next recurrence task");
|
|
1062
|
+
}
|
|
1063
|
+
const completionDate = completed.completed_at.slice(0, 10);
|
|
1064
|
+
expect(next.status).toBe("active");
|
|
1065
|
+
expect(next.recurrence_trigger).toBe("completion");
|
|
1066
|
+
expect(next.recurrence_strategy).toBe("accumulate");
|
|
1067
|
+
expect(next.due).toBe(addDaysToIsoDate("2026-03-01", 7));
|
|
1068
|
+
expect(next.defer_until).toBe(addDaysToIsoDate(completionDate, 7));
|
|
1069
|
+
}
|
|
1070
|
+
finally {
|
|
1071
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1072
|
+
}
|
|
1073
|
+
});
|
|
1074
|
+
it("generateNextRecurrence with replace drops the current instance and creates a new task", async () => {
|
|
1075
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-next-recur-replace-"));
|
|
1076
|
+
try {
|
|
1077
|
+
await writeTaskFiles(dataDir, [
|
|
1078
|
+
{
|
|
1079
|
+
...baseTask(),
|
|
1080
|
+
recurrence: "FREQ=WEEKLY;BYDAY=MO",
|
|
1081
|
+
recurrence_trigger: "clock",
|
|
1082
|
+
recurrence_strategy: "replace",
|
|
1083
|
+
recurrence_last_generated: null,
|
|
1084
|
+
},
|
|
1085
|
+
]);
|
|
1086
|
+
const next = await runRepository(dataDir, (repository) => repository.generateNextRecurrence("revive-unzen"));
|
|
1087
|
+
expect(next.id).toMatch(/^revive-unzen-server-[a-z0-9]{6}$/);
|
|
1088
|
+
expect(next.status).toBe("active");
|
|
1089
|
+
expect(next.created).toBe(todayIso());
|
|
1090
|
+
expect(next.updated).toBe(todayIso());
|
|
1091
|
+
expect(next.actual_minutes).toBeNull();
|
|
1092
|
+
expect(next.completed_at).toBeNull();
|
|
1093
|
+
expect(next.last_surfaced).toBeNull();
|
|
1094
|
+
expect(next.defer_until).toBeNull();
|
|
1095
|
+
expect(next.nudge_count).toBe(0);
|
|
1096
|
+
expect(next.recurrence).toBe("FREQ=WEEKLY;BYDAY=MO");
|
|
1097
|
+
expect(next.recurrence_last_generated).not.toBeNull();
|
|
1098
|
+
const replaced = await runRepository(dataDir, (repository) => repository.getTask("revive-unzen"));
|
|
1099
|
+
expect(replaced.status).toBe("dropped");
|
|
1100
|
+
expect(replaced.updated).toBe(todayIso());
|
|
1101
|
+
const listed = await runRepository(dataDir, (repository) => repository.listTasks());
|
|
1102
|
+
expect(listed).toHaveLength(2);
|
|
1103
|
+
}
|
|
1104
|
+
finally {
|
|
1105
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1106
|
+
}
|
|
1107
|
+
});
|
|
1108
|
+
it("generateNextRecurrence with accumulate keeps the current instance active", async () => {
|
|
1109
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-next-recur-accumulate-"));
|
|
1110
|
+
try {
|
|
1111
|
+
await writeTaskFiles(dataDir, [
|
|
1112
|
+
{
|
|
1113
|
+
...baseTask(),
|
|
1114
|
+
recurrence: "FREQ=DAILY",
|
|
1115
|
+
recurrence_trigger: "clock",
|
|
1116
|
+
recurrence_strategy: "accumulate",
|
|
1117
|
+
recurrence_last_generated: null,
|
|
1118
|
+
},
|
|
1119
|
+
]);
|
|
1120
|
+
const next = await runRepository(dataDir, (repository) => repository.generateNextRecurrence("revive-unzen"));
|
|
1121
|
+
expect(next.status).toBe("active");
|
|
1122
|
+
const original = await runRepository(dataDir, (repository) => repository.getTask("revive-unzen"));
|
|
1123
|
+
expect(original.status).toBe("active");
|
|
1124
|
+
const listed = await runRepository(dataDir, (repository) => repository.listTasks());
|
|
1125
|
+
expect(listed).toHaveLength(2);
|
|
1126
|
+
}
|
|
1127
|
+
finally {
|
|
1128
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1129
|
+
}
|
|
1130
|
+
});
|
|
1131
|
+
it("processDueRecurrences creates due clock-driven tasks and reports replaced ids", async () => {
|
|
1132
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-process-due-replace-"));
|
|
1133
|
+
try {
|
|
1134
|
+
await writeTaskFiles(dataDir, [
|
|
1135
|
+
{
|
|
1136
|
+
...baseTask(),
|
|
1137
|
+
id: "clock-replace",
|
|
1138
|
+
title: "Clock replace",
|
|
1139
|
+
recurrence: "FREQ=DAILY",
|
|
1140
|
+
recurrence_trigger: "clock",
|
|
1141
|
+
recurrence_strategy: "replace",
|
|
1142
|
+
recurrence_last_generated: "2026-02-24T08:00:00Z",
|
|
1143
|
+
status: "active",
|
|
1144
|
+
},
|
|
1145
|
+
{
|
|
1146
|
+
...baseTask(),
|
|
1147
|
+
id: "completion-driven",
|
|
1148
|
+
title: "Completion driven",
|
|
1149
|
+
recurrence: "FREQ=DAILY",
|
|
1150
|
+
recurrence_trigger: "completion",
|
|
1151
|
+
recurrence_strategy: "replace",
|
|
1152
|
+
recurrence_last_generated: "2026-02-24T08:00:00Z",
|
|
1153
|
+
status: "active",
|
|
1154
|
+
},
|
|
1155
|
+
{
|
|
1156
|
+
...baseTask(),
|
|
1157
|
+
id: "done-clock",
|
|
1158
|
+
title: "Done clock",
|
|
1159
|
+
recurrence: "FREQ=DAILY",
|
|
1160
|
+
recurrence_trigger: "clock",
|
|
1161
|
+
recurrence_strategy: "replace",
|
|
1162
|
+
recurrence_last_generated: "2026-02-24T08:00:00Z",
|
|
1163
|
+
status: "done",
|
|
1164
|
+
},
|
|
1165
|
+
]);
|
|
1166
|
+
const now = new Date("2026-02-25T09:30:00.000Z");
|
|
1167
|
+
const result = await runRepository(dataDir, (repository) => repository.processDueRecurrences(now));
|
|
1168
|
+
expect(result.replaced).toEqual(["clock-replace"]);
|
|
1169
|
+
expect(result.created).toHaveLength(1);
|
|
1170
|
+
expect(result.created[0]?.id).toMatch(/^clock-replace-[a-z0-9]{6}$/);
|
|
1171
|
+
expect(result.created[0]?.recurrence_last_generated).toBe("2026-02-25T09:30:00.000Z");
|
|
1172
|
+
const replacedTask = await runRepository(dataDir, (repository) => repository.getTask("clock-replace"));
|
|
1173
|
+
expect(replacedTask.status).toBe("dropped");
|
|
1174
|
+
expect(replacedTask.recurrence_last_generated).toBe("2026-02-25T09:30:00.000Z");
|
|
1175
|
+
const allTasks = await runRepository(dataDir, (repository) => repository.listTasks());
|
|
1176
|
+
expect(allTasks).toHaveLength(4);
|
|
1177
|
+
}
|
|
1178
|
+
finally {
|
|
1179
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1180
|
+
}
|
|
1181
|
+
});
|
|
1182
|
+
it("processDueRecurrences is idempotent for the same run timestamp", async () => {
|
|
1183
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-process-due-idem-"));
|
|
1184
|
+
try {
|
|
1185
|
+
await writeTaskFiles(dataDir, [
|
|
1186
|
+
{
|
|
1187
|
+
...baseTask(),
|
|
1188
|
+
id: "clock-accumulate",
|
|
1189
|
+
title: "Clock accumulate",
|
|
1190
|
+
recurrence: "FREQ=DAILY",
|
|
1191
|
+
recurrence_trigger: "clock",
|
|
1192
|
+
recurrence_strategy: "accumulate",
|
|
1193
|
+
recurrence_last_generated: "2026-02-24T08:00:00Z",
|
|
1194
|
+
status: "active",
|
|
1195
|
+
},
|
|
1196
|
+
]);
|
|
1197
|
+
const now = new Date("2026-02-25T09:30:00.000Z");
|
|
1198
|
+
const first = await runRepository(dataDir, (repository) => repository.processDueRecurrences(now));
|
|
1199
|
+
expect(first.created).toHaveLength(1);
|
|
1200
|
+
expect(first.replaced).toEqual([]);
|
|
1201
|
+
const second = await runRepository(dataDir, (repository) => repository.processDueRecurrences(now));
|
|
1202
|
+
expect(second).toEqual({
|
|
1203
|
+
created: [],
|
|
1204
|
+
replaced: [],
|
|
1205
|
+
});
|
|
1206
|
+
const original = await runRepository(dataDir, (repository) => repository.getTask("clock-accumulate"));
|
|
1207
|
+
expect(original.status).toBe("active");
|
|
1208
|
+
expect(original.recurrence_last_generated).toBe("2026-02-25T09:30:00.000Z");
|
|
1209
|
+
const allTasks = await runRepository(dataDir, (repository) => repository.listTasks());
|
|
1210
|
+
expect(allTasks).toHaveLength(2);
|
|
1211
|
+
}
|
|
1212
|
+
finally {
|
|
1213
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1214
|
+
}
|
|
1215
|
+
});
|
|
1216
|
+
it("setDailyHighlight persists the highlighted task id and replaces previous highlight", async () => {
|
|
1217
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-highlight-"));
|
|
1218
|
+
try {
|
|
1219
|
+
const firstTask = {
|
|
1220
|
+
...baseTask(),
|
|
1221
|
+
id: "first-task",
|
|
1222
|
+
title: "First task",
|
|
1223
|
+
};
|
|
1224
|
+
const secondTask = {
|
|
1225
|
+
...baseTask(),
|
|
1226
|
+
id: "second-task",
|
|
1227
|
+
title: "Second task",
|
|
1228
|
+
};
|
|
1229
|
+
await writeTaskFiles(dataDir, [firstTask, secondTask]);
|
|
1230
|
+
const firstHighlight = await runRepository(dataDir, (repository) => repository.setDailyHighlight("first-task"));
|
|
1231
|
+
expect(firstHighlight.id).toBe("first-task");
|
|
1232
|
+
expect(YAML.parse(await readFile(join(dataDir, "daily-highlight.yaml"), "utf8"))).toEqual({
|
|
1233
|
+
id: "first-task",
|
|
1234
|
+
});
|
|
1235
|
+
const secondHighlight = await runRepository(dataDir, (repository) => repository.setDailyHighlight("second-task"));
|
|
1236
|
+
expect(secondHighlight.id).toBe("second-task");
|
|
1237
|
+
expect(YAML.parse(await readFile(join(dataDir, "daily-highlight.yaml"), "utf8"))).toEqual({
|
|
1238
|
+
id: "second-task",
|
|
1239
|
+
});
|
|
1240
|
+
}
|
|
1241
|
+
finally {
|
|
1242
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1243
|
+
}
|
|
1244
|
+
});
|
|
1245
|
+
it("setDailyHighlight fails when the task does not exist", async () => {
|
|
1246
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-highlight-missing-"));
|
|
1247
|
+
try {
|
|
1248
|
+
const result = await runRepositoryExit(dataDir, (repository) => repository.setDailyHighlight("missing-task"));
|
|
1249
|
+
expect(Exit.isFailure(result)).toBe(true);
|
|
1250
|
+
if (Exit.isFailure(result)) {
|
|
1251
|
+
const failure = Option.getOrNull(Cause.failureOption(result.cause));
|
|
1252
|
+
expect(failure).toBe("TaskRepository failed to read task missing-task: Task not found: missing-task");
|
|
1253
|
+
}
|
|
1254
|
+
}
|
|
1255
|
+
finally {
|
|
1256
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1257
|
+
}
|
|
1258
|
+
});
|
|
1259
|
+
it("deleteTask removes the task file and returns deleted", async () => {
|
|
1260
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-delete-"));
|
|
1261
|
+
try {
|
|
1262
|
+
await writeTaskFiles(dataDir, [baseTask()]);
|
|
1263
|
+
const deleted = await runRepository(dataDir, (repository) => repository.deleteTask("revive-unzen"));
|
|
1264
|
+
expect(deleted).toEqual({ deleted: true });
|
|
1265
|
+
const result = await runRepositoryExit(dataDir, (repository) => repository.getTask("revive-unzen"));
|
|
1266
|
+
expect(Exit.isFailure(result)).toBe(true);
|
|
1267
|
+
if (Exit.isFailure(result)) {
|
|
1268
|
+
const failure = Option.getOrNull(Cause.failureOption(result.cause));
|
|
1269
|
+
expect(failure).toBe("TaskRepository failed to read task revive-unzen: Task not found: revive-unzen");
|
|
1270
|
+
}
|
|
1271
|
+
}
|
|
1272
|
+
finally {
|
|
1273
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1274
|
+
}
|
|
1275
|
+
});
|
|
1276
|
+
it("deleteTask runs on-delete hooks with the deleted task payload", async () => {
|
|
1277
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-delete-hook-"));
|
|
1278
|
+
const hooksDir = join(dataDir, "hooks");
|
|
1279
|
+
const markerPath = join(dataDir, "on-delete.json");
|
|
1280
|
+
try {
|
|
1281
|
+
await writeTaskFiles(dataDir, [baseTask()]);
|
|
1282
|
+
await writeExecutableHook(hooksDir, "on-delete", `#!/usr/bin/env node
|
|
1283
|
+
const fs = require("node:fs");
|
|
1284
|
+
const task = JSON.parse(fs.readFileSync(0, "utf8"));
|
|
1285
|
+
fs.writeFileSync(${JSON.stringify(markerPath)}, JSON.stringify(task), "utf8");
|
|
1286
|
+
`);
|
|
1287
|
+
const deleted = await runRepositoryWithOptions({ dataDir, hooksDir }, (repository) => repository.deleteTask("revive-unzen"));
|
|
1288
|
+
expect(deleted).toEqual({ deleted: true });
|
|
1289
|
+
const payload = JSON.parse(await readFile(markerPath, "utf8"));
|
|
1290
|
+
expect(payload.id).toBe("revive-unzen");
|
|
1291
|
+
expect(payload.title).toBe("Revive unzen server");
|
|
1292
|
+
}
|
|
1293
|
+
finally {
|
|
1294
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1295
|
+
}
|
|
1296
|
+
});
|
|
1297
|
+
it("deleteTask hooks receive TASHKS_EVENT, TASHKS_ID, and TASHKS_DATA_DIR", async () => {
|
|
1298
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-delete-hook-env-"));
|
|
1299
|
+
const hooksDir = join(dataDir, "hooks");
|
|
1300
|
+
const markerPath = join(dataDir, "on-delete-env.json");
|
|
1301
|
+
try {
|
|
1302
|
+
await writeTaskFiles(dataDir, [baseTask()]);
|
|
1303
|
+
await writeExecutableHook(hooksDir, "on-delete", `#!/usr/bin/env node
|
|
1304
|
+
const fs = require("node:fs");
|
|
1305
|
+
const task = JSON.parse(fs.readFileSync(0, "utf8"));
|
|
1306
|
+
const payload = {
|
|
1307
|
+
event: process.env.TASHKS_EVENT ?? null,
|
|
1308
|
+
id: process.env.TASHKS_ID ?? null,
|
|
1309
|
+
dataDir: process.env.TASHKS_DATA_DIR ?? null,
|
|
1310
|
+
taskId: task.id,
|
|
1311
|
+
};
|
|
1312
|
+
fs.writeFileSync(${JSON.stringify(markerPath)}, JSON.stringify(payload), "utf8");
|
|
1313
|
+
`);
|
|
1314
|
+
await runRepositoryWithOptions({ dataDir, hooksDir }, (repository) => repository.deleteTask("revive-unzen"));
|
|
1315
|
+
const payload = JSON.parse(await readFile(markerPath, "utf8"));
|
|
1316
|
+
expect(payload).toEqual({
|
|
1317
|
+
event: "delete",
|
|
1318
|
+
id: "revive-unzen",
|
|
1319
|
+
dataDir,
|
|
1320
|
+
taskId: "revive-unzen",
|
|
1321
|
+
});
|
|
1322
|
+
}
|
|
1323
|
+
finally {
|
|
1324
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1325
|
+
}
|
|
1326
|
+
});
|
|
1327
|
+
it("deleteTask does not fail when an on-delete hook exits non-zero", async () => {
|
|
1328
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-delete-hook-fail-"));
|
|
1329
|
+
const hooksDir = join(dataDir, "hooks");
|
|
1330
|
+
try {
|
|
1331
|
+
await writeTaskFiles(dataDir, [baseTask()]);
|
|
1332
|
+
await writeExecutableHook(hooksDir, "on-delete", `#!/usr/bin/env bash
|
|
1333
|
+
echo "delete hook failed" >&2
|
|
1334
|
+
exit 5
|
|
1335
|
+
`);
|
|
1336
|
+
const deleted = await runRepositoryWithOptions({ dataDir, hooksDir }, (repository) => repository.deleteTask("revive-unzen"));
|
|
1337
|
+
expect(deleted).toEqual({ deleted: true });
|
|
1338
|
+
const result = await runRepositoryExit(dataDir, (repository) => repository.getTask("revive-unzen"));
|
|
1339
|
+
expect(Exit.isFailure(result)).toBe(true);
|
|
1340
|
+
}
|
|
1341
|
+
finally {
|
|
1342
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1343
|
+
}
|
|
1344
|
+
});
|
|
1345
|
+
it("createWorkLogEntry persists a derived entry from create input", async () => {
|
|
1346
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-worklog-create-"));
|
|
1347
|
+
try {
|
|
1348
|
+
const created = await runRepository(dataDir, (repository) => repository.createWorkLogEntry({
|
|
1349
|
+
task_id: "revive-unzen",
|
|
1350
|
+
started_at: "2026-02-20T09:00:00Z",
|
|
1351
|
+
}));
|
|
1352
|
+
expect(created).toEqual({
|
|
1353
|
+
id: "revive-unzen-20260220T0900",
|
|
1354
|
+
task_id: "revive-unzen",
|
|
1355
|
+
started_at: "2026-02-20T09:00:00Z",
|
|
1356
|
+
ended_at: null,
|
|
1357
|
+
date: "2026-02-20",
|
|
1358
|
+
});
|
|
1359
|
+
const storedSource = await readFile(join(dataDir, "work-log", `${created.id}.yaml`), "utf8");
|
|
1360
|
+
expect(YAML.parse(storedSource)).toEqual(created);
|
|
1361
|
+
}
|
|
1362
|
+
finally {
|
|
1363
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1364
|
+
}
|
|
1365
|
+
});
|
|
1366
|
+
it("listWorkLog applies date filtering and sorts by started_at descending", async () => {
|
|
1367
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-worklog-list-"));
|
|
1368
|
+
try {
|
|
1369
|
+
const entries = [
|
|
1370
|
+
{
|
|
1371
|
+
...baseWorkLogEntry(),
|
|
1372
|
+
id: "revive-unzen-20260220T0900",
|
|
1373
|
+
started_at: "2026-02-20T09:00:00Z",
|
|
1374
|
+
date: "2026-02-20",
|
|
1375
|
+
},
|
|
1376
|
+
{
|
|
1377
|
+
...baseWorkLogEntry(),
|
|
1378
|
+
id: "revive-unzen-20260221T0800",
|
|
1379
|
+
started_at: "2026-02-21T08:00:00Z",
|
|
1380
|
+
date: "2026-02-21",
|
|
1381
|
+
},
|
|
1382
|
+
{
|
|
1383
|
+
...baseWorkLogEntry(),
|
|
1384
|
+
id: "revive-unzen-20260221T0900",
|
|
1385
|
+
started_at: "2026-02-21T09:00:00Z",
|
|
1386
|
+
date: "2026-02-21",
|
|
1387
|
+
},
|
|
1388
|
+
];
|
|
1389
|
+
await writeWorkLogFiles(dataDir, entries);
|
|
1390
|
+
const allEntries = await runRepository(dataDir, (repository) => repository.listWorkLog());
|
|
1391
|
+
expect(allEntries.map((entry) => entry.id)).toEqual([
|
|
1392
|
+
"revive-unzen-20260221T0900",
|
|
1393
|
+
"revive-unzen-20260221T0800",
|
|
1394
|
+
"revive-unzen-20260220T0900",
|
|
1395
|
+
]);
|
|
1396
|
+
const filteredEntries = await runRepository(dataDir, (repository) => repository.listWorkLog({ date: "2026-02-21" }));
|
|
1397
|
+
expect(filteredEntries.map((entry) => entry.id)).toEqual([
|
|
1398
|
+
"revive-unzen-20260221T0900",
|
|
1399
|
+
"revive-unzen-20260221T0800",
|
|
1400
|
+
]);
|
|
1401
|
+
}
|
|
1402
|
+
finally {
|
|
1403
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1404
|
+
}
|
|
1405
|
+
});
|
|
1406
|
+
it("updateWorkLogEntry merges and persists changes", async () => {
|
|
1407
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-worklog-update-"));
|
|
1408
|
+
try {
|
|
1409
|
+
await writeWorkLogFiles(dataDir, [baseWorkLogEntry()]);
|
|
1410
|
+
const updated = await runRepository(dataDir, (repository) => repository.updateWorkLogEntry("revive-unzen-20260220T0900", {
|
|
1411
|
+
ended_at: null,
|
|
1412
|
+
date: "2026-02-21",
|
|
1413
|
+
}));
|
|
1414
|
+
expect(updated).toEqual({
|
|
1415
|
+
...baseWorkLogEntry(),
|
|
1416
|
+
ended_at: null,
|
|
1417
|
+
date: "2026-02-21",
|
|
1418
|
+
});
|
|
1419
|
+
const listed = await runRepository(dataDir, (repository) => repository.listWorkLog());
|
|
1420
|
+
expect(listed).toEqual([updated]);
|
|
1421
|
+
}
|
|
1422
|
+
finally {
|
|
1423
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1424
|
+
}
|
|
1425
|
+
});
|
|
1426
|
+
it("deleteWorkLogEntry removes the entry file and returns deleted", async () => {
|
|
1427
|
+
const dataDir = await mkdtemp(join(tmpdir(), "tasks-worklog-delete-"));
|
|
1428
|
+
try {
|
|
1429
|
+
await writeWorkLogFiles(dataDir, [baseWorkLogEntry()]);
|
|
1430
|
+
const deleted = await runRepository(dataDir, (repository) => repository.deleteWorkLogEntry("revive-unzen-20260220T0900"));
|
|
1431
|
+
expect(deleted).toEqual({ deleted: true });
|
|
1432
|
+
const entries = await runRepository(dataDir, (repository) => repository.listWorkLog());
|
|
1433
|
+
expect(entries).toEqual([]);
|
|
1434
|
+
}
|
|
1435
|
+
finally {
|
|
1436
|
+
await rm(dataDir, { recursive: true, force: true });
|
|
1437
|
+
}
|
|
1438
|
+
});
|
|
1439
|
+
});
|