@eunjae/il 0.0.1 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/scripts/il.d.mts +1 -0
- package/dist/scripts/il.mjs +977 -0
- package/package.json +10 -3
- package/dist/lib/__tests__/aliasRepo.test.js +0 -17
- package/dist/lib/__tests__/edit.test.js +0 -32
- package/dist/lib/__tests__/fsm.test.js +0 -22
- package/dist/lib/__tests__/id.test.js +0 -25
- package/dist/lib/__tests__/schema.test.js +0 -37
- package/dist/lib/__tests__/taskOperations.test.js +0 -33
- package/dist/lib/aliasReconcile.js +0 -28
- package/dist/lib/aliasRepo.js +0 -58
- package/dist/lib/config.js +0 -35
- package/dist/lib/constants.js +0 -6
- package/dist/lib/edit.js +0 -47
- package/dist/lib/format.js +0 -52
- package/dist/lib/fsm.js +0 -25
- package/dist/lib/gitignore.js +0 -21
- package/dist/lib/id.js +0 -30
- package/dist/lib/json.js +0 -10
- package/dist/lib/lock.js +0 -25
- package/dist/lib/pr.js +0 -64
- package/dist/lib/schema.js +0 -64
- package/dist/lib/search.js +0 -17
- package/dist/lib/taskFactory.js +0 -19
- package/dist/lib/taskOperations.js +0 -57
- package/dist/lib/taskRepo.js +0 -67
- package/dist/lib/taskResolver.js +0 -31
- package/dist/lib/taskStore.js +0 -9
- package/dist/lib/time.js +0 -18
- package/dist/lib/types.js +0 -3
- package/dist/lib/workspace.js +0 -58
- package/dist/scripts/il.js +0 -396
- package/dist/scripts/suivi.js +0 -396
|
@@ -0,0 +1,977 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { access, mkdir, readFile, readdir, rename, writeFile } from "node:fs/promises";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { fileURLToPath } from "node:url";
|
|
5
|
+
import { Command } from "commander";
|
|
6
|
+
import writeFileAtomic from "write-file-atomic";
|
|
7
|
+
import { z } from "zod";
|
|
8
|
+
import { homedir } from "node:os";
|
|
9
|
+
import { Octokit } from "octokit";
|
|
10
|
+
import { DateTime } from "luxon";
|
|
11
|
+
import lockfile from "proper-lockfile";
|
|
12
|
+
import { createHash } from "node:crypto";
|
|
13
|
+
import { ulid } from "ulid";
|
|
14
|
+
|
|
15
|
+
//#region lib/json.ts
|
|
16
|
+
const readJsonFile = async (filePath) => {
|
|
17
|
+
const raw = await readFile(filePath, "utf8");
|
|
18
|
+
return JSON.parse(raw);
|
|
19
|
+
};
|
|
20
|
+
const writeJsonAtomic = async (filePath, data) => {
|
|
21
|
+
await writeFileAtomic(filePath, `${JSON.stringify(data, null, 2)}\n`, {
|
|
22
|
+
encoding: "utf8",
|
|
23
|
+
fsync: true
|
|
24
|
+
});
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
//#endregion
|
|
28
|
+
//#region lib/aliasRepo.ts
|
|
29
|
+
const emptyAliases = () => ({
|
|
30
|
+
T: {},
|
|
31
|
+
R: {}
|
|
32
|
+
});
|
|
33
|
+
const aliasFilePath = (workspaceRoot) => path.join(workspaceRoot, "aliases.json");
|
|
34
|
+
const fileExists$2 = async (filePath) => {
|
|
35
|
+
try {
|
|
36
|
+
await access(filePath);
|
|
37
|
+
return true;
|
|
38
|
+
} catch {
|
|
39
|
+
return false;
|
|
40
|
+
}
|
|
41
|
+
};
|
|
42
|
+
const readAliases = async (workspaceRoot) => {
|
|
43
|
+
const filePath = aliasFilePath(workspaceRoot);
|
|
44
|
+
if (!await fileExists$2(filePath)) return emptyAliases();
|
|
45
|
+
const data = await readJsonFile(filePath);
|
|
46
|
+
return {
|
|
47
|
+
T: data.T ?? {},
|
|
48
|
+
R: data.R ?? {}
|
|
49
|
+
};
|
|
50
|
+
};
|
|
51
|
+
const writeAliases = async (workspaceRoot, aliases) => {
|
|
52
|
+
await writeJsonAtomic(aliasFilePath(workspaceRoot), aliases);
|
|
53
|
+
};
|
|
54
|
+
const parseAlias = (alias) => {
|
|
55
|
+
const match = alias.match(/^([TR])(\d+)$/);
|
|
56
|
+
if (!match) return null;
|
|
57
|
+
return {
|
|
58
|
+
prefix: match[1],
|
|
59
|
+
key: match[2]
|
|
60
|
+
};
|
|
61
|
+
};
|
|
62
|
+
const formatAliasKey = (value) => String(value).padStart(2, "0");
|
|
63
|
+
const allocateAliasInMap = (aliases, prefix, taskId) => {
|
|
64
|
+
const used = new Set(Object.keys(aliases[prefix]).map((key) => Number.parseInt(key, 10)));
|
|
65
|
+
let next = 1;
|
|
66
|
+
while (used.has(next)) next += 1;
|
|
67
|
+
const key = formatAliasKey(next);
|
|
68
|
+
aliases[prefix][key] = taskId;
|
|
69
|
+
return {
|
|
70
|
+
alias: `${prefix}${key}`,
|
|
71
|
+
aliases
|
|
72
|
+
};
|
|
73
|
+
};
|
|
74
|
+
const allocateAlias = async (workspaceRoot, prefix, taskId) => {
|
|
75
|
+
return allocateAliasInMap(await readAliases(workspaceRoot), prefix, taskId);
|
|
76
|
+
};
|
|
77
|
+
const resolveAlias = async (workspaceRoot, alias) => {
|
|
78
|
+
const parsed = parseAlias(alias);
|
|
79
|
+
if (!parsed) return null;
|
|
80
|
+
return (await readAliases(workspaceRoot))[parsed.prefix][parsed.key] ?? null;
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
//#endregion
|
|
84
|
+
//#region lib/constants.ts
|
|
85
|
+
const APP_NAME = "il";
|
|
86
|
+
const APP_DIR = `.${APP_NAME}`;
|
|
87
|
+
const LOCK_DIR = ".lock";
|
|
88
|
+
const LOCK_FILE = "store.lock";
|
|
89
|
+
const TASKS_DIR = "tasks";
|
|
90
|
+
const STATUS_ORDER = [
|
|
91
|
+
"backlog",
|
|
92
|
+
"active",
|
|
93
|
+
"paused",
|
|
94
|
+
"completed",
|
|
95
|
+
"cancelled"
|
|
96
|
+
];
|
|
97
|
+
|
|
98
|
+
//#endregion
|
|
99
|
+
//#region lib/types.ts
|
|
100
|
+
const taskStatuses = [
|
|
101
|
+
"backlog",
|
|
102
|
+
"active",
|
|
103
|
+
"paused",
|
|
104
|
+
"completed",
|
|
105
|
+
"cancelled"
|
|
106
|
+
];
|
|
107
|
+
const taskTypes = ["regular", "pr_review"];
|
|
108
|
+
|
|
109
|
+
//#endregion
|
|
110
|
+
//#region lib/schema.ts
|
|
111
|
+
const logEntrySchema = z.object({
|
|
112
|
+
ts: z.string(),
|
|
113
|
+
msg: z.string(),
|
|
114
|
+
status: z.enum(taskStatuses).optional()
|
|
115
|
+
});
|
|
116
|
+
const dayAssignmentSchema = z.object({
|
|
117
|
+
date: z.string(),
|
|
118
|
+
action: z.enum(["assign", "unassign"]),
|
|
119
|
+
ts: z.string(),
|
|
120
|
+
msg: z.string().optional()
|
|
121
|
+
});
|
|
122
|
+
const prRepoSchema = z.object({
|
|
123
|
+
host: z.string(),
|
|
124
|
+
owner: z.string(),
|
|
125
|
+
name: z.string()
|
|
126
|
+
});
|
|
127
|
+
const prFetchedSchema = z.object({
|
|
128
|
+
at: z.string(),
|
|
129
|
+
title: z.string(),
|
|
130
|
+
author: z.object({ login: z.string() }),
|
|
131
|
+
state: z.enum([
|
|
132
|
+
"open",
|
|
133
|
+
"closed",
|
|
134
|
+
"merged"
|
|
135
|
+
]),
|
|
136
|
+
draft: z.boolean(),
|
|
137
|
+
updated_at: z.string()
|
|
138
|
+
});
|
|
139
|
+
const prAttachmentSchema = z.object({
|
|
140
|
+
url: z.string(),
|
|
141
|
+
provider: z.literal("github"),
|
|
142
|
+
repo: prRepoSchema.optional(),
|
|
143
|
+
number: z.number().int().positive().optional(),
|
|
144
|
+
fetched: prFetchedSchema.optional()
|
|
145
|
+
});
|
|
146
|
+
const taskMetadataSchema = z.object({
|
|
147
|
+
url: z.string().optional(),
|
|
148
|
+
pr: prAttachmentSchema.optional()
|
|
149
|
+
});
|
|
150
|
+
const taskSchema = z.object({
|
|
151
|
+
id: z.string(),
|
|
152
|
+
ref: z.string(),
|
|
153
|
+
type: z.enum(taskTypes),
|
|
154
|
+
title: z.string(),
|
|
155
|
+
status: z.enum(taskStatuses),
|
|
156
|
+
created_at: z.string(),
|
|
157
|
+
updated_at: z.string(),
|
|
158
|
+
metadata: taskMetadataSchema,
|
|
159
|
+
logs: z.array(logEntrySchema),
|
|
160
|
+
day_assignments: z.array(dayAssignmentSchema)
|
|
161
|
+
}).superRefine((task, ctx) => {
|
|
162
|
+
if (task.type === "pr_review" && !task.metadata.pr?.url) ctx.addIssue({
|
|
163
|
+
code: z.ZodIssueCode.custom,
|
|
164
|
+
message: "PR review tasks require metadata.pr.url",
|
|
165
|
+
path: [
|
|
166
|
+
"metadata",
|
|
167
|
+
"pr",
|
|
168
|
+
"url"
|
|
169
|
+
]
|
|
170
|
+
});
|
|
171
|
+
});
|
|
172
|
+
const validateTaskOrThrow = (task) => {
|
|
173
|
+
return taskSchema.parse(task);
|
|
174
|
+
};
|
|
175
|
+
|
|
176
|
+
//#endregion
|
|
177
|
+
//#region lib/taskRepo.ts
|
|
178
|
+
const taskFileName = (taskId) => `${taskId}.json`;
|
|
179
|
+
const getStatusDir = (workspaceRoot, status) => path.join(workspaceRoot, TASKS_DIR, status);
|
|
180
|
+
const getTaskPath = (workspaceRoot, status, taskId) => {
|
|
181
|
+
return path.join(getStatusDir(workspaceRoot, status), taskFileName(taskId));
|
|
182
|
+
};
|
|
183
|
+
const fileExists$1 = async (filePath) => {
|
|
184
|
+
try {
|
|
185
|
+
await access(filePath);
|
|
186
|
+
return true;
|
|
187
|
+
} catch {
|
|
188
|
+
return false;
|
|
189
|
+
}
|
|
190
|
+
};
|
|
191
|
+
const findTaskPathById = async (workspaceRoot, taskId) => {
|
|
192
|
+
for (const status of STATUS_ORDER) {
|
|
193
|
+
const candidate = getTaskPath(workspaceRoot, status, taskId);
|
|
194
|
+
if (await fileExists$1(candidate)) return {
|
|
195
|
+
filePath: candidate,
|
|
196
|
+
status
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
return null;
|
|
200
|
+
};
|
|
201
|
+
const loadTaskFromPath = async (filePath) => {
|
|
202
|
+
const parsed = await readJsonFile(filePath);
|
|
203
|
+
return taskSchema.parse(parsed);
|
|
204
|
+
};
|
|
205
|
+
const listTasksByStatus = async (workspaceRoot, status) => {
|
|
206
|
+
const statusDir = getStatusDir(workspaceRoot, status);
|
|
207
|
+
const entries = await readdir(statusDir, { withFileTypes: true });
|
|
208
|
+
return await Promise.all(entries.filter((entry) => entry.isFile() && entry.name.endsWith(".json")).map(async (entry) => {
|
|
209
|
+
const filePath = path.join(statusDir, entry.name);
|
|
210
|
+
return {
|
|
211
|
+
task: await loadTaskFromPath(filePath),
|
|
212
|
+
status,
|
|
213
|
+
filePath
|
|
214
|
+
};
|
|
215
|
+
}));
|
|
216
|
+
};
|
|
217
|
+
const listAllTasks = async (workspaceRoot) => {
|
|
218
|
+
return (await Promise.all(STATUS_ORDER.map((status) => listTasksByStatus(workspaceRoot, status)))).flat();
|
|
219
|
+
};
|
|
220
|
+
const saveTask = async (workspaceRoot, status, task) => {
|
|
221
|
+
const filePath = getTaskPath(workspaceRoot, status, task.id);
|
|
222
|
+
await writeJsonAtomic(filePath, task);
|
|
223
|
+
return filePath;
|
|
224
|
+
};
|
|
225
|
+
const moveTaskFile = async (workspaceRoot, taskId, fromStatus, toStatus) => {
|
|
226
|
+
const fromPath = getTaskPath(workspaceRoot, fromStatus, taskId);
|
|
227
|
+
const toPath = getTaskPath(workspaceRoot, toStatus, taskId);
|
|
228
|
+
await rename(fromPath, toPath);
|
|
229
|
+
return toPath;
|
|
230
|
+
};
|
|
231
|
+
|
|
232
|
+
//#endregion
|
|
233
|
+
//#region lib/aliasReconcile.ts
|
|
234
|
+
const reconcileAliases = async (workspaceRoot) => {
|
|
235
|
+
const aliases = await readAliases(workspaceRoot);
|
|
236
|
+
const tasks = await listAllTasks(workspaceRoot);
|
|
237
|
+
const tasksById = new Map(tasks.map((stored) => [stored.task.id, stored.task]));
|
|
238
|
+
const normalizePrefix = (prefix) => {
|
|
239
|
+
const next = {};
|
|
240
|
+
for (const [key, value] of Object.entries(aliases[prefix])) if (tasksById.has(value)) next[key] = value;
|
|
241
|
+
aliases[prefix] = next;
|
|
242
|
+
};
|
|
243
|
+
normalizePrefix("T");
|
|
244
|
+
normalizePrefix("R");
|
|
245
|
+
const existingIds = new Set([...Object.values(aliases.T), ...Object.values(aliases.R)]);
|
|
246
|
+
const missing = tasks.filter((stored) => !existingIds.has(stored.task.id)).sort((a, b) => a.task.created_at.localeCompare(b.task.created_at));
|
|
247
|
+
for (const stored of missing) allocateAliasInMap(aliases, stored.task.type === "pr_review" ? "R" : "T", stored.task.id);
|
|
248
|
+
await writeAliases(workspaceRoot, aliases);
|
|
249
|
+
return { updated: true };
|
|
250
|
+
};
|
|
251
|
+
|
|
252
|
+
//#endregion
|
|
253
|
+
//#region lib/config.ts
|
|
254
|
+
const fileExists = async (filePath) => {
|
|
255
|
+
try {
|
|
256
|
+
await access(filePath);
|
|
257
|
+
return true;
|
|
258
|
+
} catch {
|
|
259
|
+
return false;
|
|
260
|
+
}
|
|
261
|
+
};
|
|
262
|
+
const resolveGlobalConfigPath = () => {
|
|
263
|
+
const configHome = process.env.XDG_CONFIG_HOME ?? path.join(homedir(), ".config");
|
|
264
|
+
return path.join(configHome, APP_NAME, "config.json");
|
|
265
|
+
};
|
|
266
|
+
const readConfigFile = async (filePath) => {
|
|
267
|
+
if (!await fileExists(filePath)) return {};
|
|
268
|
+
return await readJsonFile(filePath) ?? {};
|
|
269
|
+
};
|
|
270
|
+
const resolveGithubToken = async (workspaceRoot) => {
|
|
271
|
+
if (process.env.IL_GITHUB_TOKEN) return process.env.IL_GITHUB_TOKEN;
|
|
272
|
+
const workspaceConfig = await readConfigFile(path.join(workspaceRoot, "config.json"));
|
|
273
|
+
if (workspaceConfig.github?.token) return workspaceConfig.github.token;
|
|
274
|
+
return (await readConfigFile(resolveGlobalConfigPath())).github?.token ?? null;
|
|
275
|
+
};
|
|
276
|
+
|
|
277
|
+
//#endregion
|
|
278
|
+
//#region lib/time.ts
|
|
279
|
+
const DEFAULT_ZONE = "Europe/Paris";
|
|
280
|
+
const nowIso = () => {
|
|
281
|
+
const value = DateTime.now().setZone(DEFAULT_ZONE).toISO();
|
|
282
|
+
if (!value) throw new Error("Failed to generate timestamp");
|
|
283
|
+
return value;
|
|
284
|
+
};
|
|
285
|
+
const todayDate = (date) => {
|
|
286
|
+
const value = date ? DateTime.fromISO(date, { zone: DEFAULT_ZONE }).toISODate() : DateTime.now().setZone(DEFAULT_ZONE).toISODate();
|
|
287
|
+
if (!value) throw new Error("Failed to generate date");
|
|
288
|
+
return value;
|
|
289
|
+
};
|
|
290
|
+
|
|
291
|
+
//#endregion
|
|
292
|
+
//#region lib/pr.ts
|
|
293
|
+
const parseGitHubPrUrl = (value) => {
|
|
294
|
+
let url;
|
|
295
|
+
try {
|
|
296
|
+
url = new URL(value);
|
|
297
|
+
} catch {
|
|
298
|
+
return null;
|
|
299
|
+
}
|
|
300
|
+
const parts = url.pathname.split("/").filter(Boolean);
|
|
301
|
+
if (parts.length < 4 || parts[2] !== "pull") return null;
|
|
302
|
+
const number = Number(parts[3]);
|
|
303
|
+
if (!Number.isInteger(number)) return null;
|
|
304
|
+
return {
|
|
305
|
+
url: value,
|
|
306
|
+
provider: "github",
|
|
307
|
+
repo: {
|
|
308
|
+
host: url.host,
|
|
309
|
+
owner: parts[0],
|
|
310
|
+
name: parts[1]
|
|
311
|
+
},
|
|
312
|
+
number
|
|
313
|
+
};
|
|
314
|
+
};
|
|
315
|
+
const buildPrAttachment = (parsed, fetched) => {
|
|
316
|
+
return {
|
|
317
|
+
url: parsed.url,
|
|
318
|
+
provider: "github",
|
|
319
|
+
repo: parsed.repo,
|
|
320
|
+
number: parsed.number,
|
|
321
|
+
fetched
|
|
322
|
+
};
|
|
323
|
+
};
|
|
324
|
+
const fetchGitHubPr = async (parsed, token) => {
|
|
325
|
+
if (!parsed.repo || !parsed.number) return null;
|
|
326
|
+
if (!token) return null;
|
|
327
|
+
const data = (await new Octokit({ auth: token }).rest.pulls.get({
|
|
328
|
+
owner: parsed.repo.owner,
|
|
329
|
+
repo: parsed.repo.name,
|
|
330
|
+
pull_number: parsed.number
|
|
331
|
+
})).data;
|
|
332
|
+
const state = data.merged ? "merged" : data.state === "open" ? "open" : "closed";
|
|
333
|
+
return {
|
|
334
|
+
at: nowIso(),
|
|
335
|
+
title: data.title,
|
|
336
|
+
author: { login: data.user?.login ?? "unknown" },
|
|
337
|
+
state,
|
|
338
|
+
draft: data.draft ?? false,
|
|
339
|
+
updated_at: data.updated_at
|
|
340
|
+
};
|
|
341
|
+
};
|
|
342
|
+
|
|
343
|
+
//#endregion
|
|
344
|
+
//#region lib/edit.ts
|
|
345
|
+
const parseValue = (raw) => {
|
|
346
|
+
try {
|
|
347
|
+
return JSON.parse(raw);
|
|
348
|
+
} catch {
|
|
349
|
+
return raw;
|
|
350
|
+
}
|
|
351
|
+
};
|
|
352
|
+
const applyTaskEdit = (task, dottedPath, rawValue) => {
|
|
353
|
+
if (dottedPath === "status" || dottedPath.startsWith("status.")) throw new Error("Use status commands to change status");
|
|
354
|
+
const value = parseValue(rawValue);
|
|
355
|
+
const updated = structuredClone(task);
|
|
356
|
+
const segments = dottedPath.split(".");
|
|
357
|
+
let current = updated;
|
|
358
|
+
for (const segment of segments.slice(0, -1)) {
|
|
359
|
+
if (!(segment in current)) current[segment] = {};
|
|
360
|
+
const next = current[segment];
|
|
361
|
+
if (typeof next !== "object" || next === null || Array.isArray(next)) throw new Error(`Cannot set ${dottedPath} on non-object path`);
|
|
362
|
+
current = next;
|
|
363
|
+
}
|
|
364
|
+
const last = segments[segments.length - 1];
|
|
365
|
+
current[last] = value;
|
|
366
|
+
if (dottedPath === "metadata.pr.url") {
|
|
367
|
+
if (typeof value !== "string") throw new Error("metadata.pr.url must be a string");
|
|
368
|
+
const parsed = parseGitHubPrUrl(value);
|
|
369
|
+
if (!parsed) throw new Error("Invalid PR URL");
|
|
370
|
+
updated.metadata.pr = buildPrAttachment(parsed, updated.metadata.pr?.fetched);
|
|
371
|
+
}
|
|
372
|
+
return validateTaskOrThrow({
|
|
373
|
+
...updated,
|
|
374
|
+
updated_at: nowIso()
|
|
375
|
+
});
|
|
376
|
+
};
|
|
377
|
+
|
|
378
|
+
//#endregion
|
|
379
|
+
//#region lib/format.ts
|
|
380
|
+
const buildAliasLookup = (aliases) => {
|
|
381
|
+
const map = /* @__PURE__ */ new Map();
|
|
382
|
+
for (const [key, value] of Object.entries(aliases.T)) map.set(value, `T${key}`);
|
|
383
|
+
for (const [key, value] of Object.entries(aliases.R)) map.set(value, `R${key}`);
|
|
384
|
+
return map;
|
|
385
|
+
};
|
|
386
|
+
const formatTaskListLine = (task, alias) => {
|
|
387
|
+
return `${(alias ?? "--").padEnd(4)} ${task.ref.padEnd(9)} ${task.status.padEnd(9)} ${task.title}`;
|
|
388
|
+
};
|
|
389
|
+
const formatTaskDetails = (task, alias) => {
|
|
390
|
+
const lines = [];
|
|
391
|
+
lines.push(`${alias ?? "--"} ${task.ref} ${task.title}`);
|
|
392
|
+
lines.push(`id: ${task.id}`);
|
|
393
|
+
lines.push(`type: ${task.type}`);
|
|
394
|
+
lines.push(`status: ${task.status}`);
|
|
395
|
+
lines.push(`created: ${task.created_at}`);
|
|
396
|
+
lines.push(`updated: ${task.updated_at}`);
|
|
397
|
+
if (task.metadata.url) lines.push(`url: ${task.metadata.url}`);
|
|
398
|
+
if (task.metadata.pr?.url) {
|
|
399
|
+
lines.push(`pr: ${task.metadata.pr.url}`);
|
|
400
|
+
if (task.metadata.pr.fetched) {
|
|
401
|
+
const fetched = task.metadata.pr.fetched;
|
|
402
|
+
lines.push(`pr_state: ${fetched.state}`);
|
|
403
|
+
lines.push(`pr_title: ${fetched.title}`);
|
|
404
|
+
lines.push(`pr_author: ${fetched.author.login}`);
|
|
405
|
+
lines.push(`pr_updated_at: ${fetched.updated_at}`);
|
|
406
|
+
lines.push(`pr_refreshed_at: ${fetched.at}`);
|
|
407
|
+
}
|
|
408
|
+
}
|
|
409
|
+
if (task.logs.length > 0) {
|
|
410
|
+
lines.push("logs:");
|
|
411
|
+
for (const log of task.logs) {
|
|
412
|
+
const status = log.status ? ` [${log.status}]` : "";
|
|
413
|
+
lines.push(`- ${log.ts}${status} ${log.msg}`);
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
if (task.day_assignments.length > 0) {
|
|
417
|
+
lines.push("day_assignments:");
|
|
418
|
+
for (const entry of task.day_assignments) {
|
|
419
|
+
const msg = entry.msg ? ` (${entry.msg})` : "";
|
|
420
|
+
lines.push(`- ${entry.date} ${entry.action} ${entry.ts}${msg}`);
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
return lines.join("\n");
|
|
424
|
+
};
|
|
425
|
+
|
|
426
|
+
//#endregion
|
|
427
|
+
//#region lib/gitignore.ts
|
|
428
|
+
const lockEntry = `${APP_DIR}/.lock/`;
|
|
429
|
+
const ensureLockIgnored = async (repoRoot) => {
|
|
430
|
+
const gitignorePath = path.join(repoRoot, ".gitignore");
|
|
431
|
+
let current = "";
|
|
432
|
+
try {
|
|
433
|
+
current = await readFile(gitignorePath, "utf8");
|
|
434
|
+
} catch {
|
|
435
|
+
current = "";
|
|
436
|
+
}
|
|
437
|
+
if (current.split("\n").some((line) => line.trim() === lockEntry)) return false;
|
|
438
|
+
const separator = current.endsWith("\n") || current.length === 0 ? "" : "\n";
|
|
439
|
+
await writeFile(gitignorePath, `${current}${separator}${lockEntry}\n`, "utf8");
|
|
440
|
+
return true;
|
|
441
|
+
};
|
|
442
|
+
|
|
443
|
+
//#endregion
|
|
444
|
+
//#region lib/workspace.ts
|
|
445
|
+
const exists = async (filePath) => {
|
|
446
|
+
try {
|
|
447
|
+
await access(filePath);
|
|
448
|
+
return true;
|
|
449
|
+
} catch {
|
|
450
|
+
return false;
|
|
451
|
+
}
|
|
452
|
+
};
|
|
453
|
+
const findGitRoot = async (startDir) => {
|
|
454
|
+
let current = path.resolve(startDir);
|
|
455
|
+
while (true) {
|
|
456
|
+
if (await exists(path.join(current, ".git"))) return current;
|
|
457
|
+
const parent = path.dirname(current);
|
|
458
|
+
if (parent === current) return null;
|
|
459
|
+
current = parent;
|
|
460
|
+
}
|
|
461
|
+
};
|
|
462
|
+
const resolveGlobalWorkspaceRoot = () => {
|
|
463
|
+
const dataHome = process.env.XDG_DATA_HOME ?? path.join(homedir(), ".local", "share");
|
|
464
|
+
return path.join(dataHome, APP_NAME);
|
|
465
|
+
};
|
|
466
|
+
const resolveWorkspace = async (options) => {
|
|
467
|
+
if (options.store) return {
|
|
468
|
+
root: path.resolve(options.store),
|
|
469
|
+
kind: "explicit"
|
|
470
|
+
};
|
|
471
|
+
const repoRoot = await findGitRoot(options.cwd ?? process.cwd());
|
|
472
|
+
if (options.repo) {
|
|
473
|
+
if (!repoRoot) throw new Error("Not inside a git repository");
|
|
474
|
+
return {
|
|
475
|
+
root: path.join(repoRoot, APP_DIR),
|
|
476
|
+
kind: "repo"
|
|
477
|
+
};
|
|
478
|
+
}
|
|
479
|
+
if (options.global) return {
|
|
480
|
+
root: resolveGlobalWorkspaceRoot(),
|
|
481
|
+
kind: "global"
|
|
482
|
+
};
|
|
483
|
+
if (repoRoot) {
|
|
484
|
+
const repoWorkspace = path.join(repoRoot, APP_DIR);
|
|
485
|
+
if (await exists(repoWorkspace)) return {
|
|
486
|
+
root: repoWorkspace,
|
|
487
|
+
kind: "repo"
|
|
488
|
+
};
|
|
489
|
+
}
|
|
490
|
+
return {
|
|
491
|
+
root: resolveGlobalWorkspaceRoot(),
|
|
492
|
+
kind: "global"
|
|
493
|
+
};
|
|
494
|
+
};
|
|
495
|
+
const ensureWorkspaceLayout = async (workspaceRoot) => {
|
|
496
|
+
await mkdir(path.join(workspaceRoot, TASKS_DIR), { recursive: true });
|
|
497
|
+
await mkdir(path.join(workspaceRoot, LOCK_DIR), { recursive: true });
|
|
498
|
+
await Promise.all(STATUS_ORDER.map((status) => mkdir(path.join(workspaceRoot, TASKS_DIR, status), { recursive: true })));
|
|
499
|
+
};
|
|
500
|
+
|
|
501
|
+
//#endregion
|
|
502
|
+
//#region lib/lock.ts
|
|
503
|
+
const withWorkspaceLock = async (workspaceRoot, fn) => {
|
|
504
|
+
await ensureWorkspaceLayout(workspaceRoot);
|
|
505
|
+
const lockPath = path.join(workspaceRoot, LOCK_DIR, LOCK_FILE);
|
|
506
|
+
await writeFile(lockPath, "", { flag: "a" });
|
|
507
|
+
const release = await lockfile.lock(lockPath, {
|
|
508
|
+
stale: 6e4,
|
|
509
|
+
retries: {
|
|
510
|
+
retries: 5,
|
|
511
|
+
factor: 1.5,
|
|
512
|
+
minTimeout: 50,
|
|
513
|
+
maxTimeout: 1e3
|
|
514
|
+
}
|
|
515
|
+
});
|
|
516
|
+
try {
|
|
517
|
+
return await fn();
|
|
518
|
+
} finally {
|
|
519
|
+
await release();
|
|
520
|
+
}
|
|
521
|
+
};
|
|
522
|
+
|
|
523
|
+
//#endregion
|
|
524
|
+
//#region lib/search.ts
|
|
525
|
+
const taskMatchesQuery = (task, query) => {
|
|
526
|
+
const needle = query.toLowerCase();
|
|
527
|
+
const contains = (value) => value ? value.toLowerCase().includes(needle) : false;
|
|
528
|
+
if (contains(task.title)) return true;
|
|
529
|
+
if (task.logs.some((log) => contains(log.msg))) return true;
|
|
530
|
+
if (contains(task.metadata.pr?.url)) return true;
|
|
531
|
+
if (contains(task.metadata.pr?.fetched?.title)) return true;
|
|
532
|
+
return false;
|
|
533
|
+
};
|
|
534
|
+
|
|
535
|
+
//#endregion
|
|
536
|
+
//#region lib/id.ts
|
|
537
|
+
const BASE32_ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567";
|
|
538
|
+
const base32Encode = (input) => {
|
|
539
|
+
let bits = 0;
|
|
540
|
+
let value = 0;
|
|
541
|
+
let output = "";
|
|
542
|
+
for (const byte of input) {
|
|
543
|
+
value = value << 8 | byte;
|
|
544
|
+
bits += 8;
|
|
545
|
+
while (bits >= 5) {
|
|
546
|
+
output += BASE32_ALPHABET[value >>> bits - 5 & 31];
|
|
547
|
+
bits -= 5;
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
if (bits > 0) output += BASE32_ALPHABET[value << 5 - bits & 31];
|
|
551
|
+
return output;
|
|
552
|
+
};
|
|
553
|
+
const generateTaskId = (taskType) => {
|
|
554
|
+
return `${taskType === "pr_review" ? "R" : "T"}${ulid()}`;
|
|
555
|
+
};
|
|
556
|
+
const generateStableRef = (taskType, taskId) => {
|
|
557
|
+
return `${taskType === "pr_review" ? "R" : "T"}-${base32Encode(createHash("sha256").update(taskId).digest()).slice(0, 6)}`;
|
|
558
|
+
};
|
|
559
|
+
|
|
560
|
+
//#endregion
|
|
561
|
+
//#region lib/taskFactory.ts
|
|
562
|
+
const buildTask = (input) => {
|
|
563
|
+
const id = generateTaskId(input.type);
|
|
564
|
+
const ref = generateStableRef(input.type, id);
|
|
565
|
+
const timestamp = nowIso();
|
|
566
|
+
return {
|
|
567
|
+
id,
|
|
568
|
+
ref,
|
|
569
|
+
type: input.type,
|
|
570
|
+
title: input.title,
|
|
571
|
+
status: "backlog",
|
|
572
|
+
created_at: timestamp,
|
|
573
|
+
updated_at: timestamp,
|
|
574
|
+
metadata: input.metadata ?? {},
|
|
575
|
+
logs: [],
|
|
576
|
+
day_assignments: []
|
|
577
|
+
};
|
|
578
|
+
};
|
|
579
|
+
|
|
580
|
+
//#endregion
|
|
581
|
+
//#region lib/fsm.ts
|
|
582
|
+
const transitionTable = {
|
|
583
|
+
backlog: {
|
|
584
|
+
start: "active",
|
|
585
|
+
cancel: "cancelled"
|
|
586
|
+
},
|
|
587
|
+
active: {
|
|
588
|
+
pause: "paused",
|
|
589
|
+
complete: "completed",
|
|
590
|
+
cancel: "cancelled"
|
|
591
|
+
},
|
|
592
|
+
paused: {
|
|
593
|
+
start: "active",
|
|
594
|
+
complete: "completed",
|
|
595
|
+
cancel: "cancelled"
|
|
596
|
+
},
|
|
597
|
+
completed: {},
|
|
598
|
+
cancelled: {}
|
|
599
|
+
};
|
|
600
|
+
const getNextStatus = (current, action) => {
|
|
601
|
+
const next = transitionTable[current][action];
|
|
602
|
+
if (!next) throw new Error(`Invalid transition: ${current} -> ${action}`);
|
|
603
|
+
return next;
|
|
604
|
+
};
|
|
605
|
+
|
|
606
|
+
//#endregion
|
|
607
|
+
//#region lib/taskOperations.ts
|
|
608
|
+
const defaultMessages = {
|
|
609
|
+
start: "Started task",
|
|
610
|
+
pause: "Paused task",
|
|
611
|
+
complete: "Completed task",
|
|
612
|
+
cancel: "Cancelled task"
|
|
613
|
+
};
|
|
614
|
+
const appendLog = (task, message, status) => {
|
|
615
|
+
const log = {
|
|
616
|
+
ts: nowIso(),
|
|
617
|
+
msg: message,
|
|
618
|
+
status
|
|
619
|
+
};
|
|
620
|
+
return {
|
|
621
|
+
...task,
|
|
622
|
+
logs: [...task.logs, log],
|
|
623
|
+
updated_at: nowIso()
|
|
624
|
+
};
|
|
625
|
+
};
|
|
626
|
+
const applyTransition = (task, action, message) => {
|
|
627
|
+
const nextStatus = getNextStatus(task.status, action);
|
|
628
|
+
const logMessage = message ?? defaultMessages[action];
|
|
629
|
+
const log = {
|
|
630
|
+
ts: nowIso(),
|
|
631
|
+
msg: logMessage,
|
|
632
|
+
status: nextStatus
|
|
633
|
+
};
|
|
634
|
+
return {
|
|
635
|
+
task: {
|
|
636
|
+
...task,
|
|
637
|
+
status: nextStatus,
|
|
638
|
+
updated_at: nowIso(),
|
|
639
|
+
logs: [...task.logs, log]
|
|
640
|
+
},
|
|
641
|
+
nextStatus
|
|
642
|
+
};
|
|
643
|
+
};
|
|
644
|
+
const isAssignedOnDate = (task, date) => {
|
|
645
|
+
const events = task.day_assignments.filter((entry) => entry.date === date);
|
|
646
|
+
if (events.length === 0) return false;
|
|
647
|
+
return events[events.length - 1].action === "assign";
|
|
648
|
+
};
|
|
649
|
+
const appendDayAssignment = (task, date, action, msg) => {
|
|
650
|
+
const event = {
|
|
651
|
+
date,
|
|
652
|
+
action,
|
|
653
|
+
ts: nowIso(),
|
|
654
|
+
msg
|
|
655
|
+
};
|
|
656
|
+
return {
|
|
657
|
+
...task,
|
|
658
|
+
day_assignments: [...task.day_assignments, event],
|
|
659
|
+
updated_at: nowIso()
|
|
660
|
+
};
|
|
661
|
+
};
|
|
662
|
+
|
|
663
|
+
//#endregion
|
|
664
|
+
//#region lib/taskStore.ts
|
|
665
|
+
const createTaskInStore = async (workspaceRoot, task) => {
|
|
666
|
+
const { alias, aliases } = await allocateAlias(workspaceRoot, task.type === "pr_review" ? "R" : "T", task.id);
|
|
667
|
+
await writeAliases(workspaceRoot, aliases);
|
|
668
|
+
await saveTask(workspaceRoot, task.status, task);
|
|
669
|
+
return {
|
|
670
|
+
alias,
|
|
671
|
+
task
|
|
672
|
+
};
|
|
673
|
+
};
|
|
674
|
+
|
|
675
|
+
//#endregion
|
|
676
|
+
//#region lib/taskResolver.ts
|
|
677
|
+
const stableRefPattern = /^[TR]-[A-Z0-9]{6}$/;
|
|
678
|
+
const resolveTaskId = async (workspaceRoot, identifier) => {
|
|
679
|
+
const aliasMatch = await resolveAlias(workspaceRoot, identifier);
|
|
680
|
+
if (aliasMatch) return aliasMatch;
|
|
681
|
+
if (stableRefPattern.test(identifier)) {
|
|
682
|
+
const match = (await listAllTasks(workspaceRoot)).find((stored) => stored.task.ref === identifier);
|
|
683
|
+
if (!match) throw new Error(`Task not found for ref: ${identifier}`);
|
|
684
|
+
return match.task.id;
|
|
685
|
+
}
|
|
686
|
+
if (!await findTaskPathById(workspaceRoot, identifier)) throw new Error(`Task not found: ${identifier}`);
|
|
687
|
+
return identifier;
|
|
688
|
+
};
|
|
689
|
+
const resolveTask = async (workspaceRoot, identifier) => {
|
|
690
|
+
const found = await findTaskPathById(workspaceRoot, await resolveTaskId(workspaceRoot, identifier));
|
|
691
|
+
if (!found) throw new Error(`Task not found: ${identifier}`);
|
|
692
|
+
return {
|
|
693
|
+
task: await loadTaskFromPath(found.filePath),
|
|
694
|
+
status: found.status,
|
|
695
|
+
filePath: found.filePath
|
|
696
|
+
};
|
|
697
|
+
};
|
|
698
|
+
|
|
699
|
+
//#endregion
|
|
700
|
+
//#region scripts/il.ts
|
|
701
|
+
const program = new Command();
|
|
702
|
+
const handleAction = (fn) => async (...args) => {
|
|
703
|
+
try {
|
|
704
|
+
await fn(...args);
|
|
705
|
+
} catch (error) {
|
|
706
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
707
|
+
process.stderr.write(`${message}\n`);
|
|
708
|
+
process.exitCode = 1;
|
|
709
|
+
}
|
|
710
|
+
};
|
|
711
|
+
const resolveWorkspaceFor = async (ensure) => {
|
|
712
|
+
const opts = program.opts();
|
|
713
|
+
const workspace = await resolveWorkspace({
|
|
714
|
+
store: opts.store,
|
|
715
|
+
global: opts.global,
|
|
716
|
+
repo: opts.repo
|
|
717
|
+
});
|
|
718
|
+
if (ensure) await ensureWorkspaceLayout(workspace.root);
|
|
719
|
+
return workspace.root;
|
|
720
|
+
};
|
|
721
|
+
const printLines = (lines) => {
|
|
722
|
+
if (lines.length === 0) {
|
|
723
|
+
process.stdout.write("No tasks.\n");
|
|
724
|
+
return;
|
|
725
|
+
}
|
|
726
|
+
process.stdout.write(`${lines.join("\n")}\n`);
|
|
727
|
+
};
|
|
728
|
+
const isTaskStatus = (value) => {
|
|
729
|
+
return taskStatuses.includes(value);
|
|
730
|
+
};
|
|
731
|
+
const isPackageJson = (value) => {
|
|
732
|
+
return value !== null && typeof value === "object" && "version" in value;
|
|
733
|
+
};
|
|
734
|
+
const resolvePackageVersion = async () => {
|
|
735
|
+
let current = path.dirname(fileURLToPath(import.meta.url));
|
|
736
|
+
while (true) {
|
|
737
|
+
const candidate = path.join(current, "package.json");
|
|
738
|
+
try {
|
|
739
|
+
const raw = await readFile(candidate, "utf8");
|
|
740
|
+
const parsed = JSON.parse(raw);
|
|
741
|
+
if (isPackageJson(parsed) && typeof parsed.version === "string") return parsed.version;
|
|
742
|
+
} catch (error) {
|
|
743
|
+
if (!(error instanceof Error)) throw error;
|
|
744
|
+
if (error.code !== "ENOENT") throw error;
|
|
745
|
+
}
|
|
746
|
+
const parent = path.dirname(current);
|
|
747
|
+
if (parent === current) return;
|
|
748
|
+
current = parent;
|
|
749
|
+
}
|
|
750
|
+
};
|
|
751
|
+
const packageVersion = await resolvePackageVersion();
|
|
752
|
+
program.name("il").description("Terminal task manager").option("--store <path>", "explicit workspace path").option("--global", "use global workspace").option("--repo", "use repo workspace");
|
|
753
|
+
if (packageVersion) program.version(packageVersion);
|
|
754
|
+
program.command("init").description("initialize repo workspace").action(handleAction(async () => {
|
|
755
|
+
const repoRoot = await findGitRoot(process.cwd());
|
|
756
|
+
if (!repoRoot) throw new Error("Not inside a git repository");
|
|
757
|
+
const workspaceRoot = path.join(repoRoot, APP_DIR);
|
|
758
|
+
await ensureWorkspaceLayout(workspaceRoot);
|
|
759
|
+
await ensureLockIgnored(repoRoot);
|
|
760
|
+
process.stdout.write(`Initialized workspace at ${workspaceRoot}\n`);
|
|
761
|
+
}));
|
|
762
|
+
program.command("where").description("show resolved workspace").action(handleAction(async () => {
|
|
763
|
+
const opts = program.opts();
|
|
764
|
+
const workspace = await resolveWorkspace({
|
|
765
|
+
store: opts.store,
|
|
766
|
+
global: opts.global,
|
|
767
|
+
repo: opts.repo
|
|
768
|
+
});
|
|
769
|
+
process.stdout.write(`${workspace.root} (${workspace.kind})\n`);
|
|
770
|
+
}));
|
|
771
|
+
program.command("add").description("add a task").argument("[title]", "task title").option("--type <type>", "regular|pr_review", "regular").option("--url <url>", "attach URL").option("--pr <url>", "attach PR URL").action(handleAction(async (title, options, command) => {
|
|
772
|
+
const taskType = options.type;
|
|
773
|
+
if (!taskTypes.includes(taskType)) throw new Error(`Invalid type: ${taskType}`);
|
|
774
|
+
if (taskType === "pr_review" && !options.pr) throw new Error("PR review tasks require --pr");
|
|
775
|
+
const workspaceRoot = await resolveWorkspaceFor(true);
|
|
776
|
+
await withWorkspaceLock(workspaceRoot, async () => {
|
|
777
|
+
let prAttachment;
|
|
778
|
+
let prTitle;
|
|
779
|
+
if (options.pr) {
|
|
780
|
+
const parsed = parseGitHubPrUrl(options.pr);
|
|
781
|
+
if (!parsed) throw new Error("Invalid PR URL");
|
|
782
|
+
const fetched = await fetchGitHubPr(parsed, await resolveGithubToken(workspaceRoot));
|
|
783
|
+
prAttachment = buildPrAttachment(parsed, fetched ?? void 0);
|
|
784
|
+
prTitle = fetched?.title ?? `PR #${parsed.number} ${parsed.repo.owner}/${parsed.repo.name}`;
|
|
785
|
+
}
|
|
786
|
+
const isExplicitTitle = Boolean(title);
|
|
787
|
+
let finalTitle = title ?? prTitle;
|
|
788
|
+
if (!finalTitle) throw new Error("Title is required when no PR URL is provided");
|
|
789
|
+
if (taskType === "pr_review" && !isExplicitTitle) finalTitle = `Review: ${finalTitle}`;
|
|
790
|
+
const metadata = {
|
|
791
|
+
url: options.url,
|
|
792
|
+
pr: prAttachment
|
|
793
|
+
};
|
|
794
|
+
const task = buildTask({
|
|
795
|
+
type: taskType,
|
|
796
|
+
title: finalTitle,
|
|
797
|
+
metadata
|
|
798
|
+
});
|
|
799
|
+
taskSchema.parse(task);
|
|
800
|
+
const created = await createTaskInStore(workspaceRoot, task);
|
|
801
|
+
process.stdout.write(`Created ${created.alias} ${created.task.ref} ${created.task.title}\n`);
|
|
802
|
+
});
|
|
803
|
+
}));
|
|
804
|
+
program.command("list").description("list tasks").argument("[status]", "task status").action(handleAction(async (status, command) => {
|
|
805
|
+
const workspaceRoot = await resolveWorkspaceFor(true);
|
|
806
|
+
const aliasLookup = buildAliasLookup(await readAliases(workspaceRoot));
|
|
807
|
+
if (status && !isTaskStatus(status)) throw new Error(`Invalid status: ${status}`);
|
|
808
|
+
const statuses = status ? [status] : [...taskStatuses];
|
|
809
|
+
const lines = [];
|
|
810
|
+
for (const currentStatus of statuses) {
|
|
811
|
+
const stored = await listTasksByStatus(workspaceRoot, currentStatus);
|
|
812
|
+
for (const entry of stored) {
|
|
813
|
+
const alias = aliasLookup.get(entry.task.id);
|
|
814
|
+
let line = formatTaskListLine(entry.task, alias);
|
|
815
|
+
if (entry.task.metadata.pr?.fetched?.state) line += ` PR:${entry.task.metadata.pr.fetched.state}`;
|
|
816
|
+
lines.push(line);
|
|
817
|
+
}
|
|
818
|
+
}
|
|
819
|
+
printLines(lines);
|
|
820
|
+
}));
|
|
821
|
+
program.command("show").description("show a task").argument("<id>", "task identifier").action(handleAction(async (identifier, command) => {
|
|
822
|
+
const workspaceRoot = await resolveWorkspaceFor(true);
|
|
823
|
+
const stored = await resolveTask(workspaceRoot, identifier);
|
|
824
|
+
const alias = buildAliasLookup(await readAliases(workspaceRoot)).get(stored.task.id);
|
|
825
|
+
process.stdout.write(`${formatTaskDetails(stored.task, alias)}\n`);
|
|
826
|
+
}));
|
|
827
|
+
const addTransitionCommand = (name) => {
|
|
828
|
+
program.command(name).description(`${name} a task`).argument("<id>", "task identifier").option("-m, --message <message>", "custom log message").action(handleAction(async (identifier, options, command) => {
|
|
829
|
+
const workspaceRoot = await resolveWorkspaceFor(true);
|
|
830
|
+
await withWorkspaceLock(workspaceRoot, async () => {
|
|
831
|
+
const stored = await resolveTask(workspaceRoot, identifier);
|
|
832
|
+
const { task: updated, nextStatus } = applyTransition(stored.task, name, options.message);
|
|
833
|
+
await saveTask(workspaceRoot, stored.status, updated);
|
|
834
|
+
await moveTaskFile(workspaceRoot, updated.id, stored.status, nextStatus);
|
|
835
|
+
process.stdout.write(`Updated ${updated.ref} -> ${nextStatus}\n`);
|
|
836
|
+
});
|
|
837
|
+
}));
|
|
838
|
+
};
|
|
839
|
+
addTransitionCommand("start");
|
|
840
|
+
addTransitionCommand("pause");
|
|
841
|
+
addTransitionCommand("complete");
|
|
842
|
+
addTransitionCommand("cancel");
|
|
843
|
+
program.command("log").description("append a log entry").argument("<id>", "task identifier").argument("<message>", "log message").option("--status <status>", "include status in log entry").action(handleAction(async (identifier, message, options, command) => {
|
|
844
|
+
const workspaceRoot = await resolveWorkspaceFor(true);
|
|
845
|
+
const status = options.status;
|
|
846
|
+
if (status && !taskStatuses.includes(status)) throw new Error(`Invalid status: ${status}`);
|
|
847
|
+
await withWorkspaceLock(workspaceRoot, async () => {
|
|
848
|
+
const stored = await resolveTask(workspaceRoot, identifier);
|
|
849
|
+
const updated = appendLog(stored.task, message, status);
|
|
850
|
+
await saveTask(workspaceRoot, stored.status, updated);
|
|
851
|
+
process.stdout.write(`Logged on ${updated.ref}\n`);
|
|
852
|
+
});
|
|
853
|
+
}));
|
|
854
|
+
program.command("edit").description("edit a task field").argument("<id>", "task identifier").argument("<path>", "dotted path").argument("<value>", "new value").action(handleAction(async (identifier, dottedPath, value, command) => {
|
|
855
|
+
const workspaceRoot = await resolveWorkspaceFor(true);
|
|
856
|
+
await withWorkspaceLock(workspaceRoot, async () => {
|
|
857
|
+
const stored = await resolveTask(workspaceRoot, identifier);
|
|
858
|
+
const updated = applyTaskEdit(stored.task, dottedPath, value);
|
|
859
|
+
await saveTask(workspaceRoot, stored.status, updated);
|
|
860
|
+
process.stdout.write(`Updated ${updated.ref}\n`);
|
|
861
|
+
});
|
|
862
|
+
}));
|
|
863
|
+
program.command("search").description("search tasks").argument("<query>", "search query").action(handleAction(async (query, command) => {
|
|
864
|
+
const workspaceRoot = await resolveWorkspaceFor(true);
|
|
865
|
+
const aliasLookup = buildAliasLookup(await readAliases(workspaceRoot));
|
|
866
|
+
printLines((await listAllTasks(workspaceRoot)).filter((stored) => taskMatchesQuery(stored.task, query)).map((stored) => {
|
|
867
|
+
const alias = aliasLookup.get(stored.task.id);
|
|
868
|
+
let line = formatTaskListLine(stored.task, alias);
|
|
869
|
+
if (stored.task.metadata.pr?.fetched?.state) line += ` PR:${stored.task.metadata.pr.fetched.state}`;
|
|
870
|
+
return line;
|
|
871
|
+
}));
|
|
872
|
+
}));
|
|
873
|
+
program.command("today").description("assign or list today tasks").argument("[id]", "task identifier").option("--date <date>", "YYYY-MM-DD").option("-m, --message <message>", "assignment message").action(handleAction(async (identifier, options, command) => {
|
|
874
|
+
const workspaceRoot = await resolveWorkspaceFor(true);
|
|
875
|
+
const date = todayDate(options.date);
|
|
876
|
+
if (!identifier) {
|
|
877
|
+
const aliasLookup = buildAliasLookup(await readAliases(workspaceRoot));
|
|
878
|
+
printLines((await listAllTasks(workspaceRoot)).filter((stored) => isAssignedOnDate(stored.task, date)).map((stored) => formatTaskListLine(stored.task, aliasLookup.get(stored.task.id))));
|
|
879
|
+
return;
|
|
880
|
+
}
|
|
881
|
+
await withWorkspaceLock(workspaceRoot, async () => {
|
|
882
|
+
const stored = await resolveTask(workspaceRoot, identifier);
|
|
883
|
+
if (isAssignedOnDate(stored.task, date)) {
|
|
884
|
+
process.stdout.write(`Already assigned ${stored.task.ref} to ${date}\n`);
|
|
885
|
+
return;
|
|
886
|
+
}
|
|
887
|
+
const updated = appendDayAssignment(stored.task, date, "assign", options.message);
|
|
888
|
+
await saveTask(workspaceRoot, stored.status, updated);
|
|
889
|
+
process.stdout.write(`Assigned ${updated.ref} to ${date}\n`);
|
|
890
|
+
});
|
|
891
|
+
}));
|
|
892
|
+
program.command("untoday").description("unassign a task from today").argument("<id>", "task identifier").option("--date <date>", "YYYY-MM-DD").action(handleAction(async (identifier, options, command) => {
|
|
893
|
+
const workspaceRoot = await resolveWorkspaceFor(true);
|
|
894
|
+
const date = todayDate(options.date);
|
|
895
|
+
await withWorkspaceLock(workspaceRoot, async () => {
|
|
896
|
+
const stored = await resolveTask(workspaceRoot, identifier);
|
|
897
|
+
if (!isAssignedOnDate(stored.task, date)) {
|
|
898
|
+
process.stdout.write(`Already unassigned ${stored.task.ref} from ${date}\n`);
|
|
899
|
+
return;
|
|
900
|
+
}
|
|
901
|
+
const updated = appendDayAssignment(stored.task, date, "unassign");
|
|
902
|
+
await saveTask(workspaceRoot, stored.status, updated);
|
|
903
|
+
process.stdout.write(`Unassigned ${updated.ref} from ${date}\n`);
|
|
904
|
+
});
|
|
905
|
+
}));
|
|
906
|
+
program.command("attach-pr").description("attach PR metadata to a task").argument("<id>", "task identifier").argument("<url>", "PR URL").option("-m, --message <message>", "log message").action(handleAction(async (identifier, url, options, command) => {
|
|
907
|
+
const workspaceRoot = await resolveWorkspaceFor(true);
|
|
908
|
+
await withWorkspaceLock(workspaceRoot, async () => {
|
|
909
|
+
const parsed = parseGitHubPrUrl(url);
|
|
910
|
+
if (!parsed) throw new Error("Invalid PR URL");
|
|
911
|
+
const fetched = await fetchGitHubPr(parsed, await resolveGithubToken(workspaceRoot));
|
|
912
|
+
const stored = await resolveTask(workspaceRoot, identifier);
|
|
913
|
+
const next = appendLog({
|
|
914
|
+
...stored.task,
|
|
915
|
+
metadata: {
|
|
916
|
+
...stored.task.metadata,
|
|
917
|
+
pr: buildPrAttachment(parsed, fetched ?? stored.task.metadata.pr?.fetched)
|
|
918
|
+
},
|
|
919
|
+
updated_at: nowIso()
|
|
920
|
+
}, options.message ?? `Attached PR ${url}`);
|
|
921
|
+
await saveTask(workspaceRoot, stored.status, next);
|
|
922
|
+
process.stdout.write(`Attached PR to ${next.ref}\n`);
|
|
923
|
+
});
|
|
924
|
+
}));
|
|
925
|
+
program.command("refresh").description("refresh PR metadata").argument("<id>", "task identifier").option("-m, --message <message>", "log message").action(handleAction(async (identifier, options, command) => {
|
|
926
|
+
const workspaceRoot = await resolveWorkspaceFor(true);
|
|
927
|
+
await withWorkspaceLock(workspaceRoot, async () => {
|
|
928
|
+
const stored = await resolveTask(workspaceRoot, identifier);
|
|
929
|
+
const prUrl = stored.task.metadata.pr?.url;
|
|
930
|
+
if (!prUrl) {
|
|
931
|
+
process.stdout.write("No PR attached.\n");
|
|
932
|
+
return;
|
|
933
|
+
}
|
|
934
|
+
const parsed = parseGitHubPrUrl(prUrl);
|
|
935
|
+
if (!parsed) throw new Error("Invalid PR URL");
|
|
936
|
+
const fetched = await fetchGitHubPr(parsed, await resolveGithubToken(workspaceRoot));
|
|
937
|
+
if (!fetched) throw new Error("No GitHub token configured");
|
|
938
|
+
const baseUpdated = {
|
|
939
|
+
...stored.task,
|
|
940
|
+
metadata: {
|
|
941
|
+
...stored.task.metadata,
|
|
942
|
+
pr: buildPrAttachment(parsed, fetched)
|
|
943
|
+
},
|
|
944
|
+
updated_at: nowIso()
|
|
945
|
+
};
|
|
946
|
+
const previousState = stored.task.metadata.pr?.fetched?.state;
|
|
947
|
+
const nextState = fetched.state;
|
|
948
|
+
const isTerminal = stored.task.status === "completed" || stored.task.status === "cancelled";
|
|
949
|
+
let finalTask = baseUpdated;
|
|
950
|
+
let nextStatus = stored.status;
|
|
951
|
+
if (!isTerminal && previousState !== nextState) {
|
|
952
|
+
if (nextState === "merged") {
|
|
953
|
+
const result = applyTransition(baseUpdated, "complete", options.message ?? "PR merged");
|
|
954
|
+
finalTask = result.task;
|
|
955
|
+
nextStatus = result.nextStatus;
|
|
956
|
+
} else if (nextState === "closed") {
|
|
957
|
+
const result = applyTransition(baseUpdated, "cancel", options.message ?? "PR closed without merge");
|
|
958
|
+
finalTask = result.task;
|
|
959
|
+
nextStatus = result.nextStatus;
|
|
960
|
+
}
|
|
961
|
+
}
|
|
962
|
+
await saveTask(workspaceRoot, stored.status, finalTask);
|
|
963
|
+
if (nextStatus !== stored.status) await moveTaskFile(workspaceRoot, finalTask.id, stored.status, nextStatus);
|
|
964
|
+
process.stdout.write(`Refreshed ${finalTask.ref}\n`);
|
|
965
|
+
});
|
|
966
|
+
}));
|
|
967
|
+
program.command("alias").description("alias helpers").command("reconcile").description("reconcile alias mapping").action(handleAction(async (command) => {
|
|
968
|
+
const workspaceRoot = await resolveWorkspaceFor(true);
|
|
969
|
+
await withWorkspaceLock(workspaceRoot, async () => {
|
|
970
|
+
await reconcileAliases(workspaceRoot);
|
|
971
|
+
process.stdout.write("Aliases reconciled.\n");
|
|
972
|
+
});
|
|
973
|
+
}));
|
|
974
|
+
program.parseAsync(process.argv);
|
|
975
|
+
|
|
976
|
+
//#endregion
|
|
977
|
+
export { };
|