@xiaofandegeng/rmemo 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/CONTRIBUTING.md +28 -0
- package/DEVELOPMENT_PLAN.md +77 -0
- package/LICENSE +22 -0
- package/README.md +150 -0
- package/README.zh-CN.md +156 -0
- package/bin/rmemo.js +63 -0
- package/package.json +47 -0
- package/src/cmd/check.js +30 -0
- package/src/cmd/context.js +15 -0
- package/src/cmd/done.js +55 -0
- package/src/cmd/hook.js +108 -0
- package/src/cmd/init.js +86 -0
- package/src/cmd/log.js +12 -0
- package/src/cmd/print.js +15 -0
- package/src/cmd/scan.js +25 -0
- package/src/cmd/start.js +41 -0
- package/src/cmd/status.js +147 -0
- package/src/cmd/todo.js +85 -0
- package/src/core/check.js +396 -0
- package/src/core/context.js +114 -0
- package/src/core/journal.js +31 -0
- package/src/core/scan.js +282 -0
- package/src/core/todos.js +143 -0
- package/src/lib/args.js +81 -0
- package/src/lib/git.js +35 -0
- package/src/lib/io.js +43 -0
- package/src/lib/paths.js +38 -0
- package/src/lib/stdin.js +12 -0
- package/src/lib/time.js +13 -0
- package/src/lib/walk.js +44 -0
- package/test/smoke.test.js +395 -0
package/src/core/scan.js
ADDED
|
@@ -0,0 +1,282 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { hasGit, isGitRepo, listGitFiles } from "../lib/git.js";
|
|
4
|
+
import { walkFiles } from "../lib/walk.js";
|
|
5
|
+
import { readText } from "../lib/io.js";
|
|
6
|
+
|
|
7
|
+
function extOf(p) {
|
|
8
|
+
const base = path.posix.basename(p);
|
|
9
|
+
if (base.startsWith(".") && !base.includes(".")) return base; // ".env" style
|
|
10
|
+
const idx = base.lastIndexOf(".");
|
|
11
|
+
return idx === -1 ? "" : base.slice(idx + 1).toLowerCase();
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function detectFromPackageJson(pkg) {
|
|
15
|
+
const deps = { ...(pkg.dependencies || {}), ...(pkg.devDependencies || {}) };
|
|
16
|
+
const depNames = new Set(Object.keys(deps));
|
|
17
|
+
|
|
18
|
+
const frameworks = [];
|
|
19
|
+
if (depNames.has("vue")) frameworks.push("vue");
|
|
20
|
+
if (depNames.has("react")) frameworks.push("react");
|
|
21
|
+
if (depNames.has("next")) frameworks.push("nextjs");
|
|
22
|
+
if (depNames.has("nuxt")) frameworks.push("nuxt");
|
|
23
|
+
if (depNames.has("uni-app") || depNames.has("@dcloudio/uni-app")) frameworks.push("uni-app");
|
|
24
|
+
if (depNames.has("@tarojs/taro")) frameworks.push("taro");
|
|
25
|
+
if (depNames.has("vite")) frameworks.push("vite");
|
|
26
|
+
|
|
27
|
+
const pkgManager = pkg.packageManager || null;
|
|
28
|
+
const workspaces = pkg.workspaces || null;
|
|
29
|
+
|
|
30
|
+
return { frameworks, pkgManager, workspaces, scripts: pkg.scripts || {} };
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function detectMonorepoSignals(files) {
|
|
34
|
+
const set = new Set(files);
|
|
35
|
+
const signals = [];
|
|
36
|
+
if (set.has("pnpm-workspace.yaml")) signals.push("pnpm-workspace");
|
|
37
|
+
if (set.has("lerna.json")) signals.push("lerna");
|
|
38
|
+
if (set.has("turbo.json")) signals.push("turborepo");
|
|
39
|
+
if (set.has("nx.json")) signals.push("nx");
|
|
40
|
+
if (set.has("rush.json")) signals.push("rush");
|
|
41
|
+
if (set.has("workspace.json")) signals.push("workspace.json");
|
|
42
|
+
return signals;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function detectDocsRoots(files) {
|
|
46
|
+
const roots = new Set();
|
|
47
|
+
for (const f of files) {
|
|
48
|
+
if (f.startsWith("docs/")) roots.add("docs");
|
|
49
|
+
if (f.startsWith("doc/")) roots.add("doc");
|
|
50
|
+
if (f.startsWith("documentation/")) roots.add("documentation");
|
|
51
|
+
}
|
|
52
|
+
return Array.from(roots).sort();
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function detectApiContracts(files) {
|
|
56
|
+
const out = [];
|
|
57
|
+
for (const f of files) {
|
|
58
|
+
const base = path.posix.basename(f).toLowerCase();
|
|
59
|
+
if (base === "openapi.yaml" || base === "openapi.yml" || base === "swagger.yaml" || base === "swagger.yml") out.push(f);
|
|
60
|
+
if (base === "openapi.json" || base === "swagger.json") out.push(f);
|
|
61
|
+
}
|
|
62
|
+
return Array.from(new Set(out)).slice(0, 30);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
function detectSubprojectCandidates(files) {
|
|
66
|
+
// Heuristic: subproject roots are dirs containing common manifest/config files.
|
|
67
|
+
// Keep it generic and cheap: only look at shallow depths.
|
|
68
|
+
const candidates = new Map(); // dir -> { reasons: Set<string>, files: Set<string> }
|
|
69
|
+
const add = (dir, reason, file) => {
|
|
70
|
+
if (!dir || dir === ".") return;
|
|
71
|
+
const cur = candidates.get(dir) || { reasons: new Set(), files: new Set() };
|
|
72
|
+
cur.reasons.add(reason);
|
|
73
|
+
if (file) cur.files.add(file);
|
|
74
|
+
candidates.set(dir, cur);
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
for (const f of files) {
|
|
78
|
+
const parts = f.split("/");
|
|
79
|
+
if (parts.length < 2) continue;
|
|
80
|
+
const dir = parts.slice(0, -1).join("/");
|
|
81
|
+
const base = parts[parts.length - 1];
|
|
82
|
+
|
|
83
|
+
// Depth cap: avoid too deep noise
|
|
84
|
+
if (parts.length > 4) continue;
|
|
85
|
+
|
|
86
|
+
if (base === "package.json") add(dir, "package.json", f);
|
|
87
|
+
if (base === "pom.xml") add(dir, "pom.xml", f);
|
|
88
|
+
if (base === "go.mod") add(dir, "go.mod", f);
|
|
89
|
+
if (base === "Cargo.toml") add(dir, "Cargo.toml", f);
|
|
90
|
+
if (base === "composer.json") add(dir, "composer.json", f);
|
|
91
|
+
if (base === "pyproject.toml" || base === "requirements.txt") add(dir, "python", f);
|
|
92
|
+
|
|
93
|
+
// Miniapp-ish hints (generic, not tied to one vendor)
|
|
94
|
+
if (base === "project.config.json") add(dir, "miniapp:project.config.json", f);
|
|
95
|
+
if (base === "app.json" && dir.includes("mini")) add(dir, "miniapp:app.json", f);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Prefer top-level-ish subprojects: unique dirs not nested inside another candidate.
|
|
99
|
+
const dirs = Array.from(candidates.keys()).sort((a, b) => a.length - b.length);
|
|
100
|
+
const filtered = [];
|
|
101
|
+
for (const d of dirs) {
|
|
102
|
+
if (filtered.some((x) => d.startsWith(x + "/"))) continue;
|
|
103
|
+
filtered.push(d);
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
return filtered.slice(0, 30).map((dir) => {
|
|
107
|
+
const meta = candidates.get(dir);
|
|
108
|
+
return {
|
|
109
|
+
dir,
|
|
110
|
+
reasons: Array.from(meta.reasons).sort(),
|
|
111
|
+
hintFiles: Array.from(meta.files).sort().slice(0, 10)
|
|
112
|
+
};
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
async function safeJson(absPath) {
|
|
117
|
+
try {
|
|
118
|
+
const s = await readText(absPath);
|
|
119
|
+
return JSON.parse(s);
|
|
120
|
+
} catch {
|
|
121
|
+
return null;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
function pickTopLevelDirs(files) {
|
|
126
|
+
const counts = new Map();
|
|
127
|
+
for (const f of files) {
|
|
128
|
+
const parts = f.split("/");
|
|
129
|
+
const top = parts[0] || "";
|
|
130
|
+
if (!top || top.startsWith(".")) continue;
|
|
131
|
+
counts.set(top, (counts.get(top) || 0) + 1);
|
|
132
|
+
}
|
|
133
|
+
return [...counts.entries()]
|
|
134
|
+
.sort((a, b) => b[1] - a[1])
|
|
135
|
+
.slice(0, 20)
|
|
136
|
+
.map(([name, fileCount]) => ({ name, fileCount }));
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
function detectRepoType(files) {
|
|
140
|
+
const set = new Set(files);
|
|
141
|
+
const hints = [];
|
|
142
|
+
if (set.has("pom.xml") || [...set].some((p) => p.endsWith("build.gradle") || p.endsWith("build.gradle.kts"))) hints.push("jvm");
|
|
143
|
+
if (set.has("go.mod")) hints.push("go");
|
|
144
|
+
if (set.has("package.json")) hints.push("node");
|
|
145
|
+
if (set.has("Cargo.toml")) hints.push("rust");
|
|
146
|
+
if (set.has("composer.json")) hints.push("php");
|
|
147
|
+
if (set.has("requirements.txt") || set.has("pyproject.toml")) hints.push("python");
|
|
148
|
+
return hints;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
function detectLockfiles(files) {
|
|
152
|
+
const set = new Set(files);
|
|
153
|
+
const locks = [];
|
|
154
|
+
if (set.has("pnpm-lock.yaml")) locks.push("pnpm-lock.yaml");
|
|
155
|
+
if (set.has("package-lock.json")) locks.push("package-lock.json");
|
|
156
|
+
if (set.has("yarn.lock")) locks.push("yarn.lock");
|
|
157
|
+
if (set.has("bun.lockb")) locks.push("bun.lockb");
|
|
158
|
+
return locks;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
function pickKeyFiles(files) {
|
|
162
|
+
const wanted = [
|
|
163
|
+
"README.md",
|
|
164
|
+
"README.zh-CN.md",
|
|
165
|
+
"README.zh.md",
|
|
166
|
+
"CONTRIBUTING.md",
|
|
167
|
+
"LICENSE",
|
|
168
|
+
"openapi.yaml",
|
|
169
|
+
"openapi.yml",
|
|
170
|
+
"swagger.yaml",
|
|
171
|
+
"swagger.yml",
|
|
172
|
+
"docker-compose.yml",
|
|
173
|
+
"docker-compose.yaml",
|
|
174
|
+
".env.example",
|
|
175
|
+
".env.sample"
|
|
176
|
+
];
|
|
177
|
+
const set = new Set(files);
|
|
178
|
+
const hits = wanted.filter((p) => set.has(p));
|
|
179
|
+
|
|
180
|
+
// plus: .github workflows
|
|
181
|
+
for (const f of files) {
|
|
182
|
+
if (f.startsWith(".github/workflows/") && (f.endsWith(".yml") || f.endsWith(".yaml"))) hits.push(f);
|
|
183
|
+
}
|
|
184
|
+
return Array.from(new Set(hits)).slice(0, 50);
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
function summarizeExtensions(files) {
|
|
188
|
+
const counts = new Map();
|
|
189
|
+
for (const f of files) {
|
|
190
|
+
const ext = extOf(f);
|
|
191
|
+
if (!ext) continue;
|
|
192
|
+
counts.set(ext, (counts.get(ext) || 0) + 1);
|
|
193
|
+
}
|
|
194
|
+
return [...counts.entries()]
|
|
195
|
+
.sort((a, b) => b[1] - a[1])
|
|
196
|
+
.slice(0, 20)
|
|
197
|
+
.map(([ext, count]) => ({ ext, count }));
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
export async function scanRepo(root, { maxFiles = 4000, preferGit = true } = {}) {
|
|
201
|
+
let files = [];
|
|
202
|
+
const gitOk = preferGit && (await hasGit()) && (await isGitRepo(root));
|
|
203
|
+
|
|
204
|
+
if (gitOk) {
|
|
205
|
+
files = await listGitFiles(root);
|
|
206
|
+
} else {
|
|
207
|
+
files = await walkFiles(root, { maxFiles });
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
if (files.length > maxFiles) files = files.slice(0, maxFiles);
|
|
211
|
+
|
|
212
|
+
const repoHints = detectRepoType(files);
|
|
213
|
+
const monorepoSignals = detectMonorepoSignals(files);
|
|
214
|
+
const lockfiles = detectLockfiles(files);
|
|
215
|
+
const keyFiles = pickKeyFiles(files);
|
|
216
|
+
const docsRoots = detectDocsRoots(files);
|
|
217
|
+
const apiContracts = detectApiContracts(files);
|
|
218
|
+
const subprojects = detectSubprojectCandidates(files);
|
|
219
|
+
const topDirs = pickTopLevelDirs(files);
|
|
220
|
+
const topExts = summarizeExtensions(files);
|
|
221
|
+
|
|
222
|
+
const pkgAbs = path.join(root, "package.json");
|
|
223
|
+
const pkg = await safeJson(pkgAbs);
|
|
224
|
+
const pkgInfo = pkg ? detectFromPackageJson(pkg) : null;
|
|
225
|
+
const monorepo = {
|
|
226
|
+
signals: monorepoSignals,
|
|
227
|
+
rootWorkspaces: pkgInfo?.workspaces || null
|
|
228
|
+
};
|
|
229
|
+
|
|
230
|
+
const manifest = {
|
|
231
|
+
schema: 1,
|
|
232
|
+
generatedAt: new Date().toISOString(),
|
|
233
|
+
root,
|
|
234
|
+
usingGit: gitOk,
|
|
235
|
+
fileCount: files.length,
|
|
236
|
+
repoHints,
|
|
237
|
+
monorepo,
|
|
238
|
+
lockfiles,
|
|
239
|
+
topDirs,
|
|
240
|
+
topExts,
|
|
241
|
+
keyFiles,
|
|
242
|
+
docsRoots,
|
|
243
|
+
apiContracts,
|
|
244
|
+
subprojects,
|
|
245
|
+
packageJson: pkg
|
|
246
|
+
? {
|
|
247
|
+
name: pkg.name || null,
|
|
248
|
+
private: !!pkg.private,
|
|
249
|
+
scripts: pkgInfo?.scripts || {},
|
|
250
|
+
frameworks: pkgInfo?.frameworks || [],
|
|
251
|
+
packageManager: pkgInfo?.pkgManager || null,
|
|
252
|
+
workspaces: pkgInfo?.workspaces || null
|
|
253
|
+
}
|
|
254
|
+
: null
|
|
255
|
+
};
|
|
256
|
+
|
|
257
|
+
// Index is for fast context generation without re-walking every time.
|
|
258
|
+
const index = {
|
|
259
|
+
schema: 1,
|
|
260
|
+
generatedAt: new Date().toISOString(),
|
|
261
|
+
files
|
|
262
|
+
};
|
|
263
|
+
|
|
264
|
+
// Rough repo title
|
|
265
|
+
let title = null;
|
|
266
|
+
if (pkg?.name) title = pkg.name;
|
|
267
|
+
if (!title) title = path.basename(root);
|
|
268
|
+
manifest.title = title;
|
|
269
|
+
|
|
270
|
+
// Try read repo README short summary
|
|
271
|
+
const readme = keyFiles.find((p) => /^README(\..+)?\.md$/i.test(p));
|
|
272
|
+
if (readme) {
|
|
273
|
+
try {
|
|
274
|
+
const s = await readText(path.join(root, readme), 64_000);
|
|
275
|
+
manifest.readmeHead = s.split("\n").slice(0, 40).join("\n");
|
|
276
|
+
} catch {
|
|
277
|
+
// ignore
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
return { manifest, index };
|
|
282
|
+
}
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import { ensureDir, fileExists, writeText } from "../lib/io.js";
|
|
3
|
+
import { memDir, todosPath } from "../lib/paths.js";
|
|
4
|
+
|
|
5
|
+
const DEFAULT_TODOS = `# Todos
|
|
6
|
+
|
|
7
|
+
## Next
|
|
8
|
+
- (Write the next concrete step)
|
|
9
|
+
|
|
10
|
+
## Blockers
|
|
11
|
+
- (If any)
|
|
12
|
+
`;
|
|
13
|
+
|
|
14
|
+
export function parseTodos(md) {
|
|
15
|
+
const out = { next: [], blockers: [], raw: String(md || "").trimEnd() };
|
|
16
|
+
const lines = String(md || "").split("\n");
|
|
17
|
+
let section = null;
|
|
18
|
+
for (const line of lines) {
|
|
19
|
+
const h = line.match(/^##\s+(.*)\s*$/);
|
|
20
|
+
if (h) {
|
|
21
|
+
const t = h[1].toLowerCase();
|
|
22
|
+
if (t.startsWith("next")) section = "next";
|
|
23
|
+
else if (t.startsWith("block")) section = "blockers";
|
|
24
|
+
else section = null;
|
|
25
|
+
continue;
|
|
26
|
+
}
|
|
27
|
+
const m = line.match(/^\s*-\s+(.*)\s*$/);
|
|
28
|
+
if (m && section) out[section].push(m[1]);
|
|
29
|
+
}
|
|
30
|
+
return out;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function ensureSection(md, title) {
|
|
34
|
+
const has = new RegExp(`^##\\s+${title}\\s*$`, "im").test(md);
|
|
35
|
+
if (has) return md;
|
|
36
|
+
const trimmed = md.trimEnd();
|
|
37
|
+
if (!trimmed) return `## ${title}\n`;
|
|
38
|
+
return trimmed + `\n\n## ${title}\n`;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
function findSectionRange(lines, title) {
|
|
42
|
+
const headerRe = new RegExp(`^##\\s+${title}\\s*$`, "i");
|
|
43
|
+
const start = lines.findIndex((l) => headerRe.test(l.trim()));
|
|
44
|
+
if (start === -1) return null;
|
|
45
|
+
let end = start + 1;
|
|
46
|
+
while (end < lines.length && !/^##\s+/.test(lines[end])) end++;
|
|
47
|
+
return { start, end };
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
function insertBulletIntoSection(md, title, bulletText) {
|
|
51
|
+
let s = String(md || "");
|
|
52
|
+
s = ensureSection(s, title);
|
|
53
|
+
const lines = s.split("\n");
|
|
54
|
+
const headerRe = new RegExp(`^##\\s+${title}\\s*$`, "i");
|
|
55
|
+
const idx = lines.findIndex((l) => headerRe.test(l.trim()));
|
|
56
|
+
if (idx === -1) return s.trimEnd() + `\n- ${bulletText}\n`;
|
|
57
|
+
|
|
58
|
+
// Drop default template placeholder bullets once real items are being added.
|
|
59
|
+
const range = findSectionRange(lines, title);
|
|
60
|
+
if (range) {
|
|
61
|
+
const placeholder =
|
|
62
|
+
title.toLowerCase() === "next"
|
|
63
|
+
? /^\s*-\s+\(Write the next concrete step\)\s*$/
|
|
64
|
+
: title.toLowerCase() === "blockers"
|
|
65
|
+
? /^\s*-\s+\(If any\)\s*$/
|
|
66
|
+
: null;
|
|
67
|
+
if (placeholder) {
|
|
68
|
+
for (let i = range.start + 1; i < range.end; i++) {
|
|
69
|
+
if (placeholder.test(lines[i])) {
|
|
70
|
+
lines.splice(i, 1);
|
|
71
|
+
break;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Insert after header + blank lines, but before next section header.
|
|
78
|
+
let insertAt = idx + 1;
|
|
79
|
+
while (insertAt < lines.length && lines[insertAt].trim() === "") insertAt++;
|
|
80
|
+
while (insertAt < lines.length && !/^##\s+/.test(lines[insertAt])) insertAt++;
|
|
81
|
+
|
|
82
|
+
lines.splice(insertAt, 0, `- ${bulletText}`);
|
|
83
|
+
return lines.join("\n").trimEnd() + "\n";
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
function removeNthBulletFromSection(md, title, n1) {
|
|
87
|
+
const n = Number(n1);
|
|
88
|
+
if (!Number.isInteger(n) || n <= 0) throw new Error(`Index must be a positive integer (got: ${n1})`);
|
|
89
|
+
|
|
90
|
+
const lines = String(md || "").split("\n");
|
|
91
|
+
const range = findSectionRange(lines, title);
|
|
92
|
+
if (!range) throw new Error(`Missing section: ## ${title}`);
|
|
93
|
+
|
|
94
|
+
const bulletIdxs = [];
|
|
95
|
+
for (let i = range.start + 1; i < range.end; i++) {
|
|
96
|
+
if (/^\s*-\s+/.test(lines[i])) bulletIdxs.push(i);
|
|
97
|
+
}
|
|
98
|
+
if (n > bulletIdxs.length) throw new Error(`No such item: ${title} #${n} (total: ${bulletIdxs.length})`);
|
|
99
|
+
|
|
100
|
+
const rmAt = bulletIdxs[n - 1];
|
|
101
|
+
lines.splice(rmAt, 1);
|
|
102
|
+
return lines.join("\n").trimEnd() + "\n";
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
export async function ensureTodosFile(root) {
|
|
106
|
+
await ensureDir(memDir(root));
|
|
107
|
+
const p = todosPath(root);
|
|
108
|
+
if (await fileExists(p)) return p;
|
|
109
|
+
await writeText(p, DEFAULT_TODOS);
|
|
110
|
+
return p;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
export async function addTodoNext(root, text) {
|
|
114
|
+
const p = await ensureTodosFile(root);
|
|
115
|
+
const s = await fs.readFile(p, "utf8");
|
|
116
|
+
const updated = insertBulletIntoSection(s, "Next", text);
|
|
117
|
+
await fs.writeFile(p, updated, "utf8");
|
|
118
|
+
return p;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
export async function addTodoBlocker(root, text) {
|
|
122
|
+
const p = await ensureTodosFile(root);
|
|
123
|
+
const s = await fs.readFile(p, "utf8");
|
|
124
|
+
const updated = insertBulletIntoSection(s, "Blockers", text);
|
|
125
|
+
await fs.writeFile(p, updated, "utf8");
|
|
126
|
+
return p;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
export async function removeTodoNextByIndex(root, index1) {
|
|
130
|
+
const p = await ensureTodosFile(root);
|
|
131
|
+
const s = await fs.readFile(p, "utf8");
|
|
132
|
+
const updated = removeNthBulletFromSection(s, "Next", index1);
|
|
133
|
+
await fs.writeFile(p, updated, "utf8");
|
|
134
|
+
return p;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
export async function removeTodoBlockerByIndex(root, index1) {
|
|
138
|
+
const p = await ensureTodosFile(root);
|
|
139
|
+
const s = await fs.readFile(p, "utf8");
|
|
140
|
+
const updated = removeNthBulletFromSection(s, "Blockers", index1);
|
|
141
|
+
await fs.writeFile(p, updated, "utf8");
|
|
142
|
+
return p;
|
|
143
|
+
}
|
package/src/lib/args.js
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
export function parseArgs(argv) {
|
|
2
|
+
const flags = {};
|
|
3
|
+
const rest = [];
|
|
4
|
+
|
|
5
|
+
for (let i = 0; i < argv.length; i++) {
|
|
6
|
+
const a = argv[i];
|
|
7
|
+
if (!a) continue;
|
|
8
|
+
if (a === "--") {
|
|
9
|
+
rest.push(...argv.slice(i + 1));
|
|
10
|
+
break;
|
|
11
|
+
}
|
|
12
|
+
if (a.startsWith("--")) {
|
|
13
|
+
const eq = a.indexOf("=");
|
|
14
|
+
if (eq !== -1) {
|
|
15
|
+
flags[a.slice(2, eq)] = a.slice(eq + 1);
|
|
16
|
+
} else {
|
|
17
|
+
const k = a.slice(2);
|
|
18
|
+
// Common boolean flag convention: `--no-foo` means a boolean toggle.
|
|
19
|
+
// Do not accidentally consume the next positional arg as its value.
|
|
20
|
+
if (k.startsWith("no-") || k === "help" || k === "force" || k === "staged") {
|
|
21
|
+
flags[k] = true;
|
|
22
|
+
continue;
|
|
23
|
+
}
|
|
24
|
+
const next = argv[i + 1];
|
|
25
|
+
if (next && !next.startsWith("-")) {
|
|
26
|
+
flags[k] = next;
|
|
27
|
+
i++;
|
|
28
|
+
} else {
|
|
29
|
+
flags[k] = true;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
continue;
|
|
33
|
+
}
|
|
34
|
+
if (a.startsWith("-") && a.length > 1) {
|
|
35
|
+
const k = a.slice(1);
|
|
36
|
+
flags[k] = true;
|
|
37
|
+
continue;
|
|
38
|
+
}
|
|
39
|
+
rest.push(a);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
const cmd = rest.shift();
|
|
43
|
+
return { cmd, rest, flags };
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export function printHelp() {
|
|
47
|
+
const help = `
|
|
48
|
+
rmemo - repo memory + dev journal CLI
|
|
49
|
+
|
|
50
|
+
Usage:
|
|
51
|
+
rmemo init Initialize .repo-memory/ and run a scan
|
|
52
|
+
rmemo scan Scan repo and update manifest/index
|
|
53
|
+
rmemo log <text> Append a note to today's journal
|
|
54
|
+
rmemo status Print a paste-ready status summary (rules/todos/journal)
|
|
55
|
+
rmemo check Enforce .repo-memory/rules.json (for CI / hooks)
|
|
56
|
+
rmemo hook install Install a git pre-commit hook that runs \`rmemo check\`
|
|
57
|
+
rmemo start Scan + generate context + print status (daily entrypoint)
|
|
58
|
+
rmemo done Append end-of-day notes to journal (supports stdin) and optionally update todos
|
|
59
|
+
rmemo todo add <text> Add a todo item under "## Next"
|
|
60
|
+
rmemo todo block <text> Add a blocker under "## Blockers"
|
|
61
|
+
rmemo todo done <n> Remove the nth item from "## Next"
|
|
62
|
+
rmemo todo unblock <n> Remove the nth item from "## Blockers"
|
|
63
|
+
rmemo todo ls Print parsed todos (Next/Blockers)
|
|
64
|
+
rmemo context Generate .repo-memory/context.md
|
|
65
|
+
rmemo print Print context.md to stdout (generate first if missing)
|
|
66
|
+
|
|
67
|
+
Options:
|
|
68
|
+
--root <path> Repo root (default: cwd)
|
|
69
|
+
--format <md|json> Output format for status (default: md)
|
|
70
|
+
--mode <brief|full> Output detail level for status/start (default: full)
|
|
71
|
+
--next <text> Append a bullet to .repo-memory/todos.md under "## Next"
|
|
72
|
+
--blocker <text> Append a bullet to .repo-memory/todos.md under "## Blockers"
|
|
73
|
+
--staged For check: only validate staged (git index) files
|
|
74
|
+
--force Overwrite existing git hook (creates backup)
|
|
75
|
+
--max-files <n> Max files to analyze (default: 4000)
|
|
76
|
+
--snip-lines <n> Max lines per snippet (default: 120)
|
|
77
|
+
--recent-days <n> Include recent journal entries (default: 7)
|
|
78
|
+
--no-git Don't use git for scanning (fallback to filesystem walk)
|
|
79
|
+
`;
|
|
80
|
+
process.stdout.write(help.trimStart());
|
|
81
|
+
}
|
package/src/lib/git.js
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import { promisify } from "node:util";
|
|
3
|
+
|
|
4
|
+
const execFileAsync = promisify(execFile);
|
|
5
|
+
|
|
6
|
+
export async function hasGit() {
|
|
7
|
+
try {
|
|
8
|
+
await execFileAsync("git", ["--version"]);
|
|
9
|
+
return true;
|
|
10
|
+
} catch {
|
|
11
|
+
return false;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export async function isGitRepo(root) {
|
|
16
|
+
try {
|
|
17
|
+
const { stdout } = await execFileAsync("git", ["rev-parse", "--is-inside-work-tree"], { cwd: root });
|
|
18
|
+
return stdout.trim() === "true";
|
|
19
|
+
} catch {
|
|
20
|
+
return false;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export async function listGitFiles(root) {
|
|
25
|
+
const tracked = await execFileAsync("git", ["ls-files", "-z"], { cwd: root, maxBuffer: 1024 * 1024 * 50 });
|
|
26
|
+
const untracked = await execFileAsync("git", ["ls-files", "--others", "--exclude-standard", "-z"], {
|
|
27
|
+
cwd: root,
|
|
28
|
+
maxBuffer: 1024 * 1024 * 50
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
const out = (tracked.stdout + untracked.stdout).split("\0").filter(Boolean);
|
|
32
|
+
// Normalize to posix-like relative paths (git always returns / separators).
|
|
33
|
+
return Array.from(new Set(out)).sort();
|
|
34
|
+
}
|
|
35
|
+
|
package/src/lib/io.js
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
export async function ensureDir(p) {
|
|
5
|
+
await fs.mkdir(p, { recursive: true });
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export async function fileExists(p) {
|
|
9
|
+
try {
|
|
10
|
+
await fs.access(p);
|
|
11
|
+
return true;
|
|
12
|
+
} catch {
|
|
13
|
+
return false;
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export async function readText(p, maxBytes = 2_000_000) {
|
|
18
|
+
const buf = await fs.readFile(p);
|
|
19
|
+
if (buf.byteLength > maxBytes) {
|
|
20
|
+
return buf.subarray(0, maxBytes).toString("utf8") + "\n[...truncated]";
|
|
21
|
+
}
|
|
22
|
+
return buf.toString("utf8");
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export async function writeText(p, s) {
|
|
26
|
+
await ensureDir(path.dirname(p));
|
|
27
|
+
await fs.writeFile(p, s, "utf8");
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export async function writeJson(p, obj) {
|
|
31
|
+
await writeText(p, JSON.stringify(obj, null, 2) + "\n");
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export async function readJson(p) {
|
|
35
|
+
const s = await readText(p);
|
|
36
|
+
return JSON.parse(s);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export function exitWithError(msg) {
|
|
40
|
+
process.stderr.write(String(msg).trimEnd() + "\n");
|
|
41
|
+
process.exitCode = 1;
|
|
42
|
+
}
|
|
43
|
+
|
package/src/lib/paths.js
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
|
|
3
|
+
export function resolveRoot(flags) {
|
|
4
|
+
const root = flags?.root ? String(flags.root) : process.cwd();
|
|
5
|
+
return path.resolve(root);
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export function memDir(root) {
|
|
9
|
+
return path.join(root, ".repo-memory");
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export function journalDir(root) {
|
|
13
|
+
return path.join(memDir(root), "journal");
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export function manifestPath(root) {
|
|
17
|
+
return path.join(memDir(root), "manifest.json");
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export function indexPath(root) {
|
|
21
|
+
return path.join(memDir(root), "index.json");
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export function rulesPath(root) {
|
|
25
|
+
return path.join(memDir(root), "rules.md");
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export function rulesJsonPath(root) {
|
|
29
|
+
return path.join(memDir(root), "rules.json");
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export function contextPath(root) {
|
|
33
|
+
return path.join(memDir(root), "context.md");
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export function todosPath(root) {
|
|
37
|
+
return path.join(memDir(root), "todos.md");
|
|
38
|
+
}
|
package/src/lib/stdin.js
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export async function readStdinText() {
|
|
2
|
+
// If nothing is piped, stdin is usually a TTY. Do not block waiting for EOF.
|
|
3
|
+
if (process.stdin.isTTY) return "";
|
|
4
|
+
return await new Promise((resolve, reject) => {
|
|
5
|
+
let s = "";
|
|
6
|
+
process.stdin.setEncoding("utf8");
|
|
7
|
+
process.stdin.on("data", (chunk) => (s += chunk));
|
|
8
|
+
process.stdin.on("end", () => resolve(s));
|
|
9
|
+
process.stdin.on("error", reject);
|
|
10
|
+
process.stdin.resume();
|
|
11
|
+
});
|
|
12
|
+
}
|
package/src/lib/time.js
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export function todayYmd(d = new Date()) {
|
|
2
|
+
const y = d.getFullYear();
|
|
3
|
+
const m = String(d.getMonth() + 1).padStart(2, "0");
|
|
4
|
+
const day = String(d.getDate()).padStart(2, "0");
|
|
5
|
+
return `${y}-${m}-${day}`;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export function nowHm(d = new Date()) {
|
|
9
|
+
const h = String(d.getHours()).padStart(2, "0");
|
|
10
|
+
const m = String(d.getMinutes()).padStart(2, "0");
|
|
11
|
+
return `${h}:${m}`;
|
|
12
|
+
}
|
|
13
|
+
|
package/src/lib/walk.js
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import fs from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
const DEFAULT_IGNORES = new Set([
|
|
5
|
+
".git",
|
|
6
|
+
".repo-memory",
|
|
7
|
+
"node_modules",
|
|
8
|
+
"dist",
|
|
9
|
+
"build",
|
|
10
|
+
"out",
|
|
11
|
+
"coverage",
|
|
12
|
+
".next",
|
|
13
|
+
".nuxt",
|
|
14
|
+
".turbo",
|
|
15
|
+
".vite",
|
|
16
|
+
".idea",
|
|
17
|
+
".vscode"
|
|
18
|
+
]);
|
|
19
|
+
|
|
20
|
+
export async function walkFiles(root, { maxFiles = 4000 } = {}) {
|
|
21
|
+
const result = [];
|
|
22
|
+
|
|
23
|
+
async function walkDir(rel) {
|
|
24
|
+
const abs = path.join(root, rel);
|
|
25
|
+
const entries = await fs.readdir(abs, { withFileTypes: true });
|
|
26
|
+
for (const ent of entries) {
|
|
27
|
+
const name = ent.name;
|
|
28
|
+
if (DEFAULT_IGNORES.has(name)) continue;
|
|
29
|
+
const relPath = rel ? path.posix.join(rel, name) : name;
|
|
30
|
+
const absPath = path.join(root, relPath);
|
|
31
|
+
if (ent.isDirectory()) {
|
|
32
|
+
await walkDir(relPath);
|
|
33
|
+
} else if (ent.isFile()) {
|
|
34
|
+
result.push(relPath);
|
|
35
|
+
if (result.length >= maxFiles) return;
|
|
36
|
+
}
|
|
37
|
+
if (result.length >= maxFiles) return;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
await walkDir("");
|
|
42
|
+
return result.sort();
|
|
43
|
+
}
|
|
44
|
+
|