nogrep 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +91 -0
- package/commands/init.md +241 -0
- package/commands/off.md +11 -0
- package/commands/on.md +21 -0
- package/commands/query.md +13 -0
- package/commands/status.md +15 -0
- package/commands/update.md +89 -0
- package/dist/chunk-SMUAF6SM.js +12 -0
- package/dist/chunk-SMUAF6SM.js.map +1 -0
- package/dist/query.d.ts +12 -0
- package/dist/query.js +272 -0
- package/dist/query.js.map +1 -0
- package/dist/settings.d.ts +6 -0
- package/dist/settings.js +75 -0
- package/dist/settings.js.map +1 -0
- package/dist/signals.d.ts +9 -0
- package/dist/signals.js +174 -0
- package/dist/signals.js.map +1 -0
- package/dist/trim.d.ts +3 -0
- package/dist/trim.js +266 -0
- package/dist/trim.js.map +1 -0
- package/dist/types.d.ts +141 -0
- package/dist/types.js +7 -0
- package/dist/types.js.map +1 -0
- package/dist/validate.d.ts +10 -0
- package/dist/validate.js +143 -0
- package/dist/validate.js.map +1 -0
- package/dist/write.d.ts +8 -0
- package/dist/write.js +267 -0
- package/dist/write.js.map +1 -0
- package/docs/ARCHITECTURE.md +239 -0
- package/docs/CLAUDE.md +161 -0
- package/docs/CONVENTIONS.md +162 -0
- package/docs/SPEC.md +803 -0
- package/docs/TASKS.md +216 -0
- package/hooks/hooks.json +35 -0
- package/hooks/pre-tool-use.sh +37 -0
- package/hooks/prompt-submit.sh +26 -0
- package/hooks/session-start.sh +21 -0
- package/package.json +24 -0
- package/scripts/query.ts +290 -0
- package/scripts/settings.ts +98 -0
- package/scripts/signals.ts +237 -0
- package/scripts/trim.ts +379 -0
- package/scripts/types.ts +186 -0
- package/scripts/validate.ts +181 -0
- package/scripts/write.ts +346 -0
- package/templates/claude-md-patch.md +8 -0
package/dist/trim.js
ADDED
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
// scripts/trim.ts
|
|
2
|
+
import { readFile } from "fs/promises";
|
|
3
|
+
import { resolve, extname } from "path";
|
|
4
|
+
var MAX_CLUSTER_LINES = 300;
|
|
5
|
+
function trimTypeScript(content) {
|
|
6
|
+
const lines = content.split("\n");
|
|
7
|
+
const result = [];
|
|
8
|
+
let braceDepth = 0;
|
|
9
|
+
let inBody = false;
|
|
10
|
+
let bodyStartDepth = 0;
|
|
11
|
+
for (const line of lines) {
|
|
12
|
+
const trimmed = line.trim();
|
|
13
|
+
if (braceDepth === 0 || !inBody) {
|
|
14
|
+
if (trimmed === "" || trimmed.startsWith("import ") || trimmed.startsWith("export type ") || trimmed.startsWith("export interface ") || trimmed.startsWith("export enum ") || trimmed.startsWith("export const ") || trimmed.startsWith("type ") || trimmed.startsWith("interface ") || trimmed.startsWith("enum ") || trimmed.startsWith("@") || trimmed.startsWith("//") || trimmed.startsWith("/*") || trimmed.startsWith("*") || trimmed.startsWith("declare ")) {
|
|
15
|
+
result.push(line);
|
|
16
|
+
braceDepth += countChar(trimmed, "{") - countChar(trimmed, "}");
|
|
17
|
+
continue;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
const openBraces = countChar(trimmed, "{");
|
|
21
|
+
const closeBraces = countChar(trimmed, "}");
|
|
22
|
+
if (!inBody) {
|
|
23
|
+
if (isSignatureLine(trimmed) && openBraces > closeBraces) {
|
|
24
|
+
result.push(line);
|
|
25
|
+
braceDepth += openBraces - closeBraces;
|
|
26
|
+
inBody = true;
|
|
27
|
+
bodyStartDepth = braceDepth;
|
|
28
|
+
continue;
|
|
29
|
+
}
|
|
30
|
+
if (isClassOrInterfaceLine(trimmed)) {
|
|
31
|
+
result.push(line);
|
|
32
|
+
braceDepth += openBraces - closeBraces;
|
|
33
|
+
continue;
|
|
34
|
+
}
|
|
35
|
+
result.push(line);
|
|
36
|
+
braceDepth += openBraces - closeBraces;
|
|
37
|
+
} else {
|
|
38
|
+
braceDepth += openBraces - closeBraces;
|
|
39
|
+
if (braceDepth < bodyStartDepth) {
|
|
40
|
+
result.push(line);
|
|
41
|
+
inBody = false;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
return result.join("\n");
|
|
46
|
+
}
|
|
47
|
+
function trimPython(content) {
|
|
48
|
+
const lines = content.split("\n");
|
|
49
|
+
const result = [];
|
|
50
|
+
let skipIndent = -1;
|
|
51
|
+
for (let i = 0; i < lines.length; i++) {
|
|
52
|
+
const line = lines[i];
|
|
53
|
+
const trimmed = line.trim();
|
|
54
|
+
const indent = line.length - line.trimStart().length;
|
|
55
|
+
if (skipIndent >= 0) {
|
|
56
|
+
if (trimmed === "" || indent > skipIndent) {
|
|
57
|
+
continue;
|
|
58
|
+
}
|
|
59
|
+
skipIndent = -1;
|
|
60
|
+
}
|
|
61
|
+
if (trimmed === "" || trimmed.startsWith("#") || trimmed.startsWith("import ") || trimmed.startsWith("from ") || trimmed.startsWith("@") || trimmed.startsWith("class ") || /^[A-Z_][A-Z_0-9]*\s*=/.test(trimmed)) {
|
|
62
|
+
result.push(line);
|
|
63
|
+
continue;
|
|
64
|
+
}
|
|
65
|
+
if (trimmed.startsWith("def ") || trimmed.startsWith("async def ")) {
|
|
66
|
+
result.push(line);
|
|
67
|
+
const docIdx = findDocstring(lines, i + 1, indent);
|
|
68
|
+
if (docIdx > i) {
|
|
69
|
+
for (let j = i + 1; j <= docIdx; j++) {
|
|
70
|
+
result.push(lines[j]);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
skipIndent = indent;
|
|
74
|
+
continue;
|
|
75
|
+
}
|
|
76
|
+
result.push(line);
|
|
77
|
+
}
|
|
78
|
+
return result.join("\n");
|
|
79
|
+
}
|
|
80
|
+
function trimJava(content) {
|
|
81
|
+
const lines = content.split("\n");
|
|
82
|
+
const result = [];
|
|
83
|
+
let braceDepth = 0;
|
|
84
|
+
let inBody = false;
|
|
85
|
+
let bodyStartDepth = 0;
|
|
86
|
+
for (const line of lines) {
|
|
87
|
+
const trimmed = line.trim();
|
|
88
|
+
if (braceDepth === 0 || !inBody) {
|
|
89
|
+
if (trimmed === "" || trimmed.startsWith("import ") || trimmed.startsWith("package ") || trimmed.startsWith("@") || trimmed.startsWith("//") || trimmed.startsWith("/*") || trimmed.startsWith("*") || trimmed.startsWith("public interface ") || trimmed.startsWith("interface ") || trimmed.startsWith("public enum ") || trimmed.startsWith("enum ")) {
|
|
90
|
+
result.push(line);
|
|
91
|
+
braceDepth += countChar(trimmed, "{") - countChar(trimmed, "}");
|
|
92
|
+
continue;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
const openBraces = countChar(trimmed, "{");
|
|
96
|
+
const closeBraces = countChar(trimmed, "}");
|
|
97
|
+
if (!inBody) {
|
|
98
|
+
if (isJavaMethodSignature(trimmed) && openBraces > closeBraces) {
|
|
99
|
+
result.push(line);
|
|
100
|
+
braceDepth += openBraces - closeBraces;
|
|
101
|
+
inBody = true;
|
|
102
|
+
bodyStartDepth = braceDepth;
|
|
103
|
+
continue;
|
|
104
|
+
}
|
|
105
|
+
if (isJavaClassLine(trimmed)) {
|
|
106
|
+
result.push(line);
|
|
107
|
+
braceDepth += openBraces - closeBraces;
|
|
108
|
+
continue;
|
|
109
|
+
}
|
|
110
|
+
result.push(line);
|
|
111
|
+
braceDepth += openBraces - closeBraces;
|
|
112
|
+
} else {
|
|
113
|
+
braceDepth += openBraces - closeBraces;
|
|
114
|
+
if (braceDepth < bodyStartDepth) {
|
|
115
|
+
result.push(line);
|
|
116
|
+
inBody = false;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
return result.join("\n");
|
|
121
|
+
}
|
|
122
|
+
function trimGeneric(content) {
|
|
123
|
+
return content;
|
|
124
|
+
}
|
|
125
|
+
function countChar(s, ch) {
|
|
126
|
+
let count = 0;
|
|
127
|
+
let inString = false;
|
|
128
|
+
let stringChar = "";
|
|
129
|
+
for (let i = 0; i < s.length; i++) {
|
|
130
|
+
const c = s[i];
|
|
131
|
+
if (inString) {
|
|
132
|
+
if (c === stringChar && s[i - 1] !== "\\") inString = false;
|
|
133
|
+
} else if (c === '"' || c === "'" || c === "`") {
|
|
134
|
+
inString = true;
|
|
135
|
+
stringChar = c;
|
|
136
|
+
} else if (c === ch) {
|
|
137
|
+
count++;
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
return count;
|
|
141
|
+
}
|
|
142
|
+
function isSignatureLine(trimmed) {
|
|
143
|
+
return /^(export\s+)?(async\s+)?function\s/.test(trimmed) || /^(public|private|protected|static|async|get|set|\*)\s/.test(trimmed) || /^(readonly\s+)?[a-zA-Z_$][a-zA-Z0-9_$]*\s*\(/.test(trimmed) || /^(export\s+)?(const|let|var)\s+\w+\s*=\s*(async\s+)?\(/.test(trimmed) || /^(export\s+)?(const|let|var)\s+\w+\s*=\s*(async\s+)?function/.test(trimmed) || // Arrow function assigned at class level
|
|
144
|
+
/^[a-zA-Z_$][a-zA-Z0-9_$]*\s*=\s*(async\s+)?\(/.test(trimmed);
|
|
145
|
+
}
|
|
146
|
+
function isClassOrInterfaceLine(trimmed) {
|
|
147
|
+
return /^(export\s+)?(abstract\s+)?(class|interface|enum)\s/.test(trimmed) || /^(export\s+)?namespace\s/.test(trimmed);
|
|
148
|
+
}
|
|
149
|
+
function isJavaMethodSignature(trimmed) {
|
|
150
|
+
return /^(public|private|protected|static|final|abstract|synchronized|native)\s/.test(trimmed) && /\(/.test(trimmed);
|
|
151
|
+
}
|
|
152
|
+
function isJavaClassLine(trimmed) {
|
|
153
|
+
return /^(public|private|protected)?\s*(abstract\s+)?(class|interface|enum)\s/.test(trimmed);
|
|
154
|
+
}
|
|
155
|
+
function findDocstring(lines, startIdx, defIndent) {
|
|
156
|
+
for (let i = startIdx; i < lines.length; i++) {
|
|
157
|
+
const trimmed = lines[i].trim();
|
|
158
|
+
if (trimmed === "") continue;
|
|
159
|
+
if (trimmed.startsWith('"""') || trimmed.startsWith("'''")) {
|
|
160
|
+
const quote = trimmed.slice(0, 3);
|
|
161
|
+
if (trimmed.length > 3 && trimmed.endsWith(quote)) return i;
|
|
162
|
+
for (let j = i + 1; j < lines.length; j++) {
|
|
163
|
+
if (lines[j].trim().endsWith(quote)) return j;
|
|
164
|
+
}
|
|
165
|
+
return i;
|
|
166
|
+
}
|
|
167
|
+
return startIdx - 1;
|
|
168
|
+
}
|
|
169
|
+
return startIdx - 1;
|
|
170
|
+
}
|
|
171
|
+
function getTrimmer(filePath) {
|
|
172
|
+
const ext = extname(filePath).toLowerCase();
|
|
173
|
+
switch (ext) {
|
|
174
|
+
case ".ts":
|
|
175
|
+
case ".tsx":
|
|
176
|
+
case ".js":
|
|
177
|
+
case ".jsx":
|
|
178
|
+
case ".mjs":
|
|
179
|
+
case ".cjs":
|
|
180
|
+
return trimTypeScript;
|
|
181
|
+
case ".py":
|
|
182
|
+
return trimPython;
|
|
183
|
+
case ".java":
|
|
184
|
+
case ".kt":
|
|
185
|
+
case ".kts":
|
|
186
|
+
case ".scala":
|
|
187
|
+
case ".groovy":
|
|
188
|
+
return trimJava;
|
|
189
|
+
case ".go":
|
|
190
|
+
case ".rs":
|
|
191
|
+
case ".c":
|
|
192
|
+
case ".cpp":
|
|
193
|
+
case ".h":
|
|
194
|
+
case ".hpp":
|
|
195
|
+
case ".cs":
|
|
196
|
+
case ".swift":
|
|
197
|
+
case ".dart":
|
|
198
|
+
return trimJava;
|
|
199
|
+
// brace-based languages use same strategy
|
|
200
|
+
default:
|
|
201
|
+
return trimGeneric;
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
async function trimCluster(paths, projectRoot) {
|
|
205
|
+
const results = [];
|
|
206
|
+
for (const filePath of paths) {
|
|
207
|
+
const absPath = resolve(projectRoot, filePath);
|
|
208
|
+
try {
|
|
209
|
+
const raw = await readFile(absPath, "utf-8");
|
|
210
|
+
const trimmer = getTrimmer(filePath);
|
|
211
|
+
const trimmed = trimmer(raw);
|
|
212
|
+
results.push({
|
|
213
|
+
path: filePath,
|
|
214
|
+
content: trimmed,
|
|
215
|
+
lines: trimmed.split("\n").length
|
|
216
|
+
});
|
|
217
|
+
} catch {
|
|
218
|
+
if (process.env["NOGREP_DEBUG"] === "1") {
|
|
219
|
+
process.stderr.write(`[nogrep] Could not read: ${absPath}
|
|
220
|
+
`);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
results.sort((a, b) => a.lines - b.lines);
|
|
225
|
+
const output = [];
|
|
226
|
+
let totalLines = 0;
|
|
227
|
+
const maxLines = MAX_CLUSTER_LINES;
|
|
228
|
+
for (const file of results) {
|
|
229
|
+
const header = `// === ${file.path} ===`;
|
|
230
|
+
const fileLines = file.content.split("\n");
|
|
231
|
+
const available = maxLines - totalLines - 2;
|
|
232
|
+
if (available <= 0) break;
|
|
233
|
+
output.push(header);
|
|
234
|
+
if (fileLines.length <= available) {
|
|
235
|
+
output.push(file.content);
|
|
236
|
+
} else {
|
|
237
|
+
output.push(fileLines.slice(0, available).join("\n"));
|
|
238
|
+
output.push(`// ... truncated (${fileLines.length - available} more lines)`);
|
|
239
|
+
}
|
|
240
|
+
output.push("");
|
|
241
|
+
totalLines += Math.min(fileLines.length, available) + 2;
|
|
242
|
+
}
|
|
243
|
+
return output.join("\n");
|
|
244
|
+
}
|
|
245
|
+
async function main() {
|
|
246
|
+
const args = process.argv.slice(2);
|
|
247
|
+
if (args.length === 0) {
|
|
248
|
+
process.stderr.write("Usage: node trim.js <path1> <path2> ...\n");
|
|
249
|
+
process.exit(1);
|
|
250
|
+
}
|
|
251
|
+
const projectRoot = process.cwd();
|
|
252
|
+
const result = await trimCluster(args, projectRoot);
|
|
253
|
+
process.stdout.write(result);
|
|
254
|
+
}
|
|
255
|
+
var isDirectRun = process.argv[1]?.endsWith("trim.js") || process.argv[1]?.endsWith("trim.ts");
|
|
256
|
+
if (isDirectRun) {
|
|
257
|
+
main().catch((err) => {
|
|
258
|
+
process.stderr.write(`Error: ${err instanceof Error ? err.message : String(err)}
|
|
259
|
+
`);
|
|
260
|
+
process.exit(1);
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
export {
|
|
264
|
+
trimCluster
|
|
265
|
+
};
|
|
266
|
+
//# sourceMappingURL=trim.js.map
|
package/dist/trim.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../scripts/trim.ts"],"sourcesContent":["import { readFile } from 'fs/promises'\nimport { resolve, extname, basename } from 'path'\n\nconst MAX_CLUSTER_LINES = 300\n\ninterface TrimOptions {\n maxLines?: number\n}\n\n// Language-agnostic regex patterns for stripping function/method bodies\n// Strategy: find opening braces after signatures, track depth, remove body content\n\nfunction trimTypeScript(content: string): string {\n const lines = content.split('\\n')\n const result: string[] = []\n let braceDepth = 0\n let inBody = false\n let bodyStartDepth = 0\n\n for (const line of lines) {\n const trimmed = line.trim()\n\n // Always keep: empty lines at top level, imports, type/interface, decorators, exports of types\n if (braceDepth === 0 || !inBody) {\n if (\n trimmed === '' ||\n trimmed.startsWith('import ') ||\n trimmed.startsWith('export type ') ||\n trimmed.startsWith('export interface ') ||\n trimmed.startsWith('export enum ') ||\n trimmed.startsWith('export const ') ||\n trimmed.startsWith('type ') ||\n trimmed.startsWith('interface ') ||\n trimmed.startsWith('enum ') ||\n trimmed.startsWith('@') ||\n trimmed.startsWith('//') ||\n trimmed.startsWith('/*') ||\n trimmed.startsWith('*') ||\n trimmed.startsWith('declare ')\n ) {\n result.push(line)\n // Count braces even in kept lines\n braceDepth += countChar(trimmed, '{') - countChar(trimmed, '}')\n continue\n }\n }\n\n const openBraces = countChar(trimmed, '{')\n const closeBraces = countChar(trimmed, '}')\n\n if (!inBody) {\n // Detect function/method signature — line with opening brace\n if (isSignatureLine(trimmed) && openBraces > closeBraces) {\n result.push(line)\n braceDepth += openBraces - closeBraces\n inBody = true\n bodyStartDepth = braceDepth\n continue\n }\n\n // Class/interface declaration — keep but don't treat as body\n if (isClassOrInterfaceLine(trimmed)) {\n result.push(line)\n braceDepth += openBraces - closeBraces\n continue\n }\n\n // Keep the line (top-level statement, property declaration, etc.)\n result.push(line)\n braceDepth += openBraces - closeBraces\n } else {\n // Inside a function body — skip lines\n braceDepth += openBraces - closeBraces\n\n // Check if we've closed back to where the body started\n if (braceDepth < bodyStartDepth) {\n // Add closing brace\n result.push(line)\n inBody = false\n }\n }\n }\n\n return result.join('\\n')\n}\n\nfunction trimPython(content: string): string {\n const lines = content.split('\\n')\n const result: string[] = []\n let skipIndent = -1\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i]!\n const trimmed = line.trim()\n const indent = line.length - line.trimStart().length\n\n // If we're skipping a body and this line is still indented deeper, skip it\n if (skipIndent >= 0) {\n if (trimmed === '' || indent > skipIndent) {\n continue\n }\n // We've exited the body\n skipIndent = -1\n }\n\n // Always keep: comments, imports, class defs, decorators, type hints, module-level assignments\n if (\n trimmed === '' ||\n trimmed.startsWith('#') ||\n trimmed.startsWith('import ') ||\n trimmed.startsWith('from ') ||\n trimmed.startsWith('@') ||\n trimmed.startsWith('class ') ||\n /^[A-Z_][A-Z_0-9]*\\s*=/.test(trimmed)\n ) {\n result.push(line)\n continue\n }\n\n // Function/method definition — keep signature, skip body\n if (trimmed.startsWith('def ') || trimmed.startsWith('async def ')) {\n result.push(line)\n // If the next non-empty line has docstring, keep it\n const docIdx = findDocstring(lines, i + 1, indent)\n if (docIdx > i) {\n for (let j = i + 1; j <= docIdx; j++) {\n result.push(lines[j]!)\n }\n }\n skipIndent = indent\n continue\n }\n\n // Keep everything else at module/class level\n result.push(line)\n }\n\n return result.join('\\n')\n}\n\nfunction trimJava(content: string): string {\n // Java/Kotlin — very similar to TypeScript brace-matching\n const lines = content.split('\\n')\n const result: string[] = []\n let braceDepth = 0\n let inBody = false\n let bodyStartDepth = 0\n\n for (const line of lines) {\n const trimmed = line.trim()\n\n if (braceDepth === 0 || !inBody) {\n if (\n trimmed === '' ||\n trimmed.startsWith('import ') ||\n trimmed.startsWith('package ') ||\n trimmed.startsWith('@') ||\n trimmed.startsWith('//') ||\n trimmed.startsWith('/*') ||\n trimmed.startsWith('*') ||\n trimmed.startsWith('public interface ') ||\n trimmed.startsWith('interface ') ||\n trimmed.startsWith('public enum ') ||\n trimmed.startsWith('enum ')\n ) {\n result.push(line)\n braceDepth += countChar(trimmed, '{') - countChar(trimmed, '}')\n continue\n }\n }\n\n const openBraces = countChar(trimmed, '{')\n const closeBraces = countChar(trimmed, '}')\n\n if (!inBody) {\n if (isJavaMethodSignature(trimmed) && openBraces > closeBraces) {\n result.push(line)\n braceDepth += openBraces - closeBraces\n inBody = true\n bodyStartDepth = braceDepth\n continue\n }\n\n if (isJavaClassLine(trimmed)) {\n result.push(line)\n braceDepth += openBraces - closeBraces\n continue\n }\n\n result.push(line)\n braceDepth += openBraces - closeBraces\n } else {\n braceDepth += openBraces - closeBraces\n if (braceDepth < bodyStartDepth) {\n result.push(line)\n inBody = false\n }\n }\n }\n\n return result.join('\\n')\n}\n\nfunction trimGeneric(content: string): string {\n // For unknown languages, just return as-is (truncation handles size)\n return content\n}\n\n// --- Helpers ---\n\nfunction countChar(s: string, ch: string): number {\n let count = 0\n let inString = false\n let stringChar = ''\n for (let i = 0; i < s.length; i++) {\n const c = s[i]!\n if (inString) {\n if (c === stringChar && s[i - 1] !== '\\\\') inString = false\n } else if (c === '\"' || c === \"'\" || c === '`') {\n inString = true\n stringChar = c\n } else if (c === ch) {\n count++\n }\n }\n return count\n}\n\nfunction isSignatureLine(trimmed: string): boolean {\n return /^(export\\s+)?(async\\s+)?function\\s/.test(trimmed) ||\n /^(public|private|protected|static|async|get|set|\\*)\\s/.test(trimmed) ||\n /^(readonly\\s+)?[a-zA-Z_$][a-zA-Z0-9_$]*\\s*\\(/.test(trimmed) ||\n /^(export\\s+)?(const|let|var)\\s+\\w+\\s*=\\s*(async\\s+)?\\(/.test(trimmed) ||\n /^(export\\s+)?(const|let|var)\\s+\\w+\\s*=\\s*(async\\s+)?function/.test(trimmed) ||\n // Arrow function assigned at class level\n /^[a-zA-Z_$][a-zA-Z0-9_$]*\\s*=\\s*(async\\s+)?\\(/.test(trimmed)\n}\n\nfunction isClassOrInterfaceLine(trimmed: string): boolean {\n return /^(export\\s+)?(abstract\\s+)?(class|interface|enum)\\s/.test(trimmed) ||\n /^(export\\s+)?namespace\\s/.test(trimmed)\n}\n\nfunction isJavaMethodSignature(trimmed: string): boolean {\n return /^(public|private|protected|static|final|abstract|synchronized|native)\\s/.test(trimmed) &&\n /\\(/.test(trimmed)\n}\n\nfunction isJavaClassLine(trimmed: string): boolean {\n return /^(public|private|protected)?\\s*(abstract\\s+)?(class|interface|enum)\\s/.test(trimmed)\n}\n\nfunction findDocstring(lines: string[], startIdx: number, defIndent: number): number {\n // Find Python docstring (triple-quoted) after a def\n for (let i = startIdx; i < lines.length; i++) {\n const trimmed = lines[i]!.trim()\n if (trimmed === '') continue\n if (trimmed.startsWith('\"\"\"') || trimmed.startsWith(\"'''\")) {\n const quote = trimmed.slice(0, 3)\n // Single-line docstring\n if (trimmed.length > 3 && trimmed.endsWith(quote)) return i\n // Multi-line docstring — find closing\n for (let j = i + 1; j < lines.length; j++) {\n if (lines[j]!.trim().endsWith(quote)) return j\n }\n return i\n }\n // First non-empty line after def is not a docstring\n return startIdx - 1\n }\n return startIdx - 1\n}\n\nfunction getTrimmer(filePath: string): (content: string) => string {\n const ext = extname(filePath).toLowerCase()\n switch (ext) {\n case '.ts':\n case '.tsx':\n case '.js':\n case '.jsx':\n case '.mjs':\n case '.cjs':\n return trimTypeScript\n case '.py':\n return trimPython\n case '.java':\n case '.kt':\n case '.kts':\n case '.scala':\n case '.groovy':\n return trimJava\n case '.go':\n case '.rs':\n case '.c':\n case '.cpp':\n case '.h':\n case '.hpp':\n case '.cs':\n case '.swift':\n case '.dart':\n return trimJava // brace-based languages use same strategy\n default:\n return trimGeneric\n }\n}\n\nexport async function trimCluster(paths: string[], projectRoot: string): Promise<string> {\n const results: Array<{ path: string; content: string; lines: number }> = []\n\n for (const filePath of paths) {\n const absPath = resolve(projectRoot, filePath)\n try {\n const raw = await readFile(absPath, 'utf-8')\n const trimmer = getTrimmer(filePath)\n const trimmed = trimmer(raw)\n results.push({\n path: filePath,\n content: trimmed,\n lines: trimmed.split('\\n').length,\n })\n } catch {\n // Skip files that can't be read\n if (process.env['NOGREP_DEBUG'] === '1') {\n process.stderr.write(`[nogrep] Could not read: ${absPath}\\n`)\n }\n }\n }\n\n // Sort by line count descending — truncate least important (largest) files first\n results.sort((a, b) => a.lines - b.lines)\n\n const output: string[] = []\n let totalLines = 0\n const maxLines = MAX_CLUSTER_LINES\n\n for (const file of results) {\n const header = `// === ${file.path} ===`\n const fileLines = file.content.split('\\n')\n const available = maxLines - totalLines - 2 // header + separator\n\n if (available <= 0) break\n\n output.push(header)\n if (fileLines.length <= available) {\n output.push(file.content)\n } else {\n output.push(fileLines.slice(0, available).join('\\n'))\n output.push(`// ... truncated (${fileLines.length - available} more lines)`)\n }\n output.push('')\n\n totalLines += Math.min(fileLines.length, available) + 2\n }\n\n return output.join('\\n')\n}\n\n// --- CLI ---\n\nasync function main(): Promise<void> {\n const args = process.argv.slice(2)\n\n if (args.length === 0) {\n process.stderr.write('Usage: node trim.js <path1> <path2> ...\\n')\n process.exit(1)\n }\n\n const projectRoot = process.cwd()\n const result = await trimCluster(args, projectRoot)\n process.stdout.write(result)\n}\n\nconst isDirectRun = process.argv[1]?.endsWith('trim.js') || process.argv[1]?.endsWith('trim.ts')\nif (isDirectRun) {\n main().catch((err: unknown) => {\n process.stderr.write(`Error: ${err instanceof Error ? err.message : String(err)}\\n`)\n process.exit(1)\n })\n}\n"],"mappings":";AAAA,SAAS,gBAAgB;AACzB,SAAS,SAAS,eAAyB;AAE3C,IAAM,oBAAoB;AAS1B,SAAS,eAAe,SAAyB;AAC/C,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,SAAmB,CAAC;AAC1B,MAAI,aAAa;AACjB,MAAI,SAAS;AACb,MAAI,iBAAiB;AAErB,aAAW,QAAQ,OAAO;AACxB,UAAM,UAAU,KAAK,KAAK;AAG1B,QAAI,eAAe,KAAK,CAAC,QAAQ;AAC/B,UACE,YAAY,MACZ,QAAQ,WAAW,SAAS,KAC5B,QAAQ,WAAW,cAAc,KACjC,QAAQ,WAAW,mBAAmB,KACtC,QAAQ,WAAW,cAAc,KACjC,QAAQ,WAAW,eAAe,KAClC,QAAQ,WAAW,OAAO,KAC1B,QAAQ,WAAW,YAAY,KAC/B,QAAQ,WAAW,OAAO,KAC1B,QAAQ,WAAW,GAAG,KACtB,QAAQ,WAAW,IAAI,KACvB,QAAQ,WAAW,IAAI,KACvB,QAAQ,WAAW,GAAG,KACtB,QAAQ,WAAW,UAAU,GAC7B;AACA,eAAO,KAAK,IAAI;AAEhB,sBAAc,UAAU,SAAS,GAAG,IAAI,UAAU,SAAS,GAAG;AAC9D;AAAA,MACF;AAAA,IACF;AAEA,UAAM,aAAa,UAAU,SAAS,GAAG;AACzC,UAAM,cAAc,UAAU,SAAS,GAAG;AAE1C,QAAI,CAAC,QAAQ;AAEX,UAAI,gBAAgB,OAAO,KAAK,aAAa,aAAa;AACxD,eAAO,KAAK,IAAI;AAChB,sBAAc,aAAa;AAC3B,iBAAS;AACT,yBAAiB;AACjB;AAAA,MACF;AAGA,UAAI,uBAAuB,OAAO,GAAG;AACnC,eAAO,KAAK,IAAI;AAChB,sBAAc,aAAa;AAC3B;AAAA,MACF;AAGA,aAAO,KAAK,IAAI;AAChB,oBAAc,aAAa;AAAA,IAC7B,OAAO;AAEL,oBAAc,aAAa;AAG3B,UAAI,aAAa,gBAAgB;AAE/B,eAAO,KAAK,IAAI;AAChB,iBAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,KAAK,IAAI;AACzB;AAEA,SAAS,WAAW,SAAyB;AAC3C,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,SAAmB,CAAC;AAC1B,MAAI,aAAa;AAEjB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,UAAU,KAAK,KAAK;AAC1B,UAAM,SAAS,KAAK,SAAS,KAAK,UAAU,EAAE;AAG9C,QAAI,cAAc,GAAG;AACnB,UAAI,YAAY,MAAM,SAAS,YAAY;AACzC;AAAA,MACF;AAEA,mBAAa;AAAA,IACf;AAGA,QACE,YAAY,MACZ,QAAQ,WAAW,GAAG,KACtB,QAAQ,WAAW,SAAS,KAC5B,QAAQ,WAAW,OAAO,KAC1B,QAAQ,WAAW,GAAG,KACtB,QAAQ,WAAW,QAAQ,KAC3B,wBAAwB,KAAK,OAAO,GACpC;AACA,aAAO,KAAK,IAAI;AAChB;AAAA,IACF;AAGA,QAAI,QAAQ,WAAW,MAAM,KAAK,QAAQ,WAAW,YAAY,GAAG;AAClE,aAAO,KAAK,IAAI;AAEhB,YAAM,SAAS,cAAc,OAAO,IAAI,GAAG,MAAM;AACjD,UAAI,SAAS,GAAG;AACd,iBAAS,IAAI,IAAI,GAAG,KAAK,QAAQ,KAAK;AACpC,iBAAO,KAAK,MAAM,CAAC,CAAE;AAAA,QACvB;AAAA,MACF;AACA,mBAAa;AACb;AAAA,IACF;AAGA,WAAO,KAAK,IAAI;AAAA,EAClB;AAEA,SAAO,OAAO,KAAK,IAAI;AACzB;AAEA,SAAS,SAAS,SAAyB;AAEzC,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,SAAmB,CAAC;AAC1B,MAAI,aAAa;AACjB,MAAI,SAAS;AACb,MAAI,iBAAiB;AAErB,aAAW,QAAQ,OAAO;AACxB,UAAM,UAAU,KAAK,KAAK;AAE1B,QAAI,eAAe,KAAK,CAAC,QAAQ;AAC/B,UACE,YAAY,MACZ,QAAQ,WAAW,SAAS,KAC5B,QAAQ,WAAW,UAAU,KAC7B,QAAQ,WAAW,GAAG,KACtB,QAAQ,WAAW,IAAI,KACvB,QAAQ,WAAW,IAAI,KACvB,QAAQ,WAAW,GAAG,KACtB,QAAQ,WAAW,mBAAmB,KACtC,QAAQ,WAAW,YAAY,KAC/B,QAAQ,WAAW,cAAc,KACjC,QAAQ,WAAW,OAAO,GAC1B;AACA,eAAO,KAAK,IAAI;AAChB,sBAAc,UAAU,SAAS,GAAG,IAAI,UAAU,SAAS,GAAG;AAC9D;AAAA,MACF;AAAA,IACF;AAEA,UAAM,aAAa,UAAU,SAAS,GAAG;AACzC,UAAM,cAAc,UAAU,SAAS,GAAG;AAE1C,QAAI,CAAC,QAAQ;AACX,UAAI,sBAAsB,OAAO,KAAK,aAAa,aAAa;AAC9D,eAAO,KAAK,IAAI;AAChB,sBAAc,aAAa;AAC3B,iBAAS;AACT,yBAAiB;AACjB;AAAA,MACF;AAEA,UAAI,gBAAgB,OAAO,GAAG;AAC5B,eAAO,KAAK,IAAI;AAChB,sBAAc,aAAa;AAC3B;AAAA,MACF;AAEA,aAAO,KAAK,IAAI;AAChB,oBAAc,aAAa;AAAA,IAC7B,OAAO;AACL,oBAAc,aAAa;AAC3B,UAAI,aAAa,gBAAgB;AAC/B,eAAO,KAAK,IAAI;AAChB,iBAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,KAAK,IAAI;AACzB;AAEA,SAAS,YAAY,SAAyB;AAE5C,SAAO;AACT;AAIA,SAAS,UAAU,GAAW,IAAoB;AAChD,MAAI,QAAQ;AACZ,MAAI,WAAW;AACf,MAAI,aAAa;AACjB,WAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,UAAM,IAAI,EAAE,CAAC;AACb,QAAI,UAAU;AACZ,UAAI,MAAM,cAAc,EAAE,IAAI,CAAC,MAAM,KAAM,YAAW;AAAA,IACxD,WAAW,MAAM,OAAO,MAAM,OAAO,MAAM,KAAK;AAC9C,iBAAW;AACX,mBAAa;AAAA,IACf,WAAW,MAAM,IAAI;AACnB;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,gBAAgB,SAA0B;AACjD,SAAO,qCAAqC,KAAK,OAAO,KACtD,wDAAwD,KAAK,OAAO,KACpE,+CAA+C,KAAK,OAAO,KAC3D,yDAAyD,KAAK,OAAO,KACrE,+DAA+D,KAAK,OAAO;AAAA,EAE3E,gDAAgD,KAAK,OAAO;AAChE;AAEA,SAAS,uBAAuB,SAA0B;AACxD,SAAO,sDAAsD,KAAK,OAAO,KACvE,2BAA2B,KAAK,OAAO;AAC3C;AAEA,SAAS,sBAAsB,SAA0B;AACvD,SAAO,0EAA0E,KAAK,OAAO,KAC3F,KAAK,KAAK,OAAO;AACrB;AAEA,SAAS,gBAAgB,SAA0B;AACjD,SAAO,wEAAwE,KAAK,OAAO;AAC7F;AAEA,SAAS,cAAc,OAAiB,UAAkB,WAA2B;AAEnF,WAAS,IAAI,UAAU,IAAI,MAAM,QAAQ,KAAK;AAC5C,UAAM,UAAU,MAAM,CAAC,EAAG,KAAK;AAC/B,QAAI,YAAY,GAAI;AACpB,QAAI,QAAQ,WAAW,KAAK,KAAK,QAAQ,WAAW,KAAK,GAAG;AAC1D,YAAM,QAAQ,QAAQ,MAAM,GAAG,CAAC;AAEhC,UAAI,QAAQ,SAAS,KAAK,QAAQ,SAAS,KAAK,EAAG,QAAO;AAE1D,eAAS,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACzC,YAAI,MAAM,CAAC,EAAG,KAAK,EAAE,SAAS,KAAK,EAAG,QAAO;AAAA,MAC/C;AACA,aAAO;AAAA,IACT;AAEA,WAAO,WAAW;AAAA,EACpB;AACA,SAAO,WAAW;AACpB;AAEA,SAAS,WAAW,UAA+C;AACjE,QAAM,MAAM,QAAQ,QAAQ,EAAE,YAAY;AAC1C,UAAQ,KAAK;AAAA,IACX,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAEA,eAAsB,YAAY,OAAiB,aAAsC;AACvF,QAAM,UAAmE,CAAC;AAE1E,aAAW,YAAY,OAAO;AAC5B,UAAM,UAAU,QAAQ,aAAa,QAAQ;AAC7C,QAAI;AACF,YAAM,MAAM,MAAM,SAAS,SAAS,OAAO;AAC3C,YAAM,UAAU,WAAW,QAAQ;AACnC,YAAM,UAAU,QAAQ,GAAG;AAC3B,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,SAAS;AAAA,QACT,OAAO,QAAQ,MAAM,IAAI,EAAE;AAAA,MAC7B,CAAC;AAAA,IACH,QAAQ;AAEN,UAAI,QAAQ,IAAI,cAAc,MAAM,KAAK;AACvC,gBAAQ,OAAO,MAAM,4BAA4B,OAAO;AAAA,CAAI;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAExC,QAAM,SAAmB,CAAC;AAC1B,MAAI,aAAa;AACjB,QAAM,WAAW;AAEjB,aAAW,QAAQ,SAAS;AAC1B,UAAM,SAAS,UAAU,KAAK,IAAI;AAClC,UAAM,YAAY,KAAK,QAAQ,MAAM,IAAI;AACzC,UAAM,YAAY,WAAW,aAAa;AAE1C,QAAI,aAAa,EAAG;AAEpB,WAAO,KAAK,MAAM;AAClB,QAAI,UAAU,UAAU,WAAW;AACjC,aAAO,KAAK,KAAK,OAAO;AAAA,IAC1B,OAAO;AACL,aAAO,KAAK,UAAU,MAAM,GAAG,SAAS,EAAE,KAAK,IAAI,CAAC;AACpD,aAAO,KAAK,qBAAqB,UAAU,SAAS,SAAS,cAAc;AAAA,IAC7E;AACA,WAAO,KAAK,EAAE;AAEd,kBAAc,KAAK,IAAI,UAAU,QAAQ,SAAS,IAAI;AAAA,EACxD;AAEA,SAAO,OAAO,KAAK,IAAI;AACzB;AAIA,eAAe,OAAsB;AACnC,QAAM,OAAO,QAAQ,KAAK,MAAM,CAAC;AAEjC,MAAI,KAAK,WAAW,GAAG;AACrB,YAAQ,OAAO,MAAM,2CAA2C;AAChE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,cAAc,QAAQ,IAAI;AAChC,QAAM,SAAS,MAAM,YAAY,MAAM,WAAW;AAClD,UAAQ,OAAO,MAAM,MAAM;AAC7B;AAEA,IAAM,cAAc,QAAQ,KAAK,CAAC,GAAG,SAAS,SAAS,KAAK,QAAQ,KAAK,CAAC,GAAG,SAAS,SAAS;AAC/F,IAAI,aAAa;AACf,OAAK,EAAE,MAAM,CAAC,QAAiB;AAC7B,YAAQ,OAAO,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,CAAI;AACnF,YAAQ,KAAK,CAAC;AAAA,EAChB,CAAC;AACH;","names":[]}
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
interface DirectoryNode {
|
|
2
|
+
name: string;
|
|
3
|
+
path: string;
|
|
4
|
+
type: 'file' | 'directory';
|
|
5
|
+
children?: DirectoryNode[];
|
|
6
|
+
}
|
|
7
|
+
interface ManifestFile {
|
|
8
|
+
path: string;
|
|
9
|
+
type: string;
|
|
10
|
+
depth: number;
|
|
11
|
+
}
|
|
12
|
+
interface ChurnEntry {
|
|
13
|
+
path: string;
|
|
14
|
+
changes: number;
|
|
15
|
+
}
|
|
16
|
+
interface FileSize {
|
|
17
|
+
path: string;
|
|
18
|
+
bytes: number;
|
|
19
|
+
}
|
|
20
|
+
interface SignalResult {
|
|
21
|
+
directoryTree: DirectoryNode[];
|
|
22
|
+
extensionMap: Record<string, number>;
|
|
23
|
+
manifests: ManifestFile[];
|
|
24
|
+
entryPoints: string[];
|
|
25
|
+
gitChurn: ChurnEntry[];
|
|
26
|
+
largeFiles: FileSize[];
|
|
27
|
+
envFiles: string[];
|
|
28
|
+
testFiles: string[];
|
|
29
|
+
}
|
|
30
|
+
interface StackConventions {
|
|
31
|
+
entryPattern: string;
|
|
32
|
+
testPattern: string;
|
|
33
|
+
configLocation: string;
|
|
34
|
+
}
|
|
35
|
+
interface DomainCluster {
|
|
36
|
+
name: string;
|
|
37
|
+
path: string;
|
|
38
|
+
confidence: number;
|
|
39
|
+
}
|
|
40
|
+
interface StackResult {
|
|
41
|
+
primaryLanguage: string;
|
|
42
|
+
frameworks: string[];
|
|
43
|
+
architecture: 'monolith' | 'monorepo' | 'multi-repo' | 'microservice' | 'library';
|
|
44
|
+
domainClusters: DomainCluster[];
|
|
45
|
+
conventions: StackConventions;
|
|
46
|
+
stackHints: string;
|
|
47
|
+
dynamicTaxonomy: {
|
|
48
|
+
domain: string[];
|
|
49
|
+
tech: string[];
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
interface TagSet {
|
|
53
|
+
domain: string[];
|
|
54
|
+
layer: string[];
|
|
55
|
+
tech: string[];
|
|
56
|
+
concern: string[];
|
|
57
|
+
type: string[];
|
|
58
|
+
}
|
|
59
|
+
interface Taxonomy {
|
|
60
|
+
static: {
|
|
61
|
+
layer: string[];
|
|
62
|
+
concern: string[];
|
|
63
|
+
type: string[];
|
|
64
|
+
};
|
|
65
|
+
dynamic: {
|
|
66
|
+
domain: string[];
|
|
67
|
+
tech: string[];
|
|
68
|
+
};
|
|
69
|
+
custom: Record<string, string[]>;
|
|
70
|
+
}
|
|
71
|
+
interface Relation {
|
|
72
|
+
id: string;
|
|
73
|
+
reason: string;
|
|
74
|
+
}
|
|
75
|
+
interface ExternalDep {
|
|
76
|
+
name: string;
|
|
77
|
+
usage: string;
|
|
78
|
+
}
|
|
79
|
+
interface SyncMeta {
|
|
80
|
+
commit: string;
|
|
81
|
+
timestamp: string;
|
|
82
|
+
srcHash: string;
|
|
83
|
+
}
|
|
84
|
+
interface NodeResult {
|
|
85
|
+
id: string;
|
|
86
|
+
title: string;
|
|
87
|
+
category: 'domain' | 'architecture' | 'flow' | 'entity';
|
|
88
|
+
tags: TagSet;
|
|
89
|
+
relatesTo: Relation[];
|
|
90
|
+
inverseRelations: Relation[];
|
|
91
|
+
srcPaths: string[];
|
|
92
|
+
keywords: string[];
|
|
93
|
+
lastSynced: SyncMeta;
|
|
94
|
+
purpose: string;
|
|
95
|
+
publicSurface: string[];
|
|
96
|
+
doesNotOwn: string[];
|
|
97
|
+
externalDeps: ExternalDep[];
|
|
98
|
+
gotchas: string[];
|
|
99
|
+
}
|
|
100
|
+
interface PathEntry {
|
|
101
|
+
context: string;
|
|
102
|
+
tags: string[];
|
|
103
|
+
}
|
|
104
|
+
interface IndexJson {
|
|
105
|
+
version: string;
|
|
106
|
+
generatedAt: string;
|
|
107
|
+
commit: string;
|
|
108
|
+
stack: Pick<StackResult, 'primaryLanguage' | 'frameworks' | 'architecture'>;
|
|
109
|
+
tags: Record<string, string[]>;
|
|
110
|
+
keywords: Record<string, string[]>;
|
|
111
|
+
paths: Record<string, PathEntry>;
|
|
112
|
+
}
|
|
113
|
+
interface RegistryMapping {
|
|
114
|
+
glob: string;
|
|
115
|
+
contextFile: string;
|
|
116
|
+
watch: boolean;
|
|
117
|
+
}
|
|
118
|
+
interface RegistryJson {
|
|
119
|
+
mappings: RegistryMapping[];
|
|
120
|
+
}
|
|
121
|
+
interface RankedResult {
|
|
122
|
+
contextFile: string;
|
|
123
|
+
score: number;
|
|
124
|
+
matchedOn: string[];
|
|
125
|
+
summary: string;
|
|
126
|
+
}
|
|
127
|
+
interface StaleResult {
|
|
128
|
+
file: string;
|
|
129
|
+
isStale: boolean;
|
|
130
|
+
reason?: string;
|
|
131
|
+
}
|
|
132
|
+
interface NogrepSettings {
|
|
133
|
+
enabled: boolean;
|
|
134
|
+
}
|
|
135
|
+
type NogrepErrorCode = 'NO_INDEX' | 'NO_GIT' | 'IO_ERROR' | 'STALE';
|
|
136
|
+
declare class NogrepError extends Error {
|
|
137
|
+
code: NogrepErrorCode;
|
|
138
|
+
constructor(message: string, code: NogrepErrorCode);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
export { type ChurnEntry, type DirectoryNode, type DomainCluster, type ExternalDep, type FileSize, type IndexJson, type ManifestFile, type NodeResult, NogrepError, type NogrepErrorCode, type NogrepSettings, type PathEntry, type RankedResult, type RegistryJson, type RegistryMapping, type Relation, type SignalResult, type StackConventions, type StackResult, type StaleResult, type SyncMeta, type TagSet, type Taxonomy };
|
package/dist/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { StaleResult } from './types.js';
|
|
2
|
+
|
|
3
|
+
declare function checkFreshness(nodeFile: string, projectRoot: string): Promise<StaleResult>;
|
|
4
|
+
declare function validateAll(projectRoot: string): Promise<{
|
|
5
|
+
total: number;
|
|
6
|
+
fresh: StaleResult[];
|
|
7
|
+
stale: StaleResult[];
|
|
8
|
+
}>;
|
|
9
|
+
|
|
10
|
+
export { checkFreshness, validateAll };
|
package/dist/validate.js
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import {
|
|
2
|
+
NogrepError
|
|
3
|
+
} from "./chunk-SMUAF6SM.js";
|
|
4
|
+
|
|
5
|
+
// scripts/validate.ts
|
|
6
|
+
import { readFile } from "fs/promises";
|
|
7
|
+
import { join, resolve as resolvePath } from "path";
|
|
8
|
+
import { createHash } from "crypto";
|
|
9
|
+
import { parseArgs } from "util";
|
|
10
|
+
import { glob } from "glob";
|
|
11
|
+
import matter from "gray-matter";
|
|
12
|
+
async function checkFreshness(nodeFile, projectRoot) {
|
|
13
|
+
let content;
|
|
14
|
+
try {
|
|
15
|
+
content = await readFile(join(projectRoot, nodeFile), "utf-8");
|
|
16
|
+
} catch {
|
|
17
|
+
return { file: nodeFile, isStale: true, reason: "context file not found" };
|
|
18
|
+
}
|
|
19
|
+
const parsed = matter(content);
|
|
20
|
+
const srcPaths = parsed.data.src_paths ?? [];
|
|
21
|
+
const lastSynced = parsed.data.last_synced;
|
|
22
|
+
if (!lastSynced?.src_hash) {
|
|
23
|
+
return { file: nodeFile, isStale: true, reason: "no src_hash in frontmatter" };
|
|
24
|
+
}
|
|
25
|
+
if (srcPaths.length === 0) {
|
|
26
|
+
return { file: nodeFile, isStale: false };
|
|
27
|
+
}
|
|
28
|
+
const allFiles = [];
|
|
29
|
+
for (const pattern of srcPaths) {
|
|
30
|
+
const matches = await glob(pattern, {
|
|
31
|
+
cwd: projectRoot,
|
|
32
|
+
nodir: true,
|
|
33
|
+
ignore: ["node_modules/**", "dist/**", "build/**", ".git/**", "coverage/**"]
|
|
34
|
+
});
|
|
35
|
+
allFiles.push(...matches);
|
|
36
|
+
}
|
|
37
|
+
allFiles.sort();
|
|
38
|
+
if (allFiles.length === 0) {
|
|
39
|
+
return { file: nodeFile, isStale: true, reason: "no source files match src_paths" };
|
|
40
|
+
}
|
|
41
|
+
const hash = createHash("sha256");
|
|
42
|
+
for (const file of allFiles) {
|
|
43
|
+
try {
|
|
44
|
+
const fileContent = await readFile(join(projectRoot, file));
|
|
45
|
+
hash.update(fileContent);
|
|
46
|
+
} catch {
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
const currentHash = `sha256:${hash.digest("hex").slice(0, 12)}`;
|
|
50
|
+
if (currentHash !== lastSynced.src_hash) {
|
|
51
|
+
return {
|
|
52
|
+
file: nodeFile,
|
|
53
|
+
isStale: true,
|
|
54
|
+
reason: `hash mismatch: expected ${lastSynced.src_hash}, got ${currentHash}`
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
return { file: nodeFile, isStale: false };
|
|
58
|
+
}
|
|
59
|
+
async function discoverNodes(projectRoot) {
|
|
60
|
+
const nogrepDir = join(projectRoot, ".nogrep");
|
|
61
|
+
const patterns = [
|
|
62
|
+
"domains/*.md",
|
|
63
|
+
"architecture/*.md",
|
|
64
|
+
"flows/*.md",
|
|
65
|
+
"entities/*.md"
|
|
66
|
+
];
|
|
67
|
+
const files = [];
|
|
68
|
+
for (const pattern of patterns) {
|
|
69
|
+
const matches = await glob(pattern, { cwd: nogrepDir, nodir: true });
|
|
70
|
+
files.push(...matches.map((m) => `.nogrep/${m}`));
|
|
71
|
+
}
|
|
72
|
+
return files.sort();
|
|
73
|
+
}
|
|
74
|
+
async function validateAll(projectRoot) {
|
|
75
|
+
const indexPath = join(projectRoot, ".nogrep", "_index.json");
|
|
76
|
+
try {
|
|
77
|
+
await readFile(indexPath, "utf-8");
|
|
78
|
+
} catch {
|
|
79
|
+
throw new NogrepError(
|
|
80
|
+
"No .nogrep/_index.json found. Run /nogrep:init first.",
|
|
81
|
+
"NO_INDEX"
|
|
82
|
+
);
|
|
83
|
+
}
|
|
84
|
+
const nodeFiles = await discoverNodes(projectRoot);
|
|
85
|
+
const results = await Promise.all(
|
|
86
|
+
nodeFiles.map((f) => checkFreshness(f, projectRoot))
|
|
87
|
+
);
|
|
88
|
+
const fresh = results.filter((r) => !r.isStale);
|
|
89
|
+
const stale = results.filter((r) => r.isStale);
|
|
90
|
+
return { total: results.length, fresh, stale };
|
|
91
|
+
}
|
|
92
|
+
function formatText(result) {
|
|
93
|
+
const lines = [];
|
|
94
|
+
lines.push(`nogrep index: ${result.total} nodes`);
|
|
95
|
+
lines.push(` Fresh: ${result.fresh.length}`);
|
|
96
|
+
lines.push(` Stale: ${result.stale.length}`);
|
|
97
|
+
if (result.stale.length > 0) {
|
|
98
|
+
lines.push("");
|
|
99
|
+
lines.push("Stale nodes:");
|
|
100
|
+
for (const s of result.stale) {
|
|
101
|
+
lines.push(` - ${s.file}: ${s.reason}`);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
return lines.join("\n");
|
|
105
|
+
}
|
|
106
|
+
function formatJson(result) {
|
|
107
|
+
return JSON.stringify(result, null, 2);
|
|
108
|
+
}
|
|
109
|
+
async function main() {
|
|
110
|
+
const { values } = parseArgs({
|
|
111
|
+
options: {
|
|
112
|
+
format: { type: "string", default: "text" },
|
|
113
|
+
root: { type: "string", default: process.cwd() }
|
|
114
|
+
},
|
|
115
|
+
strict: true
|
|
116
|
+
});
|
|
117
|
+
const root = resolvePath(values.root ?? process.cwd());
|
|
118
|
+
const format = values.format ?? "text";
|
|
119
|
+
const result = await validateAll(root);
|
|
120
|
+
switch (format) {
|
|
121
|
+
case "json":
|
|
122
|
+
process.stdout.write(formatJson(result) + "\n");
|
|
123
|
+
break;
|
|
124
|
+
case "text":
|
|
125
|
+
default:
|
|
126
|
+
process.stdout.write(formatText(result) + "\n");
|
|
127
|
+
break;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
main().catch((err) => {
|
|
131
|
+
if (err instanceof NogrepError) {
|
|
132
|
+
process.stderr.write(JSON.stringify({ error: err.message, code: err.code }) + "\n");
|
|
133
|
+
} else {
|
|
134
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
135
|
+
process.stderr.write(JSON.stringify({ error: message }) + "\n");
|
|
136
|
+
}
|
|
137
|
+
process.exitCode = 1;
|
|
138
|
+
});
|
|
139
|
+
export {
|
|
140
|
+
checkFreshness,
|
|
141
|
+
validateAll
|
|
142
|
+
};
|
|
143
|
+
//# sourceMappingURL=validate.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../scripts/validate.ts"],"sourcesContent":["import { readFile } from 'node:fs/promises'\nimport { join, resolve as resolvePath } from 'node:path'\nimport { createHash } from 'node:crypto'\nimport { parseArgs } from 'node:util'\nimport { glob } from 'glob'\nimport matter from 'gray-matter'\nimport type { StaleResult } from './types.js'\nimport { NogrepError } from './types.js'\n\n// --- Freshness check ---\n\nexport async function checkFreshness(\n nodeFile: string,\n projectRoot: string,\n): Promise<StaleResult> {\n let content: string\n try {\n content = await readFile(join(projectRoot, nodeFile), 'utf-8')\n } catch {\n return { file: nodeFile, isStale: true, reason: 'context file not found' }\n }\n\n const parsed = matter(content)\n const srcPaths: string[] = parsed.data.src_paths ?? []\n const lastSynced = parsed.data.last_synced as\n | { src_hash?: string; commit?: string; timestamp?: string }\n | undefined\n\n if (!lastSynced?.src_hash) {\n return { file: nodeFile, isStale: true, reason: 'no src_hash in frontmatter' }\n }\n\n if (srcPaths.length === 0) {\n return { file: nodeFile, isStale: false }\n }\n\n // Glob all matching source files\n const allFiles: string[] = []\n for (const pattern of srcPaths) {\n const matches = await glob(pattern, {\n cwd: projectRoot,\n nodir: true,\n ignore: ['node_modules/**', 'dist/**', 'build/**', '.git/**', 'coverage/**'],\n })\n allFiles.push(...matches)\n }\n\n allFiles.sort()\n\n if (allFiles.length === 0) {\n return { file: nodeFile, isStale: true, reason: 'no source files match src_paths' }\n }\n\n // Compute SHA256 of all file contents concatenated\n const hash = createHash('sha256')\n for (const file of allFiles) {\n try {\n const fileContent = await readFile(join(projectRoot, file))\n hash.update(fileContent)\n } catch {\n // File unreadable — skip\n }\n }\n const currentHash = `sha256:${hash.digest('hex').slice(0, 12)}`\n\n if (currentHash !== lastSynced.src_hash) {\n return {\n file: nodeFile,\n isStale: true,\n reason: `hash mismatch: expected ${lastSynced.src_hash}, got ${currentHash}`,\n }\n }\n\n return { file: nodeFile, isStale: false }\n}\n\n// --- Discover all context nodes ---\n\nasync function discoverNodes(projectRoot: string): Promise<string[]> {\n const nogrepDir = join(projectRoot, '.nogrep')\n const patterns = [\n 'domains/*.md',\n 'architecture/*.md',\n 'flows/*.md',\n 'entities/*.md',\n ]\n\n const files: string[] = []\n for (const pattern of patterns) {\n const matches = await glob(pattern, { cwd: nogrepDir, nodir: true })\n files.push(...matches.map(m => `.nogrep/${m}`))\n }\n\n return files.sort()\n}\n\n// --- Validate all nodes ---\n\nexport async function validateAll(\n projectRoot: string,\n): Promise<{ total: number; fresh: StaleResult[]; stale: StaleResult[] }> {\n const indexPath = join(projectRoot, '.nogrep', '_index.json')\n try {\n await readFile(indexPath, 'utf-8')\n } catch {\n throw new NogrepError(\n 'No .nogrep/_index.json found. Run /nogrep:init first.',\n 'NO_INDEX',\n )\n }\n\n const nodeFiles = await discoverNodes(projectRoot)\n const results = await Promise.all(\n nodeFiles.map(f => checkFreshness(f, projectRoot)),\n )\n\n const fresh = results.filter(r => !r.isStale)\n const stale = results.filter(r => r.isStale)\n\n return { total: results.length, fresh, stale }\n}\n\n// --- Formatting ---\n\nfunction formatText(result: { total: number; fresh: StaleResult[]; stale: StaleResult[] }): string {\n const lines: string[] = []\n lines.push(`nogrep index: ${result.total} nodes`)\n lines.push(` Fresh: ${result.fresh.length}`)\n lines.push(` Stale: ${result.stale.length}`)\n\n if (result.stale.length > 0) {\n lines.push('')\n lines.push('Stale nodes:')\n for (const s of result.stale) {\n lines.push(` - ${s.file}: ${s.reason}`)\n }\n }\n\n return lines.join('\\n')\n}\n\nfunction formatJson(result: { total: number; fresh: StaleResult[]; stale: StaleResult[] }): string {\n return JSON.stringify(result, null, 2)\n}\n\n// --- CLI ---\n\nasync function main(): Promise<void> {\n const { values } = parseArgs({\n options: {\n format: { type: 'string', default: 'text' },\n root: { type: 'string', default: process.cwd() },\n },\n strict: true,\n })\n\n const root = resolvePath(values.root ?? process.cwd())\n const format = values.format ?? 'text'\n\n const result = await validateAll(root)\n\n switch (format) {\n case 'json':\n process.stdout.write(formatJson(result) + '\\n')\n break\n case 'text':\n default:\n process.stdout.write(formatText(result) + '\\n')\n break\n }\n}\n\nmain().catch((err: unknown) => {\n if (err instanceof NogrepError) {\n process.stderr.write(JSON.stringify({ error: err.message, code: err.code }) + '\\n')\n } else {\n const message = err instanceof Error ? err.message : String(err)\n process.stderr.write(JSON.stringify({ error: message }) + '\\n')\n }\n process.exitCode = 1\n})\n"],"mappings":";;;;;AAAA,SAAS,gBAAgB;AACzB,SAAS,MAAM,WAAW,mBAAmB;AAC7C,SAAS,kBAAkB;AAC3B,SAAS,iBAAiB;AAC1B,SAAS,YAAY;AACrB,OAAO,YAAY;AAMnB,eAAsB,eACpB,UACA,aACsB;AACtB,MAAI;AACJ,MAAI;AACF,cAAU,MAAM,SAAS,KAAK,aAAa,QAAQ,GAAG,OAAO;AAAA,EAC/D,QAAQ;AACN,WAAO,EAAE,MAAM,UAAU,SAAS,MAAM,QAAQ,yBAAyB;AAAA,EAC3E;AAEA,QAAM,SAAS,OAAO,OAAO;AAC7B,QAAM,WAAqB,OAAO,KAAK,aAAa,CAAC;AACrD,QAAM,aAAa,OAAO,KAAK;AAI/B,MAAI,CAAC,YAAY,UAAU;AACzB,WAAO,EAAE,MAAM,UAAU,SAAS,MAAM,QAAQ,6BAA6B;AAAA,EAC/E;AAEA,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO,EAAE,MAAM,UAAU,SAAS,MAAM;AAAA,EAC1C;AAGA,QAAM,WAAqB,CAAC;AAC5B,aAAW,WAAW,UAAU;AAC9B,UAAM,UAAU,MAAM,KAAK,SAAS;AAAA,MAClC,KAAK;AAAA,MACL,OAAO;AAAA,MACP,QAAQ,CAAC,mBAAmB,WAAW,YAAY,WAAW,aAAa;AAAA,IAC7E,CAAC;AACD,aAAS,KAAK,GAAG,OAAO;AAAA,EAC1B;AAEA,WAAS,KAAK;AAEd,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO,EAAE,MAAM,UAAU,SAAS,MAAM,QAAQ,kCAAkC;AAAA,EACpF;AAGA,QAAM,OAAO,WAAW,QAAQ;AAChC,aAAW,QAAQ,UAAU;AAC3B,QAAI;AACF,YAAM,cAAc,MAAM,SAAS,KAAK,aAAa,IAAI,CAAC;AAC1D,WAAK,OAAO,WAAW;AAAA,IACzB,QAAQ;AAAA,IAER;AAAA,EACF;AACA,QAAM,cAAc,UAAU,KAAK,OAAO,KAAK,EAAE,MAAM,GAAG,EAAE,CAAC;AAE7D,MAAI,gBAAgB,WAAW,UAAU;AACvC,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS;AAAA,MACT,QAAQ,2BAA2B,WAAW,QAAQ,SAAS,WAAW;AAAA,IAC5E;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,UAAU,SAAS,MAAM;AAC1C;AAIA,eAAe,cAAc,aAAwC;AACnE,QAAM,YAAY,KAAK,aAAa,SAAS;AAC7C,QAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,QAAM,QAAkB,CAAC;AACzB,aAAW,WAAW,UAAU;AAC9B,UAAM,UAAU,MAAM,KAAK,SAAS,EAAE,KAAK,WAAW,OAAO,KAAK,CAAC;AACnE,UAAM,KAAK,GAAG,QAAQ,IAAI,OAAK,WAAW,CAAC,EAAE,CAAC;AAAA,EAChD;AAEA,SAAO,MAAM,KAAK;AACpB;AAIA,eAAsB,YACpB,aACwE;AACxE,QAAM,YAAY,KAAK,aAAa,WAAW,aAAa;AAC5D,MAAI;AACF,UAAM,SAAS,WAAW,OAAO;AAAA,EACnC,QAAQ;AACN,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,YAAY,MAAM,cAAc,WAAW;AACjD,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,UAAU,IAAI,OAAK,eAAe,GAAG,WAAW,CAAC;AAAA,EACnD;AAEA,QAAM,QAAQ,QAAQ,OAAO,OAAK,CAAC,EAAE,OAAO;AAC5C,QAAM,QAAQ,QAAQ,OAAO,OAAK,EAAE,OAAO;AAE3C,SAAO,EAAE,OAAO,QAAQ,QAAQ,OAAO,MAAM;AAC/C;AAIA,SAAS,WAAW,QAA+E;AACjG,QAAM,QAAkB,CAAC;AACzB,QAAM,KAAK,iBAAiB,OAAO,KAAK,QAAQ;AAChD,QAAM,KAAK,YAAY,OAAO,MAAM,MAAM,EAAE;AAC5C,QAAM,KAAK,YAAY,OAAO,MAAM,MAAM,EAAE;AAE5C,MAAI,OAAO,MAAM,SAAS,GAAG;AAC3B,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,cAAc;AACzB,eAAW,KAAK,OAAO,OAAO;AAC5B,YAAM,KAAK,OAAO,EAAE,IAAI,KAAK,EAAE,MAAM,EAAE;AAAA,IACzC;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAEA,SAAS,WAAW,QAA+E;AACjG,SAAO,KAAK,UAAU,QAAQ,MAAM,CAAC;AACvC;AAIA,eAAe,OAAsB;AACnC,QAAM,EAAE,OAAO,IAAI,UAAU;AAAA,IAC3B,SAAS;AAAA,MACP,QAAQ,EAAE,MAAM,UAAU,SAAS,OAAO;AAAA,MAC1C,MAAM,EAAE,MAAM,UAAU,SAAS,QAAQ,IAAI,EAAE;AAAA,IACjD;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AAED,QAAM,OAAO,YAAY,OAAO,QAAQ,QAAQ,IAAI,CAAC;AACrD,QAAM,SAAS,OAAO,UAAU;AAEhC,QAAM,SAAS,MAAM,YAAY,IAAI;AAErC,UAAQ,QAAQ;AAAA,IACd,KAAK;AACH,cAAQ,OAAO,MAAM,WAAW,MAAM,IAAI,IAAI;AAC9C;AAAA,IACF,KAAK;AAAA,IACL;AACE,cAAQ,OAAO,MAAM,WAAW,MAAM,IAAI,IAAI;AAC9C;AAAA,EACJ;AACF;AAEA,KAAK,EAAE,MAAM,CAAC,QAAiB;AAC7B,MAAI,eAAe,aAAa;AAC9B,YAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,IAAI,SAAS,MAAM,IAAI,KAAK,CAAC,IAAI,IAAI;AAAA,EACpF,OAAO;AACL,UAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,YAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,QAAQ,CAAC,IAAI,IAAI;AAAA,EAChE;AACA,UAAQ,WAAW;AACrB,CAAC;","names":[]}
|
package/dist/write.d.ts
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { NodeResult, StackResult, IndexJson, RegistryJson } from './types.js';
|
|
2
|
+
|
|
3
|
+
declare function writeContextNodes(nodes: NodeResult[], outputDir: string): Promise<void>;
|
|
4
|
+
declare function buildIndex(nodes: NodeResult[], stack: Pick<StackResult, 'primaryLanguage' | 'frameworks' | 'architecture'>): IndexJson;
|
|
5
|
+
declare function buildRegistry(nodes: NodeResult[]): RegistryJson;
|
|
6
|
+
declare function patchClaudeMd(projectRoot: string): Promise<void>;
|
|
7
|
+
|
|
8
|
+
export { buildIndex, buildRegistry, patchClaudeMd, writeContextNodes };
|