nogrep 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/README.md +91 -0
  2. package/commands/init.md +241 -0
  3. package/commands/off.md +11 -0
  4. package/commands/on.md +21 -0
  5. package/commands/query.md +13 -0
  6. package/commands/status.md +15 -0
  7. package/commands/update.md +89 -0
  8. package/dist/chunk-SMUAF6SM.js +12 -0
  9. package/dist/chunk-SMUAF6SM.js.map +1 -0
  10. package/dist/query.d.ts +12 -0
  11. package/dist/query.js +272 -0
  12. package/dist/query.js.map +1 -0
  13. package/dist/settings.d.ts +6 -0
  14. package/dist/settings.js +75 -0
  15. package/dist/settings.js.map +1 -0
  16. package/dist/signals.d.ts +9 -0
  17. package/dist/signals.js +174 -0
  18. package/dist/signals.js.map +1 -0
  19. package/dist/trim.d.ts +3 -0
  20. package/dist/trim.js +266 -0
  21. package/dist/trim.js.map +1 -0
  22. package/dist/types.d.ts +141 -0
  23. package/dist/types.js +7 -0
  24. package/dist/types.js.map +1 -0
  25. package/dist/validate.d.ts +10 -0
  26. package/dist/validate.js +143 -0
  27. package/dist/validate.js.map +1 -0
  28. package/dist/write.d.ts +8 -0
  29. package/dist/write.js +267 -0
  30. package/dist/write.js.map +1 -0
  31. package/docs/ARCHITECTURE.md +239 -0
  32. package/docs/CLAUDE.md +161 -0
  33. package/docs/CONVENTIONS.md +162 -0
  34. package/docs/SPEC.md +803 -0
  35. package/docs/TASKS.md +216 -0
  36. package/hooks/hooks.json +35 -0
  37. package/hooks/pre-tool-use.sh +37 -0
  38. package/hooks/prompt-submit.sh +26 -0
  39. package/hooks/session-start.sh +21 -0
  40. package/package.json +24 -0
  41. package/scripts/query.ts +290 -0
  42. package/scripts/settings.ts +98 -0
  43. package/scripts/signals.ts +237 -0
  44. package/scripts/trim.ts +379 -0
  45. package/scripts/types.ts +186 -0
  46. package/scripts/validate.ts +181 -0
  47. package/scripts/write.ts +346 -0
  48. package/templates/claude-md-patch.md +8 -0
package/dist/query.js ADDED
@@ -0,0 +1,272 @@
1
+ import {
2
+ NogrepError
3
+ } from "./chunk-SMUAF6SM.js";
4
+
5
+ // scripts/query.ts
6
+ import { readFile } from "fs/promises";
7
+ import { join, resolve as resolvePath } from "path";
8
+ import { parseArgs } from "util";
9
+ function extractTerms(question, taxonomy) {
10
+ const words = question.toLowerCase().replace(/[^\w\s-]/g, " ").split(/\s+/).filter((w) => w.length > 1);
11
+ const tags = [];
12
+ const keywords = [];
13
+ const tagLookup = /* @__PURE__ */ new Map();
14
+ for (const val of taxonomy.static.layer) {
15
+ tagLookup.set(val.toLowerCase(), `layer:${val}`);
16
+ }
17
+ for (const val of taxonomy.static.concern) {
18
+ tagLookup.set(val.toLowerCase(), `concern:${val}`);
19
+ }
20
+ for (const val of taxonomy.static.type) {
21
+ tagLookup.set(val.toLowerCase(), `type:${val}`);
22
+ }
23
+ for (const val of taxonomy.dynamic.domain) {
24
+ tagLookup.set(val.toLowerCase(), `domain:${val}`);
25
+ }
26
+ for (const val of taxonomy.dynamic.tech) {
27
+ tagLookup.set(val.toLowerCase(), `tech:${val}`);
28
+ }
29
+ for (const [cat, values] of Object.entries(taxonomy.custom)) {
30
+ for (const val of values) {
31
+ tagLookup.set(val.toLowerCase(), `${cat}:${val}`);
32
+ }
33
+ }
34
+ const stopWords = /* @__PURE__ */ new Set([
35
+ "the",
36
+ "is",
37
+ "at",
38
+ "in",
39
+ "of",
40
+ "on",
41
+ "to",
42
+ "a",
43
+ "an",
44
+ "and",
45
+ "or",
46
+ "for",
47
+ "it",
48
+ "do",
49
+ "does",
50
+ "how",
51
+ "what",
52
+ "where",
53
+ "which",
54
+ "when",
55
+ "who",
56
+ "why",
57
+ "this",
58
+ "that",
59
+ "with",
60
+ "from",
61
+ "by",
62
+ "be",
63
+ "as",
64
+ "are",
65
+ "was",
66
+ "were",
67
+ "been",
68
+ "has",
69
+ "have",
70
+ "had",
71
+ "not",
72
+ "but",
73
+ "if",
74
+ "my",
75
+ "our",
76
+ "its",
77
+ "can",
78
+ "will",
79
+ "should",
80
+ "would",
81
+ "could",
82
+ "about",
83
+ "after",
84
+ "work",
85
+ "works",
86
+ "use",
87
+ "uses",
88
+ "used"
89
+ ]);
90
+ for (const word of words) {
91
+ const tag = tagLookup.get(word);
92
+ if (tag && !tags.includes(tag)) {
93
+ tags.push(tag);
94
+ }
95
+ if (!tag && !stopWords.has(word)) {
96
+ keywords.push(word);
97
+ }
98
+ }
99
+ const questionLower = question.toLowerCase();
100
+ for (const [val, tag] of tagLookup.entries()) {
101
+ if (val.includes("-")) {
102
+ const spacedVersion = val.replace(/-/g, " ");
103
+ if (questionLower.includes(spacedVersion) && !tags.includes(tag)) {
104
+ tags.push(tag);
105
+ }
106
+ if (questionLower.includes(val) && !tags.includes(tag)) {
107
+ tags.push(tag);
108
+ }
109
+ }
110
+ }
111
+ return { tags, keywords };
112
+ }
113
+ function resolveQuery(terms, index, limit = 5) {
114
+ const scoreMap = /* @__PURE__ */ new Map();
115
+ function addMatch(contextFile, score, matchLabel) {
116
+ const existing = scoreMap.get(contextFile);
117
+ if (existing) {
118
+ existing.score += score;
119
+ existing.matchedOn.push(matchLabel);
120
+ } else {
121
+ scoreMap.set(contextFile, { score, matchedOn: [matchLabel] });
122
+ }
123
+ }
124
+ for (const tag of terms.tags) {
125
+ const files = index.tags[tag];
126
+ if (files) {
127
+ for (const file of files) {
128
+ addMatch(file, 2, `tag:${tag}`);
129
+ }
130
+ }
131
+ }
132
+ for (const kw of terms.keywords) {
133
+ const kwLower = kw.toLowerCase();
134
+ const files = index.keywords[kwLower];
135
+ if (files) {
136
+ for (const file of files) {
137
+ addMatch(file, 1, `keyword:${kwLower}`);
138
+ }
139
+ }
140
+ for (const [indexKw, kwFiles] of Object.entries(index.keywords)) {
141
+ if (indexKw === kwLower) continue;
142
+ if (indexKw.includes(kwLower) || kwLower.includes(indexKw)) {
143
+ for (const file of kwFiles) {
144
+ addMatch(file, 1, `keyword:${indexKw}`);
145
+ }
146
+ }
147
+ }
148
+ }
149
+ const results = [...scoreMap.entries()].sort((a, b) => b[1].score - a[1].score || a[0].localeCompare(b[0])).slice(0, limit).map(([contextFile, { score, matchedOn }]) => ({
150
+ contextFile,
151
+ score,
152
+ matchedOn: [...new Set(matchedOn)],
153
+ summary: `Matched: ${[...new Set(matchedOn)].join(", ")}`
154
+ }));
155
+ return results;
156
+ }
157
+ async function loadIndex(projectRoot) {
158
+ const indexPath = join(projectRoot, ".nogrep", "_index.json");
159
+ try {
160
+ const content = await readFile(indexPath, "utf-8");
161
+ return JSON.parse(content);
162
+ } catch {
163
+ throw new NogrepError(
164
+ "No .nogrep/_index.json found. Run /nogrep:init first.",
165
+ "NO_INDEX"
166
+ );
167
+ }
168
+ }
169
+ async function loadTaxonomy(projectRoot) {
170
+ const taxonomyPath = join(projectRoot, ".nogrep", "_taxonomy.json");
171
+ try {
172
+ const content = await readFile(taxonomyPath, "utf-8");
173
+ return JSON.parse(content);
174
+ } catch {
175
+ return {
176
+ static: {
177
+ layer: ["presentation", "business", "data", "infrastructure", "cross-cutting"],
178
+ concern: ["security", "performance", "caching", "validation", "error-handling", "idempotency", "observability"],
179
+ type: ["module", "flow", "entity", "integration", "config", "ui", "test"]
180
+ },
181
+ dynamic: { domain: [], tech: [] },
182
+ custom: {}
183
+ };
184
+ }
185
+ }
186
+ function buildTaxonomyFromIndex(index, baseTaxonomy) {
187
+ const domains = new Set(baseTaxonomy.dynamic.domain);
188
+ const techs = new Set(baseTaxonomy.dynamic.tech);
189
+ for (const tagKey of Object.keys(index.tags)) {
190
+ const [category, value] = tagKey.split(":");
191
+ if (!category || !value) continue;
192
+ if (category === "domain") domains.add(value);
193
+ if (category === "tech") techs.add(value);
194
+ }
195
+ return {
196
+ ...baseTaxonomy,
197
+ dynamic: {
198
+ domain: [...domains],
199
+ tech: [...techs]
200
+ }
201
+ };
202
+ }
203
+ function formatPaths(results) {
204
+ return results.map((r) => r.contextFile).join("\n");
205
+ }
206
+ function formatJson(results) {
207
+ return JSON.stringify(results, null, 2);
208
+ }
209
+ function formatSummary(results) {
210
+ if (results.length === 0) return "No matching context files found.";
211
+ return results.map((r) => `- ${r.contextFile} (score: ${r.score}) \u2014 ${r.summary}`).join("\n");
212
+ }
213
+ async function main() {
214
+ const { values } = parseArgs({
215
+ options: {
216
+ tags: { type: "string" },
217
+ keywords: { type: "string" },
218
+ question: { type: "string" },
219
+ format: { type: "string", default: "json" },
220
+ limit: { type: "string", default: "5" },
221
+ root: { type: "string", default: process.cwd() }
222
+ },
223
+ strict: true
224
+ });
225
+ const root = resolvePath(values.root ?? process.cwd());
226
+ const limit = parseInt(values.limit ?? "5", 10);
227
+ const format = values.format ?? "json";
228
+ const index = await loadIndex(root);
229
+ const baseTaxonomy = await loadTaxonomy(root);
230
+ const taxonomy = buildTaxonomyFromIndex(index, baseTaxonomy);
231
+ let terms;
232
+ if (values.question) {
233
+ terms = extractTerms(values.question, taxonomy);
234
+ } else if (values.tags || values.keywords) {
235
+ const tags = values.tags ? values.tags.split(",").map((t) => t.trim()).filter(Boolean) : [];
236
+ const keywords = values.keywords ? values.keywords.split(",").map((k) => k.trim()).filter(Boolean) : [];
237
+ terms = { tags, keywords };
238
+ } else {
239
+ process.stderr.write(
240
+ JSON.stringify({ error: "Usage: node query.js --tags <tags> | --keywords <words> | --question <text> [--format paths|json|summary] [--limit N]" }) + "\n"
241
+ );
242
+ process.exitCode = 1;
243
+ return;
244
+ }
245
+ const results = resolveQuery(terms, index, limit);
246
+ switch (format) {
247
+ case "paths":
248
+ process.stdout.write(formatPaths(results) + "\n");
249
+ break;
250
+ case "summary":
251
+ process.stdout.write(formatSummary(results) + "\n");
252
+ break;
253
+ case "json":
254
+ default:
255
+ process.stdout.write(formatJson(results) + "\n");
256
+ break;
257
+ }
258
+ }
259
+ main().catch((err) => {
260
+ if (err instanceof NogrepError) {
261
+ process.stderr.write(JSON.stringify({ error: err.message, code: err.code }) + "\n");
262
+ } else {
263
+ const message = err instanceof Error ? err.message : String(err);
264
+ process.stderr.write(JSON.stringify({ error: message }) + "\n");
265
+ }
266
+ process.exitCode = 1;
267
+ });
268
+ export {
269
+ extractTerms,
270
+ resolveQuery
271
+ };
272
+ //# sourceMappingURL=query.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../scripts/query.ts"],"sourcesContent":["import { readFile } from 'node:fs/promises'\nimport { join, resolve as resolvePath } from 'node:path'\nimport { parseArgs } from 'node:util'\nimport type { IndexJson, RankedResult, Taxonomy } from './types.js'\nimport { NogrepError } from './types.js'\n\n// --- Term extraction ---\n\nexport function extractTerms(\n question: string,\n taxonomy: Taxonomy,\n): { tags: string[]; keywords: string[] } {\n const words = question\n .toLowerCase()\n .replace(/[^\\w\\s-]/g, ' ')\n .split(/\\s+/)\n .filter(w => w.length > 1)\n\n const tags: string[] = []\n const keywords: string[] = []\n\n // Collect all taxonomy values for matching\n const tagLookup = new Map<string, string>()\n\n for (const val of taxonomy.static.layer) {\n tagLookup.set(val.toLowerCase(), `layer:${val}`)\n }\n for (const val of taxonomy.static.concern) {\n tagLookup.set(val.toLowerCase(), `concern:${val}`)\n }\n for (const val of taxonomy.static.type) {\n tagLookup.set(val.toLowerCase(), `type:${val}`)\n }\n for (const val of taxonomy.dynamic.domain) {\n tagLookup.set(val.toLowerCase(), `domain:${val}`)\n }\n for (const val of taxonomy.dynamic.tech) {\n tagLookup.set(val.toLowerCase(), `tech:${val}`)\n }\n for (const [cat, values] of Object.entries(taxonomy.custom)) {\n for (const val of values) {\n tagLookup.set(val.toLowerCase(), `${cat}:${val}`)\n }\n }\n\n // Stop words to skip as keywords\n const stopWords = new Set([\n 'the', 'is', 'at', 'in', 'of', 'on', 'to', 'a', 'an', 'and', 'or',\n 'for', 'it', 'do', 'does', 'how', 'what', 'where', 'which', 'when',\n 'who', 'why', 'this', 'that', 'with', 'from', 'by', 'be', 'as',\n 'are', 'was', 'were', 'been', 'has', 'have', 'had', 'not', 'but',\n 'if', 'my', 'our', 'its', 'can', 'will', 'should', 'would', 'could',\n 'about', 'after', 'work', 'works', 'use', 'uses', 'used',\n ])\n\n for (const word of words) {\n const tag = tagLookup.get(word)\n if (tag && !tags.includes(tag)) {\n tags.push(tag)\n }\n\n // Also check hyphenated compound matches (e.g. \"error-handling\")\n if (!tag && !stopWords.has(word)) {\n keywords.push(word)\n }\n }\n\n // Check for multi-word tag matches (e.g. \"error handling\" → \"error-handling\")\n const questionLower = question.toLowerCase()\n for (const [val, tag] of tagLookup.entries()) {\n if (val.includes('-')) {\n const spacedVersion = val.replace(/-/g, ' ')\n if (questionLower.includes(spacedVersion) && !tags.includes(tag)) {\n tags.push(tag)\n }\n if (questionLower.includes(val) && !tags.includes(tag)) {\n tags.push(tag)\n }\n }\n }\n\n return { tags, keywords }\n}\n\n// --- Resolution ---\n\nexport function resolveQuery(\n terms: { tags: string[]; keywords: string[] },\n index: IndexJson,\n limit = 5,\n): RankedResult[] {\n const scoreMap = new Map<string, { score: number; matchedOn: string[] }>()\n\n function addMatch(contextFile: string, score: number, matchLabel: string): void {\n const existing = scoreMap.get(contextFile)\n if (existing) {\n existing.score += score\n existing.matchedOn.push(matchLabel)\n } else {\n scoreMap.set(contextFile, { score, matchedOn: [matchLabel] })\n }\n }\n\n // Tag matching: +2 per match\n for (const tag of terms.tags) {\n const files = index.tags[tag]\n if (files) {\n for (const file of files) {\n addMatch(file, 2, `tag:${tag}`)\n }\n }\n }\n\n // Keyword matching: +1 per match\n for (const kw of terms.keywords) {\n const kwLower = kw.toLowerCase()\n\n // Direct keyword lookup\n const files = index.keywords[kwLower]\n if (files) {\n for (const file of files) {\n addMatch(file, 1, `keyword:${kwLower}`)\n }\n }\n\n // Also search all index keywords for partial matches\n for (const [indexKw, kwFiles] of Object.entries(index.keywords)) {\n if (indexKw === kwLower) continue // Already handled\n if (indexKw.includes(kwLower) || kwLower.includes(indexKw)) {\n for (const file of kwFiles) {\n addMatch(file, 1, `keyword:${indexKw}`)\n }\n }\n }\n }\n\n // Sort by score descending, then alphabetically for ties\n const results: RankedResult[] = [...scoreMap.entries()]\n .sort((a, b) => b[1].score - a[1].score || a[0].localeCompare(b[0]))\n .slice(0, limit)\n .map(([contextFile, { score, matchedOn }]) => ({\n contextFile,\n score,\n matchedOn: [...new Set(matchedOn)],\n summary: `Matched: ${[...new Set(matchedOn)].join(', ')}`,\n }))\n\n return results\n}\n\n// --- Index + taxonomy loading ---\n\nasync function loadIndex(projectRoot: string): Promise<IndexJson> {\n const indexPath = join(projectRoot, '.nogrep', '_index.json')\n try {\n const content = await readFile(indexPath, 'utf-8')\n return JSON.parse(content) as IndexJson\n } catch {\n throw new NogrepError(\n 'No .nogrep/_index.json found. Run /nogrep:init first.',\n 'NO_INDEX',\n )\n }\n}\n\nasync function loadTaxonomy(projectRoot: string): Promise<Taxonomy> {\n const taxonomyPath = join(projectRoot, '.nogrep', '_taxonomy.json')\n try {\n const content = await readFile(taxonomyPath, 'utf-8')\n return JSON.parse(content) as Taxonomy\n } catch {\n // Return default taxonomy if file doesn't exist\n return {\n static: {\n layer: ['presentation', 'business', 'data', 'infrastructure', 'cross-cutting'],\n concern: ['security', 'performance', 'caching', 'validation', 'error-handling', 'idempotency', 'observability'],\n type: ['module', 'flow', 'entity', 'integration', 'config', 'ui', 'test'],\n },\n dynamic: { domain: [], tech: [] },\n custom: {},\n }\n }\n}\n\nfunction buildTaxonomyFromIndex(index: IndexJson, baseTaxonomy: Taxonomy): Taxonomy {\n // Extract dynamic domain and tech values from the index tags\n const domains = new Set<string>(baseTaxonomy.dynamic.domain)\n const techs = new Set<string>(baseTaxonomy.dynamic.tech)\n\n for (const tagKey of Object.keys(index.tags)) {\n const [category, value] = tagKey.split(':')\n if (!category || !value) continue\n if (category === 'domain') domains.add(value)\n if (category === 'tech') techs.add(value)\n }\n\n return {\n ...baseTaxonomy,\n dynamic: {\n domain: [...domains],\n tech: [...techs],\n },\n }\n}\n\n// --- Formatting ---\n\nfunction formatPaths(results: RankedResult[]): string {\n return results.map(r => r.contextFile).join('\\n')\n}\n\nfunction formatJson(results: RankedResult[]): string {\n return JSON.stringify(results, null, 2)\n}\n\nfunction formatSummary(results: RankedResult[]): string {\n if (results.length === 0) return 'No matching context files found.'\n return results\n .map(r => `- ${r.contextFile} (score: ${r.score}) — ${r.summary}`)\n .join('\\n')\n}\n\n// --- CLI ---\n\nasync function main(): Promise<void> {\n const { values } = parseArgs({\n options: {\n tags: { type: 'string' },\n keywords: { type: 'string' },\n question: { type: 'string' },\n format: { type: 'string', default: 'json' },\n limit: { type: 'string', default: '5' },\n root: { type: 'string', default: process.cwd() },\n },\n strict: true,\n })\n\n const root = resolvePath(values.root ?? process.cwd())\n const limit = parseInt(values.limit ?? '5', 10)\n const format = values.format ?? 'json'\n\n const index = await loadIndex(root)\n const baseTaxonomy = await loadTaxonomy(root)\n const taxonomy = buildTaxonomyFromIndex(index, baseTaxonomy)\n\n let terms: { tags: string[]; keywords: string[] }\n\n if (values.question) {\n terms = extractTerms(values.question, taxonomy)\n } else if (values.tags || values.keywords) {\n const tags = values.tags\n ? values.tags.split(',').map(t => t.trim()).filter(Boolean)\n : []\n const keywords = values.keywords\n ? values.keywords.split(',').map(k => k.trim()).filter(Boolean)\n : []\n terms = { tags, keywords }\n } else {\n process.stderr.write(\n JSON.stringify({ error: 'Usage: node query.js --tags <tags> | --keywords <words> | --question <text> [--format paths|json|summary] [--limit N]' }) + '\\n',\n )\n process.exitCode = 1\n return\n }\n\n const results = resolveQuery(terms, index, limit)\n\n switch (format) {\n case 'paths':\n process.stdout.write(formatPaths(results) + '\\n')\n break\n case 'summary':\n process.stdout.write(formatSummary(results) + '\\n')\n break\n case 'json':\n default:\n process.stdout.write(formatJson(results) + '\\n')\n break\n }\n}\n\nmain().catch((err: unknown) => {\n if (err instanceof NogrepError) {\n process.stderr.write(JSON.stringify({ error: err.message, code: err.code }) + '\\n')\n } else {\n const message = err instanceof Error ? err.message : String(err)\n process.stderr.write(JSON.stringify({ error: message }) + '\\n')\n }\n process.exitCode = 1\n})\n"],"mappings":";;;;;AAAA,SAAS,gBAAgB;AACzB,SAAS,MAAM,WAAW,mBAAmB;AAC7C,SAAS,iBAAiB;AAMnB,SAAS,aACd,UACA,UACwC;AACxC,QAAM,QAAQ,SACX,YAAY,EACZ,QAAQ,aAAa,GAAG,EACxB,MAAM,KAAK,EACX,OAAO,OAAK,EAAE,SAAS,CAAC;AAE3B,QAAM,OAAiB,CAAC;AACxB,QAAM,WAAqB,CAAC;AAG5B,QAAM,YAAY,oBAAI,IAAoB;AAE1C,aAAW,OAAO,SAAS,OAAO,OAAO;AACvC,cAAU,IAAI,IAAI,YAAY,GAAG,SAAS,GAAG,EAAE;AAAA,EACjD;AACA,aAAW,OAAO,SAAS,OAAO,SAAS;AACzC,cAAU,IAAI,IAAI,YAAY,GAAG,WAAW,GAAG,EAAE;AAAA,EACnD;AACA,aAAW,OAAO,SAAS,OAAO,MAAM;AACtC,cAAU,IAAI,IAAI,YAAY,GAAG,QAAQ,GAAG,EAAE;AAAA,EAChD;AACA,aAAW,OAAO,SAAS,QAAQ,QAAQ;AACzC,cAAU,IAAI,IAAI,YAAY,GAAG,UAAU,GAAG,EAAE;AAAA,EAClD;AACA,aAAW,OAAO,SAAS,QAAQ,MAAM;AACvC,cAAU,IAAI,IAAI,YAAY,GAAG,QAAQ,GAAG,EAAE;AAAA,EAChD;AACA,aAAW,CAAC,KAAK,MAAM,KAAK,OAAO,QAAQ,SAAS,MAAM,GAAG;AAC3D,eAAW,OAAO,QAAQ;AACxB,gBAAU,IAAI,IAAI,YAAY,GAAG,GAAG,GAAG,IAAI,GAAG,EAAE;AAAA,IAClD;AAAA,EACF;AAGA,QAAM,YAAY,oBAAI,IAAI;AAAA,IACxB;AAAA,IAAO;AAAA,IAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAK;AAAA,IAAM;AAAA,IAAO;AAAA,IAC7D;AAAA,IAAO;AAAA,IAAM;AAAA,IAAM;AAAA,IAAQ;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAS;AAAA,IAAS;AAAA,IAC5D;AAAA,IAAO;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAM;AAAA,IAAM;AAAA,IAC1D;AAAA,IAAO;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAO;AAAA,IAAO;AAAA,IAC3D;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAU;AAAA,IAAS;AAAA,IAC5D;AAAA,IAAS;AAAA,IAAS;AAAA,IAAQ;AAAA,IAAS;AAAA,IAAO;AAAA,IAAQ;AAAA,EACpD,CAAC;AAED,aAAW,QAAQ,OAAO;AACxB,UAAM,MAAM,UAAU,IAAI,IAAI;AAC9B,QAAI,OAAO,CAAC,KAAK,SAAS,GAAG,GAAG;AAC9B,WAAK,KAAK,GAAG;AAAA,IACf;AAGA,QAAI,CAAC,OAAO,CAAC,UAAU,IAAI,IAAI,GAAG;AAChC,eAAS,KAAK,IAAI;AAAA,IACpB;AAAA,EACF;AAGA,QAAM,gBAAgB,SAAS,YAAY;AAC3C,aAAW,CAAC,KAAK,GAAG,KAAK,UAAU,QAAQ,GAAG;AAC5C,QAAI,IAAI,SAAS,GAAG,GAAG;AACrB,YAAM,gBAAgB,IAAI,QAAQ,MAAM,GAAG;AAC3C,UAAI,cAAc,SAAS,aAAa,KAAK,CAAC,KAAK,SAAS,GAAG,GAAG;AAChE,aAAK,KAAK,GAAG;AAAA,MACf;AACA,UAAI,cAAc,SAAS,GAAG,KAAK,CAAC,KAAK,SAAS,GAAG,GAAG;AACtD,aAAK,KAAK,GAAG;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,SAAS;AAC1B;AAIO,SAAS,aACd,OACA,OACA,QAAQ,GACQ;AAChB,QAAM,WAAW,oBAAI,IAAoD;AAEzE,WAAS,SAAS,aAAqB,OAAe,YAA0B;AAC9E,UAAM,WAAW,SAAS,IAAI,WAAW;AACzC,QAAI,UAAU;AACZ,eAAS,SAAS;AAClB,eAAS,UAAU,KAAK,UAAU;AAAA,IACpC,OAAO;AACL,eAAS,IAAI,aAAa,EAAE,OAAO,WAAW,CAAC,UAAU,EAAE,CAAC;AAAA,IAC9D;AAAA,EACF;AAGA,aAAW,OAAO,MAAM,MAAM;AAC5B,UAAM,QAAQ,MAAM,KAAK,GAAG;AAC5B,QAAI,OAAO;AACT,iBAAW,QAAQ,OAAO;AACxB,iBAAS,MAAM,GAAG,OAAO,GAAG,EAAE;AAAA,MAChC;AAAA,IACF;AAAA,EACF;AAGA,aAAW,MAAM,MAAM,UAAU;AAC/B,UAAM,UAAU,GAAG,YAAY;AAG/B,UAAM,QAAQ,MAAM,SAAS,OAAO;AACpC,QAAI,OAAO;AACT,iBAAW,QAAQ,OAAO;AACxB,iBAAS,MAAM,GAAG,WAAW,OAAO,EAAE;AAAA,MACxC;AAAA,IACF;AAGA,eAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,MAAM,QAAQ,GAAG;AAC/D,UAAI,YAAY,QAAS;AACzB,UAAI,QAAQ,SAAS,OAAO,KAAK,QAAQ,SAAS,OAAO,GAAG;AAC1D,mBAAW,QAAQ,SAAS;AAC1B,mBAAS,MAAM,GAAG,WAAW,OAAO,EAAE;AAAA,QACxC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,UAA0B,CAAC,GAAG,SAAS,QAAQ,CAAC,EACnD,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,QAAQ,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,cAAc,EAAE,CAAC,CAAC,CAAC,EAClE,MAAM,GAAG,KAAK,EACd,IAAI,CAAC,CAAC,aAAa,EAAE,OAAO,UAAU,CAAC,OAAO;AAAA,IAC7C;AAAA,IACA;AAAA,IACA,WAAW,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC;AAAA,IACjC,SAAS,YAAY,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC,EAAE,KAAK,IAAI,CAAC;AAAA,EACzD,EAAE;AAEJ,SAAO;AACT;AAIA,eAAe,UAAU,aAAyC;AAChE,QAAM,YAAY,KAAK,aAAa,WAAW,aAAa;AAC5D,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,WAAW,OAAO;AACjD,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,QAAQ;AACN,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAe,aAAa,aAAwC;AAClE,QAAM,eAAe,KAAK,aAAa,WAAW,gBAAgB;AAClE,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,cAAc,OAAO;AACpD,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,QAAQ;AAEN,WAAO;AAAA,MACL,QAAQ;AAAA,QACN,OAAO,CAAC,gBAAgB,YAAY,QAAQ,kBAAkB,eAAe;AAAA,QAC7E,SAAS,CAAC,YAAY,eAAe,WAAW,cAAc,kBAAkB,eAAe,eAAe;AAAA,QAC9G,MAAM,CAAC,UAAU,QAAQ,UAAU,eAAe,UAAU,MAAM,MAAM;AAAA,MAC1E;AAAA,MACA,SAAS,EAAE,QAAQ,CAAC,GAAG,MAAM,CAAC,EAAE;AAAA,MAChC,QAAQ,CAAC;AAAA,IACX;AAAA,EACF;AACF;AAEA,SAAS,uBAAuB,OAAkB,cAAkC;AAElF,QAAM,UAAU,IAAI,IAAY,aAAa,QAAQ,MAAM;AAC3D,QAAM,QAAQ,IAAI,IAAY,aAAa,QAAQ,IAAI;AAEvD,aAAW,UAAU,OAAO,KAAK,MAAM,IAAI,GAAG;AAC5C,UAAM,CAAC,UAAU,KAAK,IAAI,OAAO,MAAM,GAAG;AAC1C,QAAI,CAAC,YAAY,CAAC,MAAO;AACzB,QAAI,aAAa,SAAU,SAAQ,IAAI,KAAK;AAC5C,QAAI,aAAa,OAAQ,OAAM,IAAI,KAAK;AAAA,EAC1C;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,SAAS;AAAA,MACP,QAAQ,CAAC,GAAG,OAAO;AAAA,MACnB,MAAM,CAAC,GAAG,KAAK;AAAA,IACjB;AAAA,EACF;AACF;AAIA,SAAS,YAAY,SAAiC;AACpD,SAAO,QAAQ,IAAI,OAAK,EAAE,WAAW,EAAE,KAAK,IAAI;AAClD;AAEA,SAAS,WAAW,SAAiC;AACnD,SAAO,KAAK,UAAU,SAAS,MAAM,CAAC;AACxC;AAEA,SAAS,cAAc,SAAiC;AACtD,MAAI,QAAQ,WAAW,EAAG,QAAO;AACjC,SAAO,QACJ,IAAI,OAAK,KAAK,EAAE,WAAW,YAAY,EAAE,KAAK,YAAO,EAAE,OAAO,EAAE,EAChE,KAAK,IAAI;AACd;AAIA,eAAe,OAAsB;AACnC,QAAM,EAAE,OAAO,IAAI,UAAU;AAAA,IAC3B,SAAS;AAAA,MACP,MAAM,EAAE,MAAM,SAAS;AAAA,MACvB,UAAU,EAAE,MAAM,SAAS;AAAA,MAC3B,UAAU,EAAE,MAAM,SAAS;AAAA,MAC3B,QAAQ,EAAE,MAAM,UAAU,SAAS,OAAO;AAAA,MAC1C,OAAO,EAAE,MAAM,UAAU,SAAS,IAAI;AAAA,MACtC,MAAM,EAAE,MAAM,UAAU,SAAS,QAAQ,IAAI,EAAE;AAAA,IACjD;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AAED,QAAM,OAAO,YAAY,OAAO,QAAQ,QAAQ,IAAI,CAAC;AACrD,QAAM,QAAQ,SAAS,OAAO,SAAS,KAAK,EAAE;AAC9C,QAAM,SAAS,OAAO,UAAU;AAEhC,QAAM,QAAQ,MAAM,UAAU,IAAI;AAClC,QAAM,eAAe,MAAM,aAAa,IAAI;AAC5C,QAAM,WAAW,uBAAuB,OAAO,YAAY;AAE3D,MAAI;AAEJ,MAAI,OAAO,UAAU;AACnB,YAAQ,aAAa,OAAO,UAAU,QAAQ;AAAA,EAChD,WAAW,OAAO,QAAQ,OAAO,UAAU;AACzC,UAAM,OAAO,OAAO,OAChB,OAAO,KAAK,MAAM,GAAG,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC,EAAE,OAAO,OAAO,IACxD,CAAC;AACL,UAAM,WAAW,OAAO,WACpB,OAAO,SAAS,MAAM,GAAG,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC,EAAE,OAAO,OAAO,IAC5D,CAAC;AACL,YAAQ,EAAE,MAAM,SAAS;AAAA,EAC3B,OAAO;AACL,YAAQ,OAAO;AAAA,MACb,KAAK,UAAU,EAAE,OAAO,wHAAwH,CAAC,IAAI;AAAA,IACvJ;AACA,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,QAAM,UAAU,aAAa,OAAO,OAAO,KAAK;AAEhD,UAAQ,QAAQ;AAAA,IACd,KAAK;AACH,cAAQ,OAAO,MAAM,YAAY,OAAO,IAAI,IAAI;AAChD;AAAA,IACF,KAAK;AACH,cAAQ,OAAO,MAAM,cAAc,OAAO,IAAI,IAAI;AAClD;AAAA,IACF,KAAK;AAAA,IACL;AACE,cAAQ,OAAO,MAAM,WAAW,OAAO,IAAI,IAAI;AAC/C;AAAA,EACJ;AACF;AAEA,KAAK,EAAE,MAAM,CAAC,QAAiB;AAC7B,MAAI,eAAe,aAAa;AAC9B,YAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,IAAI,SAAS,MAAM,IAAI,KAAK,CAAC,IAAI,IAAI;AAAA,EACpF,OAAO;AACL,UAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,YAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,QAAQ,CAAC,IAAI,IAAI;AAAA,EAChE;AACA,UAAQ,WAAW;AACrB,CAAC;","names":[]}
@@ -0,0 +1,6 @@
1
+ import { NogrepSettings } from './types.js';
2
+
3
+ declare function readSettings(projectRoot: string): Promise<NogrepSettings>;
4
+ declare function writeSettings(projectRoot: string, settings: Partial<NogrepSettings>, local?: boolean): Promise<void>;
5
+
6
+ export { readSettings, writeSettings };
@@ -0,0 +1,75 @@
1
+ // scripts/settings.ts
2
+ import { readFile, writeFile, mkdir } from "fs/promises";
3
+ import { join } from "path";
4
+ import { parseArgs } from "util";
5
+ var SETTINGS_FILE = ".claude/settings.json";
6
+ var SETTINGS_LOCAL_FILE = ".claude/settings.local.json";
7
+ async function readJsonFile(path) {
8
+ try {
9
+ const content = await readFile(path, "utf-8");
10
+ return JSON.parse(content);
11
+ } catch {
12
+ return {};
13
+ }
14
+ }
15
+ async function ensureDir(dir) {
16
+ await mkdir(dir, { recursive: true });
17
+ }
18
+ async function readSettings(projectRoot) {
19
+ const sharedPath = join(projectRoot, SETTINGS_FILE);
20
+ const localPath = join(projectRoot, SETTINGS_LOCAL_FILE);
21
+ const shared = await readJsonFile(sharedPath);
22
+ const local = await readJsonFile(localPath);
23
+ const enabled = local.nogrep?.enabled ?? shared.nogrep?.enabled ?? false;
24
+ return { enabled };
25
+ }
26
+ async function writeSettings(projectRoot, settings, local) {
27
+ const filePath = join(
28
+ projectRoot,
29
+ local ? SETTINGS_LOCAL_FILE : SETTINGS_FILE
30
+ );
31
+ await ensureDir(join(projectRoot, ".claude"));
32
+ const existing = await readJsonFile(filePath);
33
+ existing.nogrep = { ...existing.nogrep, ...settings };
34
+ await writeFile(filePath, JSON.stringify(existing, null, 2) + "\n", "utf-8");
35
+ }
36
+ async function main() {
37
+ const { values } = parseArgs({
38
+ options: {
39
+ set: { type: "string" },
40
+ get: { type: "boolean", default: false },
41
+ local: { type: "boolean", default: false },
42
+ root: { type: "string", default: process.cwd() }
43
+ },
44
+ strict: true
45
+ });
46
+ const root = values.root ?? process.cwd();
47
+ if (values.get) {
48
+ const settings = await readSettings(root);
49
+ process.stdout.write(JSON.stringify(settings, null, 2) + "\n");
50
+ return;
51
+ }
52
+ if (values.set) {
53
+ const [key, value] = values.set.split("=");
54
+ if (key === "enabled") {
55
+ const enabled = value === "true";
56
+ await writeSettings(root, { enabled }, values.local);
57
+ } else {
58
+ process.stderr.write(JSON.stringify({ error: `Unknown setting: ${key}` }) + "\n");
59
+ process.exitCode = 1;
60
+ }
61
+ return;
62
+ }
63
+ process.stderr.write(JSON.stringify({ error: "Usage: node settings.js --set enabled=true [--local] | --get" }) + "\n");
64
+ process.exitCode = 1;
65
+ }
66
+ main().catch((err) => {
67
+ const message = err instanceof Error ? err.message : String(err);
68
+ process.stderr.write(JSON.stringify({ error: message }) + "\n");
69
+ process.exitCode = 1;
70
+ });
71
+ export {
72
+ readSettings,
73
+ writeSettings
74
+ };
75
+ //# sourceMappingURL=settings.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../scripts/settings.ts"],"sourcesContent":["import { readFile, writeFile, mkdir } from 'node:fs/promises'\nimport { join } from 'node:path'\nimport { parseArgs } from 'node:util'\nimport type { NogrepSettings } from './types.js'\n\nconst SETTINGS_FILE = '.claude/settings.json'\nconst SETTINGS_LOCAL_FILE = '.claude/settings.local.json'\n\ninterface SettingsJson {\n nogrep?: Partial<NogrepSettings>\n [key: string]: unknown\n}\n\nasync function readJsonFile(path: string): Promise<SettingsJson> {\n try {\n const content = await readFile(path, 'utf-8')\n return JSON.parse(content) as SettingsJson\n } catch {\n return {}\n }\n}\n\nasync function ensureDir(dir: string): Promise<void> {\n await mkdir(dir, { recursive: true })\n}\n\nexport async function readSettings(projectRoot: string): Promise<NogrepSettings> {\n const sharedPath = join(projectRoot, SETTINGS_FILE)\n const localPath = join(projectRoot, SETTINGS_LOCAL_FILE)\n\n const shared = await readJsonFile(sharedPath)\n const local = await readJsonFile(localPath)\n\n const enabled =\n local.nogrep?.enabled ?? shared.nogrep?.enabled ?? false\n\n return { enabled }\n}\n\nexport async function writeSettings(\n projectRoot: string,\n settings: Partial<NogrepSettings>,\n local?: boolean,\n): Promise<void> {\n const filePath = join(\n projectRoot,\n local ? SETTINGS_LOCAL_FILE : SETTINGS_FILE,\n )\n\n await ensureDir(join(projectRoot, '.claude'))\n\n const existing = await readJsonFile(filePath)\n existing.nogrep = { ...existing.nogrep, ...settings }\n\n await writeFile(filePath, JSON.stringify(existing, null, 2) + '\\n', 'utf-8')\n}\n\n// CLI interface\nasync function main(): Promise<void> {\n const { values } = parseArgs({\n options: {\n set: { type: 'string' },\n get: { type: 'boolean', default: false },\n local: { type: 'boolean', default: false },\n root: { type: 'string', default: process.cwd() },\n },\n strict: true,\n })\n\n const root = values.root ?? process.cwd()\n\n if (values.get) {\n const settings = await readSettings(root)\n process.stdout.write(JSON.stringify(settings, null, 2) + '\\n')\n return\n }\n\n if (values.set) {\n const [key, value] = values.set.split('=')\n if (key === 'enabled') {\n const enabled = value === 'true'\n await writeSettings(root, { enabled }, values.local)\n } else {\n process.stderr.write(JSON.stringify({ error: `Unknown setting: ${key}` }) + '\\n')\n process.exitCode = 1\n }\n return\n }\n\n process.stderr.write(JSON.stringify({ error: 'Usage: node settings.js --set enabled=true [--local] | --get' }) + '\\n')\n process.exitCode = 1\n}\n\nmain().catch((err: unknown) => {\n const message = err instanceof Error ? err.message : String(err)\n process.stderr.write(JSON.stringify({ error: message }) + '\\n')\n process.exitCode = 1\n})\n"],"mappings":";AAAA,SAAS,UAAU,WAAW,aAAa;AAC3C,SAAS,YAAY;AACrB,SAAS,iBAAiB;AAG1B,IAAM,gBAAgB;AACtB,IAAM,sBAAsB;AAO5B,eAAe,aAAa,MAAqC;AAC/D,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,MAAM,OAAO;AAC5C,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAEA,eAAe,UAAU,KAA4B;AACnD,QAAM,MAAM,KAAK,EAAE,WAAW,KAAK,CAAC;AACtC;AAEA,eAAsB,aAAa,aAA8C;AAC/E,QAAM,aAAa,KAAK,aAAa,aAAa;AAClD,QAAM,YAAY,KAAK,aAAa,mBAAmB;AAEvD,QAAM,SAAS,MAAM,aAAa,UAAU;AAC5C,QAAM,QAAQ,MAAM,aAAa,SAAS;AAE1C,QAAM,UACJ,MAAM,QAAQ,WAAW,OAAO,QAAQ,WAAW;AAErD,SAAO,EAAE,QAAQ;AACnB;AAEA,eAAsB,cACpB,aACA,UACA,OACe;AACf,QAAM,WAAW;AAAA,IACf;AAAA,IACA,QAAQ,sBAAsB;AAAA,EAChC;AAEA,QAAM,UAAU,KAAK,aAAa,SAAS,CAAC;AAE5C,QAAM,WAAW,MAAM,aAAa,QAAQ;AAC5C,WAAS,SAAS,EAAE,GAAG,SAAS,QAAQ,GAAG,SAAS;AAEpD,QAAM,UAAU,UAAU,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI,MAAM,OAAO;AAC7E;AAGA,eAAe,OAAsB;AACnC,QAAM,EAAE,OAAO,IAAI,UAAU;AAAA,IAC3B,SAAS;AAAA,MACP,KAAK,EAAE,MAAM,SAAS;AAAA,MACtB,KAAK,EAAE,MAAM,WAAW,SAAS,MAAM;AAAA,MACvC,OAAO,EAAE,MAAM,WAAW,SAAS,MAAM;AAAA,MACzC,MAAM,EAAE,MAAM,UAAU,SAAS,QAAQ,IAAI,EAAE;AAAA,IACjD;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AAED,QAAM,OAAO,OAAO,QAAQ,QAAQ,IAAI;AAExC,MAAI,OAAO,KAAK;AACd,UAAM,WAAW,MAAM,aAAa,IAAI;AACxC,YAAQ,OAAO,MAAM,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI,IAAI;AAC7D;AAAA,EACF;AAEA,MAAI,OAAO,KAAK;AACd,UAAM,CAAC,KAAK,KAAK,IAAI,OAAO,IAAI,MAAM,GAAG;AACzC,QAAI,QAAQ,WAAW;AACrB,YAAM,UAAU,UAAU;AAC1B,YAAM,cAAc,MAAM,EAAE,QAAQ,GAAG,OAAO,KAAK;AAAA,IACrD,OAAO;AACL,cAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,oBAAoB,GAAG,GAAG,CAAC,IAAI,IAAI;AAChF,cAAQ,WAAW;AAAA,IACrB;AACA;AAAA,EACF;AAEA,UAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,+DAA+D,CAAC,IAAI,IAAI;AACrH,UAAQ,WAAW;AACrB;AAEA,KAAK,EAAE,MAAM,CAAC,QAAiB;AAC7B,QAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,UAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,QAAQ,CAAC,IAAI,IAAI;AAC9D,UAAQ,WAAW;AACrB,CAAC;","names":[]}
@@ -0,0 +1,9 @@
1
+ import { SignalResult } from './types.js';
2
+
3
+ interface CollectOptions {
4
+ exclude?: string[];
5
+ maxDepth?: number;
6
+ }
7
+ declare function collectSignals(root: string, options?: CollectOptions): Promise<SignalResult>;
8
+
9
+ export { collectSignals };
@@ -0,0 +1,174 @@
1
+ // scripts/signals.ts
2
+ import { readdir, stat } from "fs/promises";
3
+ import { join, extname, relative, resolve } from "path";
4
+ import { execFile } from "child_process";
5
+ import { promisify } from "util";
6
+ var execFileAsync = promisify(execFile);
7
+ var SKIP_DIRS = /* @__PURE__ */ new Set([
8
+ "node_modules",
9
+ "dist",
10
+ "build",
11
+ ".git",
12
+ "coverage",
13
+ ".next",
14
+ ".nuxt",
15
+ "__pycache__",
16
+ ".venv",
17
+ "venv",
18
+ ".idea",
19
+ ".vscode",
20
+ ".nogrep"
21
+ ]);
22
+ var MANIFEST_NAMES = {
23
+ "package.json": "npm",
24
+ "requirements.txt": "pip",
25
+ "pom.xml": "maven",
26
+ "go.mod": "go",
27
+ "Podfile": "cocoapods",
28
+ "Cargo.toml": "cargo",
29
+ "pubspec.yaml": "flutter",
30
+ "composer.json": "composer"
31
+ };
32
+ var ENTRY_NAMES = /* @__PURE__ */ new Set(["main", "index", "app", "server"]);
33
+ var TEST_PATTERNS = [
34
+ /\.test\.\w+$/,
35
+ /\.spec\.\w+$/,
36
+ /_test\.\w+$/,
37
+ /^test_.*\.py$/
38
+ ];
39
+ async function collectSignals(root, options = {}) {
40
+ const absRoot = resolve(root);
41
+ const maxDepth = options.maxDepth ?? 4;
42
+ const extraSkip = new Set(options.exclude ?? []);
43
+ const allFiles = [];
44
+ const extensionMap = {};
45
+ const manifests = [];
46
+ const entryPoints = [];
47
+ const envFiles = [];
48
+ const testFiles = [];
49
+ const directoryTree = await walkDirectory(absRoot, absRoot, 0, maxDepth, extraSkip, {
50
+ allFiles,
51
+ extensionMap,
52
+ manifests,
53
+ entryPoints,
54
+ envFiles,
55
+ testFiles
56
+ });
57
+ const gitChurn = await collectGitChurn(absRoot);
58
+ const largeFiles = allFiles.sort((a, b) => b.bytes - a.bytes).slice(0, 20).map((f) => ({ path: f.path, bytes: f.bytes }));
59
+ return {
60
+ directoryTree,
61
+ extensionMap,
62
+ manifests,
63
+ entryPoints,
64
+ gitChurn,
65
+ largeFiles,
66
+ envFiles,
67
+ testFiles
68
+ };
69
+ }
70
+ async function walkDirectory(dir, root, depth, maxDepth, extraSkip, collectors) {
71
+ if (depth > maxDepth) return [];
72
+ let entries;
73
+ try {
74
+ entries = await readdir(dir, { withFileTypes: true });
75
+ } catch {
76
+ return [];
77
+ }
78
+ const nodes = [];
79
+ for (const entry of entries) {
80
+ const fullPath = join(dir, entry.name);
81
+ const relPath = relative(root, fullPath);
82
+ if (entry.isDirectory()) {
83
+ if (SKIP_DIRS.has(entry.name) || extraSkip.has(entry.name)) continue;
84
+ const children = await walkDirectory(fullPath, root, depth + 1, maxDepth, extraSkip, collectors);
85
+ nodes.push({ name: entry.name, path: relPath, type: "directory", children });
86
+ } else if (entry.isFile()) {
87
+ nodes.push({ name: entry.name, path: relPath, type: "file" });
88
+ let fileBytes = 0;
89
+ try {
90
+ const s = await stat(fullPath);
91
+ fileBytes = s.size;
92
+ } catch {
93
+ }
94
+ collectors.allFiles.push({ path: relPath, bytes: fileBytes });
95
+ const ext = extname(entry.name);
96
+ if (ext) {
97
+ collectors.extensionMap[ext] = (collectors.extensionMap[ext] ?? 0) + 1;
98
+ }
99
+ if (entry.name in MANIFEST_NAMES) {
100
+ collectors.manifests.push({
101
+ path: relPath,
102
+ type: MANIFEST_NAMES[entry.name],
103
+ depth
104
+ });
105
+ }
106
+ if (depth <= 1 || depth === 2 && dir.endsWith("/src")) {
107
+ const nameWithoutExt = entry.name.replace(/\.\w+$/, "");
108
+ if (ENTRY_NAMES.has(nameWithoutExt)) {
109
+ collectors.entryPoints.push(relPath);
110
+ }
111
+ }
112
+ if (entry.name.startsWith(".env")) {
113
+ collectors.envFiles.push(relPath);
114
+ }
115
+ if (depth === 0 && entry.name.match(/^config\./)) {
116
+ collectors.envFiles.push(relPath);
117
+ }
118
+ const fileName = entry.name;
119
+ if (TEST_PATTERNS.some((p) => p.test(fileName))) {
120
+ collectors.testFiles.push(relPath);
121
+ }
122
+ }
123
+ }
124
+ const dirName = dir.split("/").pop();
125
+ if (dirName === "config" && depth <= 2) {
126
+ collectors.envFiles.push(relative(root, dir));
127
+ }
128
+ return nodes;
129
+ }
130
+ async function collectGitChurn(root) {
131
+ try {
132
+ const { stdout } = await execFileAsync(
133
+ "git",
134
+ ["log", "--stat", "--oneline", "-50", "--pretty=format:"],
135
+ { cwd: root, maxBuffer: 1024 * 1024 }
136
+ );
137
+ const changeCounts = {};
138
+ for (const line of stdout.split("\n")) {
139
+ const match = line.match(/^\s+(.+?)\s+\|\s+(\d+)/);
140
+ if (match) {
141
+ const filePath = match[1].trim();
142
+ const changes = parseInt(match[2], 10);
143
+ changeCounts[filePath] = (changeCounts[filePath] ?? 0) + changes;
144
+ }
145
+ }
146
+ return Object.entries(changeCounts).sort(([, a], [, b]) => b - a).slice(0, 20).map(([path, changes]) => ({ path, changes }));
147
+ } catch {
148
+ return [];
149
+ }
150
+ }
151
+ async function main() {
152
+ const args = process.argv.slice(2);
153
+ let root = ".";
154
+ const exclude = [];
155
+ for (let i = 0; i < args.length; i++) {
156
+ if (args[i] === "--root" && args[i + 1]) {
157
+ root = args[i + 1];
158
+ i++;
159
+ } else if (args[i] === "--exclude" && args[i + 1]) {
160
+ exclude.push(...args[i + 1].split(","));
161
+ i++;
162
+ }
163
+ }
164
+ const result = await collectSignals(root, { exclude });
165
+ process.stdout.write(JSON.stringify(result, null, 2));
166
+ }
167
+ main().catch((err) => {
168
+ process.stderr.write(JSON.stringify({ error: String(err) }));
169
+ process.exit(1);
170
+ });
171
+ export {
172
+ collectSignals
173
+ };
174
+ //# sourceMappingURL=signals.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../scripts/signals.ts"],"sourcesContent":["import { readdir, stat, readFile } from 'fs/promises'\nimport { join, extname, relative, resolve } from 'path'\nimport { execFile } from 'child_process'\nimport { promisify } from 'util'\nimport type { SignalResult, DirectoryNode, ManifestFile, ChurnEntry, FileSize } from './types.js'\n\nconst execFileAsync = promisify(execFile)\n\nconst SKIP_DIRS = new Set([\n 'node_modules', 'dist', 'build', '.git', 'coverage',\n '.next', '.nuxt', '__pycache__', '.venv', 'venv',\n '.idea', '.vscode', '.nogrep',\n])\n\nconst MANIFEST_NAMES: Record<string, string> = {\n 'package.json': 'npm',\n 'requirements.txt': 'pip',\n 'pom.xml': 'maven',\n 'go.mod': 'go',\n 'Podfile': 'cocoapods',\n 'Cargo.toml': 'cargo',\n 'pubspec.yaml': 'flutter',\n 'composer.json': 'composer',\n}\n\nconst ENTRY_NAMES = new Set(['main', 'index', 'app', 'server'])\n\nconst TEST_PATTERNS = [\n /\\.test\\.\\w+$/,\n /\\.spec\\.\\w+$/,\n /_test\\.\\w+$/,\n /^test_.*\\.py$/,\n]\n\ninterface CollectOptions {\n exclude?: string[]\n maxDepth?: number\n}\n\nexport async function collectSignals(\n root: string,\n options: CollectOptions = {},\n): Promise<SignalResult> {\n const absRoot = resolve(root)\n const maxDepth = options.maxDepth ?? 4\n const extraSkip = new Set(options.exclude ?? [])\n\n const allFiles: { path: string; bytes: number }[] = []\n const extensionMap: Record<string, number> = {}\n const manifests: ManifestFile[] = []\n const entryPoints: string[] = []\n const envFiles: string[] = []\n const testFiles: string[] = []\n\n const directoryTree = await walkDirectory(absRoot, absRoot, 0, maxDepth, extraSkip, {\n allFiles,\n extensionMap,\n manifests,\n entryPoints,\n envFiles,\n testFiles,\n })\n\n const gitChurn = await collectGitChurn(absRoot)\n\n const largeFiles = allFiles\n .sort((a, b) => b.bytes - a.bytes)\n .slice(0, 20)\n .map(f => ({ path: f.path, bytes: f.bytes }))\n\n return {\n directoryTree,\n extensionMap,\n manifests,\n entryPoints,\n gitChurn,\n largeFiles,\n envFiles,\n testFiles,\n }\n}\n\ninterface Collectors {\n allFiles: { path: string; bytes: number }[]\n extensionMap: Record<string, number>\n manifests: ManifestFile[]\n entryPoints: string[]\n envFiles: string[]\n testFiles: string[]\n}\n\nasync function walkDirectory(\n dir: string,\n root: string,\n depth: number,\n maxDepth: number,\n extraSkip: Set<string>,\n collectors: Collectors,\n): Promise<DirectoryNode[]> {\n if (depth > maxDepth) return []\n\n let entries\n try {\n entries = await readdir(dir, { withFileTypes: true })\n } catch {\n return []\n }\n\n const nodes: DirectoryNode[] = []\n\n for (const entry of entries) {\n const fullPath = join(dir, entry.name)\n const relPath = relative(root, fullPath)\n\n if (entry.isDirectory()) {\n if (SKIP_DIRS.has(entry.name) || extraSkip.has(entry.name)) continue\n\n const children = await walkDirectory(fullPath, root, depth + 1, maxDepth, extraSkip, collectors)\n nodes.push({ name: entry.name, path: relPath, type: 'directory', children })\n } else if (entry.isFile()) {\n nodes.push({ name: entry.name, path: relPath, type: 'file' })\n\n let fileBytes = 0\n try {\n const s = await stat(fullPath)\n fileBytes = s.size\n } catch {\n // skip\n }\n\n collectors.allFiles.push({ path: relPath, bytes: fileBytes })\n\n const ext = extname(entry.name)\n if (ext) {\n collectors.extensionMap[ext] = (collectors.extensionMap[ext] ?? 0) + 1\n }\n\n // Manifest check\n if (entry.name in MANIFEST_NAMES) {\n collectors.manifests.push({\n path: relPath,\n type: MANIFEST_NAMES[entry.name]!,\n depth,\n })\n }\n\n // Entry point check — root or src/ level\n if (depth <= 1 || (depth === 2 && dir.endsWith('/src'))) {\n const nameWithoutExt = entry.name.replace(/\\.\\w+$/, '')\n if (ENTRY_NAMES.has(nameWithoutExt)) {\n collectors.entryPoints.push(relPath)\n }\n }\n\n // Env files\n if (entry.name.startsWith('.env')) {\n collectors.envFiles.push(relPath)\n }\n\n // Config directories are handled at directory level\n // But we also detect config files at root\n if (depth === 0 && entry.name.match(/^config\\./)) {\n collectors.envFiles.push(relPath)\n }\n\n // Test files\n const fileName = entry.name\n if (TEST_PATTERNS.some(p => p.test(fileName))) {\n collectors.testFiles.push(relPath)\n }\n }\n }\n\n // Check if this directory is a config directory\n const dirName = dir.split('/').pop()\n if (dirName === 'config' && depth <= 2) {\n collectors.envFiles.push(relative(root, dir))\n }\n\n return nodes\n}\n\nasync function collectGitChurn(root: string): Promise<ChurnEntry[]> {\n try {\n const { stdout } = await execFileAsync(\n 'git',\n ['log', '--stat', '--oneline', '-50', '--pretty=format:'],\n { cwd: root, maxBuffer: 1024 * 1024 },\n )\n\n const changeCounts: Record<string, number> = {}\n\n for (const line of stdout.split('\\n')) {\n // Match lines like: src/billing/service.ts | 42 +++---\n const match = line.match(/^\\s+(.+?)\\s+\\|\\s+(\\d+)/)\n if (match) {\n const filePath = match[1]!.trim()\n const changes = parseInt(match[2]!, 10)\n changeCounts[filePath] = (changeCounts[filePath] ?? 0) + changes\n }\n }\n\n return Object.entries(changeCounts)\n .sort(([, a], [, b]) => b - a)\n .slice(0, 20)\n .map(([path, changes]) => ({ path, changes }))\n } catch {\n // No git or git log fails — return empty\n return []\n }\n}\n\n// --- CLI interface ---\n\nasync function main(): Promise<void> {\n const args = process.argv.slice(2)\n let root = '.'\n const exclude: string[] = []\n\n for (let i = 0; i < args.length; i++) {\n if (args[i] === '--root' && args[i + 1]) {\n root = args[i + 1]!\n i++\n } else if (args[i] === '--exclude' && args[i + 1]) {\n exclude.push(...args[i + 1]!.split(','))\n i++\n }\n }\n\n const result = await collectSignals(root, { exclude })\n process.stdout.write(JSON.stringify(result, null, 2))\n}\n\nmain().catch(err => {\n process.stderr.write(JSON.stringify({ error: String(err) }))\n process.exit(1)\n})\n"],"mappings":";AAAA,SAAS,SAAS,YAAsB;AACxC,SAAS,MAAM,SAAS,UAAU,eAAe;AACjD,SAAS,gBAAgB;AACzB,SAAS,iBAAiB;AAG1B,IAAM,gBAAgB,UAAU,QAAQ;AAExC,IAAM,YAAY,oBAAI,IAAI;AAAA,EACxB;AAAA,EAAgB;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAQ;AAAA,EACzC;AAAA,EAAS;AAAA,EAAS;AAAA,EAAe;AAAA,EAAS;AAAA,EAC1C;AAAA,EAAS;AAAA,EAAW;AACtB,CAAC;AAED,IAAM,iBAAyC;AAAA,EAC7C,gBAAgB;AAAA,EAChB,oBAAoB;AAAA,EACpB,WAAW;AAAA,EACX,UAAU;AAAA,EACV,WAAW;AAAA,EACX,cAAc;AAAA,EACd,gBAAgB;AAAA,EAChB,iBAAiB;AACnB;AAEA,IAAM,cAAc,oBAAI,IAAI,CAAC,QAAQ,SAAS,OAAO,QAAQ,CAAC;AAE9D,IAAM,gBAAgB;AAAA,EACpB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAOA,eAAsB,eACpB,MACA,UAA0B,CAAC,GACJ;AACvB,QAAM,UAAU,QAAQ,IAAI;AAC5B,QAAM,WAAW,QAAQ,YAAY;AACrC,QAAM,YAAY,IAAI,IAAI,QAAQ,WAAW,CAAC,CAAC;AAE/C,QAAM,WAA8C,CAAC;AACrD,QAAM,eAAuC,CAAC;AAC9C,QAAM,YAA4B,CAAC;AACnC,QAAM,cAAwB,CAAC;AAC/B,QAAM,WAAqB,CAAC;AAC5B,QAAM,YAAsB,CAAC;AAE7B,QAAM,gBAAgB,MAAM,cAAc,SAAS,SAAS,GAAG,UAAU,WAAW;AAAA,IAClF;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,WAAW,MAAM,gBAAgB,OAAO;AAE9C,QAAM,aAAa,SAChB,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC,MAAM,GAAG,EAAE,EACX,IAAI,QAAM,EAAE,MAAM,EAAE,MAAM,OAAO,EAAE,MAAM,EAAE;AAE9C,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAWA,eAAe,cACb,KACA,MACA,OACA,UACA,WACA,YAC0B;AAC1B,MAAI,QAAQ,SAAU,QAAO,CAAC;AAE9B,MAAI;AACJ,MAAI;AACF,cAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAAA,EACtD,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,QAAyB,CAAC;AAEhC,aAAW,SAAS,SAAS;AAC3B,UAAM,WAAW,KAAK,KAAK,MAAM,IAAI;AACrC,UAAM,UAAU,SAAS,MAAM,QAAQ;AAEvC,QAAI,MAAM,YAAY,GAAG;AACvB,UAAI,UAAU,IAAI,MAAM,IAAI,KAAK,UAAU,IAAI,MAAM,IAAI,EAAG;AAE5D,YAAM,WAAW,MAAM,cAAc,UAAU,MAAM,QAAQ,GAAG,UAAU,WAAW,UAAU;AAC/F,YAAM,KAAK,EAAE,MAAM,MAAM,MAAM,MAAM,SAAS,MAAM,aAAa,SAAS,CAAC;AAAA,IAC7E,WAAW,MAAM,OAAO,GAAG;AACzB,YAAM,KAAK,EAAE,MAAM,MAAM,MAAM,MAAM,SAAS,MAAM,OAAO,CAAC;AAE5D,UAAI,YAAY;AAChB,UAAI;AACF,cAAM,IAAI,MAAM,KAAK,QAAQ;AAC7B,oBAAY,EAAE;AAAA,MAChB,QAAQ;AAAA,MAER;AAEA,iBAAW,SAAS,KAAK,EAAE,MAAM,SAAS,OAAO,UAAU,CAAC;AAE5D,YAAM,MAAM,QAAQ,MAAM,IAAI;AAC9B,UAAI,KAAK;AACP,mBAAW,aAAa,GAAG,KAAK,WAAW,aAAa,GAAG,KAAK,KAAK;AAAA,MACvE;AAGA,UAAI,MAAM,QAAQ,gBAAgB;AAChC,mBAAW,UAAU,KAAK;AAAA,UACxB,MAAM;AAAA,UACN,MAAM,eAAe,MAAM,IAAI;AAAA,UAC/B;AAAA,QACF,CAAC;AAAA,MACH;AAGA,UAAI,SAAS,KAAM,UAAU,KAAK,IAAI,SAAS,MAAM,GAAI;AACvD,cAAM,iBAAiB,MAAM,KAAK,QAAQ,UAAU,EAAE;AACtD,YAAI,YAAY,IAAI,cAAc,GAAG;AACnC,qBAAW,YAAY,KAAK,OAAO;AAAA,QACrC;AAAA,MACF;AAGA,UAAI,MAAM,KAAK,WAAW,MAAM,GAAG;AACjC,mBAAW,SAAS,KAAK,OAAO;AAAA,MAClC;AAIA,UAAI,UAAU,KAAK,MAAM,KAAK,MAAM,WAAW,GAAG;AAChD,mBAAW,SAAS,KAAK,OAAO;AAAA,MAClC;AAGA,YAAM,WAAW,MAAM;AACvB,UAAI,cAAc,KAAK,OAAK,EAAE,KAAK,QAAQ,CAAC,GAAG;AAC7C,mBAAW,UAAU,KAAK,OAAO;AAAA,MACnC;AAAA,IACF;AAAA,EACF;AAGA,QAAM,UAAU,IAAI,MAAM,GAAG,EAAE,IAAI;AACnC,MAAI,YAAY,YAAY,SAAS,GAAG;AACtC,eAAW,SAAS,KAAK,SAAS,MAAM,GAAG,CAAC;AAAA,EAC9C;AAEA,SAAO;AACT;AAEA,eAAe,gBAAgB,MAAqC;AAClE,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM;AAAA,MACvB;AAAA,MACA,CAAC,OAAO,UAAU,aAAa,OAAO,kBAAkB;AAAA,MACxD,EAAE,KAAK,MAAM,WAAW,OAAO,KAAK;AAAA,IACtC;AAEA,UAAM,eAAuC,CAAC;AAE9C,eAAW,QAAQ,OAAO,MAAM,IAAI,GAAG;AAErC,YAAM,QAAQ,KAAK,MAAM,wBAAwB;AACjD,UAAI,OAAO;AACT,cAAM,WAAW,MAAM,CAAC,EAAG,KAAK;AAChC,cAAM,UAAU,SAAS,MAAM,CAAC,GAAI,EAAE;AACtC,qBAAa,QAAQ,KAAK,aAAa,QAAQ,KAAK,KAAK;AAAA,MAC3D;AAAA,IACF;AAEA,WAAO,OAAO,QAAQ,YAAY,EAC/B,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,IAAI,CAAC,EAC5B,MAAM,GAAG,EAAE,EACX,IAAI,CAAC,CAAC,MAAM,OAAO,OAAO,EAAE,MAAM,QAAQ,EAAE;AAAA,EACjD,QAAQ;AAEN,WAAO,CAAC;AAAA,EACV;AACF;AAIA,eAAe,OAAsB;AACnC,QAAM,OAAO,QAAQ,KAAK,MAAM,CAAC;AACjC,MAAI,OAAO;AACX,QAAM,UAAoB,CAAC;AAE3B,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,QAAI,KAAK,CAAC,MAAM,YAAY,KAAK,IAAI,CAAC,GAAG;AACvC,aAAO,KAAK,IAAI,CAAC;AACjB;AAAA,IACF,WAAW,KAAK,CAAC,MAAM,eAAe,KAAK,IAAI,CAAC,GAAG;AACjD,cAAQ,KAAK,GAAG,KAAK,IAAI,CAAC,EAAG,MAAM,GAAG,CAAC;AACvC;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAAS,MAAM,eAAe,MAAM,EAAE,QAAQ,CAAC;AACrD,UAAQ,OAAO,MAAM,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AACtD;AAEA,KAAK,EAAE,MAAM,SAAO;AAClB,UAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,OAAO,GAAG,EAAE,CAAC,CAAC;AAC3D,UAAQ,KAAK,CAAC;AAChB,CAAC;","names":[]}
package/dist/trim.d.ts ADDED
@@ -0,0 +1,3 @@
1
+ declare function trimCluster(paths: string[], projectRoot: string): Promise<string>;
2
+
3
+ export { trimCluster };