nogrep 1.1.1 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/README.md +15 -11
  2. package/package.json +2 -2
  3. package/{plugin → plugins/nogrep}/.claude-plugin/plugin.json +0 -1
  4. package/{plugin → plugins/nogrep}/commands/init.md +5 -6
  5. package/{plugin → plugins/nogrep}/commands/off.md +1 -1
  6. package/{plugin → plugins/nogrep}/commands/on.md +1 -1
  7. package/plugins/nogrep/commands/query.md +23 -0
  8. package/{plugin → plugins/nogrep}/commands/status.md +1 -1
  9. package/{plugin → plugins/nogrep}/commands/update.md +3 -3
  10. package/plugins/nogrep/package.json +1 -0
  11. package/{plugin/dist/chunk-OJSJ63PH.js → plugins/nogrep/scripts/chunk-ICPV2JWV.js} +1 -1
  12. package/plugins/nogrep/scripts/chunk-ICPV2JWV.js.map +1 -0
  13. package/{plugin/dist → plugins/nogrep/scripts}/query.js +12 -10
  14. package/plugins/nogrep/scripts/query.js.map +1 -0
  15. package/{plugin/dist → plugins/nogrep/scripts}/settings.js +7 -5
  16. package/plugins/nogrep/scripts/settings.js.map +1 -0
  17. package/{plugin/dist → plugins/nogrep/scripts}/signals.js +6 -4
  18. package/plugins/nogrep/scripts/signals.js.map +1 -0
  19. package/plugins/nogrep/scripts/trim.js.map +1 -0
  20. package/plugins/nogrep/scripts/types.js +7 -0
  21. package/{plugin/dist → plugins/nogrep/scripts}/validate.js +12 -10
  22. package/plugins/nogrep/scripts/validate.js.map +1 -0
  23. package/{plugin/dist → plugins/nogrep/scripts}/write.d.ts +1 -2
  24. package/{plugin/dist → plugins/nogrep/scripts}/write.js +8 -37
  25. package/plugins/nogrep/scripts/write.js.map +1 -0
  26. package/scripts/query.ts +11 -9
  27. package/scripts/settings.ts +7 -5
  28. package/scripts/signals.ts +6 -4
  29. package/scripts/validate.ts +11 -9
  30. package/scripts/write.ts +8 -50
  31. package/plugin/commands/query.md +0 -13
  32. package/plugin/dist/chunk-OJSJ63PH.js.map +0 -1
  33. package/plugin/dist/query.js.map +0 -1
  34. package/plugin/dist/settings.js.map +0 -1
  35. package/plugin/dist/signals.js.map +0 -1
  36. package/plugin/dist/trim.js.map +0 -1
  37. package/plugin/dist/types.js +0 -7
  38. package/plugin/dist/validate.js.map +0 -1
  39. package/plugin/dist/write.js.map +0 -1
  40. package/plugin/hooks/hooks.json +0 -53
  41. package/plugin/hooks/pre-tool-use-glob.sh +0 -40
  42. package/plugin/hooks/pre-tool-use-grep.sh +0 -35
  43. package/plugin/hooks/pre-tool-use.sh +0 -37
  44. package/plugin/hooks/prompt-submit.sh +0 -26
  45. package/plugin/hooks/session-start.sh +0 -21
  46. package/plugin/templates/claude-md-patch.md +0 -8
  47. /package/{plugin/dist → plugins/nogrep/scripts}/query.d.ts +0 -0
  48. /package/{plugin/dist → plugins/nogrep/scripts}/settings.d.ts +0 -0
  49. /package/{plugin/dist → plugins/nogrep/scripts}/signals.d.ts +0 -0
  50. /package/{plugin/dist → plugins/nogrep/scripts}/trim.d.ts +0 -0
  51. /package/{plugin/dist → plugins/nogrep/scripts}/trim.js +0 -0
  52. /package/{plugin/dist → plugins/nogrep/scripts}/types.d.ts +0 -0
  53. /package/{plugin/dist → plugins/nogrep/scripts}/types.js.map +0 -0
  54. /package/{plugin/dist → plugins/nogrep/scripts}/validate.d.ts +0 -0
package/README.md CHANGED
@@ -4,7 +4,7 @@ A Claude Code plugin that gives AI agents a navigable index of any codebase, so
4
4
 
5
5
  ## What it does
6
6
 
7
- `nogrep` generates a structured `.nogrep/` directory with a reverse index and thin context nodes (markdown files). When Claude Code needs to find something, it reads 2 files instead of running 20 grep commands.
7
+ `nogrep` generates a structured `.nogrep/` directory with a reverse index and thin context nodes (markdown files). When you need Claude Code to find something, run `/nogrep:query` — it reads 2 context files instead of running 20 grep commands.
8
8
 
9
9
  ## Install
10
10
 
@@ -20,7 +20,7 @@ In Claude Code:
20
20
 
21
21
  1. Open your project in Claude Code
22
22
  2. Run `/nogrep:init` — Claude analyzes your codebase and generates the index
23
- 3. That's it. Hooks automatically inject context when Claude searches your code
23
+ 3. Ask questions with `/nogrep:query where is auth handled?`
24
24
 
25
25
  ## How it works
26
26
 
@@ -39,19 +39,11 @@ Scripts handle data collection and file I/O. Claude does all the analysis work d
39
39
  |---------|-------------|
40
40
  | `/nogrep:init` | Generate the full codebase index |
41
41
  | `/nogrep:update` | Incrementally update stale nodes |
42
- | `/nogrep:query <question>` | Manual index lookup |
42
+ | `/nogrep:query <question>` | Query the index, read matched context files, and get an answer |
43
43
  | `/nogrep:status` | Show index health and freshness |
44
44
  | `/nogrep:on` | Enable nogrep |
45
45
  | `/nogrep:off` | Disable nogrep |
46
46
 
47
- ## Hooks
48
-
49
- nogrep installs three Claude Code hooks:
50
-
51
- - **PreToolUse** — intercepts `grep`/`find`/`rg` commands and injects relevant context files
52
- - **UserPromptSubmit** — injects context for code navigation prompts
53
- - **SessionStart** — checks index freshness and warns if stale
54
-
55
47
  ## Output structure
56
48
 
57
49
  ```
@@ -74,6 +66,18 @@ nogrep stores its enabled state in your project's `.claude/` directory:
74
66
  - `.claude/settings.json` — team settings (commit to repo)
75
67
  - `.claude/settings.local.json` — personal overrides (gitignored)
76
68
 
69
+ ## Contributing
70
+
71
+ Source code lives on the `develop` branch. The `main` branch contains only the built plugin and is updated automatically by CI on every push to `develop`.
72
+
73
+ ```bash
74
+ git clone -b develop https://github.com/alirezanasseh/nogrep
75
+ cd nogrep
76
+ npm install
77
+ npm run build
78
+ npm test
79
+ ```
80
+
77
81
  ## Requirements
78
82
 
79
83
  - Node.js 20+
package/package.json CHANGED
@@ -1,13 +1,13 @@
1
1
  {
2
2
  "name": "nogrep",
3
- "version": "1.1.1",
3
+ "version": "1.2.0",
4
4
  "description": "Navigable codebase index for Claude Code — stop grepping, start navigating",
5
5
  "repository": {
6
6
  "type": "git",
7
7
  "url": "https://github.com/alirezanasseh/nogrep"
8
8
  },
9
9
  "type": "module",
10
- "files": ["plugin/dist", "plugin/commands", "plugin/hooks", "plugin/templates", "plugin/.claude-plugin", "scripts"],
10
+ "files": ["plugins/nogrep/scripts", "plugins/nogrep/commands", "plugins/nogrep/.claude-plugin", "plugins/nogrep/package.json", "scripts"],
11
11
  "scripts": {
12
12
  "build": "tsup",
13
13
  "prepublishOnly": "npm run build",
@@ -1,6 +1,5 @@
1
1
  {
2
2
  "name": "nogrep",
3
- "version": "1.0.5",
4
3
  "description": "Navigable codebase index for Claude Code — stop grepping, start navigating",
5
4
  "author": {
6
5
  "name": "Alireza Nasseh"
@@ -7,7 +7,7 @@ Initialize nogrep for this project. Follow these steps exactly in order.
7
7
  Run the signal collection script to gather project metadata:
8
8
 
9
9
  ```bash
10
- node "${CLAUDE_PLUGIN_ROOT}/dist/signals.js" --root .
10
+ node "${CLAUDE_PLUGIN_ROOT}/scripts/signals.js" --root .
11
11
  ```
12
12
 
13
13
  Save the JSON output — you will use it in Step 2.
@@ -67,7 +67,7 @@ For **each** domain cluster identified in Step 2, do the following:
67
67
  Run the trim script to get signatures-only view of the cluster's source files. Pass all source files in the cluster's path as arguments:
68
68
 
69
69
  ```bash
70
- node "${CLAUDE_PLUGIN_ROOT}/dist/trim.js" <file1> <file2> ...
70
+ node "${CLAUDE_PLUGIN_ROOT}/scripts/trim.js" <file1> <file2> ...
71
71
  ```
72
72
 
73
73
  To find the files in the cluster, use the cluster's `path` from Step 2 to list source files:
@@ -183,14 +183,13 @@ Now assemble the final input for the writer script. Create a JSON object combini
183
183
  Pipe the JSON to the writer script:
184
184
 
185
185
  ```bash
186
- echo '<YOUR_JSON>' | node "${CLAUDE_PLUGIN_ROOT}/dist/write.js" --root .
186
+ echo '<YOUR_JSON>' | node "${CLAUDE_PLUGIN_ROOT}/scripts/write.js" --root .
187
187
  ```
188
188
 
189
189
  This will:
190
190
  - Create `.nogrep/` directory with all context node files
191
191
  - Build `_index.json` (reverse index)
192
192
  - Build `_registry.json` (source path → context file mapping)
193
- - Patch `CLAUDE.md` with navigation instructions
194
193
 
195
194
  ---
196
195
 
@@ -220,7 +219,7 @@ TAXONOMY_EOF
220
219
  ## Step 7 — Enable nogrep
221
220
 
222
221
  ```bash
223
- node "${CLAUDE_PLUGIN_ROOT}/dist/settings.js" --set enabled=true
222
+ node "${CLAUDE_PLUGIN_ROOT}/scripts/settings.js" --set enabled=true
224
223
  ```
225
224
 
226
225
  ---
@@ -235,7 +234,7 @@ Tell the user:
235
234
  > - **Index:** `.nogrep/_index.json` — reverse lookup by tags, keywords, and paths
236
235
  > - **Registry:** `.nogrep/_registry.json` — maps source paths to context files
237
236
  >
238
- > nogrep is now enabled. Hooks will automatically inject context when you search.
237
+ > nogrep is now enabled.
239
238
  >
240
239
  > To update after code changes: `/nogrep:update`
241
240
  > To check index health: `/nogrep:status`
@@ -3,7 +3,7 @@ Disable nogrep for this project.
3
3
  Run this command to disable nogrep:
4
4
 
5
5
  ```bash
6
- node "${CLAUDE_PLUGIN_ROOT}/dist/settings.js" --set enabled=false
6
+ node "${CLAUDE_PLUGIN_ROOT}/scripts/settings.js" --set enabled=false
7
7
  ```
8
8
 
9
9
  Tell the user:
@@ -3,7 +3,7 @@ Enable nogrep for this project.
3
3
  Run this command to enable nogrep:
4
4
 
5
5
  ```bash
6
- node "${CLAUDE_PLUGIN_ROOT}/dist/settings.js" --set enabled=true
6
+ node "${CLAUDE_PLUGIN_ROOT}/scripts/settings.js" --set enabled=true
7
7
  ```
8
8
 
9
9
  Then check if the nogrep index exists:
@@ -0,0 +1,23 @@
1
+ ---
2
+ allowed-tools: Bash(node *), Read
3
+ ---
4
+
5
+ Answer the user's question using the nogrep codebase index. Follow these steps exactly:
6
+
7
+ ## Step 1 — Query the index
8
+
9
+ ```bash
10
+ node "${CLAUDE_PLUGIN_ROOT}/scripts/query.js" --question "$ARGUMENTS" --format summary --limit 5
11
+ ```
12
+
13
+ If no results are found, tell the user and suggest `/nogrep:init` if the index hasn't been created yet. Stop here.
14
+
15
+ ## Step 2 — Read the matched context files
16
+
17
+ For each context file listed in the results, use the Read tool to read the full file contents. These are `.nogrep/domains/*.md` or `.nogrep/flows/*.md` files in the project root.
18
+
19
+ ## Step 3 — Answer the question
20
+
21
+ Using the information from the context files (which contain file paths, public API surfaces, relationships, gotchas, and domain descriptions), answer the user's original question directly.
22
+
23
+ Do NOT launch Explore agents or do broad file searches. The context files already contain the curated knowledge needed to answer navigation and architecture questions. If the user needs to see actual source code, point them to the specific file paths listed in the context files.
@@ -3,7 +3,7 @@ Show the current status of the nogrep index.
3
3
  Run the validation script:
4
4
 
5
5
  ```bash
6
- node "${CLAUDE_PLUGIN_ROOT}/dist/validate.js" --format text
6
+ node "${CLAUDE_PLUGIN_ROOT}/scripts/validate.js" --format text
7
7
  ```
8
8
 
9
9
  If the command fails, tell the user:
@@ -5,7 +5,7 @@ Update stale nogrep context nodes based on recent changes.
5
5
  Run the validation script to find stale nodes:
6
6
 
7
7
  ```bash
8
- node "${CLAUDE_PLUGIN_ROOT}/dist/validate.js" --format json
8
+ node "${CLAUDE_PLUGIN_ROOT}/scripts/validate.js" --format json
9
9
  ```
10
10
 
11
11
  If there are no stale nodes, tell the user:
@@ -36,7 +36,7 @@ For each stale node from Step 1:
36
36
  3. Use the trimming script to get signatures:
37
37
 
38
38
  ```bash
39
- node "${CLAUDE_PLUGIN_ROOT}/dist/trim.js" <source_file_paths>
39
+ node "${CLAUDE_PLUGIN_ROOT}/scripts/trim.js" <source_file_paths>
40
40
  ```
41
41
 
42
42
  ## Step 4: Re-analyze each affected cluster
@@ -77,7 +77,7 @@ Rules:
77
77
  Combine all updated node results with any unchanged nodes. Pipe the full set as JSON to the writer:
78
78
 
79
79
  ```bash
80
- echo '<json_input>' | node "${CLAUDE_PLUGIN_ROOT}/dist/write.js" --root .
80
+ echo '<json_input>' | node "${CLAUDE_PLUGIN_ROOT}/scripts/write.js" --root .
81
81
  ```
82
82
 
83
83
  The writer automatically preserves `## Manual Notes` sections from existing files.
@@ -0,0 +1 @@
1
+ { "type": "module" }
@@ -9,4 +9,4 @@ var NogrepError = class extends Error {
9
9
  export {
10
10
  NogrepError
11
11
  };
12
- //# sourceMappingURL=chunk-OJSJ63PH.js.map
12
+ //# sourceMappingURL=chunk-ICPV2JWV.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../scripts/types.ts"],"sourcesContent":["// --- Directory / File types ---\n\nexport interface DirectoryNode {\n name: string\n path: string\n type: 'file' | 'directory'\n children?: DirectoryNode[]\n}\n\nexport interface ManifestFile {\n path: string\n type: string\n depth: number\n}\n\nexport interface ChurnEntry {\n path: string\n changes: number\n}\n\nexport interface FileSize {\n path: string\n bytes: number\n}\n\n// --- Signal collection ---\n\nexport interface SignalResult {\n directoryTree: DirectoryNode[]\n extensionMap: Record<string, number>\n manifests: ManifestFile[]\n entryPoints: string[]\n gitChurn: ChurnEntry[]\n largeFiles: FileSize[]\n envFiles: string[]\n testFiles: string[]\n}\n\n// --- Stack detection ---\n\nexport interface StackConventions {\n entryPattern: string\n testPattern: string\n configLocation: string\n}\n\nexport interface DomainCluster {\n name: string\n path: string\n confidence: number\n}\n\nexport interface StackResult {\n primaryLanguage: string\n frameworks: string[]\n architecture: 'monolith' | 'monorepo' | 'multi-repo' | 'microservice' | 'library'\n domainClusters: DomainCluster[]\n conventions: StackConventions\n stackHints: string\n dynamicTaxonomy: { domain: string[]; tech: string[] }\n}\n\n// --- Tags ---\n\nexport interface TagSet {\n domain: string[]\n layer: string[]\n tech: string[]\n concern: string[]\n type: string[]\n}\n\nexport interface Taxonomy {\n static: {\n layer: string[]\n concern: string[]\n type: string[]\n }\n dynamic: {\n domain: string[]\n tech: string[]\n }\n custom: Record<string, string[]>\n}\n\n// --- Relations ---\n\nexport interface Relation {\n id: string\n reason: string\n}\n\nexport interface ExternalDep {\n name: string\n usage: string\n}\n\nexport interface SyncMeta {\n commit: string\n timestamp: string\n srcHash: string\n}\n\n// --- Context nodes ---\n\nexport interface NodeResult {\n id: string\n title: string\n category: 'domain' | 'architecture' | 'flow' | 'entity'\n tags: TagSet\n relatesTo: Relation[]\n inverseRelations: Relation[]\n srcPaths: string[]\n keywords: string[]\n lastSynced: SyncMeta\n purpose: string\n publicSurface: string[]\n doesNotOwn: string[]\n externalDeps: ExternalDep[]\n gotchas: string[]\n}\n\n// --- Index ---\n\nexport interface PathEntry {\n context: string\n tags: string[]\n}\n\nexport interface IndexJson {\n version: string\n generatedAt: string\n commit: string\n stack: Pick<StackResult, 'primaryLanguage' | 'frameworks' | 'architecture'>\n tags: Record<string, string[]>\n keywords: Record<string, string[]>\n paths: Record<string, PathEntry>\n}\n\n// --- Registry ---\n\nexport interface RegistryMapping {\n glob: string\n contextFile: string\n watch: boolean\n}\n\nexport interface RegistryJson {\n mappings: RegistryMapping[]\n}\n\n// --- Query ---\n\nexport interface RankedResult {\n contextFile: string\n score: number\n matchedOn: string[]\n summary: string\n}\n\n// --- Validation ---\n\nexport interface StaleResult {\n file: string\n isStale: boolean\n reason?: string\n}\n\n// --- Settings ---\n\nexport interface NogrepSettings {\n enabled: boolean\n}\n\n// --- Errors ---\n\nexport type NogrepErrorCode = 'NO_INDEX' | 'NO_GIT' | 'IO_ERROR' | 'STALE'\n\nexport class NogrepError extends Error {\n constructor(\n message: string,\n public code: NogrepErrorCode,\n ) {\n super(message)\n }\n}\n"],"mappings":";AAkLO,IAAM,cAAN,cAA0B,MAAM;AAAA,EACrC,YACE,SACO,MACP;AACA,UAAM,OAAO;AAFN;AAAA,EAGT;AACF;","names":[]}
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  NogrepError
3
- } from "./chunk-OJSJ63PH.js";
3
+ } from "./chunk-ICPV2JWV.js";
4
4
 
5
5
  // scripts/query.ts
6
6
  import { readFile } from "fs/promises";
@@ -256,15 +256,17 @@ async function main() {
256
256
  break;
257
257
  }
258
258
  }
259
- main().catch((err) => {
260
- if (err instanceof NogrepError) {
261
- process.stderr.write(JSON.stringify({ error: err.message, code: err.code }) + "\n");
262
- } else {
263
- const message = err instanceof Error ? err.message : String(err);
264
- process.stderr.write(JSON.stringify({ error: message }) + "\n");
265
- }
266
- process.exitCode = 1;
267
- });
259
+ if (import.meta.url === `file://${process.argv[1]}`) {
260
+ main().catch((err) => {
261
+ if (err instanceof NogrepError) {
262
+ process.stderr.write(JSON.stringify({ error: err.message, code: err.code }) + "\n");
263
+ } else {
264
+ const message = err instanceof Error ? err.message : String(err);
265
+ process.stderr.write(JSON.stringify({ error: message }) + "\n");
266
+ }
267
+ process.exitCode = 1;
268
+ });
269
+ }
268
270
  export {
269
271
  extractTerms,
270
272
  resolveQuery
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../scripts/query.ts"],"sourcesContent":["import { readFile } from 'node:fs/promises'\nimport { join, resolve as resolvePath } from 'node:path'\nimport { parseArgs } from 'node:util'\nimport type { IndexJson, RankedResult, Taxonomy } from './types.js'\nimport { NogrepError } from './types.js'\n\n// --- Term extraction ---\n\nexport function extractTerms(\n question: string,\n taxonomy: Taxonomy,\n): { tags: string[]; keywords: string[] } {\n const words = question\n .toLowerCase()\n .replace(/[^\\w\\s-]/g, ' ')\n .split(/\\s+/)\n .filter(w => w.length > 1)\n\n const tags: string[] = []\n const keywords: string[] = []\n\n // Collect all taxonomy values for matching\n const tagLookup = new Map<string, string>()\n\n for (const val of taxonomy.static.layer) {\n tagLookup.set(val.toLowerCase(), `layer:${val}`)\n }\n for (const val of taxonomy.static.concern) {\n tagLookup.set(val.toLowerCase(), `concern:${val}`)\n }\n for (const val of taxonomy.static.type) {\n tagLookup.set(val.toLowerCase(), `type:${val}`)\n }\n for (const val of taxonomy.dynamic.domain) {\n tagLookup.set(val.toLowerCase(), `domain:${val}`)\n }\n for (const val of taxonomy.dynamic.tech) {\n tagLookup.set(val.toLowerCase(), `tech:${val}`)\n }\n for (const [cat, values] of Object.entries(taxonomy.custom)) {\n for (const val of values) {\n tagLookup.set(val.toLowerCase(), `${cat}:${val}`)\n }\n }\n\n // Stop words to skip as keywords\n const stopWords = new Set([\n 'the', 'is', 'at', 'in', 'of', 'on', 'to', 'a', 'an', 'and', 'or',\n 'for', 'it', 'do', 'does', 'how', 'what', 'where', 'which', 'when',\n 'who', 'why', 'this', 'that', 'with', 'from', 'by', 'be', 'as',\n 'are', 'was', 'were', 'been', 'has', 'have', 'had', 'not', 'but',\n 'if', 'my', 'our', 'its', 'can', 'will', 'should', 'would', 'could',\n 'about', 'after', 'work', 'works', 'use', 'uses', 'used',\n ])\n\n for (const word of words) {\n const tag = tagLookup.get(word)\n if (tag && !tags.includes(tag)) {\n tags.push(tag)\n }\n\n // Also check hyphenated compound matches (e.g. \"error-handling\")\n if (!tag && !stopWords.has(word)) {\n keywords.push(word)\n }\n }\n\n // Check for multi-word tag matches (e.g. \"error handling\" → \"error-handling\")\n const questionLower = question.toLowerCase()\n for (const [val, tag] of tagLookup.entries()) {\n if (val.includes('-')) {\n const spacedVersion = val.replace(/-/g, ' ')\n if (questionLower.includes(spacedVersion) && !tags.includes(tag)) {\n tags.push(tag)\n }\n if (questionLower.includes(val) && !tags.includes(tag)) {\n tags.push(tag)\n }\n }\n }\n\n return { tags, keywords }\n}\n\n// --- Resolution ---\n\nexport function resolveQuery(\n terms: { tags: string[]; keywords: string[] },\n index: IndexJson,\n limit = 5,\n): RankedResult[] {\n const scoreMap = new Map<string, { score: number; matchedOn: string[] }>()\n\n function addMatch(contextFile: string, score: number, matchLabel: string): void {\n const existing = scoreMap.get(contextFile)\n if (existing) {\n existing.score += score\n existing.matchedOn.push(matchLabel)\n } else {\n scoreMap.set(contextFile, { score, matchedOn: [matchLabel] })\n }\n }\n\n // Tag matching: +2 per match\n for (const tag of terms.tags) {\n const files = index.tags[tag]\n if (files) {\n for (const file of files) {\n addMatch(file, 2, `tag:${tag}`)\n }\n }\n }\n\n // Keyword matching: +1 per match\n for (const kw of terms.keywords) {\n const kwLower = kw.toLowerCase()\n\n // Direct keyword lookup\n const files = index.keywords[kwLower]\n if (files) {\n for (const file of files) {\n addMatch(file, 1, `keyword:${kwLower}`)\n }\n }\n\n // Also search all index keywords for partial matches\n for (const [indexKw, kwFiles] of Object.entries(index.keywords)) {\n if (indexKw === kwLower) continue // Already handled\n if (indexKw.includes(kwLower) || kwLower.includes(indexKw)) {\n for (const file of kwFiles) {\n addMatch(file, 1, `keyword:${indexKw}`)\n }\n }\n }\n }\n\n // Sort by score descending, then alphabetically for ties\n const results: RankedResult[] = [...scoreMap.entries()]\n .sort((a, b) => b[1].score - a[1].score || a[0].localeCompare(b[0]))\n .slice(0, limit)\n .map(([contextFile, { score, matchedOn }]) => ({\n contextFile,\n score,\n matchedOn: [...new Set(matchedOn)],\n summary: `Matched: ${[...new Set(matchedOn)].join(', ')}`,\n }))\n\n return results\n}\n\n// --- Index + taxonomy loading ---\n\nasync function loadIndex(projectRoot: string): Promise<IndexJson> {\n const indexPath = join(projectRoot, '.nogrep', '_index.json')\n try {\n const content = await readFile(indexPath, 'utf-8')\n return JSON.parse(content) as IndexJson\n } catch {\n throw new NogrepError(\n 'No .nogrep/_index.json found. Run /nogrep:init first.',\n 'NO_INDEX',\n )\n }\n}\n\nasync function loadTaxonomy(projectRoot: string): Promise<Taxonomy> {\n const taxonomyPath = join(projectRoot, '.nogrep', '_taxonomy.json')\n try {\n const content = await readFile(taxonomyPath, 'utf-8')\n return JSON.parse(content) as Taxonomy\n } catch {\n // Return default taxonomy if file doesn't exist\n return {\n static: {\n layer: ['presentation', 'business', 'data', 'infrastructure', 'cross-cutting'],\n concern: ['security', 'performance', 'caching', 'validation', 'error-handling', 'idempotency', 'observability'],\n type: ['module', 'flow', 'entity', 'integration', 'config', 'ui', 'test'],\n },\n dynamic: { domain: [], tech: [] },\n custom: {},\n }\n }\n}\n\nfunction buildTaxonomyFromIndex(index: IndexJson, baseTaxonomy: Taxonomy): Taxonomy {\n // Extract dynamic domain and tech values from the index tags\n const domains = new Set<string>(baseTaxonomy.dynamic.domain)\n const techs = new Set<string>(baseTaxonomy.dynamic.tech)\n\n for (const tagKey of Object.keys(index.tags)) {\n const [category, value] = tagKey.split(':')\n if (!category || !value) continue\n if (category === 'domain') domains.add(value)\n if (category === 'tech') techs.add(value)\n }\n\n return {\n ...baseTaxonomy,\n dynamic: {\n domain: [...domains],\n tech: [...techs],\n },\n }\n}\n\n// --- Formatting ---\n\nfunction formatPaths(results: RankedResult[]): string {\n return results.map(r => r.contextFile).join('\\n')\n}\n\nfunction formatJson(results: RankedResult[]): string {\n return JSON.stringify(results, null, 2)\n}\n\nfunction formatSummary(results: RankedResult[]): string {\n if (results.length === 0) return 'No matching context files found.'\n return results\n .map(r => `- ${r.contextFile} (score: ${r.score}) — ${r.summary}`)\n .join('\\n')\n}\n\n// --- CLI ---\n\nasync function main(): Promise<void> {\n const { values } = parseArgs({\n options: {\n tags: { type: 'string' },\n keywords: { type: 'string' },\n question: { type: 'string' },\n format: { type: 'string', default: 'json' },\n limit: { type: 'string', default: '5' },\n root: { type: 'string', default: process.cwd() },\n },\n strict: true,\n })\n\n const root = resolvePath(values.root ?? process.cwd())\n const limit = parseInt(values.limit ?? '5', 10)\n const format = values.format ?? 'json'\n\n const index = await loadIndex(root)\n const baseTaxonomy = await loadTaxonomy(root)\n const taxonomy = buildTaxonomyFromIndex(index, baseTaxonomy)\n\n let terms: { tags: string[]; keywords: string[] }\n\n if (values.question) {\n terms = extractTerms(values.question, taxonomy)\n } else if (values.tags || values.keywords) {\n const tags = values.tags\n ? values.tags.split(',').map(t => t.trim()).filter(Boolean)\n : []\n const keywords = values.keywords\n ? values.keywords.split(',').map(k => k.trim()).filter(Boolean)\n : []\n terms = { tags, keywords }\n } else {\n process.stderr.write(\n JSON.stringify({ error: 'Usage: node query.js --tags <tags> | --keywords <words> | --question <text> [--format paths|json|summary] [--limit N]' }) + '\\n',\n )\n process.exitCode = 1\n return\n }\n\n const results = resolveQuery(terms, index, limit)\n\n switch (format) {\n case 'paths':\n process.stdout.write(formatPaths(results) + '\\n')\n break\n case 'summary':\n process.stdout.write(formatSummary(results) + '\\n')\n break\n case 'json':\n default:\n process.stdout.write(formatJson(results) + '\\n')\n break\n }\n}\n\nif (import.meta.url === `file://${process.argv[1]}`) {\n main().catch((err: unknown) => {\n if (err instanceof NogrepError) {\n process.stderr.write(JSON.stringify({ error: err.message, code: err.code }) + '\\n')\n } else {\n const message = err instanceof Error ? err.message : String(err)\n process.stderr.write(JSON.stringify({ error: message }) + '\\n')\n }\n process.exitCode = 1\n })\n}\n"],"mappings":";;;;;AAAA,SAAS,gBAAgB;AACzB,SAAS,MAAM,WAAW,mBAAmB;AAC7C,SAAS,iBAAiB;AAMnB,SAAS,aACd,UACA,UACwC;AACxC,QAAM,QAAQ,SACX,YAAY,EACZ,QAAQ,aAAa,GAAG,EACxB,MAAM,KAAK,EACX,OAAO,OAAK,EAAE,SAAS,CAAC;AAE3B,QAAM,OAAiB,CAAC;AACxB,QAAM,WAAqB,CAAC;AAG5B,QAAM,YAAY,oBAAI,IAAoB;AAE1C,aAAW,OAAO,SAAS,OAAO,OAAO;AACvC,cAAU,IAAI,IAAI,YAAY,GAAG,SAAS,GAAG,EAAE;AAAA,EACjD;AACA,aAAW,OAAO,SAAS,OAAO,SAAS;AACzC,cAAU,IAAI,IAAI,YAAY,GAAG,WAAW,GAAG,EAAE;AAAA,EACnD;AACA,aAAW,OAAO,SAAS,OAAO,MAAM;AACtC,cAAU,IAAI,IAAI,YAAY,GAAG,QAAQ,GAAG,EAAE;AAAA,EAChD;AACA,aAAW,OAAO,SAAS,QAAQ,QAAQ;AACzC,cAAU,IAAI,IAAI,YAAY,GAAG,UAAU,GAAG,EAAE;AAAA,EAClD;AACA,aAAW,OAAO,SAAS,QAAQ,MAAM;AACvC,cAAU,IAAI,IAAI,YAAY,GAAG,QAAQ,GAAG,EAAE;AAAA,EAChD;AACA,aAAW,CAAC,KAAK,MAAM,KAAK,OAAO,QAAQ,SAAS,MAAM,GAAG;AAC3D,eAAW,OAAO,QAAQ;AACxB,gBAAU,IAAI,IAAI,YAAY,GAAG,GAAG,GAAG,IAAI,GAAG,EAAE;AAAA,IAClD;AAAA,EACF;AAGA,QAAM,YAAY,oBAAI,IAAI;AAAA,IACxB;AAAA,IAAO;AAAA,IAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAK;AAAA,IAAM;AAAA,IAAO;AAAA,IAC7D;AAAA,IAAO;AAAA,IAAM;AAAA,IAAM;AAAA,IAAQ;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAS;AAAA,IAAS;AAAA,IAC5D;AAAA,IAAO;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAM;AAAA,IAAM;AAAA,IAC1D;AAAA,IAAO;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAO;AAAA,IAAO;AAAA,IAC3D;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAU;AAAA,IAAS;AAAA,IAC5D;AAAA,IAAS;AAAA,IAAS;AAAA,IAAQ;AAAA,IAAS;AAAA,IAAO;AAAA,IAAQ;AAAA,EACpD,CAAC;AAED,aAAW,QAAQ,OAAO;AACxB,UAAM,MAAM,UAAU,IAAI,IAAI;AAC9B,QAAI,OAAO,CAAC,KAAK,SAAS,GAAG,GAAG;AAC9B,WAAK,KAAK,GAAG;AAAA,IACf;AAGA,QAAI,CAAC,OAAO,CAAC,UAAU,IAAI,IAAI,GAAG;AAChC,eAAS,KAAK,IAAI;AAAA,IACpB;AAAA,EACF;AAGA,QAAM,gBAAgB,SAAS,YAAY;AAC3C,aAAW,CAAC,KAAK,GAAG,KAAK,UAAU,QAAQ,GAAG;AAC5C,QAAI,IAAI,SAAS,GAAG,GAAG;AACrB,YAAM,gBAAgB,IAAI,QAAQ,MAAM,GAAG;AAC3C,UAAI,cAAc,SAAS,aAAa,KAAK,CAAC,KAAK,SAAS,GAAG,GAAG;AAChE,aAAK,KAAK,GAAG;AAAA,MACf;AACA,UAAI,cAAc,SAAS,GAAG,KAAK,CAAC,KAAK,SAAS,GAAG,GAAG;AACtD,aAAK,KAAK,GAAG;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,SAAS;AAC1B;AAIO,SAAS,aACd,OACA,OACA,QAAQ,GACQ;AAChB,QAAM,WAAW,oBAAI,IAAoD;AAEzE,WAAS,SAAS,aAAqB,OAAe,YAA0B;AAC9E,UAAM,WAAW,SAAS,IAAI,WAAW;AACzC,QAAI,UAAU;AACZ,eAAS,SAAS;AAClB,eAAS,UAAU,KAAK,UAAU;AAAA,IACpC,OAAO;AACL,eAAS,IAAI,aAAa,EAAE,OAAO,WAAW,CAAC,UAAU,EAAE,CAAC;AAAA,IAC9D;AAAA,EACF;AAGA,aAAW,OAAO,MAAM,MAAM;AAC5B,UAAM,QAAQ,MAAM,KAAK,GAAG;AAC5B,QAAI,OAAO;AACT,iBAAW,QAAQ,OAAO;AACxB,iBAAS,MAAM,GAAG,OAAO,GAAG,EAAE;AAAA,MAChC;AAAA,IACF;AAAA,EACF;AAGA,aAAW,MAAM,MAAM,UAAU;AAC/B,UAAM,UAAU,GAAG,YAAY;AAG/B,UAAM,QAAQ,MAAM,SAAS,OAAO;AACpC,QAAI,OAAO;AACT,iBAAW,QAAQ,OAAO;AACxB,iBAAS,MAAM,GAAG,WAAW,OAAO,EAAE;AAAA,MACxC;AAAA,IACF;AAGA,eAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,MAAM,QAAQ,GAAG;AAC/D,UAAI,YAAY,QAAS;AACzB,UAAI,QAAQ,SAAS,OAAO,KAAK,QAAQ,SAAS,OAAO,GAAG;AAC1D,mBAAW,QAAQ,SAAS;AAC1B,mBAAS,MAAM,GAAG,WAAW,OAAO,EAAE;AAAA,QACxC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,UAA0B,CAAC,GAAG,SAAS,QAAQ,CAAC,EACnD,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,QAAQ,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,cAAc,EAAE,CAAC,CAAC,CAAC,EAClE,MAAM,GAAG,KAAK,EACd,IAAI,CAAC,CAAC,aAAa,EAAE,OAAO,UAAU,CAAC,OAAO;AAAA,IAC7C;AAAA,IACA;AAAA,IACA,WAAW,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC;AAAA,IACjC,SAAS,YAAY,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC,EAAE,KAAK,IAAI,CAAC;AAAA,EACzD,EAAE;AAEJ,SAAO;AACT;AAIA,eAAe,UAAU,aAAyC;AAChE,QAAM,YAAY,KAAK,aAAa,WAAW,aAAa;AAC5D,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,WAAW,OAAO;AACjD,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,QAAQ;AACN,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;AAEA,eAAe,aAAa,aAAwC;AAClE,QAAM,eAAe,KAAK,aAAa,WAAW,gBAAgB;AAClE,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,cAAc,OAAO;AACpD,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,QAAQ;AAEN,WAAO;AAAA,MACL,QAAQ;AAAA,QACN,OAAO,CAAC,gBAAgB,YAAY,QAAQ,kBAAkB,eAAe;AAAA,QAC7E,SAAS,CAAC,YAAY,eAAe,WAAW,cAAc,kBAAkB,eAAe,eAAe;AAAA,QAC9G,MAAM,CAAC,UAAU,QAAQ,UAAU,eAAe,UAAU,MAAM,MAAM;AAAA,MAC1E;AAAA,MACA,SAAS,EAAE,QAAQ,CAAC,GAAG,MAAM,CAAC,EAAE;AAAA,MAChC,QAAQ,CAAC;AAAA,IACX;AAAA,EACF;AACF;AAEA,SAAS,uBAAuB,OAAkB,cAAkC;AAElF,QAAM,UAAU,IAAI,IAAY,aAAa,QAAQ,MAAM;AAC3D,QAAM,QAAQ,IAAI,IAAY,aAAa,QAAQ,IAAI;AAEvD,aAAW,UAAU,OAAO,KAAK,MAAM,IAAI,GAAG;AAC5C,UAAM,CAAC,UAAU,KAAK,IAAI,OAAO,MAAM,GAAG;AAC1C,QAAI,CAAC,YAAY,CAAC,MAAO;AACzB,QAAI,aAAa,SAAU,SAAQ,IAAI,KAAK;AAC5C,QAAI,aAAa,OAAQ,OAAM,IAAI,KAAK;AAAA,EAC1C;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,SAAS;AAAA,MACP,QAAQ,CAAC,GAAG,OAAO;AAAA,MACnB,MAAM,CAAC,GAAG,KAAK;AAAA,IACjB;AAAA,EACF;AACF;AAIA,SAAS,YAAY,SAAiC;AACpD,SAAO,QAAQ,IAAI,OAAK,EAAE,WAAW,EAAE,KAAK,IAAI;AAClD;AAEA,SAAS,WAAW,SAAiC;AACnD,SAAO,KAAK,UAAU,SAAS,MAAM,CAAC;AACxC;AAEA,SAAS,cAAc,SAAiC;AACtD,MAAI,QAAQ,WAAW,EAAG,QAAO;AACjC,SAAO,QACJ,IAAI,OAAK,KAAK,EAAE,WAAW,YAAY,EAAE,KAAK,YAAO,EAAE,OAAO,EAAE,EAChE,KAAK,IAAI;AACd;AAIA,eAAe,OAAsB;AACnC,QAAM,EAAE,OAAO,IAAI,UAAU;AAAA,IAC3B,SAAS;AAAA,MACP,MAAM,EAAE,MAAM,SAAS;AAAA,MACvB,UAAU,EAAE,MAAM,SAAS;AAAA,MAC3B,UAAU,EAAE,MAAM,SAAS;AAAA,MAC3B,QAAQ,EAAE,MAAM,UAAU,SAAS,OAAO;AAAA,MAC1C,OAAO,EAAE,MAAM,UAAU,SAAS,IAAI;AAAA,MACtC,MAAM,EAAE,MAAM,UAAU,SAAS,QAAQ,IAAI,EAAE;AAAA,IACjD;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AAED,QAAM,OAAO,YAAY,OAAO,QAAQ,QAAQ,IAAI,CAAC;AACrD,QAAM,QAAQ,SAAS,OAAO,SAAS,KAAK,EAAE;AAC9C,QAAM,SAAS,OAAO,UAAU;AAEhC,QAAM,QAAQ,MAAM,UAAU,IAAI;AAClC,QAAM,eAAe,MAAM,aAAa,IAAI;AAC5C,QAAM,WAAW,uBAAuB,OAAO,YAAY;AAE3D,MAAI;AAEJ,MAAI,OAAO,UAAU;AACnB,YAAQ,aAAa,OAAO,UAAU,QAAQ;AAAA,EAChD,WAAW,OAAO,QAAQ,OAAO,UAAU;AACzC,UAAM,OAAO,OAAO,OAChB,OAAO,KAAK,MAAM,GAAG,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC,EAAE,OAAO,OAAO,IACxD,CAAC;AACL,UAAM,WAAW,OAAO,WACpB,OAAO,SAAS,MAAM,GAAG,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC,EAAE,OAAO,OAAO,IAC5D,CAAC;AACL,YAAQ,EAAE,MAAM,SAAS;AAAA,EAC3B,OAAO;AACL,YAAQ,OAAO;AAAA,MACb,KAAK,UAAU,EAAE,OAAO,wHAAwH,CAAC,IAAI;AAAA,IACvJ;AACA,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,QAAM,UAAU,aAAa,OAAO,OAAO,KAAK;AAEhD,UAAQ,QAAQ;AAAA,IACd,KAAK;AACH,cAAQ,OAAO,MAAM,YAAY,OAAO,IAAI,IAAI;AAChD;AAAA,IACF,KAAK;AACH,cAAQ,OAAO,MAAM,cAAc,OAAO,IAAI,IAAI;AAClD;AAAA,IACF,KAAK;AAAA,IACL;AACE,cAAQ,OAAO,MAAM,WAAW,OAAO,IAAI,IAAI;AAC/C;AAAA,EACJ;AACF;AAEA,IAAI,YAAY,QAAQ,UAAU,QAAQ,KAAK,CAAC,CAAC,IAAI;AACnD,OAAK,EAAE,MAAM,CAAC,QAAiB;AAC7B,QAAI,eAAe,aAAa;AAC9B,cAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,IAAI,SAAS,MAAM,IAAI,KAAK,CAAC,IAAI,IAAI;AAAA,IACpF,OAAO;AACL,YAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,cAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,QAAQ,CAAC,IAAI,IAAI;AAAA,IAChE;AACA,YAAQ,WAAW;AAAA,EACrB,CAAC;AACH;","names":[]}
@@ -63,11 +63,13 @@ async function main() {
63
63
  process.stderr.write(JSON.stringify({ error: "Usage: node settings.js --set enabled=true [--local] | --get" }) + "\n");
64
64
  process.exitCode = 1;
65
65
  }
66
- main().catch((err) => {
67
- const message = err instanceof Error ? err.message : String(err);
68
- process.stderr.write(JSON.stringify({ error: message }) + "\n");
69
- process.exitCode = 1;
70
- });
66
+ if (import.meta.url === `file://${process.argv[1]}`) {
67
+ main().catch((err) => {
68
+ const message = err instanceof Error ? err.message : String(err);
69
+ process.stderr.write(JSON.stringify({ error: message }) + "\n");
70
+ process.exitCode = 1;
71
+ });
72
+ }
71
73
  export {
72
74
  readSettings,
73
75
  writeSettings
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../scripts/settings.ts"],"sourcesContent":["import { readFile, writeFile, mkdir } from 'node:fs/promises'\nimport { join } from 'node:path'\nimport { parseArgs } from 'node:util'\nimport type { NogrepSettings } from './types.js'\n\nconst SETTINGS_FILE = '.claude/settings.json'\nconst SETTINGS_LOCAL_FILE = '.claude/settings.local.json'\n\ninterface SettingsJson {\n nogrep?: Partial<NogrepSettings>\n [key: string]: unknown\n}\n\nasync function readJsonFile(path: string): Promise<SettingsJson> {\n try {\n const content = await readFile(path, 'utf-8')\n return JSON.parse(content) as SettingsJson\n } catch {\n return {}\n }\n}\n\nasync function ensureDir(dir: string): Promise<void> {\n await mkdir(dir, { recursive: true })\n}\n\nexport async function readSettings(projectRoot: string): Promise<NogrepSettings> {\n const sharedPath = join(projectRoot, SETTINGS_FILE)\n const localPath = join(projectRoot, SETTINGS_LOCAL_FILE)\n\n const shared = await readJsonFile(sharedPath)\n const local = await readJsonFile(localPath)\n\n const enabled =\n local.nogrep?.enabled ?? shared.nogrep?.enabled ?? false\n\n return { enabled }\n}\n\nexport async function writeSettings(\n projectRoot: string,\n settings: Partial<NogrepSettings>,\n local?: boolean,\n): Promise<void> {\n const filePath = join(\n projectRoot,\n local ? SETTINGS_LOCAL_FILE : SETTINGS_FILE,\n )\n\n await ensureDir(join(projectRoot, '.claude'))\n\n const existing = await readJsonFile(filePath)\n existing.nogrep = { ...existing.nogrep, ...settings }\n\n await writeFile(filePath, JSON.stringify(existing, null, 2) + '\\n', 'utf-8')\n}\n\n// CLI interface\nasync function main(): Promise<void> {\n const { values } = parseArgs({\n options: {\n set: { type: 'string' },\n get: { type: 'boolean', default: false },\n local: { type: 'boolean', default: false },\n root: { type: 'string', default: process.cwd() },\n },\n strict: true,\n })\n\n const root = values.root ?? process.cwd()\n\n if (values.get) {\n const settings = await readSettings(root)\n process.stdout.write(JSON.stringify(settings, null, 2) + '\\n')\n return\n }\n\n if (values.set) {\n const [key, value] = values.set.split('=')\n if (key === 'enabled') {\n const enabled = value === 'true'\n await writeSettings(root, { enabled }, values.local)\n } else {\n process.stderr.write(JSON.stringify({ error: `Unknown setting: ${key}` }) + '\\n')\n process.exitCode = 1\n }\n return\n }\n\n process.stderr.write(JSON.stringify({ error: 'Usage: node settings.js --set enabled=true [--local] | --get' }) + '\\n')\n process.exitCode = 1\n}\n\nif (import.meta.url === `file://${process.argv[1]}`) {\n main().catch((err: unknown) => {\n const message = err instanceof Error ? err.message : String(err)\n process.stderr.write(JSON.stringify({ error: message }) + '\\n')\n process.exitCode = 1\n })\n}\n"],"mappings":";AAAA,SAAS,UAAU,WAAW,aAAa;AAC3C,SAAS,YAAY;AACrB,SAAS,iBAAiB;AAG1B,IAAM,gBAAgB;AACtB,IAAM,sBAAsB;AAO5B,eAAe,aAAa,MAAqC;AAC/D,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,MAAM,OAAO;AAC5C,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAEA,eAAe,UAAU,KAA4B;AACnD,QAAM,MAAM,KAAK,EAAE,WAAW,KAAK,CAAC;AACtC;AAEA,eAAsB,aAAa,aAA8C;AAC/E,QAAM,aAAa,KAAK,aAAa,aAAa;AAClD,QAAM,YAAY,KAAK,aAAa,mBAAmB;AAEvD,QAAM,SAAS,MAAM,aAAa,UAAU;AAC5C,QAAM,QAAQ,MAAM,aAAa,SAAS;AAE1C,QAAM,UACJ,MAAM,QAAQ,WAAW,OAAO,QAAQ,WAAW;AAErD,SAAO,EAAE,QAAQ;AACnB;AAEA,eAAsB,cACpB,aACA,UACA,OACe;AACf,QAAM,WAAW;AAAA,IACf;AAAA,IACA,QAAQ,sBAAsB;AAAA,EAChC;AAEA,QAAM,UAAU,KAAK,aAAa,SAAS,CAAC;AAE5C,QAAM,WAAW,MAAM,aAAa,QAAQ;AAC5C,WAAS,SAAS,EAAE,GAAG,SAAS,QAAQ,GAAG,SAAS;AAEpD,QAAM,UAAU,UAAU,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI,MAAM,OAAO;AAC7E;AAGA,eAAe,OAAsB;AACnC,QAAM,EAAE,OAAO,IAAI,UAAU;AAAA,IAC3B,SAAS;AAAA,MACP,KAAK,EAAE,MAAM,SAAS;AAAA,MACtB,KAAK,EAAE,MAAM,WAAW,SAAS,MAAM;AAAA,MACvC,OAAO,EAAE,MAAM,WAAW,SAAS,MAAM;AAAA,MACzC,MAAM,EAAE,MAAM,UAAU,SAAS,QAAQ,IAAI,EAAE;AAAA,IACjD;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AAED,QAAM,OAAO,OAAO,QAAQ,QAAQ,IAAI;AAExC,MAAI,OAAO,KAAK;AACd,UAAM,WAAW,MAAM,aAAa,IAAI;AACxC,YAAQ,OAAO,MAAM,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI,IAAI;AAC7D;AAAA,EACF;AAEA,MAAI,OAAO,KAAK;AACd,UAAM,CAAC,KAAK,KAAK,IAAI,OAAO,IAAI,MAAM,GAAG;AACzC,QAAI,QAAQ,WAAW;AACrB,YAAM,UAAU,UAAU;AAC1B,YAAM,cAAc,MAAM,EAAE,QAAQ,GAAG,OAAO,KAAK;AAAA,IACrD,OAAO;AACL,cAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,oBAAoB,GAAG,GAAG,CAAC,IAAI,IAAI;AAChF,cAAQ,WAAW;AAAA,IACrB;AACA;AAAA,EACF;AAEA,UAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,+DAA+D,CAAC,IAAI,IAAI;AACrH,UAAQ,WAAW;AACrB;AAEA,IAAI,YAAY,QAAQ,UAAU,QAAQ,KAAK,CAAC,CAAC,IAAI;AACnD,OAAK,EAAE,MAAM,CAAC,QAAiB;AAC7B,UAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,YAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,QAAQ,CAAC,IAAI,IAAI;AAC9D,YAAQ,WAAW;AAAA,EACrB,CAAC;AACH;","names":[]}
@@ -164,10 +164,12 @@ async function main() {
164
164
  const result = await collectSignals(root, { exclude });
165
165
  process.stdout.write(JSON.stringify(result, null, 2));
166
166
  }
167
- main().catch((err) => {
168
- process.stderr.write(JSON.stringify({ error: String(err) }));
169
- process.exit(1);
170
- });
167
+ if (import.meta.url === `file://${process.argv[1]}`) {
168
+ main().catch((err) => {
169
+ process.stderr.write(JSON.stringify({ error: String(err) }));
170
+ process.exit(1);
171
+ });
172
+ }
171
173
  export {
172
174
  collectSignals
173
175
  };
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../scripts/signals.ts"],"sourcesContent":["import { readdir, stat, readFile } from 'fs/promises'\nimport { join, extname, relative, resolve } from 'path'\nimport { execFile } from 'child_process'\nimport { promisify } from 'util'\nimport type { SignalResult, DirectoryNode, ManifestFile, ChurnEntry, FileSize } from './types.js'\n\nconst execFileAsync = promisify(execFile)\n\nconst SKIP_DIRS = new Set([\n 'node_modules', 'dist', 'build', '.git', 'coverage',\n '.next', '.nuxt', '__pycache__', '.venv', 'venv',\n '.idea', '.vscode', '.nogrep',\n])\n\nconst MANIFEST_NAMES: Record<string, string> = {\n 'package.json': 'npm',\n 'requirements.txt': 'pip',\n 'pom.xml': 'maven',\n 'go.mod': 'go',\n 'Podfile': 'cocoapods',\n 'Cargo.toml': 'cargo',\n 'pubspec.yaml': 'flutter',\n 'composer.json': 'composer',\n}\n\nconst ENTRY_NAMES = new Set(['main', 'index', 'app', 'server'])\n\nconst TEST_PATTERNS = [\n /\\.test\\.\\w+$/,\n /\\.spec\\.\\w+$/,\n /_test\\.\\w+$/,\n /^test_.*\\.py$/,\n]\n\ninterface CollectOptions {\n exclude?: string[]\n maxDepth?: number\n}\n\nexport async function collectSignals(\n root: string,\n options: CollectOptions = {},\n): Promise<SignalResult> {\n const absRoot = resolve(root)\n const maxDepth = options.maxDepth ?? 4\n const extraSkip = new Set(options.exclude ?? [])\n\n const allFiles: { path: string; bytes: number }[] = []\n const extensionMap: Record<string, number> = {}\n const manifests: ManifestFile[] = []\n const entryPoints: string[] = []\n const envFiles: string[] = []\n const testFiles: string[] = []\n\n const directoryTree = await walkDirectory(absRoot, absRoot, 0, maxDepth, extraSkip, {\n allFiles,\n extensionMap,\n manifests,\n entryPoints,\n envFiles,\n testFiles,\n })\n\n const gitChurn = await collectGitChurn(absRoot)\n\n const largeFiles = allFiles\n .sort((a, b) => b.bytes - a.bytes)\n .slice(0, 20)\n .map(f => ({ path: f.path, bytes: f.bytes }))\n\n return {\n directoryTree,\n extensionMap,\n manifests,\n entryPoints,\n gitChurn,\n largeFiles,\n envFiles,\n testFiles,\n }\n}\n\ninterface Collectors {\n allFiles: { path: string; bytes: number }[]\n extensionMap: Record<string, number>\n manifests: ManifestFile[]\n entryPoints: string[]\n envFiles: string[]\n testFiles: string[]\n}\n\nasync function walkDirectory(\n dir: string,\n root: string,\n depth: number,\n maxDepth: number,\n extraSkip: Set<string>,\n collectors: Collectors,\n): Promise<DirectoryNode[]> {\n if (depth > maxDepth) return []\n\n let entries\n try {\n entries = await readdir(dir, { withFileTypes: true })\n } catch {\n return []\n }\n\n const nodes: DirectoryNode[] = []\n\n for (const entry of entries) {\n const fullPath = join(dir, entry.name)\n const relPath = relative(root, fullPath)\n\n if (entry.isDirectory()) {\n if (SKIP_DIRS.has(entry.name) || extraSkip.has(entry.name)) continue\n\n const children = await walkDirectory(fullPath, root, depth + 1, maxDepth, extraSkip, collectors)\n nodes.push({ name: entry.name, path: relPath, type: 'directory', children })\n } else if (entry.isFile()) {\n nodes.push({ name: entry.name, path: relPath, type: 'file' })\n\n let fileBytes = 0\n try {\n const s = await stat(fullPath)\n fileBytes = s.size\n } catch {\n // skip\n }\n\n collectors.allFiles.push({ path: relPath, bytes: fileBytes })\n\n const ext = extname(entry.name)\n if (ext) {\n collectors.extensionMap[ext] = (collectors.extensionMap[ext] ?? 0) + 1\n }\n\n // Manifest check\n if (entry.name in MANIFEST_NAMES) {\n collectors.manifests.push({\n path: relPath,\n type: MANIFEST_NAMES[entry.name]!,\n depth,\n })\n }\n\n // Entry point check — root or src/ level\n if (depth <= 1 || (depth === 2 && dir.endsWith('/src'))) {\n const nameWithoutExt = entry.name.replace(/\\.\\w+$/, '')\n if (ENTRY_NAMES.has(nameWithoutExt)) {\n collectors.entryPoints.push(relPath)\n }\n }\n\n // Env files\n if (entry.name.startsWith('.env')) {\n collectors.envFiles.push(relPath)\n }\n\n // Config directories are handled at directory level\n // But we also detect config files at root\n if (depth === 0 && entry.name.match(/^config\\./)) {\n collectors.envFiles.push(relPath)\n }\n\n // Test files\n const fileName = entry.name\n if (TEST_PATTERNS.some(p => p.test(fileName))) {\n collectors.testFiles.push(relPath)\n }\n }\n }\n\n // Check if this directory is a config directory\n const dirName = dir.split('/').pop()\n if (dirName === 'config' && depth <= 2) {\n collectors.envFiles.push(relative(root, dir))\n }\n\n return nodes\n}\n\nasync function collectGitChurn(root: string): Promise<ChurnEntry[]> {\n try {\n const { stdout } = await execFileAsync(\n 'git',\n ['log', '--stat', '--oneline', '-50', '--pretty=format:'],\n { cwd: root, maxBuffer: 1024 * 1024 },\n )\n\n const changeCounts: Record<string, number> = {}\n\n for (const line of stdout.split('\\n')) {\n // Match lines like: src/billing/service.ts | 42 +++---\n const match = line.match(/^\\s+(.+?)\\s+\\|\\s+(\\d+)/)\n if (match) {\n const filePath = match[1]!.trim()\n const changes = parseInt(match[2]!, 10)\n changeCounts[filePath] = (changeCounts[filePath] ?? 0) + changes\n }\n }\n\n return Object.entries(changeCounts)\n .sort(([, a], [, b]) => b - a)\n .slice(0, 20)\n .map(([path, changes]) => ({ path, changes }))\n } catch {\n // No git or git log fails — return empty\n return []\n }\n}\n\n// --- CLI interface ---\n\nasync function main(): Promise<void> {\n const args = process.argv.slice(2)\n let root = '.'\n const exclude: string[] = []\n\n for (let i = 0; i < args.length; i++) {\n if (args[i] === '--root' && args[i + 1]) {\n root = args[i + 1]!\n i++\n } else if (args[i] === '--exclude' && args[i + 1]) {\n exclude.push(...args[i + 1]!.split(','))\n i++\n }\n }\n\n const result = await collectSignals(root, { exclude })\n process.stdout.write(JSON.stringify(result, null, 2))\n}\n\nif (import.meta.url === `file://${process.argv[1]}`) {\n main().catch(err => {\n process.stderr.write(JSON.stringify({ error: String(err) }))\n process.exit(1)\n })\n}\n"],"mappings":";AAAA,SAAS,SAAS,YAAsB;AACxC,SAAS,MAAM,SAAS,UAAU,eAAe;AACjD,SAAS,gBAAgB;AACzB,SAAS,iBAAiB;AAG1B,IAAM,gBAAgB,UAAU,QAAQ;AAExC,IAAM,YAAY,oBAAI,IAAI;AAAA,EACxB;AAAA,EAAgB;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAQ;AAAA,EACzC;AAAA,EAAS;AAAA,EAAS;AAAA,EAAe;AAAA,EAAS;AAAA,EAC1C;AAAA,EAAS;AAAA,EAAW;AACtB,CAAC;AAED,IAAM,iBAAyC;AAAA,EAC7C,gBAAgB;AAAA,EAChB,oBAAoB;AAAA,EACpB,WAAW;AAAA,EACX,UAAU;AAAA,EACV,WAAW;AAAA,EACX,cAAc;AAAA,EACd,gBAAgB;AAAA,EAChB,iBAAiB;AACnB;AAEA,IAAM,cAAc,oBAAI,IAAI,CAAC,QAAQ,SAAS,OAAO,QAAQ,CAAC;AAE9D,IAAM,gBAAgB;AAAA,EACpB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAOA,eAAsB,eACpB,MACA,UAA0B,CAAC,GACJ;AACvB,QAAM,UAAU,QAAQ,IAAI;AAC5B,QAAM,WAAW,QAAQ,YAAY;AACrC,QAAM,YAAY,IAAI,IAAI,QAAQ,WAAW,CAAC,CAAC;AAE/C,QAAM,WAA8C,CAAC;AACrD,QAAM,eAAuC,CAAC;AAC9C,QAAM,YAA4B,CAAC;AACnC,QAAM,cAAwB,CAAC;AAC/B,QAAM,WAAqB,CAAC;AAC5B,QAAM,YAAsB,CAAC;AAE7B,QAAM,gBAAgB,MAAM,cAAc,SAAS,SAAS,GAAG,UAAU,WAAW;AAAA,IAClF;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,WAAW,MAAM,gBAAgB,OAAO;AAE9C,QAAM,aAAa,SAChB,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC,MAAM,GAAG,EAAE,EACX,IAAI,QAAM,EAAE,MAAM,EAAE,MAAM,OAAO,EAAE,MAAM,EAAE;AAE9C,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAWA,eAAe,cACb,KACA,MACA,OACA,UACA,WACA,YAC0B;AAC1B,MAAI,QAAQ,SAAU,QAAO,CAAC;AAE9B,MAAI;AACJ,MAAI;AACF,cAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAAA,EACtD,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,QAAyB,CAAC;AAEhC,aAAW,SAAS,SAAS;AAC3B,UAAM,WAAW,KAAK,KAAK,MAAM,IAAI;AACrC,UAAM,UAAU,SAAS,MAAM,QAAQ;AAEvC,QAAI,MAAM,YAAY,GAAG;AACvB,UAAI,UAAU,IAAI,MAAM,IAAI,KAAK,UAAU,IAAI,MAAM,IAAI,EAAG;AAE5D,YAAM,WAAW,MAAM,cAAc,UAAU,MAAM,QAAQ,GAAG,UAAU,WAAW,UAAU;AAC/F,YAAM,KAAK,EAAE,MAAM,MAAM,MAAM,MAAM,SAAS,MAAM,aAAa,SAAS,CAAC;AAAA,IAC7E,WAAW,MAAM,OAAO,GAAG;AACzB,YAAM,KAAK,EAAE,MAAM,MAAM,MAAM,MAAM,SAAS,MAAM,OAAO,CAAC;AAE5D,UAAI,YAAY;AAChB,UAAI;AACF,cAAM,IAAI,MAAM,KAAK,QAAQ;AAC7B,oBAAY,EAAE;AAAA,MAChB,QAAQ;AAAA,MAER;AAEA,iBAAW,SAAS,KAAK,EAAE,MAAM,SAAS,OAAO,UAAU,CAAC;AAE5D,YAAM,MAAM,QAAQ,MAAM,IAAI;AAC9B,UAAI,KAAK;AACP,mBAAW,aAAa,GAAG,KAAK,WAAW,aAAa,GAAG,KAAK,KAAK;AAAA,MACvE;AAGA,UAAI,MAAM,QAAQ,gBAAgB;AAChC,mBAAW,UAAU,KAAK;AAAA,UACxB,MAAM;AAAA,UACN,MAAM,eAAe,MAAM,IAAI;AAAA,UAC/B;AAAA,QACF,CAAC;AAAA,MACH;AAGA,UAAI,SAAS,KAAM,UAAU,KAAK,IAAI,SAAS,MAAM,GAAI;AACvD,cAAM,iBAAiB,MAAM,KAAK,QAAQ,UAAU,EAAE;AACtD,YAAI,YAAY,IAAI,cAAc,GAAG;AACnC,qBAAW,YAAY,KAAK,OAAO;AAAA,QACrC;AAAA,MACF;AAGA,UAAI,MAAM,KAAK,WAAW,MAAM,GAAG;AACjC,mBAAW,SAAS,KAAK,OAAO;AAAA,MAClC;AAIA,UAAI,UAAU,KAAK,MAAM,KAAK,MAAM,WAAW,GAAG;AAChD,mBAAW,SAAS,KAAK,OAAO;AAAA,MAClC;AAGA,YAAM,WAAW,MAAM;AACvB,UAAI,cAAc,KAAK,OAAK,EAAE,KAAK,QAAQ,CAAC,GAAG;AAC7C,mBAAW,UAAU,KAAK,OAAO;AAAA,MACnC;AAAA,IACF;AAAA,EACF;AAGA,QAAM,UAAU,IAAI,MAAM,GAAG,EAAE,IAAI;AACnC,MAAI,YAAY,YAAY,SAAS,GAAG;AACtC,eAAW,SAAS,KAAK,SAAS,MAAM,GAAG,CAAC;AAAA,EAC9C;AAEA,SAAO;AACT;AAEA,eAAe,gBAAgB,MAAqC;AAClE,MAAI;AACF,UAAM,EAAE,OAAO,IAAI,MAAM;AAAA,MACvB;AAAA,MACA,CAAC,OAAO,UAAU,aAAa,OAAO,kBAAkB;AAAA,MACxD,EAAE,KAAK,MAAM,WAAW,OAAO,KAAK;AAAA,IACtC;AAEA,UAAM,eAAuC,CAAC;AAE9C,eAAW,QAAQ,OAAO,MAAM,IAAI,GAAG;AAErC,YAAM,QAAQ,KAAK,MAAM,wBAAwB;AACjD,UAAI,OAAO;AACT,cAAM,WAAW,MAAM,CAAC,EAAG,KAAK;AAChC,cAAM,UAAU,SAAS,MAAM,CAAC,GAAI,EAAE;AACtC,qBAAa,QAAQ,KAAK,aAAa,QAAQ,KAAK,KAAK;AAAA,MAC3D;AAAA,IACF;AAEA,WAAO,OAAO,QAAQ,YAAY,EAC/B,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,MAAM,IAAI,CAAC,EAC5B,MAAM,GAAG,EAAE,EACX,IAAI,CAAC,CAAC,MAAM,OAAO,OAAO,EAAE,MAAM,QAAQ,EAAE;AAAA,EACjD,QAAQ;AAEN,WAAO,CAAC;AAAA,EACV;AACF;AAIA,eAAe,OAAsB;AACnC,QAAM,OAAO,QAAQ,KAAK,MAAM,CAAC;AACjC,MAAI,OAAO;AACX,QAAM,UAAoB,CAAC;AAE3B,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,QAAI,KAAK,CAAC,MAAM,YAAY,KAAK,IAAI,CAAC,GAAG;AACvC,aAAO,KAAK,IAAI,CAAC;AACjB;AAAA,IACF,WAAW,KAAK,CAAC,MAAM,eAAe,KAAK,IAAI,CAAC,GAAG;AACjD,cAAQ,KAAK,GAAG,KAAK,IAAI,CAAC,EAAG,MAAM,GAAG,CAAC;AACvC;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAAS,MAAM,eAAe,MAAM,EAAE,QAAQ,CAAC;AACrD,UAAQ,OAAO,MAAM,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AACtD;AAEA,IAAI,YAAY,QAAQ,UAAU,QAAQ,KAAK,CAAC,CAAC,IAAI;AACnD,OAAK,EAAE,MAAM,SAAO;AAClB,YAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,OAAO,GAAG,EAAE,CAAC,CAAC;AAC3D,YAAQ,KAAK,CAAC;AAAA,EAChB,CAAC;AACH;","names":[]}
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../scripts/trim.ts"],"sourcesContent":["import { readFile } from 'fs/promises'\nimport { resolve, extname, basename } from 'path'\n\nconst MAX_CLUSTER_LINES = 300\n\ninterface TrimOptions {\n maxLines?: number\n}\n\n// Language-agnostic regex patterns for stripping function/method bodies\n// Strategy: find opening braces after signatures, track depth, remove body content\n\nfunction trimTypeScript(content: string): string {\n const lines = content.split('\\n')\n const result: string[] = []\n let braceDepth = 0\n let inBody = false\n let bodyStartDepth = 0\n\n for (const line of lines) {\n const trimmed = line.trim()\n\n // Always keep: empty lines at top level, imports, type/interface, decorators, exports of types\n if (braceDepth === 0 || !inBody) {\n if (\n trimmed === '' ||\n trimmed.startsWith('import ') ||\n trimmed.startsWith('export type ') ||\n trimmed.startsWith('export interface ') ||\n trimmed.startsWith('export enum ') ||\n trimmed.startsWith('export const ') ||\n trimmed.startsWith('type ') ||\n trimmed.startsWith('interface ') ||\n trimmed.startsWith('enum ') ||\n trimmed.startsWith('@') ||\n trimmed.startsWith('//') ||\n trimmed.startsWith('/*') ||\n trimmed.startsWith('*') ||\n trimmed.startsWith('declare ')\n ) {\n result.push(line)\n // Count braces even in kept lines\n braceDepth += countChar(trimmed, '{') - countChar(trimmed, '}')\n continue\n }\n }\n\n const openBraces = countChar(trimmed, '{')\n const closeBraces = countChar(trimmed, '}')\n\n if (!inBody) {\n // Detect function/method signature — line with opening brace\n if (isSignatureLine(trimmed) && openBraces > closeBraces) {\n result.push(line)\n braceDepth += openBraces - closeBraces\n inBody = true\n bodyStartDepth = braceDepth\n continue\n }\n\n // Class/interface declaration — keep but don't treat as body\n if (isClassOrInterfaceLine(trimmed)) {\n result.push(line)\n braceDepth += openBraces - closeBraces\n continue\n }\n\n // Keep the line (top-level statement, property declaration, etc.)\n result.push(line)\n braceDepth += openBraces - closeBraces\n } else {\n // Inside a function body — skip lines\n braceDepth += openBraces - closeBraces\n\n // Check if we've closed back to where the body started\n if (braceDepth < bodyStartDepth) {\n // Add closing brace\n result.push(line)\n inBody = false\n }\n }\n }\n\n return result.join('\\n')\n}\n\nfunction trimPython(content: string): string {\n const lines = content.split('\\n')\n const result: string[] = []\n let skipIndent = -1\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i]!\n const trimmed = line.trim()\n const indent = line.length - line.trimStart().length\n\n // If we're skipping a body and this line is still indented deeper, skip it\n if (skipIndent >= 0) {\n if (trimmed === '' || indent > skipIndent) {\n continue\n }\n // We've exited the body\n skipIndent = -1\n }\n\n // Always keep: comments, imports, class defs, decorators, type hints, module-level assignments\n if (\n trimmed === '' ||\n trimmed.startsWith('#') ||\n trimmed.startsWith('import ') ||\n trimmed.startsWith('from ') ||\n trimmed.startsWith('@') ||\n trimmed.startsWith('class ') ||\n /^[A-Z_][A-Z_0-9]*\\s*=/.test(trimmed)\n ) {\n result.push(line)\n continue\n }\n\n // Function/method definition — keep signature, skip body\n if (trimmed.startsWith('def ') || trimmed.startsWith('async def ')) {\n result.push(line)\n // If the next non-empty line has docstring, keep it\n const docIdx = findDocstring(lines, i + 1, indent)\n if (docIdx > i) {\n for (let j = i + 1; j <= docIdx; j++) {\n result.push(lines[j]!)\n }\n }\n skipIndent = indent\n continue\n }\n\n // Keep everything else at module/class level\n result.push(line)\n }\n\n return result.join('\\n')\n}\n\nfunction trimJava(content: string): string {\n // Java/Kotlin — very similar to TypeScript brace-matching\n const lines = content.split('\\n')\n const result: string[] = []\n let braceDepth = 0\n let inBody = false\n let bodyStartDepth = 0\n\n for (const line of lines) {\n const trimmed = line.trim()\n\n if (braceDepth === 0 || !inBody) {\n if (\n trimmed === '' ||\n trimmed.startsWith('import ') ||\n trimmed.startsWith('package ') ||\n trimmed.startsWith('@') ||\n trimmed.startsWith('//') ||\n trimmed.startsWith('/*') ||\n trimmed.startsWith('*') ||\n trimmed.startsWith('public interface ') ||\n trimmed.startsWith('interface ') ||\n trimmed.startsWith('public enum ') ||\n trimmed.startsWith('enum ')\n ) {\n result.push(line)\n braceDepth += countChar(trimmed, '{') - countChar(trimmed, '}')\n continue\n }\n }\n\n const openBraces = countChar(trimmed, '{')\n const closeBraces = countChar(trimmed, '}')\n\n if (!inBody) {\n if (isJavaMethodSignature(trimmed) && openBraces > closeBraces) {\n result.push(line)\n braceDepth += openBraces - closeBraces\n inBody = true\n bodyStartDepth = braceDepth\n continue\n }\n\n if (isJavaClassLine(trimmed)) {\n result.push(line)\n braceDepth += openBraces - closeBraces\n continue\n }\n\n result.push(line)\n braceDepth += openBraces - closeBraces\n } else {\n braceDepth += openBraces - closeBraces\n if (braceDepth < bodyStartDepth) {\n result.push(line)\n inBody = false\n }\n }\n }\n\n return result.join('\\n')\n}\n\nfunction trimGeneric(content: string): string {\n // For unknown languages, just return as-is (truncation handles size)\n return content\n}\n\n// --- Helpers ---\n\nfunction countChar(s: string, ch: string): number {\n let count = 0\n let inString = false\n let stringChar = ''\n for (let i = 0; i < s.length; i++) {\n const c = s[i]!\n if (inString) {\n if (c === stringChar && s[i - 1] !== '\\\\') inString = false\n } else if (c === '\"' || c === \"'\" || c === '`') {\n inString = true\n stringChar = c\n } else if (c === ch) {\n count++\n }\n }\n return count\n}\n\nfunction isSignatureLine(trimmed: string): boolean {\n return /^(export\\s+)?(async\\s+)?function\\s/.test(trimmed) ||\n /^(public|private|protected|static|async|get|set|\\*)\\s/.test(trimmed) ||\n /^(readonly\\s+)?[a-zA-Z_$][a-zA-Z0-9_$]*\\s*\\(/.test(trimmed) ||\n /^(export\\s+)?(const|let|var)\\s+\\w+\\s*=\\s*(async\\s+)?\\(/.test(trimmed) ||\n /^(export\\s+)?(const|let|var)\\s+\\w+\\s*=\\s*(async\\s+)?function/.test(trimmed) ||\n // Arrow function assigned at class level\n /^[a-zA-Z_$][a-zA-Z0-9_$]*\\s*=\\s*(async\\s+)?\\(/.test(trimmed)\n}\n\nfunction isClassOrInterfaceLine(trimmed: string): boolean {\n return /^(export\\s+)?(abstract\\s+)?(class|interface|enum)\\s/.test(trimmed) ||\n /^(export\\s+)?namespace\\s/.test(trimmed)\n}\n\nfunction isJavaMethodSignature(trimmed: string): boolean {\n return /^(public|private|protected|static|final|abstract|synchronized|native)\\s/.test(trimmed) &&\n /\\(/.test(trimmed)\n}\n\nfunction isJavaClassLine(trimmed: string): boolean {\n return /^(public|private|protected)?\\s*(abstract\\s+)?(class|interface|enum)\\s/.test(trimmed)\n}\n\nfunction findDocstring(lines: string[], startIdx: number, defIndent: number): number {\n // Find Python docstring (triple-quoted) after a def\n for (let i = startIdx; i < lines.length; i++) {\n const trimmed = lines[i]!.trim()\n if (trimmed === '') continue\n if (trimmed.startsWith('\"\"\"') || trimmed.startsWith(\"'''\")) {\n const quote = trimmed.slice(0, 3)\n // Single-line docstring\n if (trimmed.length > 3 && trimmed.endsWith(quote)) return i\n // Multi-line docstring — find closing\n for (let j = i + 1; j < lines.length; j++) {\n if (lines[j]!.trim().endsWith(quote)) return j\n }\n return i\n }\n // First non-empty line after def is not a docstring\n return startIdx - 1\n }\n return startIdx - 1\n}\n\nfunction getTrimmer(filePath: string): (content: string) => string {\n const ext = extname(filePath).toLowerCase()\n switch (ext) {\n case '.ts':\n case '.tsx':\n case '.js':\n case '.jsx':\n case '.mjs':\n case '.cjs':\n return trimTypeScript\n case '.py':\n return trimPython\n case '.java':\n case '.kt':\n case '.kts':\n case '.scala':\n case '.groovy':\n return trimJava\n case '.go':\n case '.rs':\n case '.c':\n case '.cpp':\n case '.h':\n case '.hpp':\n case '.cs':\n case '.swift':\n case '.dart':\n return trimJava // brace-based languages use same strategy\n default:\n return trimGeneric\n }\n}\n\nexport async function trimCluster(paths: string[], projectRoot: string): Promise<string> {\n const results: Array<{ path: string; content: string; lines: number }> = []\n\n for (const filePath of paths) {\n const absPath = resolve(projectRoot, filePath)\n try {\n const raw = await readFile(absPath, 'utf-8')\n const trimmer = getTrimmer(filePath)\n const trimmed = trimmer(raw)\n results.push({\n path: filePath,\n content: trimmed,\n lines: trimmed.split('\\n').length,\n })\n } catch {\n // Skip files that can't be read\n if (process.env['NOGREP_DEBUG'] === '1') {\n process.stderr.write(`[nogrep] Could not read: ${absPath}\\n`)\n }\n }\n }\n\n // Sort by line count descending — truncate least important (largest) files first\n results.sort((a, b) => a.lines - b.lines)\n\n const output: string[] = []\n let totalLines = 0\n const maxLines = MAX_CLUSTER_LINES\n\n for (const file of results) {\n const header = `// === ${file.path} ===`\n const fileLines = file.content.split('\\n')\n const available = maxLines - totalLines - 2 // header + separator\n\n if (available <= 0) break\n\n output.push(header)\n if (fileLines.length <= available) {\n output.push(file.content)\n } else {\n output.push(fileLines.slice(0, available).join('\\n'))\n output.push(`// ... truncated (${fileLines.length - available} more lines)`)\n }\n output.push('')\n\n totalLines += Math.min(fileLines.length, available) + 2\n }\n\n return output.join('\\n')\n}\n\n// --- CLI ---\n\nasync function main(): Promise<void> {\n const args = process.argv.slice(2)\n\n if (args.length === 0) {\n process.stderr.write('Usage: node trim.js <path1> <path2> ...\\n')\n process.exit(1)\n }\n\n const projectRoot = process.cwd()\n const result = await trimCluster(args, projectRoot)\n process.stdout.write(result)\n}\n\nconst isDirectRun = process.argv[1]?.endsWith('trim.js') || process.argv[1]?.endsWith('trim.ts')\nif (isDirectRun) {\n main().catch((err: unknown) => {\n process.stderr.write(`Error: ${err instanceof Error ? err.message : String(err)}\\n`)\n process.exit(1)\n })\n}\n"],"mappings":";AAAA,SAAS,gBAAgB;AACzB,SAAS,SAAS,eAAyB;AAE3C,IAAM,oBAAoB;AAS1B,SAAS,eAAe,SAAyB;AAC/C,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,SAAmB,CAAC;AAC1B,MAAI,aAAa;AACjB,MAAI,SAAS;AACb,MAAI,iBAAiB;AAErB,aAAW,QAAQ,OAAO;AACxB,UAAM,UAAU,KAAK,KAAK;AAG1B,QAAI,eAAe,KAAK,CAAC,QAAQ;AAC/B,UACE,YAAY,MACZ,QAAQ,WAAW,SAAS,KAC5B,QAAQ,WAAW,cAAc,KACjC,QAAQ,WAAW,mBAAmB,KACtC,QAAQ,WAAW,cAAc,KACjC,QAAQ,WAAW,eAAe,KAClC,QAAQ,WAAW,OAAO,KAC1B,QAAQ,WAAW,YAAY,KAC/B,QAAQ,WAAW,OAAO,KAC1B,QAAQ,WAAW,GAAG,KACtB,QAAQ,WAAW,IAAI,KACvB,QAAQ,WAAW,IAAI,KACvB,QAAQ,WAAW,GAAG,KACtB,QAAQ,WAAW,UAAU,GAC7B;AACA,eAAO,KAAK,IAAI;AAEhB,sBAAc,UAAU,SAAS,GAAG,IAAI,UAAU,SAAS,GAAG;AAC9D;AAAA,MACF;AAAA,IACF;AAEA,UAAM,aAAa,UAAU,SAAS,GAAG;AACzC,UAAM,cAAc,UAAU,SAAS,GAAG;AAE1C,QAAI,CAAC,QAAQ;AAEX,UAAI,gBAAgB,OAAO,KAAK,aAAa,aAAa;AACxD,eAAO,KAAK,IAAI;AAChB,sBAAc,aAAa;AAC3B,iBAAS;AACT,yBAAiB;AACjB;AAAA,MACF;AAGA,UAAI,uBAAuB,OAAO,GAAG;AACnC,eAAO,KAAK,IAAI;AAChB,sBAAc,aAAa;AAC3B;AAAA,MACF;AAGA,aAAO,KAAK,IAAI;AAChB,oBAAc,aAAa;AAAA,IAC7B,OAAO;AAEL,oBAAc,aAAa;AAG3B,UAAI,aAAa,gBAAgB;AAE/B,eAAO,KAAK,IAAI;AAChB,iBAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,KAAK,IAAI;AACzB;AAEA,SAAS,WAAW,SAAyB;AAC3C,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,SAAmB,CAAC;AAC1B,MAAI,aAAa;AAEjB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,UAAU,KAAK,KAAK;AAC1B,UAAM,SAAS,KAAK,SAAS,KAAK,UAAU,EAAE;AAG9C,QAAI,cAAc,GAAG;AACnB,UAAI,YAAY,MAAM,SAAS,YAAY;AACzC;AAAA,MACF;AAEA,mBAAa;AAAA,IACf;AAGA,QACE,YAAY,MACZ,QAAQ,WAAW,GAAG,KACtB,QAAQ,WAAW,SAAS,KAC5B,QAAQ,WAAW,OAAO,KAC1B,QAAQ,WAAW,GAAG,KACtB,QAAQ,WAAW,QAAQ,KAC3B,wBAAwB,KAAK,OAAO,GACpC;AACA,aAAO,KAAK,IAAI;AAChB;AAAA,IACF;AAGA,QAAI,QAAQ,WAAW,MAAM,KAAK,QAAQ,WAAW,YAAY,GAAG;AAClE,aAAO,KAAK,IAAI;AAEhB,YAAM,SAAS,cAAc,OAAO,IAAI,GAAG,MAAM;AACjD,UAAI,SAAS,GAAG;AACd,iBAAS,IAAI,IAAI,GAAG,KAAK,QAAQ,KAAK;AACpC,iBAAO,KAAK,MAAM,CAAC,CAAE;AAAA,QACvB;AAAA,MACF;AACA,mBAAa;AACb;AAAA,IACF;AAGA,WAAO,KAAK,IAAI;AAAA,EAClB;AAEA,SAAO,OAAO,KAAK,IAAI;AACzB;AAEA,SAAS,SAAS,SAAyB;AAEzC,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,QAAM,SAAmB,CAAC;AAC1B,MAAI,aAAa;AACjB,MAAI,SAAS;AACb,MAAI,iBAAiB;AAErB,aAAW,QAAQ,OAAO;AACxB,UAAM,UAAU,KAAK,KAAK;AAE1B,QAAI,eAAe,KAAK,CAAC,QAAQ;AAC/B,UACE,YAAY,MACZ,QAAQ,WAAW,SAAS,KAC5B,QAAQ,WAAW,UAAU,KAC7B,QAAQ,WAAW,GAAG,KACtB,QAAQ,WAAW,IAAI,KACvB,QAAQ,WAAW,IAAI,KACvB,QAAQ,WAAW,GAAG,KACtB,QAAQ,WAAW,mBAAmB,KACtC,QAAQ,WAAW,YAAY,KAC/B,QAAQ,WAAW,cAAc,KACjC,QAAQ,WAAW,OAAO,GAC1B;AACA,eAAO,KAAK,IAAI;AAChB,sBAAc,UAAU,SAAS,GAAG,IAAI,UAAU,SAAS,GAAG;AAC9D;AAAA,MACF;AAAA,IACF;AAEA,UAAM,aAAa,UAAU,SAAS,GAAG;AACzC,UAAM,cAAc,UAAU,SAAS,GAAG;AAE1C,QAAI,CAAC,QAAQ;AACX,UAAI,sBAAsB,OAAO,KAAK,aAAa,aAAa;AAC9D,eAAO,KAAK,IAAI;AAChB,sBAAc,aAAa;AAC3B,iBAAS;AACT,yBAAiB;AACjB;AAAA,MACF;AAEA,UAAI,gBAAgB,OAAO,GAAG;AAC5B,eAAO,KAAK,IAAI;AAChB,sBAAc,aAAa;AAC3B;AAAA,MACF;AAEA,aAAO,KAAK,IAAI;AAChB,oBAAc,aAAa;AAAA,IAC7B,OAAO;AACL,oBAAc,aAAa;AAC3B,UAAI,aAAa,gBAAgB;AAC/B,eAAO,KAAK,IAAI;AAChB,iBAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,KAAK,IAAI;AACzB;AAEA,SAAS,YAAY,SAAyB;AAE5C,SAAO;AACT;AAIA,SAAS,UAAU,GAAW,IAAoB;AAChD,MAAI,QAAQ;AACZ,MAAI,WAAW;AACf,MAAI,aAAa;AACjB,WAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,UAAM,IAAI,EAAE,CAAC;AACb,QAAI,UAAU;AACZ,UAAI,MAAM,cAAc,EAAE,IAAI,CAAC,MAAM,KAAM,YAAW;AAAA,IACxD,WAAW,MAAM,OAAO,MAAM,OAAO,MAAM,KAAK;AAC9C,iBAAW;AACX,mBAAa;AAAA,IACf,WAAW,MAAM,IAAI;AACnB;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,gBAAgB,SAA0B;AACjD,SAAO,qCAAqC,KAAK,OAAO,KACtD,wDAAwD,KAAK,OAAO,KACpE,+CAA+C,KAAK,OAAO,KAC3D,yDAAyD,KAAK,OAAO,KACrE,+DAA+D,KAAK,OAAO;AAAA,EAE3E,gDAAgD,KAAK,OAAO;AAChE;AAEA,SAAS,uBAAuB,SAA0B;AACxD,SAAO,sDAAsD,KAAK,OAAO,KACvE,2BAA2B,KAAK,OAAO;AAC3C;AAEA,SAAS,sBAAsB,SAA0B;AACvD,SAAO,0EAA0E,KAAK,OAAO,KAC3F,KAAK,KAAK,OAAO;AACrB;AAEA,SAAS,gBAAgB,SAA0B;AACjD,SAAO,wEAAwE,KAAK,OAAO;AAC7F;AAEA,SAAS,cAAc,OAAiB,UAAkB,WAA2B;AAEnF,WAAS,IAAI,UAAU,IAAI,MAAM,QAAQ,KAAK;AAC5C,UAAM,UAAU,MAAM,CAAC,EAAG,KAAK;AAC/B,QAAI,YAAY,GAAI;AACpB,QAAI,QAAQ,WAAW,KAAK,KAAK,QAAQ,WAAW,KAAK,GAAG;AAC1D,YAAM,QAAQ,QAAQ,MAAM,GAAG,CAAC;AAEhC,UAAI,QAAQ,SAAS,KAAK,QAAQ,SAAS,KAAK,EAAG,QAAO;AAE1D,eAAS,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACzC,YAAI,MAAM,CAAC,EAAG,KAAK,EAAE,SAAS,KAAK,EAAG,QAAO;AAAA,MAC/C;AACA,aAAO;AAAA,IACT;AAEA,WAAO,WAAW;AAAA,EACpB;AACA,SAAO,WAAW;AACpB;AAEA,SAAS,WAAW,UAA+C;AACjE,QAAM,MAAM,QAAQ,QAAQ,EAAE,YAAY;AAC1C,UAAQ,KAAK;AAAA,IACX,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAEA,eAAsB,YAAY,OAAiB,aAAsC;AACvF,QAAM,UAAmE,CAAC;AAE1E,aAAW,YAAY,OAAO;AAC5B,UAAM,UAAU,QAAQ,aAAa,QAAQ;AAC7C,QAAI;AACF,YAAM,MAAM,MAAM,SAAS,SAAS,OAAO;AAC3C,YAAM,UAAU,WAAW,QAAQ;AACnC,YAAM,UAAU,QAAQ,GAAG;AAC3B,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,SAAS;AAAA,QACT,OAAO,QAAQ,MAAM,IAAI,EAAE;AAAA,MAC7B,CAAC;AAAA,IACH,QAAQ;AAEN,UAAI,QAAQ,IAAI,cAAc,MAAM,KAAK;AACvC,gBAAQ,OAAO,MAAM,4BAA4B,OAAO;AAAA,CAAI;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAExC,QAAM,SAAmB,CAAC;AAC1B,MAAI,aAAa;AACjB,QAAM,WAAW;AAEjB,aAAW,QAAQ,SAAS;AAC1B,UAAM,SAAS,UAAU,KAAK,IAAI;AAClC,UAAM,YAAY,KAAK,QAAQ,MAAM,IAAI;AACzC,UAAM,YAAY,WAAW,aAAa;AAE1C,QAAI,aAAa,EAAG;AAEpB,WAAO,KAAK,MAAM;AAClB,QAAI,UAAU,UAAU,WAAW;AACjC,aAAO,KAAK,KAAK,OAAO;AAAA,IAC1B,OAAO;AACL,aAAO,KAAK,UAAU,MAAM,GAAG,SAAS,EAAE,KAAK,IAAI,CAAC;AACpD,aAAO,KAAK,qBAAqB,UAAU,SAAS,SAAS,cAAc;AAAA,IAC7E;AACA,WAAO,KAAK,EAAE;AAEd,kBAAc,KAAK,IAAI,UAAU,QAAQ,SAAS,IAAI;AAAA,EACxD;AAEA,SAAO,OAAO,KAAK,IAAI;AACzB;AAIA,eAAe,OAAsB;AACnC,QAAM,OAAO,QAAQ,KAAK,MAAM,CAAC;AAEjC,MAAI,KAAK,WAAW,GAAG;AACrB,YAAQ,OAAO,MAAM,2CAA2C;AAChE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,cAAc,QAAQ,IAAI;AAChC,QAAM,SAAS,MAAM,YAAY,MAAM,WAAW;AAClD,UAAQ,OAAO,MAAM,MAAM;AAC7B;AAEA,IAAM,cAAc,QAAQ,KAAK,CAAC,GAAG,SAAS,SAAS,KAAK,QAAQ,KAAK,CAAC,GAAG,SAAS,SAAS;AAC/F,IAAI,aAAa;AACf,OAAK,EAAE,MAAM,CAAC,QAAiB;AAC7B,YAAQ,OAAO,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,CAAI;AACnF,YAAQ,KAAK,CAAC;AAAA,EAChB,CAAC;AACH;","names":[]}
@@ -0,0 +1,7 @@
1
+ import {
2
+ NogrepError
3
+ } from "./chunk-ICPV2JWV.js";
4
+ export {
5
+ NogrepError
6
+ };
7
+ //# sourceMappingURL=types.js.map
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  NogrepError
3
- } from "./chunk-OJSJ63PH.js";
3
+ } from "./chunk-ICPV2JWV.js";
4
4
 
5
5
  // scripts/validate.ts
6
6
  import { readFile } from "fs/promises";
@@ -127,15 +127,17 @@ async function main() {
127
127
  break;
128
128
  }
129
129
  }
130
- main().catch((err) => {
131
- if (err instanceof NogrepError) {
132
- process.stderr.write(JSON.stringify({ error: err.message, code: err.code }) + "\n");
133
- } else {
134
- const message = err instanceof Error ? err.message : String(err);
135
- process.stderr.write(JSON.stringify({ error: message }) + "\n");
136
- }
137
- process.exitCode = 1;
138
- });
130
+ if (import.meta.url === `file://${process.argv[1]}`) {
131
+ main().catch((err) => {
132
+ if (err instanceof NogrepError) {
133
+ process.stderr.write(JSON.stringify({ error: err.message, code: err.code }) + "\n");
134
+ } else {
135
+ const message = err instanceof Error ? err.message : String(err);
136
+ process.stderr.write(JSON.stringify({ error: message }) + "\n");
137
+ }
138
+ process.exitCode = 1;
139
+ });
140
+ }
139
141
  export {
140
142
  checkFreshness,
141
143
  validateAll
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../scripts/validate.ts"],"sourcesContent":["import { readFile } from 'node:fs/promises'\nimport { join, resolve as resolvePath } from 'node:path'\nimport { createHash } from 'node:crypto'\nimport { parseArgs } from 'node:util'\nimport { glob } from 'glob'\nimport matter from 'gray-matter'\nimport type { StaleResult } from './types.js'\nimport { NogrepError } from './types.js'\n\n// --- Freshness check ---\n\nexport async function checkFreshness(\n nodeFile: string,\n projectRoot: string,\n): Promise<StaleResult> {\n let content: string\n try {\n content = await readFile(join(projectRoot, nodeFile), 'utf-8')\n } catch {\n return { file: nodeFile, isStale: true, reason: 'context file not found' }\n }\n\n const parsed = matter(content)\n const srcPaths: string[] = parsed.data.src_paths ?? []\n const lastSynced = parsed.data.last_synced as\n | { src_hash?: string; commit?: string; timestamp?: string }\n | undefined\n\n if (!lastSynced?.src_hash) {\n return { file: nodeFile, isStale: true, reason: 'no src_hash in frontmatter' }\n }\n\n if (srcPaths.length === 0) {\n return { file: nodeFile, isStale: false }\n }\n\n // Glob all matching source files\n const allFiles: string[] = []\n for (const pattern of srcPaths) {\n const matches = await glob(pattern, {\n cwd: projectRoot,\n nodir: true,\n ignore: ['node_modules/**', 'dist/**', 'build/**', '.git/**', 'coverage/**'],\n })\n allFiles.push(...matches)\n }\n\n allFiles.sort()\n\n if (allFiles.length === 0) {\n return { file: nodeFile, isStale: true, reason: 'no source files match src_paths' }\n }\n\n // Compute SHA256 of all file contents concatenated\n const hash = createHash('sha256')\n for (const file of allFiles) {\n try {\n const fileContent = await readFile(join(projectRoot, file))\n hash.update(fileContent)\n } catch {\n // File unreadable — skip\n }\n }\n const currentHash = `sha256:${hash.digest('hex').slice(0, 12)}`\n\n if (currentHash !== lastSynced.src_hash) {\n return {\n file: nodeFile,\n isStale: true,\n reason: `hash mismatch: expected ${lastSynced.src_hash}, got ${currentHash}`,\n }\n }\n\n return { file: nodeFile, isStale: false }\n}\n\n// --- Discover all context nodes ---\n\nasync function discoverNodes(projectRoot: string): Promise<string[]> {\n const nogrepDir = join(projectRoot, '.nogrep')\n const patterns = [\n 'domains/*.md',\n 'architecture/*.md',\n 'flows/*.md',\n 'entities/*.md',\n ]\n\n const files: string[] = []\n for (const pattern of patterns) {\n const matches = await glob(pattern, { cwd: nogrepDir, nodir: true })\n files.push(...matches.map(m => `.nogrep/${m}`))\n }\n\n return files.sort()\n}\n\n// --- Validate all nodes ---\n\nexport async function validateAll(\n projectRoot: string,\n): Promise<{ total: number; fresh: StaleResult[]; stale: StaleResult[] }> {\n const indexPath = join(projectRoot, '.nogrep', '_index.json')\n try {\n await readFile(indexPath, 'utf-8')\n } catch {\n throw new NogrepError(\n 'No .nogrep/_index.json found. Run /nogrep:init first.',\n 'NO_INDEX',\n )\n }\n\n const nodeFiles = await discoverNodes(projectRoot)\n const results = await Promise.all(\n nodeFiles.map(f => checkFreshness(f, projectRoot)),\n )\n\n const fresh = results.filter(r => !r.isStale)\n const stale = results.filter(r => r.isStale)\n\n return { total: results.length, fresh, stale }\n}\n\n// --- Formatting ---\n\nfunction formatText(result: { total: number; fresh: StaleResult[]; stale: StaleResult[] }): string {\n const lines: string[] = []\n lines.push(`nogrep index: ${result.total} nodes`)\n lines.push(` Fresh: ${result.fresh.length}`)\n lines.push(` Stale: ${result.stale.length}`)\n\n if (result.stale.length > 0) {\n lines.push('')\n lines.push('Stale nodes:')\n for (const s of result.stale) {\n lines.push(` - ${s.file}: ${s.reason}`)\n }\n }\n\n return lines.join('\\n')\n}\n\nfunction formatJson(result: { total: number; fresh: StaleResult[]; stale: StaleResult[] }): string {\n return JSON.stringify(result, null, 2)\n}\n\n// --- CLI ---\n\nasync function main(): Promise<void> {\n const { values } = parseArgs({\n options: {\n format: { type: 'string', default: 'text' },\n root: { type: 'string', default: process.cwd() },\n },\n strict: true,\n })\n\n const root = resolvePath(values.root ?? process.cwd())\n const format = values.format ?? 'text'\n\n const result = await validateAll(root)\n\n switch (format) {\n case 'json':\n process.stdout.write(formatJson(result) + '\\n')\n break\n case 'text':\n default:\n process.stdout.write(formatText(result) + '\\n')\n break\n }\n}\n\nif (import.meta.url === `file://${process.argv[1]}`) {\n main().catch((err: unknown) => {\n if (err instanceof NogrepError) {\n process.stderr.write(JSON.stringify({ error: err.message, code: err.code }) + '\\n')\n } else {\n const message = err instanceof Error ? err.message : String(err)\n process.stderr.write(JSON.stringify({ error: message }) + '\\n')\n }\n process.exitCode = 1\n })\n}\n"],"mappings":";;;;;AAAA,SAAS,gBAAgB;AACzB,SAAS,MAAM,WAAW,mBAAmB;AAC7C,SAAS,kBAAkB;AAC3B,SAAS,iBAAiB;AAC1B,SAAS,YAAY;AACrB,OAAO,YAAY;AAMnB,eAAsB,eACpB,UACA,aACsB;AACtB,MAAI;AACJ,MAAI;AACF,cAAU,MAAM,SAAS,KAAK,aAAa,QAAQ,GAAG,OAAO;AAAA,EAC/D,QAAQ;AACN,WAAO,EAAE,MAAM,UAAU,SAAS,MAAM,QAAQ,yBAAyB;AAAA,EAC3E;AAEA,QAAM,SAAS,OAAO,OAAO;AAC7B,QAAM,WAAqB,OAAO,KAAK,aAAa,CAAC;AACrD,QAAM,aAAa,OAAO,KAAK;AAI/B,MAAI,CAAC,YAAY,UAAU;AACzB,WAAO,EAAE,MAAM,UAAU,SAAS,MAAM,QAAQ,6BAA6B;AAAA,EAC/E;AAEA,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO,EAAE,MAAM,UAAU,SAAS,MAAM;AAAA,EAC1C;AAGA,QAAM,WAAqB,CAAC;AAC5B,aAAW,WAAW,UAAU;AAC9B,UAAM,UAAU,MAAM,KAAK,SAAS;AAAA,MAClC,KAAK;AAAA,MACL,OAAO;AAAA,MACP,QAAQ,CAAC,mBAAmB,WAAW,YAAY,WAAW,aAAa;AAAA,IAC7E,CAAC;AACD,aAAS,KAAK,GAAG,OAAO;AAAA,EAC1B;AAEA,WAAS,KAAK;AAEd,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO,EAAE,MAAM,UAAU,SAAS,MAAM,QAAQ,kCAAkC;AAAA,EACpF;AAGA,QAAM,OAAO,WAAW,QAAQ;AAChC,aAAW,QAAQ,UAAU;AAC3B,QAAI;AACF,YAAM,cAAc,MAAM,SAAS,KAAK,aAAa,IAAI,CAAC;AAC1D,WAAK,OAAO,WAAW;AAAA,IACzB,QAAQ;AAAA,IAER;AAAA,EACF;AACA,QAAM,cAAc,UAAU,KAAK,OAAO,KAAK,EAAE,MAAM,GAAG,EAAE,CAAC;AAE7D,MAAI,gBAAgB,WAAW,UAAU;AACvC,WAAO;AAAA,MACL,MAAM;AAAA,MACN,SAAS;AAAA,MACT,QAAQ,2BAA2B,WAAW,QAAQ,SAAS,WAAW;AAAA,IAC5E;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,UAAU,SAAS,MAAM;AAC1C;AAIA,eAAe,cAAc,aAAwC;AACnE,QAAM,YAAY,KAAK,aAAa,SAAS;AAC7C,QAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,QAAM,QAAkB,CAAC;AACzB,aAAW,WAAW,UAAU;AAC9B,UAAM,UAAU,MAAM,KAAK,SAAS,EAAE,KAAK,WAAW,OAAO,KAAK,CAAC;AACnE,UAAM,KAAK,GAAG,QAAQ,IAAI,OAAK,WAAW,CAAC,EAAE,CAAC;AAAA,EAChD;AAEA,SAAO,MAAM,KAAK;AACpB;AAIA,eAAsB,YACpB,aACwE;AACxE,QAAM,YAAY,KAAK,aAAa,WAAW,aAAa;AAC5D,MAAI;AACF,UAAM,SAAS,WAAW,OAAO;AAAA,EACnC,QAAQ;AACN,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,YAAY,MAAM,cAAc,WAAW;AACjD,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,UAAU,IAAI,OAAK,eAAe,GAAG,WAAW,CAAC;AAAA,EACnD;AAEA,QAAM,QAAQ,QAAQ,OAAO,OAAK,CAAC,EAAE,OAAO;AAC5C,QAAM,QAAQ,QAAQ,OAAO,OAAK,EAAE,OAAO;AAE3C,SAAO,EAAE,OAAO,QAAQ,QAAQ,OAAO,MAAM;AAC/C;AAIA,SAAS,WAAW,QAA+E;AACjG,QAAM,QAAkB,CAAC;AACzB,QAAM,KAAK,iBAAiB,OAAO,KAAK,QAAQ;AAChD,QAAM,KAAK,YAAY,OAAO,MAAM,MAAM,EAAE;AAC5C,QAAM,KAAK,YAAY,OAAO,MAAM,MAAM,EAAE;AAE5C,MAAI,OAAO,MAAM,SAAS,GAAG;AAC3B,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,cAAc;AACzB,eAAW,KAAK,OAAO,OAAO;AAC5B,YAAM,KAAK,OAAO,EAAE,IAAI,KAAK,EAAE,MAAM,EAAE;AAAA,IACzC;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAEA,SAAS,WAAW,QAA+E;AACjG,SAAO,KAAK,UAAU,QAAQ,MAAM,CAAC;AACvC;AAIA,eAAe,OAAsB;AACnC,QAAM,EAAE,OAAO,IAAI,UAAU;AAAA,IAC3B,SAAS;AAAA,MACP,QAAQ,EAAE,MAAM,UAAU,SAAS,OAAO;AAAA,MAC1C,MAAM,EAAE,MAAM,UAAU,SAAS,QAAQ,IAAI,EAAE;AAAA,IACjD;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AAED,QAAM,OAAO,YAAY,OAAO,QAAQ,QAAQ,IAAI,CAAC;AACrD,QAAM,SAAS,OAAO,UAAU;AAEhC,QAAM,SAAS,MAAM,YAAY,IAAI;AAErC,UAAQ,QAAQ;AAAA,IACd,KAAK;AACH,cAAQ,OAAO,MAAM,WAAW,MAAM,IAAI,IAAI;AAC9C;AAAA,IACF,KAAK;AAAA,IACL;AACE,cAAQ,OAAO,MAAM,WAAW,MAAM,IAAI,IAAI;AAC9C;AAAA,EACJ;AACF;AAEA,IAAI,YAAY,QAAQ,UAAU,QAAQ,KAAK,CAAC,CAAC,IAAI;AACnD,OAAK,EAAE,MAAM,CAAC,QAAiB;AAC7B,QAAI,eAAe,aAAa;AAC9B,cAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,IAAI,SAAS,MAAM,IAAI,KAAK,CAAC,IAAI,IAAI;AAAA,IACpF,OAAO;AACL,YAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,cAAQ,OAAO,MAAM,KAAK,UAAU,EAAE,OAAO,QAAQ,CAAC,IAAI,IAAI;AAAA,IAChE;AACA,YAAQ,WAAW;AAAA,EACrB,CAAC;AACH;","names":[]}
@@ -3,6 +3,5 @@ import { NodeResult, StackResult, IndexJson, RegistryJson } from './types.js';
3
3
  declare function writeContextNodes(nodes: NodeResult[], outputDir: string): Promise<void>;
4
4
  declare function buildIndex(nodes: NodeResult[], stack: Pick<StackResult, 'primaryLanguage' | 'frameworks' | 'architecture'>): IndexJson;
5
5
  declare function buildRegistry(nodes: NodeResult[]): RegistryJson;
6
- declare function patchClaudeMd(projectRoot: string): Promise<void>;
7
6
 
8
- export { buildIndex, buildRegistry, patchClaudeMd, writeContextNodes };
7
+ export { buildIndex, buildRegistry, writeContextNodes };
@@ -1,6 +1,6 @@
1
1
  // scripts/write.ts
2
2
  import { readFile, writeFile, mkdir } from "fs/promises";
3
- import { join, resolve, dirname } from "path";
3
+ import { join, resolve } from "path";
4
4
  import { execFile } from "child_process";
5
5
  import { promisify } from "util";
6
6
  import yaml from "js-yaml";
@@ -173,35 +173,6 @@ function buildRegistry(nodes) {
173
173
  );
174
174
  return { mappings };
175
175
  }
176
- async function patchClaudeMd(projectRoot) {
177
- const claudeMdPath = join(projectRoot, "CLAUDE.md");
178
- const patchPath = join(dirname(import.meta.url.replace("file://", "")), "..", "templates", "claude-md-patch.md");
179
- let patch;
180
- try {
181
- patch = await readFile(patchPath, "utf-8");
182
- } catch {
183
- patch = [
184
- "<!-- nogrep -->",
185
- "## Code Navigation",
186
- "",
187
- "This project uses [nogrep](https://github.com/techtulp/nogrep).",
188
- "Context files in `.nogrep/` are a navigable index of this codebase.",
189
- "When you see nogrep results injected into your context, trust them \u2014",
190
- "read those files before exploring source.",
191
- "<!-- /nogrep -->"
192
- ].join("\n") + "\n";
193
- }
194
- let existing = "";
195
- try {
196
- existing = await readFile(claudeMdPath, "utf-8");
197
- } catch {
198
- }
199
- if (existing.includes("<!-- nogrep -->")) {
200
- return;
201
- }
202
- const newContent = existing ? existing.trimEnd() + "\n\n" + patch : patch;
203
- await writeFile(claudeMdPath, newContent, "utf-8");
204
- }
205
176
  async function writeAll(input, projectRoot) {
206
177
  const outputDir = join(projectRoot, ".nogrep");
207
178
  await mkdir(outputDir, { recursive: true });
@@ -225,7 +196,6 @@ async function writeAll(input, projectRoot) {
225
196
  JSON.stringify(registry, null, 2) + "\n",
226
197
  "utf-8"
227
198
  );
228
- await patchClaudeMd(projectRoot);
229
199
  }
230
200
  async function main() {
231
201
  const args = process.argv.slice(2);
@@ -253,15 +223,16 @@ async function main() {
253
223
  const input = JSON.parse(rawInput);
254
224
  await writeAll(input, projectRoot);
255
225
  }
256
- main().catch((err) => {
257
- const message = err instanceof Error ? err.message : String(err);
258
- process.stderr.write(JSON.stringify({ error: message }) + "\n");
259
- process.exitCode = 1;
260
- });
226
+ if (import.meta.url === `file://${process.argv[1]}`) {
227
+ main().catch((err) => {
228
+ const message = err instanceof Error ? err.message : String(err);
229
+ process.stderr.write(JSON.stringify({ error: message }) + "\n");
230
+ process.exitCode = 1;
231
+ });
232
+ }
261
233
  export {
262
234
  buildIndex,
263
235
  buildRegistry,
264
- patchClaudeMd,
265
236
  writeContextNodes
266
237
  };
267
238
  //# sourceMappingURL=write.js.map