nogrep 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/README.md +91 -0
  2. package/commands/init.md +241 -0
  3. package/commands/off.md +11 -0
  4. package/commands/on.md +21 -0
  5. package/commands/query.md +13 -0
  6. package/commands/status.md +15 -0
  7. package/commands/update.md +89 -0
  8. package/dist/chunk-SMUAF6SM.js +12 -0
  9. package/dist/chunk-SMUAF6SM.js.map +1 -0
  10. package/dist/query.d.ts +12 -0
  11. package/dist/query.js +272 -0
  12. package/dist/query.js.map +1 -0
  13. package/dist/settings.d.ts +6 -0
  14. package/dist/settings.js +75 -0
  15. package/dist/settings.js.map +1 -0
  16. package/dist/signals.d.ts +9 -0
  17. package/dist/signals.js +174 -0
  18. package/dist/signals.js.map +1 -0
  19. package/dist/trim.d.ts +3 -0
  20. package/dist/trim.js +266 -0
  21. package/dist/trim.js.map +1 -0
  22. package/dist/types.d.ts +141 -0
  23. package/dist/types.js +7 -0
  24. package/dist/types.js.map +1 -0
  25. package/dist/validate.d.ts +10 -0
  26. package/dist/validate.js +143 -0
  27. package/dist/validate.js.map +1 -0
  28. package/dist/write.d.ts +8 -0
  29. package/dist/write.js +267 -0
  30. package/dist/write.js.map +1 -0
  31. package/docs/ARCHITECTURE.md +239 -0
  32. package/docs/CLAUDE.md +161 -0
  33. package/docs/CONVENTIONS.md +162 -0
  34. package/docs/SPEC.md +803 -0
  35. package/docs/TASKS.md +216 -0
  36. package/hooks/hooks.json +35 -0
  37. package/hooks/pre-tool-use.sh +37 -0
  38. package/hooks/prompt-submit.sh +26 -0
  39. package/hooks/session-start.sh +21 -0
  40. package/package.json +24 -0
  41. package/scripts/query.ts +290 -0
  42. package/scripts/settings.ts +98 -0
  43. package/scripts/signals.ts +237 -0
  44. package/scripts/trim.ts +379 -0
  45. package/scripts/types.ts +186 -0
  46. package/scripts/validate.ts +181 -0
  47. package/scripts/write.ts +346 -0
  48. package/templates/claude-md-patch.md +8 -0
package/README.md ADDED
@@ -0,0 +1,91 @@
1
+ # nogrep
2
+
3
+ A Claude Code plugin that gives AI agents a navigable index of any codebase, so they stop doing blind `grep`/`find` exploration.
4
+
5
+ ## What it does
6
+
7
+ `nogrep` generates a structured `.nogrep/` directory with a reverse index and thin context nodes (markdown files). When Claude Code needs to find something, it reads 2 files instead of running 20 grep commands.
8
+
9
+ ## Install
10
+
11
+ Add the marketplace and install the plugin:
12
+
13
+ ```
14
+ /plugin marketplace add alirezanasseh/nogrep
15
+ /plugin install nogrep@nogrep-marketplace
16
+ ```
17
+
18
+ For local development/testing:
19
+
20
+ ```bash
21
+ git clone https://github.com/alirezanasseh/nogrep.git
22
+ cd nogrep && npm install && npm run build
23
+ claude --plugin-dir ./nogrep
24
+ ```
25
+
26
+ ## Quick start
27
+
28
+ 1. Open your project in Claude Code
29
+ 2. Run `/nogrep:init` — Claude analyzes your codebase and generates the index
30
+ 3. That's it. Hooks automatically inject context when Claude searches your code
31
+
32
+ ## How it works
33
+
34
+ ```
35
+ Phase 1: Collect signals (scripts — file tree, deps, git churn, entry points)
36
+ Phase 2: Detect stack (Claude — language, frameworks, domain clusters)
37
+ Phase 3: Analyze clusters (Claude — per-domain context nodes from trimmed source)
38
+ Phase 4: Write index (scripts — .nogrep/ files, _index.json, CLAUDE.md patch)
39
+ ```
40
+
41
+ Scripts handle data collection and file I/O. Claude does all the analysis work directly during the session — no API keys needed.
42
+
43
+ ## Commands
44
+
45
+ | Command | Description |
46
+ |---------|-------------|
47
+ | `/nogrep:init` | Generate the full codebase index |
48
+ | `/nogrep:update` | Incrementally update stale nodes |
49
+ | `/nogrep:query <question>` | Manual index lookup |
50
+ | `/nogrep:status` | Show index health and freshness |
51
+ | `/nogrep:on` | Enable nogrep |
52
+ | `/nogrep:off` | Disable nogrep |
53
+
54
+ ## Hooks
55
+
56
+ nogrep installs three Claude Code hooks:
57
+
58
+ - **PreToolUse** — intercepts `grep`/`find`/`rg` commands and injects relevant context files
59
+ - **UserPromptSubmit** — injects context for code navigation prompts
60
+ - **SessionStart** — checks index freshness and warns if stale
61
+
62
+ ## Output structure
63
+
64
+ ```
65
+ .nogrep/
66
+ ├── _index.json # reverse index (tags → files, keywords → files, paths → context)
67
+ ├── _registry.json # source path → context file mapping
68
+ ├── _taxonomy.json # allowed tags for this project
69
+ ├── domains/ # one file per business domain
70
+ ├── architecture/ # cross-domain architectural concerns
71
+ ├── flows/ # multi-domain business flows
72
+ └── entities/ # data models
73
+ ```
74
+
75
+ Each context node is a thin markdown file with YAML frontmatter — purpose, public surface, gotchas, and tags. Nodes include a `## Manual Notes` section that is never overwritten by updates.
76
+
77
+ ## Settings
78
+
79
+ nogrep stores its enabled state in your project's `.claude/` directory:
80
+
81
+ - `.claude/settings.json` — team settings (commit to repo)
82
+ - `.claude/settings.local.json` — personal overrides (gitignored)
83
+
84
+ ## Requirements
85
+
86
+ - Node.js 20+
87
+ - Claude Code
88
+
89
+ ## License
90
+
91
+ MIT
@@ -0,0 +1,241 @@
1
+ Initialize nogrep for this project. Follow these steps exactly in order.
2
+
3
+ ---
4
+
5
+ ## Step 1 — Collect Signals
6
+
7
+ Run the signal collection script to gather project metadata:
8
+
9
+ ```bash
10
+ node "${CLAUDE_PLUGIN_ROOT}/dist/signals.js" --root .
11
+ ```
12
+
13
+ Save the JSON output — you will use it in Step 2.
14
+
15
+ ---
16
+
17
+ ## Step 2 — Detect Stack
18
+
19
+ Analyze the signals from Step 1. Look at the directory tree, dependency manifests, file extension distribution, and entry point candidates.
20
+
21
+ Produce a JSON object with this exact shape (no prose, no markdown fences in your output — just the raw JSON):
22
+
23
+ ```
24
+ {
25
+ "primary_language": "typescript",
26
+ "frameworks": ["nestjs", "react"],
27
+ "architecture": "monolith",
28
+ "domain_clusters": [
29
+ { "name": "billing", "path": "src/billing/", "confidence": 0.95 }
30
+ ],
31
+ "conventions": {
32
+ "entry_pattern": "*.module.ts",
33
+ "test_pattern": "*.spec.ts",
34
+ "config_location": "src/config/"
35
+ },
36
+ "stack_hints": "NestJS: *.module.ts = module boundary, *.service.ts = business logic",
37
+ "dynamic_taxonomy": {
38
+ "domain": ["billing", "auth", "users"],
39
+ "tech": ["stripe", "redis", "postgres"]
40
+ }
41
+ }
42
+ ```
43
+
44
+ **Architecture detection rules:**
45
+ - `monolith` — single dependency manifest at root, one primary framework
46
+ - `monorepo` — multiple dependency manifests with shared tooling (nx, turborepo, lerna, workspaces)
47
+ - `multi-repo` — multiple dependency manifests at depth 1, no shared tooling, separate stacks per subfolder
48
+ - `microservice` — multiple independently deployable services, often with Docker/K8s config
49
+ - `library` — single package, exports API surface, no application entry points
50
+
51
+ **Domain cluster detection:**
52
+ - Look at top-level directories under `src/`, `app/`, `lib/`, `packages/`
53
+ - Each cluster is a cohesive area of business logic (e.g., `billing`, `auth`, `users`)
54
+ - Set confidence 0.0–1.0 based on how clearly defined the boundary is
55
+ - Include at least the `path` glob pattern (e.g., `src/billing/**`)
56
+
57
+ Save this result — you will use it in Steps 3 and 5.
58
+
59
+ ---
60
+
61
+ ## Step 3 — Analyze Each Domain Cluster
62
+
63
+ For **each** domain cluster identified in Step 2, do the following:
64
+
65
+ ### 3a. Trim the source files
66
+
67
+ Run the trim script to get signatures-only view of the cluster's source files. Pass all source files in the cluster's path as arguments:
68
+
69
+ ```bash
70
+ node "${CLAUDE_PLUGIN_ROOT}/dist/trim.js" <file1> <file2> ...
71
+ ```
72
+
73
+ To find the files in the cluster, use the cluster's `path` from Step 2 to list source files:
74
+
75
+ ```bash
76
+ find <cluster_path> -type f \( -name "*.ts" -o -name "*.js" -o -name "*.py" -o -name "*.java" -o -name "*.go" -o -name "*.rs" -o -name "*.rb" -o -name "*.swift" -o -name "*.dart" -o -name "*.kt" -o -name "*.cs" -o -name "*.tsx" -o -name "*.jsx" \) | head -30
77
+ ```
78
+
79
+ Then pass those files to the trim script.
80
+
81
+ ### 3b. Generate a context node
82
+
83
+ Using the trimmed source from 3a and the stack info from Step 2, generate a context node.
84
+
85
+ For each cluster, produce a JSON object with this shape:
86
+
87
+ ```
88
+ {
89
+ "id": "billing",
90
+ "title": "Billing & Payments",
91
+ "category": "domain",
92
+ "purpose": "2-3 sentences MAX. Business intent, not technical description.",
93
+ "public_surface": ["BillingService.createSubscription(userId, planId)", "POST /billing/webhook"],
94
+ "does_not_own": ["email delivery → notifications", "user identity → auth"],
95
+ "external_deps": [{"name": "stripe", "usage": "payment processing"}],
96
+ "tags": {
97
+ "domain": ["billing"],
98
+ "layer": ["business", "data"],
99
+ "tech": ["stripe", "postgres"],
100
+ "concern": ["error-handling", "idempotency"],
101
+ "type": ["module"]
102
+ },
103
+ "relates_to": [
104
+ {"id": "notifications", "reason": "triggers invoice emails after payment events"}
105
+ ],
106
+ "src_paths": ["src/billing/**"],
107
+ "keywords": ["stripe", "webhook", "invoice", "retry", "idempotent"],
108
+ "gotchas": ["Webhook handler must be idempotent — check event.id before processing"]
109
+ }
110
+ ```
111
+
112
+ **Allowed tag values (use ONLY these — never invent new ones):**
113
+ - `layer`: presentation, business, data, infrastructure, cross-cutting
114
+ - `concern`: security, performance, caching, validation, error-handling, idempotency, observability
115
+ - `type`: module, flow, entity, integration, config, ui, test
116
+ - `domain`: use values from Step 2's `dynamic_taxonomy.domain`
117
+ - `tech`: use values from Step 2's `dynamic_taxonomy.tech`
118
+
119
+ **Rules:**
120
+ - `purpose`: max 3 sentences — what the domain exists to do, not how
121
+ - `gotchas`: max 5 items — non-obvious behaviors, footguns, constraints
122
+ - `keywords`: 5–15 items — terms a developer would grep for to find this domain
123
+ - `does_not_own`: include explicit redirections ("email delivery → notifications")
124
+ - `id`: kebab-case, matches the cluster name
125
+
126
+ Collect all node results — you will use them in Step 5.
127
+
128
+ ---
129
+
130
+ ## Step 4 — Detect Flows
131
+
132
+ Review all the nodes from Step 3. A cluster qualifies as a cross-domain **flow** when:
133
+ - Its import graph or `relates_to` touches 3+ distinct domain clusters, OR
134
+ - It is named with flow keywords: `checkout`, `onboarding`, `signup`, `pipeline`, `workflow`, `process`
135
+
136
+ For each detected flow, generate a node with `"category": "flow"` using the same schema as Step 3b. Flow nodes go in `.nogrep/flows/`.
137
+
138
+ Add any flow nodes to your collection from Step 3.
139
+
140
+ ---
141
+
142
+ ## Step 5 — Write Everything
143
+
144
+ Now assemble the final input for the writer script. Create a JSON object combining all nodes and the stack info:
145
+
146
+ ```json
147
+ {
148
+ "nodes": [
149
+ {
150
+ "id": "billing",
151
+ "title": "Billing & Payments",
152
+ "category": "domain",
153
+ "tags": { "domain": ["billing"], "layer": ["business"], "tech": ["stripe"], "concern": [], "type": ["module"] },
154
+ "relatesTo": [{"id": "notifications", "reason": "triggers emails"}],
155
+ "inverseRelations": [],
156
+ "srcPaths": ["src/billing/**"],
157
+ "keywords": ["stripe", "webhook"],
158
+ "lastSynced": { "commit": "", "timestamp": "2025-03-13T10:00:00Z", "srcHash": "" },
159
+ "purpose": "Handles all payment processing...",
160
+ "publicSurface": ["BillingService.createSubscription()"],
161
+ "doesNotOwn": ["email delivery → notifications"],
162
+ "externalDeps": [{"name": "stripe", "usage": "payment processing"}],
163
+ "gotchas": ["Webhook must be idempotent"]
164
+ }
165
+ ],
166
+ "stack": {
167
+ "primaryLanguage": "typescript",
168
+ "frameworks": ["nestjs"],
169
+ "architecture": "monolith"
170
+ }
171
+ }
172
+ ```
173
+
174
+ **Important:** When converting from Step 3's output format to the writer's input format:
175
+ - `relates_to` → `relatesTo`
176
+ - `src_paths` → `srcPaths`
177
+ - `does_not_own` → `doesNotOwn`
178
+ - `public_surface` → `publicSurface`
179
+ - `external_deps` → `externalDeps`
180
+ - Add `inverseRelations: []` (the writer populates these automatically)
181
+ - Add `lastSynced` with empty `commit`, current ISO timestamp, and empty `srcHash`
182
+
183
+ Pipe the JSON to the writer script:
184
+
185
+ ```bash
186
+ echo '<YOUR_JSON>' | node "${CLAUDE_PLUGIN_ROOT}/dist/write.js" --root .
187
+ ```
188
+
189
+ This will:
190
+ - Create `.nogrep/` directory with all context node files
191
+ - Build `_index.json` (reverse index)
192
+ - Build `_registry.json` (source path → context file mapping)
193
+ - Patch `CLAUDE.md` with navigation instructions
194
+
195
+ ---
196
+
197
+ ## Step 6 — Write Taxonomy
198
+
199
+ Write `_taxonomy.json` to `.nogrep/` with the detected taxonomy. Use the `dynamic_taxonomy` from Step 2:
200
+
201
+ ```bash
202
+ cat > .nogrep/_taxonomy.json << 'TAXONOMY_EOF'
203
+ {
204
+ "static": {
205
+ "layer": ["presentation", "business", "data", "infrastructure", "cross-cutting"],
206
+ "concern": ["security", "performance", "caching", "validation", "error-handling", "idempotency", "observability"],
207
+ "type": ["module", "flow", "entity", "integration", "config", "ui", "test"]
208
+ },
209
+ "dynamic": {
210
+ "domain": <DOMAIN_VALUES_FROM_STEP_2>,
211
+ "tech": <TECH_VALUES_FROM_STEP_2>
212
+ },
213
+ "custom": {}
214
+ }
215
+ TAXONOMY_EOF
216
+ ```
217
+
218
+ ---
219
+
220
+ ## Step 7 — Enable nogrep
221
+
222
+ ```bash
223
+ node "${CLAUDE_PLUGIN_ROOT}/dist/settings.js" --set enabled=true
224
+ ```
225
+
226
+ ---
227
+
228
+ ## Done
229
+
230
+ Tell the user:
231
+
232
+ > nogrep initialized successfully. The `.nogrep/` directory contains your codebase index.
233
+ >
234
+ > - **Context nodes:** one per domain/flow in `.nogrep/domains/` and `.nogrep/flows/`
235
+ > - **Index:** `.nogrep/_index.json` — reverse lookup by tags, keywords, and paths
236
+ > - **Registry:** `.nogrep/_registry.json` — maps source paths to context files
237
+ >
238
+ > nogrep is now enabled. Hooks will automatically inject context when you search.
239
+ >
240
+ > To update after code changes: `/nogrep:update`
241
+ > To check index health: `/nogrep:status`
@@ -0,0 +1,11 @@
1
+ Disable nogrep for this project.
2
+
3
+ Run this command to disable nogrep:
4
+
5
+ ```bash
6
+ node "${CLAUDE_PLUGIN_ROOT}/dist/settings.js" --set enabled=false
7
+ ```
8
+
9
+ Tell the user:
10
+
11
+ > nogrep is now disabled. Context injection is paused. Run `/nogrep:on` to re-enable.
package/commands/on.md ADDED
@@ -0,0 +1,21 @@
1
+ Enable nogrep for this project.
2
+
3
+ Run this command to enable nogrep:
4
+
5
+ ```bash
6
+ node "${CLAUDE_PLUGIN_ROOT}/dist/settings.js" --set enabled=true
7
+ ```
8
+
9
+ Then check if the nogrep index exists:
10
+
11
+ ```bash
12
+ test -f .nogrep/_index.json && echo "INDEX_EXISTS" || echo "INDEX_MISSING"
13
+ ```
14
+
15
+ If the index is missing, tell the user:
16
+
17
+ > nogrep is now enabled, but no index exists yet. Run `/nogrep:init` to generate the codebase index.
18
+
19
+ If the index exists, tell the user:
20
+
21
+ > nogrep is now enabled. Context will be injected automatically.
@@ -0,0 +1,13 @@
1
+ ---
2
+ allowed-tools: Bash(node *)
3
+ ---
4
+
5
+ Run the nogrep query system to find relevant context files for the given question.
6
+
7
+ ```bash
8
+ node "${CLAUDE_PLUGIN_ROOT}/dist/query.js" --question "$ARGUMENTS" --format summary --limit 5
9
+ ```
10
+
11
+ If results are returned, read the top context files to understand the relevant parts of the codebase before exploring source code directly.
12
+
13
+ If no results are found, let the user know and suggest they run `/nogrep:init` if the index hasn't been created yet.
@@ -0,0 +1,15 @@
1
+ Show the current status of the nogrep index.
2
+
3
+ Run the validation script:
4
+
5
+ ```bash
6
+ node "${CLAUDE_PLUGIN_ROOT}/dist/validate.js" --format text
7
+ ```
8
+
9
+ If the command fails, tell the user:
10
+
11
+ > No nogrep index found. Run `/nogrep:init` to generate the codebase index.
12
+
13
+ If it succeeds, display the output to the user. If there are stale nodes, suggest:
14
+
15
+ > Run `/nogrep:update` to refresh stale nodes.
@@ -0,0 +1,89 @@
1
+ Update stale nogrep context nodes based on recent changes.
2
+
3
+ ## Step 1: Check for stale nodes
4
+
5
+ Run the validation script to find stale nodes:
6
+
7
+ ```bash
8
+ node "${CLAUDE_PLUGIN_ROOT}/dist/validate.js" --format json
9
+ ```
10
+
11
+ If there are no stale nodes, tell the user:
12
+
13
+ > All nogrep context nodes are up to date. Nothing to update.
14
+
15
+ If the validation fails because no index exists, tell the user:
16
+
17
+ > No nogrep index found. Run `/nogrep:init` first.
18
+
19
+ ## Step 2: Identify changed files
20
+
21
+ Run git diff to find recently changed files:
22
+
23
+ ```bash
24
+ git diff origin/main --name-only 2>/dev/null || git diff HEAD~10 --name-only 2>/dev/null || echo "NO_GIT"
25
+ ```
26
+
27
+ If no git is available, tell the user you'll re-analyze all stale nodes based on hash mismatch.
28
+
29
+ ## Step 3: Map changes to affected nodes
30
+
31
+ Read `.nogrep/_registry.json` to map changed files to their context nodes.
32
+
33
+ For each stale node from Step 1:
34
+ 1. Read the existing context file to extract `## Manual Notes` content
35
+ 2. Read the source files listed in the node's `src_paths` frontmatter
36
+ 3. Use the trimming script to get signatures:
37
+
38
+ ```bash
39
+ node "${CLAUDE_PLUGIN_ROOT}/dist/trim.js" <source_file_paths>
40
+ ```
41
+
42
+ ## Step 4: Re-analyze each affected cluster
43
+
44
+ For each stale node, analyze the trimmed source and generate an updated context node.
45
+
46
+ Read `.nogrep/_taxonomy.json` for allowed tag values.
47
+
48
+ Return JSON only, no prose, no markdown fences:
49
+
50
+ ```
51
+ {
52
+ "purpose": "2-3 sentences MAX. Business intent, not technical description.",
53
+ "public_surface": ["list of exported functions/routes/events other domains use"],
54
+ "does_not_own": ["what this module delegates elsewhere, with → target domain"],
55
+ "external_deps": [{"name": "lib", "usage": "what it's used for"}],
56
+ "tags": {
57
+ "domain": [],
58
+ "layer": [],
59
+ "tech": [],
60
+ "concern": [],
61
+ "type": []
62
+ },
63
+ "keywords": ["terms a developer would search to find this domain"],
64
+ "gotchas": ["max 5 non-obvious behaviors, footguns, or constraints"]
65
+ }
66
+ ```
67
+
68
+ Rules:
69
+ - purpose: max 3 sentences
70
+ - gotchas: max 5 items
71
+ - keywords: 5-15 items
72
+ - tags: use taxonomy values only, never invent new tag values
73
+ - does_not_own: include explicit redirections ("email delivery → notifications")
74
+
75
+ ## Step 5: Write updates
76
+
77
+ Combine all updated node results with any unchanged nodes. Pipe the full set as JSON to the writer:
78
+
79
+ ```bash
80
+ echo '<json_input>' | node "${CLAUDE_PLUGIN_ROOT}/dist/write.js" --root .
81
+ ```
82
+
83
+ The writer automatically preserves `## Manual Notes` sections from existing files.
84
+
85
+ ## Step 6: Confirm
86
+
87
+ Tell the user which nodes were updated and suggest reviewing the changes:
88
+
89
+ > Updated N nogrep context nodes. Run `git diff .nogrep/` to review changes.
@@ -0,0 +1,12 @@
1
+ // scripts/types.ts
2
+ var NogrepError = class extends Error {
3
+ constructor(message, code) {
4
+ super(message);
5
+ this.code = code;
6
+ }
7
+ };
8
+
9
+ export {
10
+ NogrepError
11
+ };
12
+ //# sourceMappingURL=chunk-SMUAF6SM.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../scripts/types.ts"],"sourcesContent":["// --- Directory / File types ---\n\nexport interface DirectoryNode {\n name: string\n path: string\n type: 'file' | 'directory'\n children?: DirectoryNode[]\n}\n\nexport interface ManifestFile {\n path: string\n type: string\n depth: number\n}\n\nexport interface ChurnEntry {\n path: string\n changes: number\n}\n\nexport interface FileSize {\n path: string\n bytes: number\n}\n\n// --- Signal collection ---\n\nexport interface SignalResult {\n directoryTree: DirectoryNode[]\n extensionMap: Record<string, number>\n manifests: ManifestFile[]\n entryPoints: string[]\n gitChurn: ChurnEntry[]\n largeFiles: FileSize[]\n envFiles: string[]\n testFiles: string[]\n}\n\n// --- Stack detection ---\n\nexport interface StackConventions {\n entryPattern: string\n testPattern: string\n configLocation: string\n}\n\nexport interface DomainCluster {\n name: string\n path: string\n confidence: number\n}\n\nexport interface StackResult {\n primaryLanguage: string\n frameworks: string[]\n architecture: 'monolith' | 'monorepo' | 'multi-repo' | 'microservice' | 'library'\n domainClusters: DomainCluster[]\n conventions: StackConventions\n stackHints: string\n dynamicTaxonomy: { domain: string[]; tech: string[] }\n}\n\n// --- Tags ---\n\nexport interface TagSet {\n domain: string[]\n layer: string[]\n tech: string[]\n concern: string[]\n type: string[]\n}\n\nexport interface Taxonomy {\n static: {\n layer: string[]\n concern: string[]\n type: string[]\n }\n dynamic: {\n domain: string[]\n tech: string[]\n }\n custom: Record<string, string[]>\n}\n\n// --- Relations ---\n\nexport interface Relation {\n id: string\n reason: string\n}\n\nexport interface ExternalDep {\n name: string\n usage: string\n}\n\nexport interface SyncMeta {\n commit: string\n timestamp: string\n srcHash: string\n}\n\n// --- Context nodes ---\n\nexport interface NodeResult {\n id: string\n title: string\n category: 'domain' | 'architecture' | 'flow' | 'entity'\n tags: TagSet\n relatesTo: Relation[]\n inverseRelations: Relation[]\n srcPaths: string[]\n keywords: string[]\n lastSynced: SyncMeta\n purpose: string\n publicSurface: string[]\n doesNotOwn: string[]\n externalDeps: ExternalDep[]\n gotchas: string[]\n}\n\n// --- Index ---\n\nexport interface PathEntry {\n context: string\n tags: string[]\n}\n\nexport interface IndexJson {\n version: string\n generatedAt: string\n commit: string\n stack: Pick<StackResult, 'primaryLanguage' | 'frameworks' | 'architecture'>\n tags: Record<string, string[]>\n keywords: Record<string, string[]>\n paths: Record<string, PathEntry>\n}\n\n// --- Registry ---\n\nexport interface RegistryMapping {\n glob: string\n contextFile: string\n watch: boolean\n}\n\nexport interface RegistryJson {\n mappings: RegistryMapping[]\n}\n\n// --- Query ---\n\nexport interface RankedResult {\n contextFile: string\n score: number\n matchedOn: string[]\n summary: string\n}\n\n// --- Validation ---\n\nexport interface StaleResult {\n file: string\n isStale: boolean\n reason?: string\n}\n\n// --- Settings ---\n\nexport interface NogrepSettings {\n enabled: boolean\n}\n\n// --- Errors ---\n\nexport type NogrepErrorCode = 'NO_INDEX' | 'NO_GIT' | 'IO_ERROR' | 'STALE'\n\nexport class NogrepError extends Error {\n constructor(\n message: string,\n public code: NogrepErrorCode,\n ) {\n super(message)\n }\n}\n"],"mappings":";AAkLO,IAAM,cAAN,cAA0B,MAAM;AAAA,EACrC,YACE,SACO,MACP;AACA,UAAM,OAAO;AAFN;AAAA,EAGT;AACF;","names":[]}
@@ -0,0 +1,12 @@
1
+ import { Taxonomy, IndexJson, RankedResult } from './types.js';
2
+
3
+ declare function extractTerms(question: string, taxonomy: Taxonomy): {
4
+ tags: string[];
5
+ keywords: string[];
6
+ };
7
+ declare function resolveQuery(terms: {
8
+ tags: string[];
9
+ keywords: string[];
10
+ }, index: IndexJson, limit?: number): RankedResult[];
11
+
12
+ export { extractTerms, resolveQuery };