@comfanion/usethis_search 4.4.0 → 4.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/api.ts CHANGED
@@ -7,16 +7,38 @@
7
7
 
8
8
  import { GraphDB } from "./vectorizer/graph-db"
9
9
 
10
- // Global GraphDB instance (shared across plugins)
10
+ // Global GraphDB instance (shared across plugins).
11
+ // NOTE: This is a weak reference — the GraphDB may be closed if the indexer
12
+ // is evicted from the pool. All API methods guard against stale references.
11
13
  let graphDBInstance: GraphDB | null = null
12
14
 
13
15
  /**
14
- * Initialize API with GraphDB instance
16
+ * Initialize API with GraphDB instance.
17
+ * Called by file-indexer on startup. The instance may become stale
18
+ * if the indexer pool evicts the owning indexer — API methods handle this.
15
19
  */
16
20
  export function initGraphAPI(db: GraphDB): void {
17
21
  graphDBInstance = db
18
22
  }
19
23
 
24
+ /**
25
+ * Clear the API reference (called when the owning GraphDB is closed).
26
+ */
27
+ export function clearGraphAPI(): void {
28
+ graphDBInstance = null
29
+ }
30
+
31
+ /** Check if the stored GraphDB reference is still usable. */
32
+ function isGraphAlive(): boolean {
33
+ if (!graphDBInstance) return false
34
+ // GraphDB sets db=null in close() — use that as liveness check
35
+ try {
36
+ return (graphDBInstance as any).initialized === true
37
+ } catch {
38
+ return false
39
+ }
40
+ }
41
+
20
42
  /**
21
43
  * Get related files for a given file path
22
44
  *
@@ -39,21 +61,18 @@ export async function getRelatedFiles(
39
61
  filePath: string,
40
62
  maxDepth: number = 1
41
63
  ): Promise<{path: string, relation: string, weight: number}[]> {
42
- if (!graphDBInstance) {
43
- console.warn("[usethis_search API] GraphDB not initialized. Returning empty array.")
44
- return []
45
- }
64
+ if (!isGraphAlive()) return []
46
65
 
47
66
  try {
48
67
  const chunkId = `file:${filePath}`
49
- const related = await graphDBInstance.getRelatedFiles(chunkId, maxDepth)
68
+ const related = await graphDBInstance!.getRelatedFiles(chunkId, maxDepth)
50
69
 
51
70
  // Filter out the input file itself (it might appear in the graph)
52
71
  const filtered = related.filter(r => r.path !== filePath)
53
72
 
54
73
  return filtered
55
- } catch (error) {
56
- console.error(`[usethis_search API] Error getting related files for ${filePath}:`, error)
74
+ } catch {
75
+ // GraphDB may have been closed between the alive check and usage — non-fatal
57
76
  return []
58
77
  }
59
78
  }
@@ -62,22 +81,20 @@ export async function getRelatedFiles(
62
81
  * Check if graph API is available
63
82
  */
64
83
  export function isGraphAPIAvailable(): boolean {
65
- return graphDBInstance !== null
84
+ return isGraphAlive()
66
85
  }
67
86
 
68
87
  /**
69
88
  * Get all graph entries for a file (both incoming and outgoing)
70
89
  */
71
90
  export async function getGraphEntries(filePath: string) {
72
- if (!graphDBInstance) {
73
- return null
74
- }
91
+ if (!isGraphAlive()) return null
75
92
 
76
93
  try {
77
94
  const chunkId = `file:${filePath}`
78
95
  const [outgoing, incoming] = await Promise.all([
79
- graphDBInstance.getOutgoing(chunkId),
80
- graphDBInstance.getIncoming(chunkId),
96
+ graphDBInstance!.getOutgoing(chunkId),
97
+ graphDBInstance!.getIncoming(chunkId),
81
98
  ])
82
99
 
83
100
  return {
@@ -85,8 +102,8 @@ export async function getGraphEntries(filePath: string) {
85
102
  extends: outgoing.filter(t => t.predicate === "extends"),
86
103
  used_by: incoming,
87
104
  }
88
- } catch (error) {
89
- console.error(`[usethis_search API] Error getting graph entries for ${filePath}:`, error)
105
+ } catch {
106
+ // GraphDB may have been closed non-fatal
90
107
  return null
91
108
  }
92
109
  }
package/cache/manager.ts CHANGED
@@ -224,7 +224,12 @@ class WorkspaceCache {
224
224
  if (this._shutdownRegistered) return
225
225
  this._shutdownRegistered = true
226
226
 
227
+ let flushed = false
227
228
  const flush = () => {
229
+ // Guard against double flush (SIGINT/SIGTERM → exit → flush again)
230
+ if (flushed) return
231
+ flushed = true
232
+
228
233
  // Synchronous-ish flush — best effort
229
234
  // Node process is exiting, so we can't await.
230
235
  // Use writeFileSync as last resort.
@@ -555,7 +560,7 @@ class WorkspaceCache {
555
560
  let removed = 0
556
561
 
557
562
  for (const [chunkId, entry] of this.entries) {
558
- if (entry.path === filePath) {
563
+ if (entry.path === filePath || entry.path.startsWith(filePath)) {
559
564
  this.entries.delete(chunkId)
560
565
  this._totalTokens -= entry.tokens
561
566
  removed++
@@ -570,11 +575,11 @@ class WorkspaceCache {
570
575
  * Get all chunks sorted by: search-main first (by score desc), then search-graph, then manual.
571
576
  */
572
577
  getAll(): WorkspaceEntry[] {
573
- return Array.from(this.entries.values()).sort((a, b) => {
574
- // Main chunks first
575
- const roleOrder = { "search-main": 0, "search-graph": 1, manual: 2 }
576
- const ra = roleOrder[a.role] ?? 2
577
- const rb = roleOrder[b.role] ?? 2
578
+ return Array.from(this.entries.values()).sort((a, b) => {
579
+ // Main chunks first, then context, then graph, then manual
580
+ const roleOrder: Record<string, number> = { "search-main": 0, "search-context": 1, "search-graph": 2, manual: 3 }
581
+ const ra = roleOrder[a.role] ?? 3
582
+ const rb = roleOrder[b.role] ?? 3
578
583
  if (ra !== rb) return ra - rb
579
584
 
580
585
  // Within same role: higher score first
@@ -704,8 +709,8 @@ class WorkspaceCache {
704
709
  * This method only detects changes and removes orphaned chunks.
705
710
  * For full content refresh, chunks must be re-attached from search.
706
711
  *
707
- * Called by message.before hook — ensures AI sees fresh file status.
708
- * Cost: ~2ms for 30 files (stat only, no readFile).
712
+ * Called by message.before hook — ensures AI sees fresh file status.
713
+ * Cost: ~5-50ms for 30 files (reads file content to check chunk presence).
709
714
  *
710
715
  * Returns { updated, removed } counts.
711
716
  */
@@ -729,16 +734,22 @@ class WorkspaceCache {
729
734
  try {
730
735
  const fullPath = path.join(this.projectRoot, filePath)
731
736
  const content = await fs.readFile(fullPath, "utf-8")
732
- const newHash = this.hashContent(content)
733
737
 
734
- // Check if file changed
735
- const fileChanged = chunks.some(c => c.contentHash !== newHash)
738
+ // Check if any chunk's content is no longer present in the file.
739
+ // contentHash is per-chunk, not per-file, so direct hash comparison
740
+ // doesn't work. Instead we check if each chunk's text is still in the file.
741
+ let fileChanged = false
742
+ for (const chunk of chunks) {
743
+ if (!content.includes(chunk.content.trim())) {
744
+ fileChanged = true
745
+ break
746
+ }
747
+ }
748
+
736
749
  if (fileChanged) {
737
750
  // Mark all chunks from this file as stale
738
751
  // (they need re-attachment from search to get fresh content)
739
752
  stale += chunks.length
740
- // TODO(chunk-7): Implement stale flag on WorkspaceEntry
741
- // For now, chunks remain in cache but are marked as needing refresh
742
753
  }
743
754
  } catch {
744
755
  // File no longer exists — remove all chunks from this file
@@ -776,12 +787,12 @@ class WorkspaceCache {
776
787
  return
777
788
  }
778
789
 
779
- // Build eviction priority list
780
- const candidates = Array.from(this.entries.values()).sort((a, b) => {
781
- // Evict graph chunks before main chunks before manual chunks
782
- const roleOrder = { "search-graph": 0, "search-main": 1, manual: 2 }
783
- const ra = roleOrder[a.role] ?? 2
784
- const rb = roleOrder[b.role] ?? 2
790
+ // Build eviction priority list
791
+ const candidates = Array.from(this.entries.values()).sort((a, b) => {
792
+ // Evict graph first, then context, then main, then manual (manual evicted last)
793
+ const roleOrder: Record<string, number> = { "search-graph": 0, "search-context": 1, "search-main": 2, manual: 3 }
794
+ const ra = roleOrder[a.role] ?? 3
795
+ const rb = roleOrder[b.role] ?? 3
785
796
  if (ra !== rb) return ra - rb
786
797
 
787
798
  // Within same role: lowest score first
package/cli.ts CHANGED
@@ -22,7 +22,7 @@
22
22
 
23
23
  import path from "path"
24
24
  import fs from "fs/promises"
25
- import { CodebaseIndexer } from "./vectorizer/index.ts"
25
+ import { CodebaseIndexer, disposeSharedModel } from "./vectorizer/index.ts"
26
26
 
27
27
  const args = process.argv.slice(2)
28
28
  const action = args[0]
@@ -257,7 +257,10 @@ async function main() {
257
257
  }
258
258
  }
259
259
 
260
- main().catch((e) => {
261
- console.error(`\n Fatal error: ${e.message}\n`)
262
- process.exit(1)
263
- })
260
+ main()
261
+ .then(() => disposeSharedModel())
262
+ .catch(async (e) => {
263
+ await disposeSharedModel()
264
+ console.error(`\n Fatal error: ${e.message}\n`)
265
+ process.exit(1)
266
+ })
package/file-indexer.ts CHANGED
@@ -3,7 +3,7 @@ import path from "path"
3
3
  import fs from "fs/promises"
4
4
  import fsSync from "fs"
5
5
 
6
- import { CodebaseIndexer } from "./vectorizer/index.ts"
6
+ import { CodebaseIndexer, getIndexer, releaseIndexer } from "./vectorizer/index.ts"
7
7
  import { initGraphAPI } from "./api"
8
8
 
9
9
  /**
@@ -161,6 +161,7 @@ async function loadConfig(projectRoot: string): Promise<VectorizerConfig> {
161
161
  const tempIndexer = new CodebaseIndexer(projectRoot, "code")
162
162
  // @ts-ignore - accessing internal method for config creation
163
163
  await tempIndexer.init()
164
+ await tempIndexer.unloadModel()
164
165
  try {
165
166
  content = await fs.readFile(configPath, "utf8")
166
167
  } catch {
@@ -252,7 +253,7 @@ async function ensureIndexOnSessionStart(
252
253
 
253
254
  for (const [indexName, indexConfig] of Object.entries(config.indexes)) {
254
255
  if (!indexConfig.enabled) continue
255
- const indexer = await new CodebaseIndexer(projectRoot, indexName).init()
256
+ const indexer = await getIndexer(projectRoot, indexName)
256
257
 
257
258
  try {
258
259
  // Initialize graph API for Mind plugin integration
@@ -274,7 +275,7 @@ async function ensureIndexOnSessionStart(
274
275
  needsWork = true
275
276
  }
276
277
  } finally {
277
- await indexer.unloadModel()
278
+ releaseIndexer(projectRoot, indexName)
278
279
  }
279
280
  }
280
281
 
@@ -289,13 +290,13 @@ async function ensureIndexOnSessionStart(
289
290
  const indexExists = await hasIndex(projectRoot, indexName)
290
291
  const startTime = Date.now()
291
292
 
292
- const indexer = await new CodebaseIndexer(projectRoot, indexName).init()
293
+ const indexer = await getIndexer(projectRoot, indexName)
293
294
  try {
294
295
  if (!indexExists) {
295
296
  log(`Creating "${indexName}" index...`)
296
- const stats = await indexer.indexAll((indexed: number, total: number, file: string) => {
297
+ const stats = await indexer.indexAll((indexed: number, total: number, file: string, _fileNum?: number, phase?: string) => {
297
298
  if (indexed % 10 === 0 || indexed === total) {
298
- logFile(`"${indexName}": ${indexed}/${total} - ${file}`)
299
+ logFile(`"${indexName}" [${phase || "?"}]: ${indexed}/${total} - ${file}`)
299
300
  }
300
301
  }, config.exclude)
301
302
  const elapsed = ((Date.now() - startTime) / 1000).toFixed(1)
@@ -307,9 +308,9 @@ async function ensureIndexOnSessionStart(
307
308
 
308
309
  if (health.needsReindex) {
309
310
  log(`Rebuilding "${indexName}" (${health.reason}: ${health.currentCount} vs ${health.expectedCount} files)...`)
310
- const stats = await indexer.indexAll((indexed: number, total: number, file: string) => {
311
+ const stats = await indexer.indexAll((indexed: number, total: number, file: string, _fileNum?: number, phase?: string) => {
311
312
  if (indexed % 10 === 0 || indexed === total) {
312
- logFile(`"${indexName}": ${indexed}/${total} - ${file}`)
313
+ logFile(`"${indexName}" [${phase || "?"}]: ${indexed}/${total} - ${file}`)
313
314
  }
314
315
  }, config.exclude)
315
316
  const elapsed = ((Date.now() - startTime) / 1000).toFixed(1)
@@ -329,8 +330,13 @@ async function ensureIndexOnSessionStart(
329
330
  }
330
331
  }
331
332
  }
333
+ } catch (e) {
334
+ const elapsed = ((Date.now() - startTime) / 1000).toFixed(1)
335
+ log(`ERROR indexing "${indexName}" after ${elapsed}s: ${(e as Error).message}`)
336
+ logFile(`ERROR "${indexName}" stack: ${(e as Error).stack || "no stack"}`)
337
+ // Continue with other indexes — don't let one failure stop everything
332
338
  } finally {
333
- await indexer.unloadModel()
339
+ releaseIndexer(projectRoot, indexName)
334
340
  }
335
341
  }
336
342
 
@@ -374,7 +380,7 @@ async function processPendingFiles(projectRoot: string, config: VectorizerConfig
374
380
  debug(`Processing ${filesToProcess.size} index(es)...`)
375
381
 
376
382
  for (const [indexName, files] of filesToProcess.entries()) {
377
- const indexer = await new CodebaseIndexer(projectRoot, indexName).init()
383
+ const indexer = await getIndexer(projectRoot, indexName)
378
384
  try {
379
385
  for (const filePath of files) {
380
386
  try {
@@ -389,7 +395,7 @@ async function processPendingFiles(projectRoot: string, config: VectorizerConfig
389
395
  }
390
396
  }
391
397
  } finally {
392
- await indexer.unloadModel()
398
+ releaseIndexer(projectRoot, indexName)
393
399
  }
394
400
  }
395
401
  }
@@ -420,6 +426,15 @@ export const FileIndexerPlugin: Plugin = async ({ directory, client }) => {
420
426
 
421
427
  log(`Plugin ACTIVE`)
422
428
 
429
+ // Catch unhandled errors that would silently kill indexing
430
+ process.on("uncaughtException", (err) => {
431
+ logFile(`FATAL uncaughtException: ${err.message}\n${err.stack || "no stack"}`)
432
+ })
433
+ process.on("unhandledRejection", (reason) => {
434
+ const msg = reason instanceof Error ? `${reason.message}\n${reason.stack}` : String(reason)
435
+ logFile(`FATAL unhandledRejection: ${msg}`)
436
+ })
437
+
423
438
  const lang = await getLanguage(directory)
424
439
  const messages = FUN_MESSAGES[lang]
425
440
 
@@ -444,6 +459,8 @@ export const FileIndexerPlugin: Plugin = async ({ directory, client }) => {
444
459
  toast(messages.done(result.totalFiles, duration), "success")
445
460
  }
446
461
  } catch (e: any) {
462
+ log(`ERROR ensureIndexOnSessionStart: ${e.message}`)
463
+ logFile(`ERROR stack: ${e.stack || "no stack"}`)
447
464
  toast(messages.error(e.message), "error")
448
465
  }
449
466
  }, 1000)
@@ -51,10 +51,10 @@ interface Message {
51
51
  /** Tools that return full workspace state in their output. */
52
52
  const WORKSPACE_TOOLS = new Set([
53
53
  "search",
54
- "workspace_list",
55
- "workspace_forget",
56
- "workspace_clear",
57
- "workspace_restore",
54
+ "list",
55
+ "forget",
56
+ "clear",
57
+ "explore",
58
58
  ])
59
59
 
60
60
  /** Minimum output length to consider pruning. Short outputs are kept as-is. */
@@ -80,7 +80,7 @@ export function createWorkspaceInjectionHandler(state: SessionState) {
80
80
  /**
81
81
  * Replace old workspace tool outputs with compact summaries.
82
82
  *
83
- * Workspace tools (search, workspace_list, etc.) return full workspace
83
+ * Workspace tools (search, list, forget, etc.) return full workspace
84
84
  * state in their output. Only the LAST such output is kept — all previous
85
85
  * ones are replaced with a 1-line summary.
86
86
  *
@@ -1,6 +1,10 @@
1
1
  /**
2
2
  * Tool Output Substitution Hook
3
3
  *
4
+ * STATUS: DISABLED — not registered in plugin index.ts.
5
+ * The workspace-injection approach (v6) replaced tool substitution.
6
+ * Kept for potential future re-enablement.
7
+ *
4
8
  * Intercepts read() tool outputs and replaces them with compact messages
5
9
  * when the file is in the workspace cache.
6
10
  *
@@ -278,69 +282,6 @@ function substituteReadOutput(output: { title: string; output: string; metadata:
278
282
  }
279
283
  }
280
284
 
281
- /**
282
- * Substitute grep() output if ALL matched files are in workspace.
283
- *
284
- * Input: { pattern: "auth", include?: "*.ts" }
285
- * Output: "src/auth.ts:10:export function login(...)\nsrc/types.ts:5:interface User {...}"
286
- *
287
- * Parse output to extract file paths, check if ALL are in workspace.
288
- * If yes: Replace with "[Pattern "auth" matched N files, all in workspace context:\n- file1\n- file2\n...]"
289
- * If partial: Keep original
290
- */
291
- function substituteGrepOutput(output: { title: string; output: string; metadata: any }, cache: WorkspaceCache): void {
292
- try {
293
- const pattern = output.metadata?.pattern || extractPatternFromTitle(output.title)
294
- if (!pattern) return
295
-
296
- // Parse grep output to extract file paths
297
- // Format: "path:line:content" or just "path"
298
- const filePaths = parseGrepOutput(output.output)
299
- if (filePaths.length === 0) return
300
-
301
- // Check if ALL files are in workspace
302
- const allInWorkspace = filePaths.every(fp => cache.has(fp))
303
- if (!allInWorkspace) return
304
-
305
- // Replace with compact message
306
- const fileList = filePaths.map(fp => `- ${fp}`).join("\n")
307
- output.output = `[Pattern "${pattern}" matched ${filePaths.length} files, all in workspace context:\n${fileList}]`
308
- } catch {
309
- // Silently fail
310
- }
311
- }
312
-
313
- /**
314
- * Substitute glob() output if ALL matched files are in workspace.
315
- *
316
- * Input: { pattern: "src/[glob-pattern].ts" }
317
- * Output: "src/auth.ts\nsrc/types.ts\nsrc/utils.ts"
318
- *
319
- * Parse output (newline-separated paths), check if ALL are in workspace.
320
- * If yes: Replace with "[Pattern matched N files, all in workspace context:\n- file1\n- file2\n...]"
321
- * If partial: Keep original
322
- */
323
- function substituteGlobOutput(output: { title: string; output: string; metadata: any }, cache: WorkspaceCache): void {
324
- try {
325
- const pattern = output.metadata?.pattern || extractPatternFromTitle(output.title)
326
- if (!pattern) return
327
-
328
- // Parse glob output (newline-separated file paths)
329
- const filePaths = parseGlobOutput(output.output)
330
- if (filePaths.length === 0) return
331
-
332
- // Check if ALL files are in workspace
333
- const allInWorkspace = filePaths.every(fp => cache.has(fp))
334
- if (!allInWorkspace) return
335
-
336
- // Replace with compact message
337
- const fileList = filePaths.map(fp => `- ${fp}`).join("\n")
338
- output.output = `[Pattern "${pattern}" matched ${filePaths.length} files, all in workspace context:\n${fileList}]`
339
- } catch {
340
- // Silently fail
341
- }
342
- }
343
-
344
285
  // ── Helpers ──────────────────────────────────────────────────────────────────
345
286
 
346
287
  /**
@@ -365,61 +306,4 @@ function extractFilePathFromTitle(title: string): string | null {
365
306
  return null
366
307
  }
367
308
 
368
- /**
369
- * Extract pattern from grep() or glob() title.
370
- * Title format: "Search for: auth" or "Find files: src/[pattern].ts" or similar
371
- */
372
- function extractPatternFromTitle(title: string): string | null {
373
- // Try common patterns
374
- const patterns = [
375
- /(?:search|find|pattern|glob).*?:\s*(.+?)(?:\s*\(|$)/i,
376
- /(?:search|find|pattern|glob)\s+(.+?)(?:\s*\(|$)/i,
377
- ]
378
-
379
- for (const pattern of patterns) {
380
- const match = title.match(pattern)
381
- if (match) {
382
- return match[1].trim()
383
- }
384
- }
385
309
 
386
- return null
387
- }
388
-
389
- /**
390
- * Parse grep output to extract file paths.
391
- *
392
- * Format variations:
393
- * - "path:line:content" (standard grep)
394
- * - "path:line" (grep -n without content)
395
- * - "path" (grep -l, list files only)
396
- *
397
- * Returns unique file paths.
398
- */
399
- function parseGrepOutput(output: string): string[] {
400
- const lines = output.split("\n").filter(l => l.trim())
401
- const paths = new Set<string>()
402
-
403
- for (const line of lines) {
404
- // Extract path (everything before first colon, or entire line if no colon)
405
- const colonIndex = line.indexOf(":")
406
- const path = colonIndex >= 0 ? line.substring(0, colonIndex) : line
407
-
408
- if (path.trim()) {
409
- paths.add(path.trim())
410
- }
411
- }
412
-
413
- return Array.from(paths)
414
- }
415
-
416
- /**
417
- * Parse glob output to extract file paths.
418
- *
419
- * Format: newline-separated file paths
420
- * Returns unique file paths.
421
- */
422
- function parseGlobOutput(output: string): string[] {
423
- const lines = output.split("\n").filter(l => l.trim())
424
- return Array.from(new Set(lines.map(l => l.trim())))
425
- }
package/index.ts CHANGED
@@ -1,7 +1,10 @@
1
1
  import type { Plugin } from "@opencode-ai/plugin"
2
+ import path from "path"
3
+ import fsSync from "fs"
2
4
 
3
5
  import search from "./tools/search"
4
- import { workspace_list, workspace_forget, workspace_clear, workspace_restore } from "./tools/workspace"
6
+ import { list, forget, clear } from "./tools/workspace"
7
+ import { explore } from "./tools/graph"
5
8
  import FileIndexerPlugin from "./file-indexer"
6
9
  import { workspaceCache } from "./cache/manager"
7
10
  import { createWorkspaceInjectionHandler } from "./hooks/message-before"
@@ -29,18 +32,26 @@ const UsethisSearchPlugin: Plugin = async ({ directory, client }) => {
29
32
  try {
30
33
  const hooks = await FileIndexerPlugin({ directory, client } as any)
31
34
  fileIndexerEvent = hooks?.event || null
32
- } catch {
35
+ } catch (e) {
33
36
  // file indexer init failed — tools still work, just no auto-indexing
37
+ // Log to file so the error is not lost silently
38
+ try {
39
+ const logPath = path.join(directory, ".opencode", "indexer.log")
40
+ const msg = `${new Date().toISOString()} ERROR FileIndexerPlugin init: ${(e as Error).message}\n${(e as Error).stack || ""}\n`
41
+ fsSync.appendFileSync(logPath, msg)
42
+ } catch {
43
+ // can't even write log — truly silent
44
+ }
34
45
  }
35
46
 
36
47
  return {
37
48
  // ── Tools ───────────────────────────────────────────────────────────
38
49
  tool: {
39
50
  search,
40
- workspace_list,
41
- workspace_forget,
42
- workspace_clear,
43
- workspace_restore,
51
+ list,
52
+ forget,
53
+ clear,
54
+ explore,
44
55
  },
45
56
 
46
57
  // ── Hooks ───────────────────────────────────────────────────────────
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@comfanion/usethis_search",
3
- "version": "4.4.0",
4
- "description": "OpenCode plugin: semantic search with query decomposition, RRF merge, and context-efficient workspace (v4.4.0)",
3
+ "version": "4.5.1",
4
+ "description": "OpenCode plugin: semantic search with query decomposition, RRF merge, and context-efficient workspace (v4.5.0)",
5
5
  "type": "module",
6
6
  "main": "./index.ts",
7
7
  "exports": {
@@ -27,6 +27,7 @@
27
27
  "tools/workspace.ts",
28
28
  "tools/workspace-state.ts",
29
29
  "tools/read-interceptor.ts",
30
+ "tools/graph.ts",
30
31
  "cache/manager.ts",
31
32
  "hooks/message-before.ts",
32
33
  "hooks/tool-substitution.ts",