@comfanion/usethis_search 3.0.0-dev.5 → 3.0.0-dev.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/api.ts CHANGED
@@ -6,10 +6,14 @@
6
6
  */
7
7
 
8
8
  import { GraphDB } from "./vectorizer/graph-db"
9
+ import { CodebaseIndexer } from "./vectorizer/index.js"
9
10
 
10
11
  // Global GraphDB instance (shared across the plugin)
11
12
  let graphDBInstance: GraphDB | null = null
12
13
 
14
+ // Export CodebaseIndexer for read-interceptor
15
+ export { CodebaseIndexer }
16
+
13
17
  /**
14
18
  * Initialize the API with GraphDB instance
15
19
  */
package/index.ts CHANGED
@@ -2,9 +2,11 @@ import type { Plugin } from "@opencode-ai/plugin"
2
2
 
3
3
  import search from "./tools/search"
4
4
  import codeindex from "./tools/codeindex"
5
- import readInterceptor from "./tools/read-interceptor"
6
5
  import FileIndexerPlugin from "./file-indexer"
7
6
 
7
+ // Export API for plugin-to-plugin communication
8
+ export * from "./api"
9
+
8
10
  const UsethisSearchPlugin: Plugin = async ({ directory, client }) => {
9
11
  // Start file indexer (background indexing + event handling)
10
12
  let fileIndexerEvent: ((args: any) => Promise<void>) | null = null
@@ -19,7 +21,6 @@ const UsethisSearchPlugin: Plugin = async ({ directory, client }) => {
19
21
  tool: {
20
22
  search,
21
23
  codeindex,
22
- read: readInterceptor,
23
24
  },
24
25
 
25
26
  event: async (args: any) => {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@comfanion/usethis_search",
3
- "version": "3.0.0-dev.5",
3
+ "version": "3.0.0-dev.7",
4
4
  "description": "OpenCode plugin: semantic search with graph-based context (v3: graph relations, 1-hop context, LSP + regex analyzers)",
5
5
  "type": "module",
6
6
  "main": "./index.ts",
@@ -16,7 +16,6 @@
16
16
  "file-indexer.ts",
17
17
  "tools/search.ts",
18
18
  "tools/codeindex.ts",
19
- "tools/read-interceptor.ts",
20
19
  "vectorizer/index.js",
21
20
  "vectorizer/content-cleaner.ts",
22
21
  "vectorizer/metadata-extractor.ts",
@@ -313,16 +313,27 @@ Available indexes:
313
313
  }
314
314
 
315
315
  // 1. Get all triples from graph
316
- const allTriples = await graphDB.getAllTriples()
316
+ let allTriples: any[] = []
317
+ try {
318
+ allTriples = await graphDB.getAllTriples()
319
+ } catch (e: any) {
320
+ await indexer.unloadModel()
321
+ return `## Graph Validation: "${indexName}"\n\n**Error:** Failed to read graph database: ${e.message || String(e)}\n\nThe graph database may be corrupted. Run: codeindex({ action: "reindex", index: "${indexName}" })`
322
+ }
317
323
 
318
324
  // 2. Get all chunk IDs from vector DB
319
325
  const knownChunkIds = new Set<string>()
320
326
  const tables = await db.tableNames()
321
327
  if (tables.includes("chunks")) {
322
328
  const table = await db.openTable("chunks")
323
- const rows = await table.search([0]).limit(100000).execute()
324
- for (const row of rows) {
325
- if (row.chunk_id) knownChunkIds.add(row.chunk_id)
329
+ try {
330
+ const rows = await table.search([0]).limit(100000).execute()
331
+ for (const row of rows) {
332
+ if (row.chunk_id) knownChunkIds.add(row.chunk_id)
333
+ }
334
+ } catch (e: any) {
335
+ await indexer.unloadModel()
336
+ return `## Graph Validation: "${indexName}"\n\n**Error:** Failed to read vector database: ${e.message || String(e)}\n\nThe vector database may be corrupted. Run: codeindex({ action: "reindex", index: "${indexName}" })`
326
337
  }
327
338
  }
328
339
 
@@ -357,7 +368,13 @@ Available indexes:
357
368
  }
358
369
 
359
370
  // 4. Get file metadata stats
360
- const fileMeta = await graphDB.getAllFileMeta()
371
+ let fileMeta: Array<{ filePath: string; hash: string; timestamp: number }> = []
372
+ try {
373
+ fileMeta = await graphDB.getAllFileMeta()
374
+ } catch (e: any) {
375
+ // Non-fatal - continue validation without metadata
376
+ console.warn(`Warning: Failed to get file metadata: ${e.message || String(e)}`)
377
+ }
361
378
 
362
379
  await indexer.unloadModel()
363
380
 
@@ -1,149 +0,0 @@
1
- import { tool } from "@opencode-ai/plugin"
2
- import path from "path"
3
- import fs from "fs/promises"
4
-
5
- import { CodebaseIndexer } from "../vectorizer/index.js"
6
-
7
- // FR-043: Logging for intercepted Read() calls
8
- const DEBUG = process.env.DEBUG?.includes("vectorizer") || process.env.DEBUG === "*"
9
-
10
- interface ReadLogEntry {
11
- timestamp: number
12
- filePath: string
13
- relPath: string
14
- chunksFound: number
15
- relatedContextCount: number
16
- durationMs: number
17
- fallback: boolean
18
- }
19
-
20
- const LOG_MAX_ENTRIES = 500
21
-
22
- /**
23
- * Append a log entry to the Read() interception log file.
24
- * Non-blocking, non-fatal — errors are silently ignored.
25
- */
26
- async function logReadInterception(projectRoot: string, entry: ReadLogEntry): Promise<void> {
27
- try {
28
- const logPath = path.join(projectRoot, ".opencode", "vectors", "read-intercept.log.json")
29
- await fs.mkdir(path.dirname(logPath), { recursive: true })
30
-
31
- let entries: ReadLogEntry[] = []
32
- try {
33
- const raw = await fs.readFile(logPath, "utf-8")
34
- entries = JSON.parse(raw)
35
- } catch {
36
- // file doesn't exist or is invalid — start fresh
37
- }
38
-
39
- entries.push(entry)
40
- // Cap log size to avoid unbounded growth
41
- if (entries.length > LOG_MAX_ENTRIES) {
42
- entries = entries.slice(-LOG_MAX_ENTRIES)
43
- }
44
-
45
- await fs.writeFile(logPath, JSON.stringify(entries, null, 2), "utf-8")
46
- } catch {
47
- // non-fatal — logging must never break Read
48
- }
49
- }
50
-
51
- export default tool({
52
- description: `Read file with graph-aware context attachment. When available, this tool searches the file in the index and returns content + related context from the graph (imports, links, etc.).
53
-
54
- Use this instead of the standard Read tool for better context awareness.`,
55
-
56
- args: {
57
- filePath: tool.schema.string().describe("Path to the file to read"),
58
- },
59
-
60
- async execute(args) {
61
- const startTime = Date.now()
62
- const projectRoot = process.cwd()
63
- const filePath = path.isAbsolute(args.filePath) ? args.filePath : path.join(projectRoot, args.filePath)
64
-
65
- const relPath = path.relative(projectRoot, filePath)
66
-
67
- if (DEBUG) {
68
- console.log(`[read-interceptor] Intercepted Read("${relPath}")`)
69
- }
70
-
71
- // Resilient search: if vector index is corrupted or unavailable, fall back gracefully
72
- let fileChunks: any[] = []
73
- let allRelated: any[] = []
74
- let searchFailed = false
75
-
76
- try {
77
- const indexer = await new CodebaseIndexer(projectRoot, "code").init()
78
- try {
79
- const results = await indexer.search(relPath, 20, false, {})
80
- fileChunks = results.filter((r: any) => r.file === relPath)
81
-
82
- allRelated = fileChunks
83
- .flatMap((c: any) => c.relatedContext || [])
84
- .filter((r: any, i: number, arr: any[]) => arr.findIndex((x: any) => x.chunk_id === r.chunk_id) === i)
85
- } catch (searchErr: any) {
86
- if (DEBUG) {
87
- console.log(`[read-interceptor] Search failed for "${relPath}": ${searchErr.message}`)
88
- }
89
- searchFailed = true
90
- }
91
- await indexer.unloadModel()
92
- } catch (initErr: any) {
93
- if (DEBUG) {
94
- console.log(`[read-interceptor] Indexer init failed: ${initErr.message}`)
95
- }
96
- searchFailed = true
97
- }
98
-
99
- const durationMs = Date.now() - startTime
100
- const fallback = fileChunks.length === 0
101
-
102
- // FR-043: Log the interception asynchronously (non-blocking)
103
- logReadInterception(projectRoot, {
104
- timestamp: startTime,
105
- filePath: args.filePath,
106
- relPath,
107
- chunksFound: fileChunks.length,
108
- relatedContextCount: allRelated.length,
109
- durationMs,
110
- fallback,
111
- }).catch(() => {})
112
-
113
- if (DEBUG) {
114
- console.log(
115
- `[read-interceptor] ${relPath}: ${fileChunks.length} chunks, ${allRelated.length} related, ${durationMs}ms${fallback ? " (fallback)" : ""}${searchFailed ? " (search error)" : ""}`
116
- )
117
- }
118
-
119
- if (fallback) {
120
- const reason = searchFailed
121
- ? `Search index unavailable (possibly corrupted). Run codeindex({ action: "reindex", index: "code" }) to rebuild.`
122
- : `File "${relPath}" not indexed. Use original Read tool or run codeindex({ action: "reindex", index: "code" })`
123
- return reason
124
- }
125
-
126
- let output = `## ${relPath}\n\n`
127
-
128
- output += `### Content\n\n`
129
- for (const chunk of fileChunks) {
130
- output += chunk.content + "\n\n"
131
- }
132
-
133
- if (allRelated.length > 0) {
134
- output += `### Related Context\n\n`
135
- for (const rel of allRelated) {
136
- const snippet = rel.content.length > 300
137
- ? rel.content.substring(0, 300) + "..."
138
- : rel.content
139
- output += `**${rel.file}** (${rel.relation})\n`
140
- output += `\`\`\`\n${snippet}\n\`\`\`\n\n`
141
- }
142
- }
143
-
144
- return output
145
- },
146
- })
147
-
148
- // Export for testing
149
- export { logReadInterception, ReadLogEntry }