opencode-semantic-search 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENTS.md +165 -0
- package/README.md +138 -0
- package/SETUP.md +541 -0
- package/bin/opencode-semantic-search.mjs +70 -0
- package/bun.lock +61 -0
- package/index.ts +138 -0
- package/install.sh +260 -0
- package/package.json +67 -0
- package/src/chunker/fallback.ts +77 -0
- package/src/chunker/index.ts +16 -0
- package/src/chunker/treesitter.ts +119 -0
- package/src/config.ts +157 -0
- package/src/diagnostics/bundle.ts +63 -0
- package/src/diagnostics/routing.ts +37 -0
- package/src/embedder/interface.ts +62 -0
- package/src/embedder/ollama.ts +60 -0
- package/src/embedder/openai.ts +71 -0
- package/src/indexer/delta.ts +165 -0
- package/src/indexer/gc.ts +10 -0
- package/src/indexer/incremental.ts +105 -0
- package/src/indexer/pipeline.test.ts +126 -0
- package/src/indexer/pipeline.ts +394 -0
- package/src/indexer/pool.ts +25 -0
- package/src/indexer/resume.ts +14 -0
- package/src/logger.ts +121 -0
- package/src/runtime.ts +111 -0
- package/src/search/context.ts +17 -0
- package/src/search/hybrid.ts +65 -0
- package/src/store/schema.sql +31 -0
- package/src/store/sqlite.ts +269 -0
- package/src/tools/diagnostic_bundle.ts +34 -0
- package/src/tools/index_status.ts +73 -0
- package/src/tools/reindex.ts +71 -0
- package/src/tools/semantic_search.ts +91 -0
- package/src/tools/smart_grep.ts +198 -0
- package/src/tui_toast.ts +191 -0
- package/src/types.d.ts +1 -0
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import type { ToolContext } from "@opencode-ai/plugin";
|
|
2
|
+
import { tool } from "@opencode-ai/plugin";
|
|
3
|
+
import type { RuntimeContext } from "../runtime";
|
|
4
|
+
|
|
5
|
+
function formatBackgroundLine(runtime: RuntimeContext): string {
|
|
6
|
+
const p = runtime.indexingProgress;
|
|
7
|
+
if (p.phase === "idle") {
|
|
8
|
+
return "idle";
|
|
9
|
+
}
|
|
10
|
+
const parts: string[] = [p.phase];
|
|
11
|
+
if (p.total > 0) {
|
|
12
|
+
parts.push(`${p.current}/${p.total}`);
|
|
13
|
+
}
|
|
14
|
+
if (p.label) {
|
|
15
|
+
parts.push(`(${p.label})`);
|
|
16
|
+
}
|
|
17
|
+
if (p.source) {
|
|
18
|
+
parts.push(`[${p.source}]`);
|
|
19
|
+
}
|
|
20
|
+
return parts.join(" ");
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export async function executeIndexStatus(
|
|
24
|
+
runtime: RuntimeContext,
|
|
25
|
+
ctx?: Pick<ToolContext, "metadata">
|
|
26
|
+
): Promise<string> {
|
|
27
|
+
ctx?.metadata?.({ title: "index_status", metadata: { phase: "collecting" } });
|
|
28
|
+
const stats = runtime.store.stats();
|
|
29
|
+
const providerHealthy = await runtime.embedder.healthcheck();
|
|
30
|
+
await runtime.logger.debug("tool.index_status", {
|
|
31
|
+
message: "Index status requested",
|
|
32
|
+
extra: { filesIndexed: stats.files, chunks: stats.chunks, providerHealthy }
|
|
33
|
+
});
|
|
34
|
+
const bg = formatBackgroundLine(runtime);
|
|
35
|
+
const lastSync = stats.lastSync ?? "never";
|
|
36
|
+
const endpoint = runtime.config.embedding.api_base;
|
|
37
|
+
const model = runtime.config.embedding.model;
|
|
38
|
+
const health = providerHealthy ? "healthy" : "unreachable or unhealthy";
|
|
39
|
+
const body = [
|
|
40
|
+
"## Semantic index status",
|
|
41
|
+
"",
|
|
42
|
+
"| Field | Value |",
|
|
43
|
+
"|-------|-------|",
|
|
44
|
+
`| Files indexed | ${stats.files} |`,
|
|
45
|
+
`| Chunks | ${stats.chunks} |`,
|
|
46
|
+
`| Last sync | ${lastSync} |`,
|
|
47
|
+
`| Embedding model | ${model} |`,
|
|
48
|
+
`| Embedding endpoint | ${endpoint} |`,
|
|
49
|
+
`| Embedder | ${health} |`,
|
|
50
|
+
`| Background indexing | ${bg} |`,
|
|
51
|
+
"",
|
|
52
|
+
].join("\n");
|
|
53
|
+
ctx?.metadata?.({
|
|
54
|
+
title: "index_status",
|
|
55
|
+
metadata: {
|
|
56
|
+
phase: "ready",
|
|
57
|
+
files: stats.files,
|
|
58
|
+
chunks: stats.chunks,
|
|
59
|
+
embedder_healthy: providerHealthy,
|
|
60
|
+
},
|
|
61
|
+
});
|
|
62
|
+
return body;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export function createIndexStatusTool(runtime: RuntimeContext) {
|
|
66
|
+
return tool({
|
|
67
|
+
description: "Check semantic index health and coverage.",
|
|
68
|
+
args: {},
|
|
69
|
+
async execute(_args, toolCtx) {
|
|
70
|
+
return executeIndexStatus(runtime, toolCtx);
|
|
71
|
+
},
|
|
72
|
+
});
|
|
73
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import type { ToolContext } from "@opencode-ai/plugin";
|
|
2
|
+
import { tool } from "@opencode-ai/plugin";
|
|
3
|
+
import { deltaSync } from "../indexer/delta";
|
|
4
|
+
import { applyDeltaProgress, type RuntimeContext } from "../runtime";
|
|
5
|
+
import { showIndexingCompleteToast } from "../tui_toast";
|
|
6
|
+
|
|
7
|
+
const metadataPollMs = 400;
|
|
8
|
+
|
|
9
|
+
export async function executeReindex(
|
|
10
|
+
runtime: RuntimeContext,
|
|
11
|
+
ctx: Pick<ToolContext, "metadata">
|
|
12
|
+
): Promise<string> {
|
|
13
|
+
ctx.metadata({ title: "reindex", metadata: { phase: "clearing_index_tables" } });
|
|
14
|
+
runtime.store.db.exec("DELETE FROM chunks_fts;");
|
|
15
|
+
runtime.store.db.exec("DELETE FROM chunk_vec;");
|
|
16
|
+
runtime.store.db.exec("DELETE FROM chunks;");
|
|
17
|
+
runtime.store.db.exec("DELETE FROM files;");
|
|
18
|
+
await runtime.logger.info("tool.reindex", { message: "Forced reindex requested" });
|
|
19
|
+
const reindexStartedAt = Date.now();
|
|
20
|
+
|
|
21
|
+
const tick = (): void => {
|
|
22
|
+
const p = runtime.indexingProgress;
|
|
23
|
+
ctx.metadata({
|
|
24
|
+
title: "reindex",
|
|
25
|
+
metadata: {
|
|
26
|
+
phase: p.phase,
|
|
27
|
+
file: p.label,
|
|
28
|
+
progress: p.total > 0 ? `${p.current}/${p.total}` : undefined,
|
|
29
|
+
},
|
|
30
|
+
});
|
|
31
|
+
};
|
|
32
|
+
tick();
|
|
33
|
+
const interval = setInterval(tick, metadataPollMs);
|
|
34
|
+
try {
|
|
35
|
+
await deltaSync(runtime.worktree, runtime.store, runtime.embedder, runtime.config, {
|
|
36
|
+
logger: runtime.logger,
|
|
37
|
+
onProgress: (update) => applyDeltaProgress(runtime, update, "reindex"),
|
|
38
|
+
});
|
|
39
|
+
} finally {
|
|
40
|
+
clearInterval(interval);
|
|
41
|
+
}
|
|
42
|
+
const stats = runtime.store.stats();
|
|
43
|
+
ctx.metadata({
|
|
44
|
+
title: "reindex",
|
|
45
|
+
metadata: {
|
|
46
|
+
phase: "complete",
|
|
47
|
+
files_indexed: stats.files,
|
|
48
|
+
chunks: stats.chunks,
|
|
49
|
+
last_sync: stats.lastSync,
|
|
50
|
+
},
|
|
51
|
+
});
|
|
52
|
+
void showIndexingCompleteToast(runtime, {
|
|
53
|
+
title: "Reindex complete",
|
|
54
|
+
files: stats.files,
|
|
55
|
+
chunks: stats.chunks,
|
|
56
|
+
elapsedMs: Date.now() - reindexStartedAt,
|
|
57
|
+
failedFiles: runtime.indexingProgress.failedFiles,
|
|
58
|
+
flavor: "reindex",
|
|
59
|
+
});
|
|
60
|
+
return JSON.stringify(stats);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export function createReindexTool(runtime: RuntimeContext) {
|
|
64
|
+
return tool({
|
|
65
|
+
description: "Force a full semantic index rebuild.",
|
|
66
|
+
args: {},
|
|
67
|
+
async execute(_args, toolCtx) {
|
|
68
|
+
return executeReindex(runtime, toolCtx);
|
|
69
|
+
},
|
|
70
|
+
});
|
|
71
|
+
}
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import type { ToolContext } from "@opencode-ai/plugin";
|
|
2
|
+
import { tool } from "@opencode-ai/plugin";
|
|
3
|
+
import { hybridSearch } from "../search/hybrid";
|
|
4
|
+
import type { RuntimeContext } from "../runtime";
|
|
5
|
+
import { buildContextPreview } from "../search/context";
|
|
6
|
+
|
|
7
|
+
export interface SemanticSearchToolArgs {
|
|
8
|
+
query: string;
|
|
9
|
+
top_k?: number;
|
|
10
|
+
threshold?: number;
|
|
11
|
+
path?: string;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export async function executeSemanticSearch(
|
|
15
|
+
runtime: RuntimeContext,
|
|
16
|
+
args: SemanticSearchToolArgs,
|
|
17
|
+
ctx?: Pick<ToolContext, "metadata">
|
|
18
|
+
): Promise<string> {
|
|
19
|
+
const startedAt = Date.now();
|
|
20
|
+
const topK = args.top_k ?? runtime.config.search.top_k;
|
|
21
|
+
const originalThreshold = runtime.config.search.similarity_threshold;
|
|
22
|
+
if (typeof args.threshold === "number") runtime.config.search.similarity_threshold = args.threshold;
|
|
23
|
+
await runtime.logger.info("tool.semantic_search", {
|
|
24
|
+
message: "Semantic search requested",
|
|
25
|
+
extra: {
|
|
26
|
+
queryLength: args.query.length,
|
|
27
|
+
topK,
|
|
28
|
+
threshold: args.threshold ?? originalThreshold,
|
|
29
|
+
path: args.path
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
try {
|
|
33
|
+
ctx?.metadata?.({
|
|
34
|
+
title: "semantic_search",
|
|
35
|
+
metadata: { phase: "embedding_query" },
|
|
36
|
+
});
|
|
37
|
+
const chunkCount = runtime.store.stats().chunks;
|
|
38
|
+
ctx?.metadata?.({
|
|
39
|
+
title: "semantic_search",
|
|
40
|
+
metadata: { phase: "searching_index", indexed_chunks: chunkCount },
|
|
41
|
+
});
|
|
42
|
+
const ranked = await hybridSearch(runtime.store, runtime.embedder, args.query, topK, runtime.config);
|
|
43
|
+
const pathFilter = args.path;
|
|
44
|
+
const filtered = typeof pathFilter === "string" ? ranked.filter((row) => row.file.startsWith(pathFilter)) : ranked;
|
|
45
|
+
const enriched = filtered.map(async (row) => {
|
|
46
|
+
const fullText = await Bun.file(row.file).text();
|
|
47
|
+
return { row, fullText };
|
|
48
|
+
});
|
|
49
|
+
const resolved = await Promise.all(enriched);
|
|
50
|
+
const elapsedMs = Date.now() - startedAt;
|
|
51
|
+
await runtime.logger.info("tool.semantic_search", {
|
|
52
|
+
message: "Semantic search completed",
|
|
53
|
+
extra: {
|
|
54
|
+
results: resolved.length,
|
|
55
|
+
elapsedMs,
|
|
56
|
+
},
|
|
57
|
+
});
|
|
58
|
+
ctx?.metadata?.({
|
|
59
|
+
title: "semantic_search",
|
|
60
|
+
metadata: { phase: "done", results: resolved.length, elapsed_ms: elapsedMs },
|
|
61
|
+
});
|
|
62
|
+
return JSON.stringify(
|
|
63
|
+
resolved.map(({ row, fullText }) => ({
|
|
64
|
+
file: row.file,
|
|
65
|
+
lines: `${row.startLine}-${row.endLine}`,
|
|
66
|
+
score: Number(row.score.toFixed(4)),
|
|
67
|
+
vector_score: Number(row.vectorScore.toFixed(4)),
|
|
68
|
+
bm25_score: Number(row.bm25Score.toFixed(4)),
|
|
69
|
+
preview: buildContextPreview(fullText, row.startLine, row.endLine, runtime.config.search.context_lines).preview,
|
|
70
|
+
})),
|
|
71
|
+
);
|
|
72
|
+
} finally {
|
|
73
|
+
runtime.config.search.similarity_threshold = originalThreshold;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
export function createSemanticSearchTool(runtime: RuntimeContext) {
|
|
78
|
+
return tool({
|
|
79
|
+
description:
|
|
80
|
+
"Search code by meaning and concept, not exact text. Use this for multi-word conceptual queries like authentication flow, retry logic, or payment handling. Prefer over grep for behavior-level search.",
|
|
81
|
+
args: {
|
|
82
|
+
query: tool.schema.string().describe("Natural language search query"),
|
|
83
|
+
top_k: tool.schema.number().optional().describe("Number of results"),
|
|
84
|
+
threshold: tool.schema.number().optional().describe("Minimum score threshold"),
|
|
85
|
+
path: tool.schema.string().optional().describe("Optional path filter prefix"),
|
|
86
|
+
},
|
|
87
|
+
async execute(args, toolCtx) {
|
|
88
|
+
return executeSemanticSearch(runtime, args, toolCtx);
|
|
89
|
+
},
|
|
90
|
+
});
|
|
91
|
+
}
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
import type { ToolContext } from "@opencode-ai/plugin";
|
|
2
|
+
import { tool } from "@opencode-ai/plugin";
|
|
3
|
+
import { appendRoutingOutcome } from "../diagnostics/routing";
|
|
4
|
+
import { hybridSearch } from "../search/hybrid";
|
|
5
|
+
import type { RuntimeContext } from "../runtime";
|
|
6
|
+
|
|
7
|
+
function looksConceptual(query: string, minWords: number): boolean {
|
|
8
|
+
if (query.trim().split(/\s+/).length < minWords) return false;
|
|
9
|
+
if (/[.*+?^${}()|[\]\\]/.test(query)) return false;
|
|
10
|
+
if (/^[A-Za-z0-9_]+$/.test(query) && !query.includes(" ")) return false;
|
|
11
|
+
return true;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export interface GrepToolArgs {
|
|
15
|
+
pattern?: string;
|
|
16
|
+
query?: string;
|
|
17
|
+
path?: string;
|
|
18
|
+
glob?: string;
|
|
19
|
+
case_sensitive?: boolean;
|
|
20
|
+
word?: boolean;
|
|
21
|
+
fixed_strings?: boolean;
|
|
22
|
+
max_count?: number;
|
|
23
|
+
files_with_matches?: boolean;
|
|
24
|
+
multiline?: boolean;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function pickPattern(args: GrepToolArgs): string {
|
|
28
|
+
const pattern = args.pattern ?? args.query;
|
|
29
|
+
if (!pattern || pattern.trim().length === 0) return "";
|
|
30
|
+
return pattern;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function buildRipgrepArgs(args: GrepToolArgs): string[] {
|
|
34
|
+
const rgArgs: string[] = ["-n", "--hidden", "--glob", "!node_modules/**", "--glob", "!.git/**"];
|
|
35
|
+
if (args.case_sensitive === false) rgArgs.push("-i");
|
|
36
|
+
if (args.word) rgArgs.push("-w");
|
|
37
|
+
if (args.fixed_strings) rgArgs.push("-F");
|
|
38
|
+
if (args.files_with_matches) rgArgs.push("-l");
|
|
39
|
+
if (args.multiline) rgArgs.push("-U");
|
|
40
|
+
if (typeof args.max_count === "number") rgArgs.push("-m", `${args.max_count}`);
|
|
41
|
+
if (typeof args.glob === "string" && args.glob.length > 0) rgArgs.push("--glob", args.glob);
|
|
42
|
+
return rgArgs;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function toPatternPreview(pattern: string): string {
|
|
46
|
+
const normalized = pattern.replace(/\s+/g, " ").trim();
|
|
47
|
+
return normalized.length > 100 ? `${normalized.slice(0, 100)}...` : normalized;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
async function runRipgrep(args: GrepToolArgs): Promise<string> {
|
|
51
|
+
const pattern = pickPattern(args);
|
|
52
|
+
if (!pattern) return "";
|
|
53
|
+
const pathArg = args.path && args.path.length > 0 ? args.path : ".";
|
|
54
|
+
const rgArgs = buildRipgrepArgs(args);
|
|
55
|
+
const proc = Bun.spawn({
|
|
56
|
+
cmd: ["rg", ...rgArgs, pattern, pathArg],
|
|
57
|
+
stdout: "pipe",
|
|
58
|
+
stderr: "pipe",
|
|
59
|
+
});
|
|
60
|
+
const stdout = await new Response(proc.stdout).text();
|
|
61
|
+
const stderr = await new Response(proc.stderr).text();
|
|
62
|
+
const exitCode = await proc.exited;
|
|
63
|
+
if (exitCode === 0 || exitCode === 1) return stdout.trim();
|
|
64
|
+
throw new Error(stderr || `rg failed with exit code ${exitCode}`);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function grepMeta(ctx: Pick<ToolContext, "metadata"> | undefined, metadata: Record<string, unknown>): void {
|
|
68
|
+
ctx?.metadata?.({ title: "grep", metadata });
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
export async function executeSmartGrep(
|
|
72
|
+
runtime: RuntimeContext,
|
|
73
|
+
args: GrepToolArgs,
|
|
74
|
+
ctx?: Pick<ToolContext, "metadata">
|
|
75
|
+
): Promise<string> {
|
|
76
|
+
const pattern = pickPattern(args);
|
|
77
|
+
if (!pattern) {
|
|
78
|
+
grepMeta(ctx, { route: "none", reason: "empty_pattern" });
|
|
79
|
+
return "";
|
|
80
|
+
}
|
|
81
|
+
await runtime.logger.debug("tool.smart_grep", {
|
|
82
|
+
message: "Received grep request",
|
|
83
|
+
extra: { patternLength: pattern.length, path: args.path, glob: args.glob }
|
|
84
|
+
});
|
|
85
|
+
const shouldUseSemantic = runtime.config.smart_grep.enabled && looksConceptual(pattern, runtime.config.smart_grep.min_words_for_semantic);
|
|
86
|
+
if (!shouldUseSemantic) {
|
|
87
|
+
appendRoutingOutcome(runtime.store, {
|
|
88
|
+
at: new Date().toISOString(),
|
|
89
|
+
route: "ripgrep",
|
|
90
|
+
reason: "non-conceptual-or-disabled",
|
|
91
|
+
patternPreview: toPatternPreview(pattern),
|
|
92
|
+
});
|
|
93
|
+
await runtime.logger.info("tool.smart_grep", {
|
|
94
|
+
message: "Routing to ripgrep",
|
|
95
|
+
extra: { reason: "non-conceptual-or-disabled" }
|
|
96
|
+
});
|
|
97
|
+
grepMeta(ctx, { route: "ripgrep", phase: "running" });
|
|
98
|
+
const out = await runRipgrep(args);
|
|
99
|
+
grepMeta(ctx, {
|
|
100
|
+
route: "ripgrep",
|
|
101
|
+
phase: "done",
|
|
102
|
+
output_chars: out.length,
|
|
103
|
+
lines: out ? out.split("\n").length : 0,
|
|
104
|
+
});
|
|
105
|
+
return out;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
const providerHealthy = await runtime.embedder.healthcheck();
|
|
109
|
+
const stats = runtime.store.stats();
|
|
110
|
+
if (!providerHealthy || stats.chunks === 0) {
|
|
111
|
+
const reason = !providerHealthy ? "embedder-unhealthy" : "empty-index";
|
|
112
|
+
appendRoutingOutcome(runtime.store, {
|
|
113
|
+
at: new Date().toISOString(),
|
|
114
|
+
route: "ripgrep",
|
|
115
|
+
reason,
|
|
116
|
+
patternPreview: toPatternPreview(pattern),
|
|
117
|
+
providerHealthy,
|
|
118
|
+
indexedChunks: stats.chunks,
|
|
119
|
+
});
|
|
120
|
+
await runtime.logger.warn("tool.smart_grep", {
|
|
121
|
+
message: "Falling back to ripgrep",
|
|
122
|
+
extra: {
|
|
123
|
+
reason,
|
|
124
|
+
providerHealthy,
|
|
125
|
+
indexedChunks: stats.chunks
|
|
126
|
+
}
|
|
127
|
+
});
|
|
128
|
+
grepMeta(ctx, { route: "ripgrep", phase: "running", fallback_reason: reason });
|
|
129
|
+
const out = await runRipgrep(args);
|
|
130
|
+
grepMeta(ctx, {
|
|
131
|
+
route: "ripgrep",
|
|
132
|
+
phase: "done",
|
|
133
|
+
fallback_reason: reason,
|
|
134
|
+
output_chars: out.length,
|
|
135
|
+
});
|
|
136
|
+
return out;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
grepMeta(ctx, { route: "semantic", phase: "embedding_and_search", indexed_chunks: stats.chunks });
|
|
140
|
+
const semanticResults = await hybridSearch(runtime.store, runtime.embedder, pattern, runtime.config.search.top_k, runtime.config);
|
|
141
|
+
const pathFilter = args.path;
|
|
142
|
+
const filtered = typeof pathFilter === "string" ? semanticResults.filter((row) => row.file.startsWith(pathFilter)) : semanticResults;
|
|
143
|
+
if (filtered.length === 0 && runtime.config.smart_grep.fallback_to_grep_on_empty) {
|
|
144
|
+
appendRoutingOutcome(runtime.store, {
|
|
145
|
+
at: new Date().toISOString(),
|
|
146
|
+
route: "ripgrep",
|
|
147
|
+
reason: "semantic-empty-results",
|
|
148
|
+
patternPreview: toPatternPreview(pattern),
|
|
149
|
+
results: 0,
|
|
150
|
+
});
|
|
151
|
+
await runtime.logger.info("tool.smart_grep", {
|
|
152
|
+
message: "Falling back to ripgrep",
|
|
153
|
+
extra: { reason: "semantic-empty-results" }
|
|
154
|
+
});
|
|
155
|
+
grepMeta(ctx, { route: "ripgrep", phase: "running", fallback_reason: "semantic-empty-results" });
|
|
156
|
+
const out = await runRipgrep(args);
|
|
157
|
+
grepMeta(ctx, { route: "ripgrep", phase: "done", output_chars: out.length });
|
|
158
|
+
return out;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
appendRoutingOutcome(runtime.store, {
|
|
162
|
+
at: new Date().toISOString(),
|
|
163
|
+
route: "semantic",
|
|
164
|
+
reason: "conceptual-query",
|
|
165
|
+
patternPreview: toPatternPreview(pattern),
|
|
166
|
+
results: filtered.length,
|
|
167
|
+
});
|
|
168
|
+
await runtime.logger.info("tool.smart_grep", {
|
|
169
|
+
message: "Routing to semantic results",
|
|
170
|
+
extra: { results: filtered.length }
|
|
171
|
+
});
|
|
172
|
+
grepMeta(ctx, { route: "semantic", phase: "done", results: filtered.length });
|
|
173
|
+
return filtered
|
|
174
|
+
.map((row) => `${row.file}:${row.startLine}-${row.endLine} score=${row.score.toFixed(4)}\n${row.preview}`)
|
|
175
|
+
.join("\n\n");
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
export function createSmartGrepTool(runtime: RuntimeContext) {
|
|
179
|
+
return tool({
|
|
180
|
+
description:
|
|
181
|
+
"Intelligent grep wrapper. Routes conceptual multi-word queries to semantic search and exact-match/regex queries to ripgrep. Falls back to ripgrep if semantic search is unavailable.",
|
|
182
|
+
args: {
|
|
183
|
+
pattern: tool.schema.string().optional().describe("Pattern or conceptual search query"),
|
|
184
|
+
query: tool.schema.string().optional().describe("Alternative grep query field"),
|
|
185
|
+
path: tool.schema.string().optional().describe("Optional search path"),
|
|
186
|
+
glob: tool.schema.string().optional().describe("Optional glob filter"),
|
|
187
|
+
case_sensitive: tool.schema.boolean().optional().describe("Enable case-sensitive matching"),
|
|
188
|
+
word: tool.schema.boolean().optional().describe("Match whole words"),
|
|
189
|
+
fixed_strings: tool.schema.boolean().optional().describe("Treat pattern as literal string"),
|
|
190
|
+
max_count: tool.schema.number().optional().describe("Max matching lines per file"),
|
|
191
|
+
files_with_matches: tool.schema.boolean().optional().describe("Return matching file names only"),
|
|
192
|
+
multiline: tool.schema.boolean().optional().describe("Enable multiline regex"),
|
|
193
|
+
},
|
|
194
|
+
async execute(args, toolCtx) {
|
|
195
|
+
return executeSmartGrep(runtime, args, toolCtx);
|
|
196
|
+
},
|
|
197
|
+
});
|
|
198
|
+
}
|
package/src/tui_toast.ts
ADDED
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import type { IndexingProgressState, RuntimeContext } from "./runtime";
|
|
3
|
+
|
|
4
|
+
const progressToastMinIntervalMs = 2200;
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* OpenCode TUI: POST /tui/show-toast (`client.tui.showToast` on the SDK).
|
|
8
|
+
* Complements tool `context.metadata()` (inline tool-call cards) with transient toast pop-ups.
|
|
9
|
+
*/
|
|
10
|
+
export async function showTuiToast(
|
|
11
|
+
runtime: RuntimeContext,
|
|
12
|
+
input: {
|
|
13
|
+
title?: string;
|
|
14
|
+
message: string;
|
|
15
|
+
variant: "info" | "success" | "warning" | "error";
|
|
16
|
+
duration?: number;
|
|
17
|
+
}
|
|
18
|
+
): Promise<void> {
|
|
19
|
+
const client = runtime.opencodeClient;
|
|
20
|
+
if (!client?.tui?.showToast) {
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
const directory = runtime.projectDirectory ?? runtime.worktree;
|
|
24
|
+
try {
|
|
25
|
+
await client.tui.showToast({
|
|
26
|
+
body: {
|
|
27
|
+
title: input.title,
|
|
28
|
+
message: input.message,
|
|
29
|
+
variant: input.variant,
|
|
30
|
+
duration: input.duration ?? 4500,
|
|
31
|
+
},
|
|
32
|
+
query: { directory },
|
|
33
|
+
});
|
|
34
|
+
} catch {
|
|
35
|
+
// No TUI (headless), wrong directory, or API unavailable
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
function formatElapsedSuffix(startedAt: number): string {
|
|
40
|
+
if (startedAt <= 0) return "";
|
|
41
|
+
const sec = Math.max(0, Math.round((Date.now() - startedAt) / 1000));
|
|
42
|
+
return sec > 0 ? ` · ${sec}s` : "";
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/** Maps internal progress labels to short toast text (avoid stacking file counts + batch labels). */
|
|
46
|
+
function formatIndexingProgressMessage(p: IndexingProgressState): string {
|
|
47
|
+
const elapsed = formatElapsedSuffix(p.startedAt);
|
|
48
|
+
|
|
49
|
+
if (p.phase === "scanning") {
|
|
50
|
+
if (p.label === "listing files") {
|
|
51
|
+
return `Listing files…${elapsed}`;
|
|
52
|
+
}
|
|
53
|
+
return `${p.label ?? "Listing files…"}${elapsed}`;
|
|
54
|
+
}
|
|
55
|
+
if (p.phase === "gc") {
|
|
56
|
+
return `Cleaning up…${elapsed}`;
|
|
57
|
+
}
|
|
58
|
+
if (p.phase === "indexing") {
|
|
59
|
+
const label = p.label ?? "";
|
|
60
|
+
if (label.startsWith("scan:")) {
|
|
61
|
+
const file = label.slice("scan:".length).trim();
|
|
62
|
+
if (p.total > 0) {
|
|
63
|
+
return `${p.current}/${p.total} · ${file}${elapsed}`;
|
|
64
|
+
}
|
|
65
|
+
return file ? `${file}${elapsed}` : `Scanning…${elapsed}`;
|
|
66
|
+
}
|
|
67
|
+
if (/^\d+ files to scan$/.test(label)) {
|
|
68
|
+
return p.total > 0 ? `Scanning ${p.total} files…${elapsed}` : `Scanning…${elapsed}`;
|
|
69
|
+
}
|
|
70
|
+
if (label === "no files matched") {
|
|
71
|
+
return `No files to index${elapsed}`;
|
|
72
|
+
}
|
|
73
|
+
if (label === "chunking files…") {
|
|
74
|
+
return p.total > 0 ? `Preparing ${p.current}/${p.total}…${elapsed}` : `Preparing…${elapsed}`;
|
|
75
|
+
}
|
|
76
|
+
if (label === "embedding…") {
|
|
77
|
+
return p.total > 0 ? `Embedding ${p.current}/${p.total}…${elapsed}` : `Embedding…${elapsed}`;
|
|
78
|
+
}
|
|
79
|
+
if (label.startsWith("embedding batch ")) {
|
|
80
|
+
const m = /^embedding batch (\d+)\/(\d+)$/.exec(label);
|
|
81
|
+
if (m?.[1] !== undefined && m[2] !== undefined) {
|
|
82
|
+
return `Embedding ${m[1]}/${m[2]}…${elapsed}`;
|
|
83
|
+
}
|
|
84
|
+
return `Embedding…${elapsed}`;
|
|
85
|
+
}
|
|
86
|
+
if (label === "saving index…") {
|
|
87
|
+
return p.total > 0 ? `Saving ${p.current}/${p.total}…${elapsed}` : `Saving…${elapsed}`;
|
|
88
|
+
}
|
|
89
|
+
if (label.startsWith("saved ")) {
|
|
90
|
+
const head = label.split(" · ")[0] ?? label;
|
|
91
|
+
const rest = head.replace(/^saved\s+/u, "");
|
|
92
|
+
return rest ? `Saving ${rest}${elapsed}` : `Saving…${elapsed}`;
|
|
93
|
+
}
|
|
94
|
+
if (label.startsWith("writing ") && label !== "writing…") {
|
|
95
|
+
return p.total > 0 ? `Saving ${p.current}/${p.total}…${elapsed}` : `Saving…${elapsed}`;
|
|
96
|
+
}
|
|
97
|
+
if (p.total > 0 && label.length > 0) {
|
|
98
|
+
return `${p.current}/${p.total} · ${label}${elapsed}`;
|
|
99
|
+
}
|
|
100
|
+
if (p.total > 0) {
|
|
101
|
+
return `${p.current}/${p.total}${elapsed}`;
|
|
102
|
+
}
|
|
103
|
+
return `${label || "Working…"}${elapsed}`;
|
|
104
|
+
}
|
|
105
|
+
return `Working…${elapsed}`;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
export function resetIndexingToastThrottle(runtime: RuntimeContext): void {
|
|
109
|
+
runtime.indexingToastLastMs = undefined;
|
|
110
|
+
runtime.indexingToastLastPhase = undefined;
|
|
111
|
+
runtime.indexingToastLastLabel = undefined;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Shows a throttled info toast while delta sync or incremental indexing runs.
|
|
116
|
+
* Always emits on phase transitions; otherwise at most once per {@link progressToastMinIntervalMs}.
|
|
117
|
+
*/
|
|
118
|
+
export function notifyIndexingProgressToast(runtime: RuntimeContext): void {
|
|
119
|
+
const p = runtime.indexingProgress;
|
|
120
|
+
if (p.phase === "idle") {
|
|
121
|
+
return;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
const now = Date.now();
|
|
125
|
+
const lastAt = runtime.indexingToastLastMs ?? 0;
|
|
126
|
+
const lastPhase = runtime.indexingToastLastPhase;
|
|
127
|
+
const lastLabel = runtime.indexingToastLastLabel;
|
|
128
|
+
const phaseChanged = lastPhase === undefined || lastPhase !== p.phase;
|
|
129
|
+
const labelKey = p.label ?? "";
|
|
130
|
+
const labelChanged = lastLabel === undefined || lastLabel !== labelKey;
|
|
131
|
+
|
|
132
|
+
if (!phaseChanged && !labelChanged && now - lastAt < progressToastMinIntervalMs) {
|
|
133
|
+
return;
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
runtime.indexingToastLastMs = now;
|
|
137
|
+
runtime.indexingToastLastPhase = p.phase;
|
|
138
|
+
runtime.indexingToastLastLabel = labelKey;
|
|
139
|
+
|
|
140
|
+
const title =
|
|
141
|
+
p.source === "reindex" ? "Reindex" : p.source === "background" ? "Semantic index" : "Indexing";
|
|
142
|
+
|
|
143
|
+
void showTuiToast(runtime, {
|
|
144
|
+
title,
|
|
145
|
+
message: formatIndexingProgressMessage(p),
|
|
146
|
+
variant: "info",
|
|
147
|
+
duration: 3800,
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
export interface IndexingCompleteToastInput {
|
|
152
|
+
title: string;
|
|
153
|
+
files: number;
|
|
154
|
+
chunks: number;
|
|
155
|
+
elapsedMs: number;
|
|
156
|
+
/** Set when the parallel pipeline reported per-file failures. */
|
|
157
|
+
failedFiles?: string[];
|
|
158
|
+
/** `reindex` uses success styling when fully OK; background sync uses info. */
|
|
159
|
+
flavor: "background" | "reindex";
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* Completion popup after delta sync / reindex: warns when any files failed in the indexing pipeline.
|
|
164
|
+
*/
|
|
165
|
+
export function showIndexingCompleteToast(runtime: RuntimeContext, input: IndexingCompleteToastInput): void {
|
|
166
|
+
const failed = input.failedFiles?.filter((p) => p.length > 0) ?? [];
|
|
167
|
+
const n = failed.length;
|
|
168
|
+
const sec = Math.max(0, Math.round(input.elapsedMs / 1000));
|
|
169
|
+
if (n > 0) {
|
|
170
|
+
const examples =
|
|
171
|
+
n <= 3
|
|
172
|
+
? failed.map((fp) => path.basename(fp)).join(", ")
|
|
173
|
+
: `${failed
|
|
174
|
+
.slice(0, 3)
|
|
175
|
+
.map((fp) => path.basename(fp))
|
|
176
|
+
.join(", ")} (+${n - 3} more)`;
|
|
177
|
+
void showTuiToast(runtime, {
|
|
178
|
+
title: input.title,
|
|
179
|
+
message: `${n} failed: ${examples} · ${sec}s`,
|
|
180
|
+
variant: "warning",
|
|
181
|
+
duration: 7000,
|
|
182
|
+
});
|
|
183
|
+
return;
|
|
184
|
+
}
|
|
185
|
+
void showTuiToast(runtime, {
|
|
186
|
+
title: input.title,
|
|
187
|
+
message: `${input.files} files · ${input.chunks} chunks · ${sec}s`,
|
|
188
|
+
variant: input.flavor === "reindex" ? "success" : "info",
|
|
189
|
+
duration: 4500,
|
|
190
|
+
});
|
|
191
|
+
}
|
package/src/types.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
declare module "picomatch";
|