kontext-engine 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +451 -0
- package/dist/cli/index.js +3034 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/index.d.ts +485 -0
- package/dist/index.js +2422 -0
- package/dist/index.js.map +1 -0
- package/package.json +78 -0
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,485 @@
|
|
|
1
|
+
/** A single search result with chunk location, content, and relevance score (0–1). */
|
|
2
|
+
interface SearchResult {
|
|
3
|
+
chunkId: number;
|
|
4
|
+
filePath: string;
|
|
5
|
+
lineStart: number;
|
|
6
|
+
lineEnd: number;
|
|
7
|
+
name: string | null;
|
|
8
|
+
type: string;
|
|
9
|
+
text: string;
|
|
10
|
+
score: number;
|
|
11
|
+
language: string;
|
|
12
|
+
}
|
|
13
|
+
/** Optional filters applied as post-processing on search results. */
|
|
14
|
+
interface SearchFilters {
|
|
15
|
+
language?: string;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
interface VectorResult {
|
|
19
|
+
chunkId: number;
|
|
20
|
+
distance: number;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
interface FileInput {
|
|
24
|
+
path: string;
|
|
25
|
+
language: string;
|
|
26
|
+
hash: string;
|
|
27
|
+
size: number;
|
|
28
|
+
}
|
|
29
|
+
interface FileRecord {
|
|
30
|
+
id: number;
|
|
31
|
+
path: string;
|
|
32
|
+
language: string;
|
|
33
|
+
hash: string;
|
|
34
|
+
lastIndexed: number;
|
|
35
|
+
size: number;
|
|
36
|
+
}
|
|
37
|
+
interface ChunkInput {
|
|
38
|
+
lineStart: number;
|
|
39
|
+
lineEnd: number;
|
|
40
|
+
type: string;
|
|
41
|
+
name: string | null;
|
|
42
|
+
parent: string | null;
|
|
43
|
+
text: string;
|
|
44
|
+
imports: string[];
|
|
45
|
+
exports: boolean;
|
|
46
|
+
hash: string;
|
|
47
|
+
}
|
|
48
|
+
interface ChunkRecord {
|
|
49
|
+
id: number;
|
|
50
|
+
fileId: number;
|
|
51
|
+
lineStart: number;
|
|
52
|
+
lineEnd: number;
|
|
53
|
+
type: string;
|
|
54
|
+
name: string | null;
|
|
55
|
+
parent: string | null;
|
|
56
|
+
text: string;
|
|
57
|
+
imports: string[];
|
|
58
|
+
exports: boolean;
|
|
59
|
+
hash: string;
|
|
60
|
+
}
|
|
61
|
+
interface ChunkWithFile {
|
|
62
|
+
id: number;
|
|
63
|
+
fileId: number;
|
|
64
|
+
filePath: string;
|
|
65
|
+
language: string;
|
|
66
|
+
lineStart: number;
|
|
67
|
+
lineEnd: number;
|
|
68
|
+
type: string;
|
|
69
|
+
name: string | null;
|
|
70
|
+
parent: string | null;
|
|
71
|
+
text: string;
|
|
72
|
+
}
|
|
73
|
+
interface ChunkSearchFilters {
|
|
74
|
+
name?: string;
|
|
75
|
+
nameMode?: "exact" | "prefix" | "contains";
|
|
76
|
+
type?: string;
|
|
77
|
+
parent?: string;
|
|
78
|
+
language?: string;
|
|
79
|
+
}
|
|
80
|
+
interface FTSResult {
|
|
81
|
+
chunkId: number;
|
|
82
|
+
name: string | null;
|
|
83
|
+
rank: number;
|
|
84
|
+
}
|
|
85
|
+
/** Main database interface. Provides CRUD for files, chunks, vectors, FTS, and stats. */
|
|
86
|
+
interface KontextDatabase {
|
|
87
|
+
upsertFile(file: FileInput): number;
|
|
88
|
+
getFile(filePath: string): FileRecord | null;
|
|
89
|
+
getFilesByHash(hashes: Map<string, string>): Map<string, FileRecord>;
|
|
90
|
+
deleteFile(filePath: string): void;
|
|
91
|
+
insertChunks(fileId: number, chunks: ChunkInput[]): number[];
|
|
92
|
+
getChunksByFile(fileId: number): ChunkRecord[];
|
|
93
|
+
getChunksByIds(ids: number[]): ChunkWithFile[];
|
|
94
|
+
deleteChunksByFile(fileId: number): void;
|
|
95
|
+
insertDependency(sourceChunkId: number, targetChunkId: number, type: string): void;
|
|
96
|
+
getDependencies(chunkId: number): {
|
|
97
|
+
targetChunkId: number;
|
|
98
|
+
type: string;
|
|
99
|
+
}[];
|
|
100
|
+
getReverseDependencies(chunkId: number): {
|
|
101
|
+
sourceChunkId: number;
|
|
102
|
+
type: string;
|
|
103
|
+
}[];
|
|
104
|
+
insertVector(chunkId: number, vector: Float32Array): void;
|
|
105
|
+
searchVectors(query: Float32Array, limit: number): VectorResult[];
|
|
106
|
+
searchChunks(filters: ChunkSearchFilters, limit: number): ChunkWithFile[];
|
|
107
|
+
searchFTS(query: string, limit: number): FTSResult[];
|
|
108
|
+
getAllFilePaths(): string[];
|
|
109
|
+
getFileCount(): number;
|
|
110
|
+
getChunkCount(): number;
|
|
111
|
+
getVectorCount(): number;
|
|
112
|
+
getLanguageBreakdown(): Map<string, number>;
|
|
113
|
+
getLastIndexed(): string | null;
|
|
114
|
+
transaction<T>(fn: () => T): T;
|
|
115
|
+
vacuum(): void;
|
|
116
|
+
close(): void;
|
|
117
|
+
getSchemaVersion(): number;
|
|
118
|
+
pragma(key: string): string;
|
|
119
|
+
}
|
|
120
|
+
/** Create or open a SQLite database at the given path. Initializes schema and loads sqlite-vec. */
|
|
121
|
+
declare function createDatabase(dbPath: string, dimensions?: number): KontextDatabase;
|
|
122
|
+
|
|
123
|
+
type ProgressCallback = (done: number, total: number) => void;
|
|
124
|
+
/** Embedding provider: generates vector representations of text. */
|
|
125
|
+
interface Embedder {
|
|
126
|
+
readonly name: string;
|
|
127
|
+
readonly dimensions: number;
|
|
128
|
+
embed(texts: string[], onProgress?: ProgressCallback): Promise<Float32Array[]>;
|
|
129
|
+
embedSingle(text: string): Promise<Float32Array>;
|
|
130
|
+
}
|
|
131
|
+
/** Format a chunk into embedding-friendly text with file path and name prefix. */
|
|
132
|
+
declare function prepareChunkText(filePath: string, parent: string | null, text: string): string;
|
|
133
|
+
/** Create a local embedder using Xenova/all-MiniLM-L6-v2 (384 dims, ONNX Runtime). */
|
|
134
|
+
declare function createLocalEmbedder(): Promise<Embedder>;
|
|
135
|
+
/** Create an embedder using Voyage AI's code embedding API. */
|
|
136
|
+
declare function createVoyageEmbedder(apiKey: string): Embedder;
|
|
137
|
+
/** Create an embedder using OpenAI's text-embedding-3-small API. */
|
|
138
|
+
declare function createOpenAIEmbedder(apiKey: string): Embedder;
|
|
139
|
+
|
|
140
|
+
/** Project-level configuration stored in .ctx/config.json. */
|
|
141
|
+
interface KontextConfig {
|
|
142
|
+
embedder: {
|
|
143
|
+
provider: string;
|
|
144
|
+
model: string;
|
|
145
|
+
dimensions: number;
|
|
146
|
+
};
|
|
147
|
+
search: {
|
|
148
|
+
defaultLimit: number;
|
|
149
|
+
strategies: string[];
|
|
150
|
+
weights: Record<string, number>;
|
|
151
|
+
};
|
|
152
|
+
watch: {
|
|
153
|
+
debounceMs: number;
|
|
154
|
+
ignored: string[];
|
|
155
|
+
};
|
|
156
|
+
llm: {
|
|
157
|
+
provider: string | null;
|
|
158
|
+
model: string | null;
|
|
159
|
+
};
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
/** A source file discovered during scanning, with its detected language. */
|
|
163
|
+
interface DiscoveredFile {
|
|
164
|
+
path: string;
|
|
165
|
+
absolutePath: string;
|
|
166
|
+
language: string;
|
|
167
|
+
size: number;
|
|
168
|
+
lastModified: number;
|
|
169
|
+
}
|
|
170
|
+
interface DiscoverOptions {
|
|
171
|
+
root: string;
|
|
172
|
+
extraIgnore?: string[];
|
|
173
|
+
followSymlinks?: boolean;
|
|
174
|
+
}
|
|
175
|
+
/** Maps file extensions to language names. Used for filtering and language detection. */
|
|
176
|
+
declare const LANGUAGE_MAP: Record<string, string>;
|
|
177
|
+
/** Recursively scan a directory for source files, respecting .gitignore and .ctxignore. */
|
|
178
|
+
declare function discoverFiles(options: DiscoverOptions): Promise<DiscoveredFile[]>;
|
|
179
|
+
|
|
180
|
+
/** An extracted AST node: function, class, method, type, import, or constant. */
|
|
181
|
+
interface ASTNode {
|
|
182
|
+
type: "function" | "class" | "method" | "import" | "export" | "type" | "constant";
|
|
183
|
+
name: string | null;
|
|
184
|
+
lineStart: number;
|
|
185
|
+
lineEnd: number;
|
|
186
|
+
language: string;
|
|
187
|
+
parent: string | null;
|
|
188
|
+
params?: string[];
|
|
189
|
+
returnType?: string;
|
|
190
|
+
docstring?: string;
|
|
191
|
+
imports?: string[];
|
|
192
|
+
exports?: boolean;
|
|
193
|
+
text: string;
|
|
194
|
+
}
|
|
195
|
+
/** Initialize the Tree-sitter WebAssembly parser. Must be called before parseFile. */
|
|
196
|
+
declare function initParser(): Promise<void>;
|
|
197
|
+
/** Parse a source file with Tree-sitter and extract AST nodes. */
|
|
198
|
+
declare function parseFile(filePath: string, language: string): Promise<ASTNode[]>;
|
|
199
|
+
|
|
200
|
+
/** A logical code chunk with content, location, and a deterministic content hash. */
|
|
201
|
+
interface Chunk {
|
|
202
|
+
id: string;
|
|
203
|
+
filePath: string;
|
|
204
|
+
lineStart: number;
|
|
205
|
+
lineEnd: number;
|
|
206
|
+
language: string;
|
|
207
|
+
type: "function" | "class" | "method" | "type" | "import" | "constant" | "config";
|
|
208
|
+
name: string | null;
|
|
209
|
+
parent: string | null;
|
|
210
|
+
text: string;
|
|
211
|
+
imports: string[];
|
|
212
|
+
exports: boolean;
|
|
213
|
+
hash: string;
|
|
214
|
+
}
|
|
215
|
+
interface ChunkOptions {
|
|
216
|
+
maxTokens?: number;
|
|
217
|
+
overlapLines?: number;
|
|
218
|
+
}
|
|
219
|
+
/** Rough token count estimate (~4 chars per token). */
|
|
220
|
+
declare function estimateTokens(text: string): number;
|
|
221
|
+
/** Split AST nodes into logical chunks. Merges small constants; keeps functions/classes whole. */
|
|
222
|
+
declare function chunkFile(nodes: ASTNode[], filePath: string, options?: ChunkOptions): Chunk[];
|
|
223
|
+
|
|
224
|
+
/** Names of available search strategies. */
|
|
225
|
+
type StrategyName = "vector" | "fts" | "ast" | "path" | "dependency";
|
|
226
|
+
/** Results from a single search strategy, ready for fusion. */
|
|
227
|
+
interface StrategyResult {
|
|
228
|
+
strategy: StrategyName;
|
|
229
|
+
weight: number;
|
|
230
|
+
results: SearchResult[];
|
|
231
|
+
}
|
|
232
|
+
/** Merge results from multiple strategies using Reciprocal Rank Fusion (K=60). */
|
|
233
|
+
declare function fusionMerge(strategyResults: StrategyResult[], limit: number): SearchResult[];
|
|
234
|
+
|
|
235
|
+
interface ChatMessage {
|
|
236
|
+
role: "system" | "user" | "assistant";
|
|
237
|
+
content: string;
|
|
238
|
+
}
|
|
239
|
+
/** LLM provider for the steering layer. Wraps Gemini, OpenAI, or Anthropic chat APIs. */
|
|
240
|
+
interface LLMProvider {
|
|
241
|
+
name: string;
|
|
242
|
+
chat(messages: ChatMessage[]): Promise<string>;
|
|
243
|
+
}
|
|
244
|
+
interface StrategyPlan {
|
|
245
|
+
strategy: StrategyName;
|
|
246
|
+
query: string;
|
|
247
|
+
weight: number;
|
|
248
|
+
reason: string;
|
|
249
|
+
}
|
|
250
|
+
interface SearchPlan {
|
|
251
|
+
interpretation: string;
|
|
252
|
+
strategies: StrategyPlan[];
|
|
253
|
+
}
|
|
254
|
+
/** Full result from LLM-steered search: plan, results, explanation, and cost. */
|
|
255
|
+
interface SteeringResult {
|
|
256
|
+
interpretation: string;
|
|
257
|
+
strategies: StrategyPlan[];
|
|
258
|
+
results: SearchResult[];
|
|
259
|
+
explanation: string;
|
|
260
|
+
tokensUsed: number;
|
|
261
|
+
costEstimate: number;
|
|
262
|
+
}
|
|
263
|
+
type SearchExecutor = (strategies: StrategyPlan[], limit: number) => Promise<SearchResult[]>;
|
|
264
|
+
/** Ask the LLM to interpret a query and plan which search strategies to use. */
|
|
265
|
+
declare function planSearch(provider: LLMProvider, query: string): Promise<SearchPlan>;
|
|
266
|
+
/** Full steering pipeline: plan → search → synthesize. Falls back to basic search on failure. */
|
|
267
|
+
declare function steer(provider: LLMProvider, query: string, limit: number, searchExecutor: SearchExecutor): Promise<SteeringResult>;
|
|
268
|
+
|
|
269
|
+
/** A single file change event from the watcher. */
|
|
270
|
+
interface FileChange {
|
|
271
|
+
type: "add" | "change" | "unlink";
|
|
272
|
+
path: string;
|
|
273
|
+
}
|
|
274
|
+
/** Handle returned by createWatcher. Call start() to begin, stop() to clean up. */
|
|
275
|
+
interface WatcherHandle {
|
|
276
|
+
start(): Promise<void>;
|
|
277
|
+
stop(): Promise<void>;
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
/** Result of incremental change detection: files categorized by status. */
|
|
281
|
+
interface IncrementalResult {
|
|
282
|
+
added: string[];
|
|
283
|
+
modified: string[];
|
|
284
|
+
deleted: string[];
|
|
285
|
+
unchanged: string[];
|
|
286
|
+
/** SHA-256 content hashes for added + modified files */
|
|
287
|
+
hashes: Map<string, string>;
|
|
288
|
+
/** Wall-clock duration in milliseconds */
|
|
289
|
+
duration: number;
|
|
290
|
+
}
|
|
291
|
+
/** Minimal DB surface needed for change detection */
|
|
292
|
+
interface ChangeDetectionDb {
|
|
293
|
+
getFile(filePath: string): {
|
|
294
|
+
hash: string;
|
|
295
|
+
} | null;
|
|
296
|
+
getAllFilePaths(): string[];
|
|
297
|
+
}
|
|
298
|
+
/** Compare discovered files against stored hashes to detect adds, modifies, and deletes. */
|
|
299
|
+
declare function computeChanges(discovered: DiscoveredFile[], db: ChangeDetectionDb): Promise<IncrementalResult>;
|
|
300
|
+
|
|
301
|
+
/** KNN vector similarity search. Scores normalized as 1/(1+distance). */
|
|
302
|
+
declare function vectorSearch(db: KontextDatabase, embedder: Embedder, query: string, limit: number, filters?: SearchFilters): Promise<SearchResult[]>;
|
|
303
|
+
|
|
304
|
+
/** Full-text search via SQLite FTS5 with BM25 ranking. Scores normalized as 1/(1+|rank|). */
|
|
305
|
+
declare function ftsSearch(db: KontextDatabase, query: string, limit: number, filters?: SearchFilters): SearchResult[];
|
|
306
|
+
|
|
307
|
+
interface ASTFilters {
|
|
308
|
+
name?: string;
|
|
309
|
+
type?: string;
|
|
310
|
+
parent?: string;
|
|
311
|
+
language?: string;
|
|
312
|
+
matchMode?: "exact" | "prefix" | "fuzzy";
|
|
313
|
+
}
|
|
314
|
+
/** AST-aware symbol search by name, type, parent, and language. Supports exact/prefix/fuzzy matching. */
|
|
315
|
+
declare function astSearch(db: KontextDatabase, filters: ASTFilters, limit: number): SearchResult[];
|
|
316
|
+
|
|
317
|
+
/** Search files by glob pattern. Converts globs to SQL LIKE clauses. */
|
|
318
|
+
declare function pathSearch(db: KontextDatabase, pattern: string, limit: number): SearchResult[];
|
|
319
|
+
/** BFS traversal of the import/dependency graph. Scores decay with depth. */
|
|
320
|
+
declare function dependencyTrace(db: KontextDatabase, chunkId: number, direction: "imports" | "importedBy", depth: number): SearchResult[];
|
|
321
|
+
|
|
322
|
+
/** Options for the init pipeline. */
|
|
323
|
+
interface InitOptions {
|
|
324
|
+
log?: (msg: string) => void;
|
|
325
|
+
skipEmbedding?: boolean;
|
|
326
|
+
}
|
|
327
|
+
interface IndexStats {
|
|
328
|
+
filesDiscovered: number;
|
|
329
|
+
filesAdded: number;
|
|
330
|
+
filesModified: number;
|
|
331
|
+
filesDeleted: number;
|
|
332
|
+
filesUnchanged: number;
|
|
333
|
+
chunksCreated: number;
|
|
334
|
+
vectorsCreated: number;
|
|
335
|
+
durationMs: number;
|
|
336
|
+
languageCounts: Map<string, number>;
|
|
337
|
+
}
|
|
338
|
+
/** Index a codebase: discover → parse → chunk → embed → store. Runs incrementally on subsequent calls. */
|
|
339
|
+
declare function runInit(projectPath: string, options?: InitOptions): Promise<IndexStats>;
|
|
340
|
+
|
|
341
|
+
/** Options for the query command. */
|
|
342
|
+
interface QueryOptions {
|
|
343
|
+
limit: number;
|
|
344
|
+
strategies: StrategyName[];
|
|
345
|
+
language?: string;
|
|
346
|
+
format: "json" | "text";
|
|
347
|
+
}
|
|
348
|
+
interface QueryOutputResult {
|
|
349
|
+
file: string;
|
|
350
|
+
lines: [number, number];
|
|
351
|
+
name: string | null;
|
|
352
|
+
type: string;
|
|
353
|
+
score: number;
|
|
354
|
+
snippet: string;
|
|
355
|
+
language: string;
|
|
356
|
+
}
|
|
357
|
+
interface QueryOutput {
|
|
358
|
+
query: string;
|
|
359
|
+
results: QueryOutputResult[];
|
|
360
|
+
stats: {
|
|
361
|
+
strategies: string[];
|
|
362
|
+
totalResults: number;
|
|
363
|
+
searchTimeMs: number;
|
|
364
|
+
};
|
|
365
|
+
text?: string;
|
|
366
|
+
}
|
|
367
|
+
/** Execute a multi-strategy search with RRF fusion. Returns ranked results. */
|
|
368
|
+
declare function runQuery(projectPath: string, query: string, options: QueryOptions): Promise<QueryOutput>;
|
|
369
|
+
|
|
370
|
+
/** Options for the LLM-steered ask command. */
|
|
371
|
+
interface AskOptions {
|
|
372
|
+
limit: number;
|
|
373
|
+
format: "json" | "text";
|
|
374
|
+
provider?: LLMProvider;
|
|
375
|
+
providerName?: string;
|
|
376
|
+
noExplain?: boolean;
|
|
377
|
+
}
|
|
378
|
+
interface AskOutputResult {
|
|
379
|
+
file: string;
|
|
380
|
+
lines: [number, number];
|
|
381
|
+
name: string | null;
|
|
382
|
+
type: string;
|
|
383
|
+
score: number;
|
|
384
|
+
snippet: string;
|
|
385
|
+
language: string;
|
|
386
|
+
}
|
|
387
|
+
interface AskOutput {
|
|
388
|
+
query: string;
|
|
389
|
+
interpretation: string;
|
|
390
|
+
results: AskOutputResult[];
|
|
391
|
+
explanation: string;
|
|
392
|
+
stats: {
|
|
393
|
+
strategies: string[];
|
|
394
|
+
tokensUsed: number;
|
|
395
|
+
costEstimate: number;
|
|
396
|
+
totalResults: number;
|
|
397
|
+
};
|
|
398
|
+
fallback?: boolean;
|
|
399
|
+
text?: string;
|
|
400
|
+
}
|
|
401
|
+
/** LLM-steered natural language search. Falls back to basic multi-strategy search without API key. */
|
|
402
|
+
declare function runAsk(projectPath: string, query: string, options: AskOptions): Promise<AskOutput>;
|
|
403
|
+
|
|
404
|
+
interface ProjectConfig {
|
|
405
|
+
model: string;
|
|
406
|
+
dimensions: number;
|
|
407
|
+
}
|
|
408
|
+
/** Structured output from the status command. */
|
|
409
|
+
interface StatusOutput {
|
|
410
|
+
initialized: boolean;
|
|
411
|
+
fileCount: number;
|
|
412
|
+
chunkCount: number;
|
|
413
|
+
vectorCount: number;
|
|
414
|
+
dbSizeBytes: number;
|
|
415
|
+
lastIndexed: string | null;
|
|
416
|
+
languages: Map<string, number>;
|
|
417
|
+
config: ProjectConfig | null;
|
|
418
|
+
text: string;
|
|
419
|
+
}
|
|
420
|
+
/** Gather index statistics: file/chunk/vector counts, languages, DB size, config. */
|
|
421
|
+
declare function runStatus(projectPath: string): Promise<StatusOutput>;
|
|
422
|
+
|
|
423
|
+
/** String constants for all Kontext error codes. */
|
|
424
|
+
declare const ErrorCode: {
|
|
425
|
+
readonly NOT_INITIALIZED: "NOT_INITIALIZED";
|
|
426
|
+
readonly INDEX_FAILED: "INDEX_FAILED";
|
|
427
|
+
readonly PARSE_FAILED: "PARSE_FAILED";
|
|
428
|
+
readonly CHUNK_FAILED: "CHUNK_FAILED";
|
|
429
|
+
readonly EMBEDDER_FAILED: "EMBEDDER_FAILED";
|
|
430
|
+
readonly SEARCH_FAILED: "SEARCH_FAILED";
|
|
431
|
+
readonly CONFIG_INVALID: "CONFIG_INVALID";
|
|
432
|
+
readonly DB_CORRUPTED: "DB_CORRUPTED";
|
|
433
|
+
readonly DB_WRITE_FAILED: "DB_WRITE_FAILED";
|
|
434
|
+
readonly WATCHER_FAILED: "WATCHER_FAILED";
|
|
435
|
+
readonly LLM_FAILED: "LLM_FAILED";
|
|
436
|
+
};
|
|
437
|
+
/** Union type of all error code string values. */
|
|
438
|
+
type ErrorCodeValue = (typeof ErrorCode)[keyof typeof ErrorCode];
|
|
439
|
+
/** Base error class for all Kontext errors. Carries a typed `code` and optional `cause`. */
|
|
440
|
+
declare class KontextError extends Error {
|
|
441
|
+
readonly code: ErrorCodeValue;
|
|
442
|
+
constructor(message: string, code: ErrorCodeValue, cause?: Error);
|
|
443
|
+
}
|
|
444
|
+
/** Error during indexing: file discovery, parsing, chunking, or embedding. */
|
|
445
|
+
declare class IndexError extends KontextError {
|
|
446
|
+
constructor(message: string, code: ErrorCodeValue, cause?: Error);
|
|
447
|
+
}
|
|
448
|
+
/** Error during search: vector, FTS, AST, path, or fusion. */
|
|
449
|
+
declare class SearchError extends KontextError {
|
|
450
|
+
constructor(message: string, code: ErrorCodeValue, cause?: Error);
|
|
451
|
+
}
|
|
452
|
+
/** Error reading, writing, or validating configuration. */
|
|
453
|
+
declare class ConfigError extends KontextError {
|
|
454
|
+
constructor(message: string, code: ErrorCodeValue, cause?: Error);
|
|
455
|
+
}
|
|
456
|
+
/** Error in SQLite storage operations. */
|
|
457
|
+
declare class DatabaseError extends KontextError {
|
|
458
|
+
constructor(message: string, code: ErrorCodeValue, cause?: Error);
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
/** Numeric log level constants. Lower = more verbose. */
|
|
462
|
+
declare const LogLevel: {
|
|
463
|
+
readonly DEBUG: 0;
|
|
464
|
+
readonly INFO: 1;
|
|
465
|
+
readonly WARN: 2;
|
|
466
|
+
readonly ERROR: 3;
|
|
467
|
+
readonly SILENT: 4;
|
|
468
|
+
};
|
|
469
|
+
/** Union type of all log level numeric values. */
|
|
470
|
+
type LogLevelValue = (typeof LogLevel)[keyof typeof LogLevel];
|
|
471
|
+
/** Leveled logger that writes to stderr. */
|
|
472
|
+
interface Logger {
|
|
473
|
+
debug(msg: string, ...args: unknown[]): void;
|
|
474
|
+
info(msg: string, ...args: unknown[]): void;
|
|
475
|
+
warn(msg: string, ...args: unknown[]): void;
|
|
476
|
+
error(msg: string, ...args: unknown[]): void;
|
|
477
|
+
}
|
|
478
|
+
/** Options for creating a logger instance. */
|
|
479
|
+
interface LoggerOptions {
|
|
480
|
+
level?: LogLevelValue;
|
|
481
|
+
}
|
|
482
|
+
/** Create a logger. Respects `CTX_DEBUG=1` env var for debug output. */
|
|
483
|
+
declare function createLogger(options?: LoggerOptions): Logger;
|
|
484
|
+
|
|
485
|
+
export { type ASTNode, type Chunk, ConfigError, DatabaseError, type DiscoverOptions, type DiscoveredFile, type Embedder, ErrorCode, type FileChange, IndexError, type KontextConfig, type KontextDatabase, KontextError, LANGUAGE_MAP, type LLMProvider, LogLevel, type Logger, SearchError, type SearchFilters, type SearchResult, type SteeringResult, type StrategyName, type StrategyResult, type WatcherHandle, astSearch, chunkFile, computeChanges, createDatabase, createLocalEmbedder, createLogger, createOpenAIEmbedder, createVoyageEmbedder, dependencyTrace, discoverFiles, estimateTokens, ftsSearch, fusionMerge, initParser, parseFile, pathSearch, planSearch, prepareChunkText, runAsk, runInit, runQuery, runStatus, steer, vectorSearch };
|