reasonix 0.32.0 → 0.33.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (118) hide show
  1. package/dist/cli/chat-Q5ZCVIOO.js +39 -0
  2. package/dist/cli/chunk-2AWTGJ2C.js +110 -0
  3. package/dist/cli/chunk-2AWTGJ2C.js.map +1 -0
  4. package/dist/cli/chunk-3Q3C4W66.js +30 -0
  5. package/dist/cli/chunk-3Q3C4W66.js.map +1 -0
  6. package/dist/cli/chunk-4DCHFFEY.js +149 -0
  7. package/dist/cli/chunk-4DCHFFEY.js.map +1 -0
  8. package/dist/cli/chunk-5X7LZJDE.js +36 -0
  9. package/dist/cli/chunk-5X7LZJDE.js.map +1 -0
  10. package/dist/cli/chunk-63KAV5DX.js +106 -0
  11. package/dist/cli/chunk-63KAV5DX.js.map +1 -0
  12. package/dist/cli/chunk-6TMHAK5D.js +576 -0
  13. package/dist/cli/chunk-6TMHAK5D.js.map +1 -0
  14. package/dist/cli/chunk-APPB3ZPQ.js +43 -0
  15. package/dist/cli/chunk-APPB3ZPQ.js.map +1 -0
  16. package/dist/cli/chunk-BQNUJJN7.js +42 -0
  17. package/dist/cli/chunk-BQNUJJN7.js.map +1 -0
  18. package/dist/cli/chunk-CPOV2O73.js +39 -0
  19. package/dist/cli/chunk-CPOV2O73.js.map +1 -0
  20. package/dist/cli/chunk-D5DKXIP5.js +368 -0
  21. package/dist/cli/chunk-D5DKXIP5.js.map +1 -0
  22. package/dist/cli/chunk-DFP4YSVM.js +247 -0
  23. package/dist/cli/chunk-DFP4YSVM.js.map +1 -0
  24. package/dist/cli/chunk-DULSP7JH.js +410 -0
  25. package/dist/cli/chunk-DULSP7JH.js.map +1 -0
  26. package/dist/cli/chunk-FM57FNPJ.js +46 -0
  27. package/dist/cli/chunk-FM57FNPJ.js.map +1 -0
  28. package/dist/cli/chunk-FWGEHRB7.js +54 -0
  29. package/dist/cli/chunk-FWGEHRB7.js.map +1 -0
  30. package/dist/cli/chunk-FXGQ5NHE.js +513 -0
  31. package/dist/cli/chunk-FXGQ5NHE.js.map +1 -0
  32. package/dist/cli/chunk-G3XNWSFN.js +53 -0
  33. package/dist/cli/chunk-G3XNWSFN.js.map +1 -0
  34. package/dist/cli/chunk-I6YIAK6C.js +757 -0
  35. package/dist/cli/chunk-I6YIAK6C.js.map +1 -0
  36. package/dist/cli/chunk-J5VLP23S.js +94 -0
  37. package/dist/cli/chunk-J5VLP23S.js.map +1 -0
  38. package/dist/cli/chunk-KMWKGPFZ.js +303 -0
  39. package/dist/cli/chunk-KMWKGPFZ.js.map +1 -0
  40. package/dist/cli/chunk-MDHVWCJ4.js +14965 -0
  41. package/dist/cli/chunk-MDHVWCJ4.js.map +1 -0
  42. package/dist/cli/chunk-MHDNZXJJ.js +48 -0
  43. package/dist/cli/chunk-MHDNZXJJ.js.map +1 -0
  44. package/dist/cli/chunk-ORM6PK57.js +140 -0
  45. package/dist/cli/chunk-ORM6PK57.js.map +1 -0
  46. package/dist/cli/chunk-Q6YFXW7H.js +4986 -0
  47. package/dist/cli/chunk-Q6YFXW7H.js.map +1 -0
  48. package/dist/cli/chunk-QGE6AF76.js +1467 -0
  49. package/dist/cli/chunk-QGE6AF76.js.map +1 -0
  50. package/dist/cli/chunk-RFX7TYVV.js +28 -0
  51. package/dist/cli/chunk-RFX7TYVV.js.map +1 -0
  52. package/dist/cli/chunk-RZILUXUC.js +940 -0
  53. package/dist/cli/chunk-RZILUXUC.js.map +1 -0
  54. package/dist/cli/chunk-SDE5U32Z.js +535 -0
  55. package/dist/cli/chunk-SDE5U32Z.js.map +1 -0
  56. package/dist/cli/chunk-SOZE7V7V.js +340 -0
  57. package/dist/cli/chunk-SOZE7V7V.js.map +1 -0
  58. package/dist/cli/chunk-U3V2ZQ5J.js +479 -0
  59. package/dist/cli/chunk-U3V2ZQ5J.js.map +1 -0
  60. package/dist/cli/chunk-W4LDFAZ6.js +1544 -0
  61. package/dist/cli/chunk-W4LDFAZ6.js.map +1 -0
  62. package/dist/cli/chunk-WBDE4IRI.js +208 -0
  63. package/dist/cli/chunk-WBDE4IRI.js.map +1 -0
  64. package/dist/cli/chunk-XHQIK7B6.js +189 -0
  65. package/dist/cli/chunk-XHQIK7B6.js.map +1 -0
  66. package/dist/cli/chunk-XJLZ4HKU.js +307 -0
  67. package/dist/cli/chunk-XJLZ4HKU.js.map +1 -0
  68. package/dist/cli/chunk-ZPTSJGX5.js +88 -0
  69. package/dist/cli/chunk-ZPTSJGX5.js.map +1 -0
  70. package/dist/cli/chunk-ZTLZO42A.js +231 -0
  71. package/dist/cli/chunk-ZTLZO42A.js.map +1 -0
  72. package/dist/cli/code-DLR77NPZ.js +151 -0
  73. package/dist/cli/code-DLR77NPZ.js.map +1 -0
  74. package/dist/cli/commands-JWT2MWVH.js +352 -0
  75. package/dist/cli/commands-JWT2MWVH.js.map +1 -0
  76. package/dist/cli/commit-RPZBOZS2.js +288 -0
  77. package/dist/cli/commit-RPZBOZS2.js.map +1 -0
  78. package/dist/cli/diff-NTEHCSDW.js +145 -0
  79. package/dist/cli/diff-NTEHCSDW.js.map +1 -0
  80. package/dist/cli/doctor-3TGB2NZN.js +19 -0
  81. package/dist/cli/doctor-3TGB2NZN.js.map +1 -0
  82. package/dist/cli/events-P27CX7LN.js +338 -0
  83. package/dist/cli/events-P27CX7LN.js.map +1 -0
  84. package/dist/cli/index.js +80 -33693
  85. package/dist/cli/index.js.map +1 -1
  86. package/dist/cli/mcp-ARTNQ24O.js +266 -0
  87. package/dist/cli/mcp-ARTNQ24O.js.map +1 -0
  88. package/dist/cli/mcp-browse-HLO2ENDL.js +163 -0
  89. package/dist/cli/mcp-browse-HLO2ENDL.js.map +1 -0
  90. package/dist/cli/mcp-inspect-T2HBR22P.js +103 -0
  91. package/dist/cli/mcp-inspect-T2HBR22P.js.map +1 -0
  92. package/dist/cli/{prompt-XHICFAYN.js → prompt-V47QKSAR.js} +3 -2
  93. package/dist/cli/prompt-V47QKSAR.js.map +1 -0
  94. package/dist/cli/prune-sessions-ERL6B4G5.js +42 -0
  95. package/dist/cli/prune-sessions-ERL6B4G5.js.map +1 -0
  96. package/dist/cli/replay-Q43DSMG6.js +273 -0
  97. package/dist/cli/replay-Q43DSMG6.js.map +1 -0
  98. package/dist/cli/run-JMEOTQCG.js +215 -0
  99. package/dist/cli/run-JMEOTQCG.js.map +1 -0
  100. package/dist/cli/server-SYC3OVOP.js +2967 -0
  101. package/dist/cli/server-SYC3OVOP.js.map +1 -0
  102. package/dist/cli/sessions-MOJAALJI.js +102 -0
  103. package/dist/cli/sessions-MOJAALJI.js.map +1 -0
  104. package/dist/cli/setup-CCJZAWTY.js +404 -0
  105. package/dist/cli/setup-CCJZAWTY.js.map +1 -0
  106. package/dist/cli/stats-5RJCATCE.js +12 -0
  107. package/dist/cli/stats-5RJCATCE.js.map +1 -0
  108. package/dist/cli/update-4TJWRUIN.js +90 -0
  109. package/dist/cli/update-4TJWRUIN.js.map +1 -0
  110. package/dist/cli/version-3MYFE4G6.js +29 -0
  111. package/dist/cli/version-3MYFE4G6.js.map +1 -0
  112. package/dist/index.d.ts +13 -2
  113. package/dist/index.js +493 -89
  114. package/dist/index.js.map +1 -1
  115. package/package.json +1 -1
  116. package/dist/cli/chunk-VWFJNLIK.js +0 -1031
  117. package/dist/cli/chunk-VWFJNLIK.js.map +0 -1
  118. /package/dist/cli/{prompt-XHICFAYN.js.map → chat-Q5ZCVIOO.js.map} +0 -0
@@ -0,0 +1,940 @@
1
+ #!/usr/bin/env node
2
+ import {
3
+ ignoredByLayers,
4
+ loadGitignoreAt
5
+ } from "./chunk-5X7LZJDE.js";
6
+ import {
7
+ compileFilters,
8
+ defaultIndexConfig,
9
+ resolveSemanticEmbeddingConfig
10
+ } from "./chunk-DULSP7JH.js";
11
+
12
+ // src/index/semantic/builder.ts
13
+ import { promises as fs3 } from "fs";
14
+ import path3 from "path";
15
+
16
+ // src/index/semantic/chunker.ts
17
+ import { promises as fs } from "fs";
18
+ import path from "path";
19
+ var DEFAULT_MAX_CHUNK_CHARS = 4e3;
20
+ function chunkText(text, filePath, windowLines, overlap, maxChunkChars = DEFAULT_MAX_CHUNK_CHARS) {
21
+ const lines = text.split(/\r?\n/);
22
+ if (lines.length === 0 || lines.length === 1 && lines[0] === "") return [];
23
+ const stride = Math.max(1, windowLines - overlap);
24
+ const chunks = [];
25
+ for (let start = 0; start < lines.length; start += stride) {
26
+ const end = Math.min(lines.length, start + windowLines);
27
+ const slice = lines.slice(start, end).join("\n").trim();
28
+ if (slice.length === 0) {
29
+ if (end >= lines.length) break;
30
+ continue;
31
+ }
32
+ const window = {
33
+ path: filePath,
34
+ startLine: start + 1,
35
+ endLine: end,
36
+ text: slice
37
+ };
38
+ for (const sub of safeSplit(window, maxChunkChars)) chunks.push(sub);
39
+ if (end >= lines.length) break;
40
+ }
41
+ return chunks;
42
+ }
43
+ function safeSplit(chunk, maxChars) {
44
+ if (chunk.text.length <= maxChars) return [chunk];
45
+ const lines = chunk.text.split("\n");
46
+ const out = [];
47
+ let bufLines = [];
48
+ let bufStart = chunk.startLine;
49
+ let bufLen = 0;
50
+ const flush = (untilLineNo) => {
51
+ if (bufLines.length === 0) return;
52
+ out.push({
53
+ path: chunk.path,
54
+ startLine: bufStart,
55
+ endLine: untilLineNo,
56
+ text: bufLines.join("\n")
57
+ });
58
+ bufLines = [];
59
+ bufLen = 0;
60
+ };
61
+ for (let i = 0; i < lines.length; i++) {
62
+ const line = lines[i] ?? "";
63
+ const lineLen = line.length + 1;
64
+ if (lineLen > maxChars) {
65
+ flush(chunk.startLine + i - 1);
66
+ out.push({
67
+ path: chunk.path,
68
+ startLine: chunk.startLine + i,
69
+ endLine: chunk.startLine + i,
70
+ text: line.slice(0, maxChars)
71
+ });
72
+ bufStart = chunk.startLine + i + 1;
73
+ continue;
74
+ }
75
+ if (bufLen + lineLen > maxChars && bufLines.length > 0) {
76
+ flush(chunk.startLine + i - 1);
77
+ bufStart = chunk.startLine + i;
78
+ }
79
+ bufLines.push(line);
80
+ bufLen += lineLen;
81
+ }
82
+ flush(chunk.endLine);
83
+ return out;
84
+ }
85
+ function toForwardRel(root, abs) {
86
+ return path.relative(root, abs).split(path.sep).join("/");
87
+ }
88
+ async function* walkChunks(root, opts = {}) {
89
+ const windowLines = opts.windowLines ?? 60;
90
+ const overlap = Math.min(opts.overlap ?? 12, Math.max(0, windowLines - 1));
91
+ const maxChunkChars = opts.maxChunkChars ?? DEFAULT_MAX_CHUNK_CHARS;
92
+ const filters = compileFilters(opts.config ?? defaultIndexConfig());
93
+ const onSkip = opts.onSkip ?? (() => {
94
+ });
95
+ const initial = [];
96
+ if (filters.respectGitignore) {
97
+ const rootIg = await loadGitignoreAt(root);
98
+ if (rootIg) initial.push({ dirAbs: root, ig: rootIg });
99
+ }
100
+ const stack = [{ dir: root, layers: initial }];
101
+ while (stack.length > 0) {
102
+ const frame = stack.pop();
103
+ if (!frame) break;
104
+ const { dir, layers } = frame;
105
+ let entries;
106
+ try {
107
+ entries = await fs.readdir(dir, { withFileTypes: true });
108
+ } catch {
109
+ continue;
110
+ }
111
+ for (const entry of entries) {
112
+ const name = entry.name;
113
+ const abs = path.join(dir, name);
114
+ const rel = toForwardRel(root, abs);
115
+ if (entry.isDirectory()) {
116
+ if (filters.dirSet.has(name)) {
117
+ onSkip(rel, "defaultDir");
118
+ continue;
119
+ }
120
+ if (filters.respectGitignore && ignoredByLayers(layers, abs, true)) {
121
+ onSkip(rel, "gitignore");
122
+ continue;
123
+ }
124
+ if (filters.patternMatch(`${rel}/`) || filters.patternMatch(rel)) {
125
+ onSkip(rel, "pattern");
126
+ continue;
127
+ }
128
+ const childLayers = filters.respectGitignore ? await extendLayers(layers, abs) : layers;
129
+ stack.push({ dir: abs, layers: childLayers });
130
+ continue;
131
+ }
132
+ if (!entry.isFile()) continue;
133
+ if (filters.fileSet.has(name)) {
134
+ onSkip(rel, "defaultFile");
135
+ continue;
136
+ }
137
+ const ext = path.extname(name).toLowerCase();
138
+ if (filters.extSet.has(ext)) {
139
+ onSkip(rel, "binaryExt");
140
+ continue;
141
+ }
142
+ if (filters.respectGitignore && ignoredByLayers(layers, abs, false)) {
143
+ onSkip(rel, "gitignore");
144
+ continue;
145
+ }
146
+ if (filters.patternMatch(rel)) {
147
+ onSkip(rel, "pattern");
148
+ continue;
149
+ }
150
+ const result = await readSizeBoundedFile(abs, filters.maxFileBytes);
151
+ if (result.kind === "skip") {
152
+ onSkip(rel, result.reason);
153
+ continue;
154
+ }
155
+ const text = result.text;
156
+ if (text.indexOf("\0") !== -1) {
157
+ onSkip(rel, "binaryContent");
158
+ continue;
159
+ }
160
+ for (const chunk of chunkText(text, rel, windowLines, overlap, maxChunkChars)) {
161
+ yield chunk;
162
+ }
163
+ }
164
+ }
165
+ }
166
+ async function extendLayers(layers, dirAbs) {
167
+ const ig = await loadGitignoreAt(dirAbs);
168
+ return ig ? [...layers, { dirAbs, ig }] : layers;
169
+ }
170
+ async function readSizeBoundedFile(abs, maxBytes) {
171
+ try {
172
+ const fh = await fs.open(abs, "r");
173
+ try {
174
+ const stat = await fh.stat();
175
+ if (stat.size > maxBytes) return { kind: "skip", reason: "tooLarge" };
176
+ return { kind: "ok", text: await fh.readFile("utf8") };
177
+ } finally {
178
+ await fh.close();
179
+ }
180
+ } catch {
181
+ return { kind: "skip", reason: "readError" };
182
+ }
183
+ }
184
+
185
+ // src/index/semantic/embedding.ts
186
+ var DEFAULT_OLLAMA_URL = "http://localhost:11434";
187
+ var DEFAULT_EMBED_MODEL = "nomic-embed-text";
188
+ var DEFAULT_TIMEOUT_MS = 3e4;
189
+ var EmbeddingError = class extends Error {
190
+ constructor(message, cause) {
191
+ super(message);
192
+ this.cause = cause;
193
+ this.name = "EmbeddingError";
194
+ }
195
+ cause;
196
+ };
197
+ async function embed(text, opts = {}) {
198
+ if (opts.provider === "openai-compat") return await embedOpenAICompat(text, opts);
199
+ return await embedOllama(text, opts);
200
+ }
201
+ async function embedAll(texts, opts = {}) {
202
+ if (opts.provider === "openai-compat") return await embedAllOpenAICompat(texts, opts);
203
+ const out = [];
204
+ for (let i = 0; i < texts.length; i++) {
205
+ if (opts.signal?.aborted) throw new EmbeddingError("embedding aborted");
206
+ const text = texts[i];
207
+ if (text === void 0) continue;
208
+ try {
209
+ out.push(await embed(text, opts));
210
+ } catch (err) {
211
+ if (isAbortError(err) || opts.signal?.aborted) {
212
+ throw new EmbeddingError("embedding aborted", err);
213
+ }
214
+ opts.onError?.(i, err);
215
+ out.push(null);
216
+ }
217
+ opts.onProgress?.(i + 1, texts.length);
218
+ }
219
+ return out;
220
+ }
221
+ async function probeOllama(opts = {}) {
222
+ const baseUrl = opts.baseUrl ?? process.env.OLLAMA_URL ?? DEFAULT_OLLAMA_URL;
223
+ try {
224
+ const res = await fetch(`${baseUrl}/api/tags`, { signal: opts.signal });
225
+ if (!res.ok) return { ok: false, error: `Ollama returned ${res.status}` };
226
+ const json = await res.json();
227
+ const models = (json.models ?? []).map((m) => m.name).filter((n) => typeof n === "string");
228
+ return { ok: true, models };
229
+ } catch (err) {
230
+ const msg = err instanceof Error ? err.message : String(err);
231
+ return { ok: false, error: msg };
232
+ }
233
+ }
234
+ async function embedOllama(text, opts) {
235
+ const baseUrl = opts.baseUrl ?? process.env.OLLAMA_URL ?? DEFAULT_OLLAMA_URL;
236
+ const model = opts.model ?? process.env.REASONIX_EMBED_MODEL ?? DEFAULT_EMBED_MODEL;
237
+ const timeoutMs = opts.timeoutMs ?? DEFAULT_TIMEOUT_MS;
238
+ const { controller, cleanup } = composeAbort(opts.signal, timeoutMs, "embedding timeout");
239
+ let res;
240
+ try {
241
+ res = await fetch(`${baseUrl}/api/embeddings`, {
242
+ method: "POST",
243
+ headers: { "content-type": "application/json" },
244
+ body: JSON.stringify({ model, prompt: text }),
245
+ signal: controller.signal
246
+ });
247
+ } catch (err) {
248
+ cleanup();
249
+ const msg = err instanceof Error ? err.message : String(err);
250
+ if (/ECONNREFUSED|connect ECONNREFUSED|fetch failed/i.test(msg)) {
251
+ throw new EmbeddingError(
252
+ `Cannot reach Ollama at ${baseUrl}. Install from https://ollama.com, then run \`ollama pull ${model}\` and \`ollama serve\`. Override the URL via OLLAMA_URL.`,
253
+ err
254
+ );
255
+ }
256
+ throw new EmbeddingError(`embedding request failed: ${msg}`, err);
257
+ } finally {
258
+ cleanup();
259
+ }
260
+ if (!res.ok) {
261
+ const body = await res.text().catch(() => "");
262
+ if (res.status === 404 && /model.*not found/i.test(body)) {
263
+ throw new EmbeddingError(
264
+ `Embedding model "${model}" not pulled. Run \`ollama pull ${model}\` once, then retry.`
265
+ );
266
+ }
267
+ throw new EmbeddingError(`Ollama returned ${res.status}: ${body.slice(0, 200)}`);
268
+ }
269
+ const json = await res.json();
270
+ if (!json.embedding || !Array.isArray(json.embedding)) {
271
+ throw new EmbeddingError("Ollama response missing 'embedding' array");
272
+ }
273
+ return toFloat32Array(json.embedding, "embedding");
274
+ }
275
+ async function embedOpenAICompat(text, opts) {
276
+ const vectors = await requestOpenAICompatEmbeddings(text, opts);
277
+ return vectors[0] ?? new Float32Array(0);
278
+ }
279
+ async function embedAllOpenAICompat(texts, opts) {
280
+ if (texts.length === 0) return [];
281
+ if (opts.signal?.aborted) throw new EmbeddingError("embedding aborted");
282
+ const vectors = await requestOpenAICompatEmbeddings([...texts], opts);
283
+ opts.onProgress?.(texts.length, texts.length);
284
+ return vectors;
285
+ }
286
+ async function requestOpenAICompatEmbeddings(input, opts) {
287
+ const timeoutMs = opts.timeoutMs ?? DEFAULT_TIMEOUT_MS;
288
+ const { controller, cleanup } = composeAbort(opts.signal, timeoutMs, "embedding timeout");
289
+ const url = opts.baseUrl.trim();
290
+ const body = {
291
+ ...opts.extraBody ?? {},
292
+ model: opts.model,
293
+ input,
294
+ encoding_format: "float"
295
+ };
296
+ let res;
297
+ try {
298
+ res = await fetch(url, {
299
+ method: "POST",
300
+ headers: {
301
+ authorization: `Bearer ${opts.apiKey}`,
302
+ "content-type": "application/json"
303
+ },
304
+ body: JSON.stringify(body),
305
+ signal: controller.signal
306
+ });
307
+ } catch (err) {
308
+ cleanup();
309
+ if (isAbortError(err) || opts.signal?.aborted) {
310
+ throw new EmbeddingError("embedding aborted", err);
311
+ }
312
+ const msg = err instanceof Error ? err.message : String(err);
313
+ throw new EmbeddingError(`Cannot reach OpenAI-compatible embeddings at ${url}: ${msg}`, err);
314
+ } finally {
315
+ cleanup();
316
+ }
317
+ if (!res.ok) {
318
+ const raw = await res.text().catch(() => "");
319
+ const bodyText = raw.slice(0, 300);
320
+ if (res.status === 401 || res.status === 403) {
321
+ throw new EmbeddingError(
322
+ `OpenAI-compatible API rejected the API key for ${url}. Response ${res.status}: ${bodyText}`
323
+ );
324
+ }
325
+ if (res.status === 404) {
326
+ throw new EmbeddingError(
327
+ `Embeddings endpoint not found at ${url}. Check the configured API URL. Response ${res.status}: ${bodyText}`
328
+ );
329
+ }
330
+ if (res.status === 400) {
331
+ throw new EmbeddingError(
332
+ `Embedding provider returned 400: ${bodyText}. Check model and custom request body fields.`
333
+ );
334
+ }
335
+ throw new EmbeddingError(`OpenAI-compatible API returned ${res.status}: ${bodyText}`);
336
+ }
337
+ const json = await res.json();
338
+ if (!Array.isArray(json.data)) {
339
+ throw new EmbeddingError("OpenAI-compatible response missing 'data' array");
340
+ }
341
+ const size = Array.isArray(input) ? input.length : 1;
342
+ const out = new Array(size).fill(null);
343
+ for (const row of json.data) {
344
+ const rawIndex = row.index;
345
+ if (typeof rawIndex !== "number" || !Number.isInteger(rawIndex) || rawIndex < 0 || rawIndex >= size) {
346
+ throw new EmbeddingError("OpenAI-compatible response returned an invalid embedding index");
347
+ }
348
+ const index = rawIndex;
349
+ if (!Array.isArray(row.embedding)) {
350
+ throw new EmbeddingError(`OpenAI-compatible response missing embedding for index ${index}`);
351
+ }
352
+ out[index] = toFloat32Array(row.embedding, `data[${index}].embedding`);
353
+ }
354
+ for (let i = 0; i < out.length; i++) {
355
+ if (!out[i])
356
+ throw new EmbeddingError(`OpenAI-compatible response missing embedding at index ${i}`);
357
+ }
358
+ return out;
359
+ }
360
+ function toFloat32Array(values, label) {
361
+ const out = new Float32Array(values.length);
362
+ for (let i = 0; i < values.length; i++) {
363
+ const value = values[i];
364
+ if (typeof value !== "number" || !Number.isFinite(value)) {
365
+ throw new EmbeddingError(`${label}[${i}] is not a finite number`);
366
+ }
367
+ out[i] = value;
368
+ }
369
+ return out;
370
+ }
371
+ function composeAbort(signal, timeoutMs, reason) {
372
+ const controller = new AbortController();
373
+ const onCallerAbort = () => controller.abort(signal?.reason);
374
+ if (signal) {
375
+ if (signal.aborted) controller.abort(signal.reason);
376
+ else signal.addEventListener("abort", onCallerAbort, { once: true });
377
+ }
378
+ const timer = setTimeout(() => controller.abort(new Error(reason)), timeoutMs);
379
+ return {
380
+ controller,
381
+ cleanup: () => {
382
+ clearTimeout(timer);
383
+ if (signal) signal.removeEventListener("abort", onCallerAbort);
384
+ }
385
+ };
386
+ }
387
+ function isAbortError(err) {
388
+ if (err instanceof Error) {
389
+ if (err.name === "AbortError") return true;
390
+ if (/aborted/i.test(err.message)) return true;
391
+ }
392
+ return false;
393
+ }
394
+
395
+ // src/index/semantic/store.ts
396
+ import { promises as fs2 } from "fs";
397
+ import path2 from "path";
398
+ var STORE_VERSION = 1;
399
+ var META_FILE = "index.meta.json";
400
+ var DATA_FILE = "index.jsonl";
401
+ async function readIndexMeta(indexDir) {
402
+ try {
403
+ const raw = await fs2.readFile(path2.join(indexDir, META_FILE), "utf8");
404
+ return normalizeMeta(JSON.parse(raw));
405
+ } catch {
406
+ return null;
407
+ }
408
+ }
409
+ function compareIndexIdentity(meta, identity) {
410
+ if (meta.provider !== identity.provider) return "provider";
411
+ if (meta.model !== identity.model) return "model";
412
+ return null;
413
+ }
414
+ async function wipeStoreFiles(indexDir) {
415
+ await fs2.rm(path2.join(indexDir, DATA_FILE), { force: true });
416
+ await fs2.rm(path2.join(indexDir, META_FILE), { force: true });
417
+ }
418
+ var SemanticStore = class {
419
+ constructor(indexDir, identity) {
420
+ this.indexDir = indexDir;
421
+ this.identity = identity;
422
+ }
423
+ indexDir;
424
+ identity;
425
+ entries = [];
426
+ byPath = /* @__PURE__ */ new Map();
427
+ dim = 0;
428
+ get provider() {
429
+ return this.identity.provider;
430
+ }
431
+ get model() {
432
+ return this.identity.model;
433
+ }
434
+ get empty() {
435
+ return this.entries.length === 0;
436
+ }
437
+ get size() {
438
+ return this.entries.length;
439
+ }
440
+ get all() {
441
+ return this.entries;
442
+ }
443
+ fileMtimes() {
444
+ const out = /* @__PURE__ */ new Map();
445
+ for (const [p, group] of this.byPath) {
446
+ const first = group[0];
447
+ if (first) out.set(p, first.mtimeMs);
448
+ }
449
+ return out;
450
+ }
451
+ async add(entries) {
452
+ if (entries.length === 0) return;
453
+ if (this.dim === 0) this.dim = entries[0].embedding.length;
454
+ const lines = [];
455
+ for (const e of entries) {
456
+ if (e.embedding.length !== this.dim) {
457
+ throw new Error(
458
+ `embedding dim mismatch: expected ${this.dim}, got ${e.embedding.length} for ${e.path}:${e.startLine}`
459
+ );
460
+ }
461
+ this.entries.push(e);
462
+ const list = this.byPath.get(e.path);
463
+ if (list) list.push(e);
464
+ else this.byPath.set(e.path, [e]);
465
+ lines.push(serializeEntry(e));
466
+ }
467
+ await fs2.mkdir(this.indexDir, { recursive: true });
468
+ await fs2.appendFile(path2.join(this.indexDir, DATA_FILE), `${lines.join("\n")}
469
+ `, "utf8");
470
+ await this.writeMeta();
471
+ }
472
+ async remove(paths) {
473
+ if (paths.length === 0) return 0;
474
+ const drop = new Set(paths);
475
+ const before = this.entries.length;
476
+ this.entries = this.entries.filter((e) => !drop.has(e.path));
477
+ for (const p of paths) this.byPath.delete(p);
478
+ const removed = before - this.entries.length;
479
+ if (removed > 0) await this.flush();
480
+ return removed;
481
+ }
482
+ search(query, topK = 8, minScore = 0) {
483
+ if (this.entries.length === 0) return [];
484
+ if (query.length !== this.dim && this.dim !== 0) {
485
+ throw new Error(`query dim ${query.length} \u2260 index dim ${this.dim}`);
486
+ }
487
+ const heap = [];
488
+ for (const entry of this.entries) {
489
+ const score = dot(query, entry.embedding);
490
+ if (score < minScore) continue;
491
+ if (heap.length < topK) {
492
+ heap.push({ entry, score });
493
+ if (heap.length === topK) heap.sort((a, b) => a.score - b.score);
494
+ } else if (score > heap[0].score) {
495
+ heap[0] = { entry, score };
496
+ for (let i = 0; i < heap.length - 1; i++) {
497
+ if (heap[i].score > heap[i + 1].score) {
498
+ const tmp = heap[i];
499
+ heap[i] = heap[i + 1];
500
+ heap[i + 1] = tmp;
501
+ }
502
+ }
503
+ }
504
+ }
505
+ return heap.sort((a, b) => b.score - a.score);
506
+ }
507
+ async flush() {
508
+ await fs2.mkdir(this.indexDir, { recursive: true });
509
+ const tmp = path2.join(this.indexDir, `${DATA_FILE}.tmp`);
510
+ const final = path2.join(this.indexDir, DATA_FILE);
511
+ const lines = this.entries.map(serializeEntry).join("\n");
512
+ await fs2.writeFile(tmp, lines.length > 0 ? `${lines}
513
+ ` : "", "utf8");
514
+ await fs2.rename(tmp, final);
515
+ await this.writeMeta();
516
+ }
517
+ async writeMeta() {
518
+ const meta = {
519
+ version: STORE_VERSION,
520
+ provider: this.provider,
521
+ model: this.model,
522
+ dim: this.dim,
523
+ updatedAt: (/* @__PURE__ */ new Date()).toISOString()
524
+ };
525
+ await fs2.writeFile(
526
+ path2.join(this.indexDir, META_FILE),
527
+ `${JSON.stringify(meta, null, 2)}
528
+ `,
529
+ "utf8"
530
+ );
531
+ }
532
+ async wipe() {
533
+ this.entries = [];
534
+ this.byPath.clear();
535
+ this.dim = 0;
536
+ await wipeStoreFiles(this.indexDir);
537
+ }
538
+ };
539
+ async function openStore(indexDir, identity) {
540
+ const store = new SemanticStore(indexDir, identity);
541
+ const dataPath = path2.join(indexDir, DATA_FILE);
542
+ const meta = await readIndexMeta(indexDir);
543
+ if (meta) {
544
+ if (meta.version !== STORE_VERSION) {
545
+ throw new Error(
546
+ `Index format version ${meta.version} does not match current ${STORE_VERSION}. Run \`reasonix index --rebuild\`.`
547
+ );
548
+ }
549
+ const mismatch = compareIndexIdentity(meta, identity);
550
+ if (mismatch !== null) {
551
+ throw new Error(
552
+ `Index was built with provider "${meta.provider}" model "${meta.model}" but current config is provider "${identity.provider}" model "${identity.model}". Run \`reasonix index --rebuild\`.`
553
+ );
554
+ }
555
+ }
556
+ let raw;
557
+ try {
558
+ raw = await fs2.readFile(dataPath, "utf8");
559
+ } catch {
560
+ return store;
561
+ }
562
+ for (const line of raw.split("\n")) {
563
+ if (line.length === 0) continue;
564
+ try {
565
+ const entry = deserializeEntry(line);
566
+ store.dim = entry.embedding.length;
567
+ store.entries.push(entry);
568
+ const map = store.byPath;
569
+ const list = map.get(entry.path);
570
+ if (list) list.push(entry);
571
+ else map.set(entry.path, [entry]);
572
+ } catch {
573
+ }
574
+ }
575
+ return store;
576
+ }
577
+ function normalize(v) {
578
+ let sum = 0;
579
+ for (let i = 0; i < v.length; i++) sum += v[i] * v[i];
580
+ const inv = sum > 0 ? 1 / Math.sqrt(sum) : 0;
581
+ for (let i = 0; i < v.length; i++) v[i] = v[i] * inv;
582
+ return v;
583
+ }
584
+ function dot(a, b) {
585
+ let s = 0;
586
+ for (let i = 0; i < a.length; i++) s += a[i] * b[i];
587
+ return s;
588
+ }
589
+ function serializeEntry(e) {
590
+ const buf = Buffer.from(e.embedding.buffer, e.embedding.byteOffset, e.embedding.byteLength);
591
+ return JSON.stringify({
592
+ p: e.path,
593
+ s: e.startLine,
594
+ e: e.endLine,
595
+ m: e.mtimeMs,
596
+ t: e.text,
597
+ v: buf.toString("base64")
598
+ });
599
+ }
600
+ function deserializeEntry(line) {
601
+ const parsed = JSON.parse(line);
602
+ const buf = Buffer.from(parsed.v, "base64");
603
+ const embedding = new Float32Array(
604
+ buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)
605
+ );
606
+ return {
607
+ path: parsed.p,
608
+ startLine: parsed.s,
609
+ endLine: parsed.e,
610
+ mtimeMs: parsed.m,
611
+ text: parsed.t,
612
+ embedding: new Float32Array(embedding)
613
+ };
614
+ }
615
+ function normalizeMeta(meta) {
616
+ return {
617
+ version: typeof meta.version === "number" ? meta.version : STORE_VERSION,
618
+ provider: meta.provider === "openai-compat" ? "openai-compat" : "ollama",
619
+ model: typeof meta.model === "string" ? meta.model : "",
620
+ dim: typeof meta.dim === "number" ? meta.dim : 0,
621
+ updatedAt: typeof meta.updatedAt === "string" ? meta.updatedAt : (/* @__PURE__ */ new Date(0)).toISOString()
622
+ };
623
+ }
624
+
625
+ // src/index/semantic/builder.ts
626
+ var INDEX_DIR_NAME = path3.join(".reasonix", "semantic");
627
+ function emptyBuckets() {
628
+ return {
629
+ defaultDir: 0,
630
+ defaultFile: 0,
631
+ binaryExt: 0,
632
+ binaryContent: 0,
633
+ tooLarge: 0,
634
+ gitignore: 0,
635
+ pattern: 0,
636
+ readError: 0
637
+ };
638
+ }
639
+ async function buildIndex(root, opts = {}) {
640
+ const t0 = Date.now();
641
+ const indexDir = path3.join(root, INDEX_DIR_NAME);
642
+ const resolved = resolveBuildEmbeddingConfig(opts);
643
+ opts.onProgress?.({ phase: "setup" });
644
+ throwIfAborted(opts.signal);
645
+ await probeEmbeddingProvider(resolved, opts.signal);
646
+ throwIfAborted(opts.signal);
647
+ if (opts.rebuild) await wipeStoreFiles(indexDir);
648
+ const store = await openStore(indexDir, {
649
+ provider: resolved.provider,
650
+ model: resolved.model
651
+ });
652
+ const lastMtimes = store.fileMtimes();
653
+ const seenPaths = /* @__PURE__ */ new Set();
654
+ const fileChunks = /* @__PURE__ */ new Map();
655
+ let filesScanned = 0;
656
+ let filesSkipped = 0;
657
+ const skipBuckets = emptyBuckets();
658
+ for await (const chunk of walkChunks(root, {
659
+ windowLines: opts.windowLines,
660
+ overlap: opts.overlap,
661
+ config: opts.indexConfig ?? defaultIndexConfig(),
662
+ onSkip: (_p, reason) => {
663
+ skipBuckets[reason]++;
664
+ }
665
+ })) {
666
+ throwIfAborted(opts.signal);
667
+ seenPaths.add(chunk.path);
668
+ let bucket = fileChunks.get(chunk.path);
669
+ if (!bucket) {
670
+ filesScanned++;
671
+ const abs = path3.join(root, chunk.path);
672
+ let mtimeMs = 0;
673
+ try {
674
+ const stat = await fs3.stat(abs);
675
+ mtimeMs = stat.mtimeMs;
676
+ } catch {
677
+ continue;
678
+ }
679
+ const last = lastMtimes.get(chunk.path);
680
+ if (last !== void 0 && last === mtimeMs && !opts.rebuild) {
681
+ filesSkipped++;
682
+ continue;
683
+ }
684
+ bucket = { chunks: [], mtimeMs };
685
+ fileChunks.set(chunk.path, bucket);
686
+ }
687
+ bucket.chunks.push(chunk);
688
+ opts.onProgress?.({ phase: "scan", filesScanned });
689
+ }
690
+ throwIfAborted(opts.signal);
691
+ const deletedPaths = [];
692
+ for (const oldPath of lastMtimes.keys()) {
693
+ if (!seenPaths.has(oldPath)) deletedPaths.push(oldPath);
694
+ }
695
+ const replacePaths = [...fileChunks.keys()].filter((p) => lastMtimes.has(p));
696
+ throwIfAborted(opts.signal);
697
+ const removed = await store.remove([...deletedPaths, ...replacePaths]);
698
+ let chunksAdded = 0;
699
+ let chunksSkipped = 0;
700
+ const filesChanged = fileChunks.size;
701
+ let chunksTotal = 0;
702
+ for (const { chunks } of fileChunks.values()) chunksTotal += chunks.length;
703
+ let chunksDone = 0;
704
+ for (const [, bucket] of fileChunks) {
705
+ throwIfAborted(opts.signal);
706
+ if (bucket.chunks.length === 0) continue;
707
+ const texts = bucket.chunks.map((c) => c.text);
708
+ const vectors = await embedAll(texts, {
709
+ ...resolved,
710
+ signal: opts.signal,
711
+ onProgress: (done, total) => {
712
+ opts.onProgress?.({
713
+ phase: "embed",
714
+ filesScanned,
715
+ filesChanged,
716
+ chunksTotal,
717
+ chunksDone: chunksDone + done
718
+ });
719
+ if (done === total) chunksDone += total;
720
+ },
721
+ onError: (idx, err) => {
722
+ chunksSkipped++;
723
+ const c = bucket.chunks[idx];
724
+ const where = c ? `${c.path}:${c.startLine}-${c.endLine}` : `chunk #${idx}`;
725
+ const msg = err instanceof Error ? err.message : String(err);
726
+ process.stderr.write(`
727
+ ! skipped ${where}: ${msg}
728
+ `);
729
+ }
730
+ });
731
+ throwIfAborted(opts.signal);
732
+ const entries = [];
733
+ for (let i = 0; i < bucket.chunks.length; i++) {
734
+ const vec = vectors[i];
735
+ if (!vec) continue;
736
+ const c = bucket.chunks[i];
737
+ if (!c) continue;
738
+ normalize(vec);
739
+ entries.push({
740
+ path: c.path,
741
+ startLine: c.startLine,
742
+ endLine: c.endLine,
743
+ text: c.text,
744
+ embedding: vec,
745
+ mtimeMs: bucket.mtimeMs
746
+ });
747
+ }
748
+ throwIfAborted(opts.signal);
749
+ if (entries.length > 0) await store.add(entries);
750
+ chunksAdded += entries.length;
751
+ }
752
+ throwIfAborted(opts.signal);
753
+ opts.onProgress?.({
754
+ phase: "done",
755
+ filesScanned,
756
+ filesSkipped,
757
+ filesChanged,
758
+ chunksTotal,
759
+ chunksDone,
760
+ skipBuckets
761
+ });
762
+ return {
763
+ filesScanned,
764
+ filesChanged,
765
+ chunksAdded,
766
+ chunksRemoved: removed,
767
+ chunksSkipped,
768
+ skipBuckets,
769
+ durationMs: Date.now() - t0
770
+ };
771
+ }
772
+ async function querySemantic(root, query, opts = {}) {
773
+ const indexDir = path3.join(root, INDEX_DIR_NAME);
774
+ const resolved = resolveQueryEmbeddingConfig(opts);
775
+ const store = await openStore(indexDir, {
776
+ provider: resolved.provider,
777
+ model: resolved.model
778
+ });
779
+ if (store.empty) return null;
780
+ const qvec = await embed(query, { ...resolved, signal: opts.signal });
781
+ normalize(qvec);
782
+ return store.search(qvec, opts.topK ?? 8, opts.minScore ?? 0.3);
783
+ }
784
+ async function indexExists(root) {
785
+ const meta = path3.join(root, INDEX_DIR_NAME, "index.meta.json");
786
+ try {
787
+ await fs3.access(meta);
788
+ return true;
789
+ } catch {
790
+ return false;
791
+ }
792
+ }
793
+ async function indexCompatible(root, opts = {}) {
794
+ const meta = await readIndexMeta(path3.join(root, INDEX_DIR_NAME));
795
+ if (!meta) return false;
796
+ return compareIndexIdentity(meta, resolveIndexIdentity(opts)) === null;
797
+ }
798
+ function resolveBuildEmbeddingConfig(opts) {
799
+ if (opts.provider === "openai-compat") {
800
+ if (!opts.baseUrl || !opts.apiKey || !opts.model) {
801
+ throw new Error(
802
+ "OpenAI-compatible embeddings require baseUrl, apiKey, and model when passed directly."
803
+ );
804
+ }
805
+ return {
806
+ provider: "openai-compat",
807
+ baseUrl: opts.baseUrl,
808
+ apiKey: opts.apiKey,
809
+ model: opts.model,
810
+ extraBody: opts.extraBody ?? {},
811
+ timeoutMs: opts.timeoutMs ?? 3e4
812
+ };
813
+ }
814
+ if (opts.baseUrl || opts.model) {
815
+ return {
816
+ provider: "ollama",
817
+ baseUrl: opts.baseUrl ?? process.env.OLLAMA_URL ?? "http://localhost:11434",
818
+ model: opts.model ?? process.env.REASONIX_EMBED_MODEL ?? "nomic-embed-text",
819
+ timeoutMs: opts.timeoutMs ?? 3e4
820
+ };
821
+ }
822
+ return resolveSemanticEmbeddingConfig(opts.configPath);
823
+ }
824
+ function resolveIndexIdentity(opts) {
825
+ if (opts.provider && opts.model) {
826
+ return { provider: opts.provider, model: opts.model };
827
+ }
828
+ const resolved = resolveSemanticEmbeddingConfig(opts.configPath);
829
+ return { provider: resolved.provider, model: resolved.model };
830
+ }
831
+ function resolveQueryEmbeddingConfig(opts) {
832
+ return resolveBuildEmbeddingConfig(opts);
833
+ }
834
+ async function probeEmbeddingProvider(config, signal) {
835
+ if (config.provider === "openai-compat") return;
836
+ const probe = await probeOllama({ baseUrl: config.baseUrl, signal });
837
+ if (!probe.ok) {
838
+ throw new Error(
839
+ `Ollama is not reachable: ${probe.error}. Install from https://ollama.com, then \`ollama serve\` and \`ollama pull ${config.model}\`.`
840
+ );
841
+ }
842
+ }
843
+ function throwIfAborted(signal) {
844
+ if (signal?.aborted) {
845
+ throw new Error("semantic indexing aborted");
846
+ }
847
+ }
848
+
849
+ // src/index/semantic/ollama-launcher.ts
850
+ import { spawn, spawnSync } from "child_process";
851
+ import { setTimeout as sleep } from "timers/promises";
852
+ function findOllamaBinary() {
853
+ const cmd = process.platform === "win32" ? "where" : "which";
854
+ const out = spawnSync(cmd, ["ollama"], { encoding: "utf8" });
855
+ if (out.status !== 0) return null;
856
+ const first = out.stdout.split(/\r?\n/).find((l) => l.trim().length > 0);
857
+ return first ? first.trim() : null;
858
+ }
859
+ async function checkOllamaStatus(modelName, baseUrl) {
860
+ const binary = findOllamaBinary();
861
+ const probe = await probeOllama({ baseUrl });
862
+ const installedModels = probe.ok ? probe.models : [];
863
+ const wanted = modelName.includes(":") ? modelName : `${modelName}:latest`;
864
+ const modelPulled = installedModels.some((m) => m === modelName || m === wanted);
865
+ return {
866
+ binaryFound: binary !== null,
867
+ daemonRunning: probe.ok,
868
+ modelPulled,
869
+ modelName,
870
+ installedModels
871
+ };
872
+ }
873
+ async function startOllamaDaemon(opts = {}) {
874
+ const timeoutMs = opts.timeoutMs ?? 15e3;
875
+ const child = spawn("ollama", ["serve"], {
876
+ detached: true,
877
+ stdio: "ignore",
878
+ windowsHide: true
879
+ });
880
+ child.unref();
881
+ const pid = child.pid ?? null;
882
+ const start = Date.now();
883
+ while (Date.now() - start < timeoutMs) {
884
+ if (opts.signal?.aborted) return { ready: false, pid };
885
+ const probe = await probeOllama({ baseUrl: opts.baseUrl, signal: opts.signal });
886
+ if (probe.ok) return { ready: true, pid };
887
+ await sleep(500);
888
+ }
889
+ return { ready: false, pid };
890
+ }
891
+ async function pullOllamaModel(modelName, opts = {}) {
892
+ return new Promise((resolve) => {
893
+ const child = spawn("ollama", ["pull", modelName], {
894
+ stdio: ["ignore", "pipe", "pipe"],
895
+ windowsHide: true
896
+ });
897
+ if (opts.signal) {
898
+ const onAbort = () => child.kill();
899
+ opts.signal.addEventListener("abort", onAbort, { once: true });
900
+ child.once("exit", () => opts.signal?.removeEventListener("abort", onAbort));
901
+ }
902
+ streamLines(child.stdout, (l) => opts.onLine?.(l, "stdout"));
903
+ streamLines(child.stderr, (l) => opts.onLine?.(l, "stderr"));
904
+ child.once("exit", (code) => resolve(code ?? -1));
905
+ child.once("error", () => resolve(-1));
906
+ });
907
+ }
908
+ function streamLines(stream, cb) {
909
+ if (!stream) return;
910
+ let buf = "";
911
+ stream.setEncoding("utf8");
912
+ stream.on("data", (chunk) => {
913
+ buf += chunk;
914
+ let nl = buf.indexOf("\n");
915
+ while (nl !== -1) {
916
+ const line = buf.slice(0, nl).replace(/\r$/, "");
917
+ buf = buf.slice(nl + 1);
918
+ if (line.length > 0) cb(line);
919
+ nl = buf.indexOf("\n");
920
+ }
921
+ });
922
+ stream.on("end", () => {
923
+ if (buf.length > 0) cb(buf.replace(/\r$/, ""));
924
+ });
925
+ }
926
+
927
+ export {
928
+ walkChunks,
929
+ readIndexMeta,
930
+ compareIndexIdentity,
931
+ INDEX_DIR_NAME,
932
+ buildIndex,
933
+ querySemantic,
934
+ indexExists,
935
+ indexCompatible,
936
+ checkOllamaStatus,
937
+ startOllamaDaemon,
938
+ pullOllamaModel
939
+ };
940
+ //# sourceMappingURL=chunk-RZILUXUC.js.map