kontext-engine 0.1.5 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli/index.js CHANGED
@@ -1158,6 +1158,8 @@ function searchVectors(db, query, limit) {
1158
1158
  // src/storage/db.ts
1159
1159
  var DEFAULT_DIMENSIONS = 384;
1160
1160
  var VECTOR_DIMENSIONS_META_KEY = "vector_dimensions";
1161
+ var INDEX_EMBEDDER_PROVIDER_META_KEY = "index_embedder_provider";
1162
+ var INDEX_EMBEDDER_MODEL_META_KEY = "index_embedder_model";
1161
1163
  function createDatabase(dbPath, dimensions) {
1162
1164
  const dir = path3.dirname(dbPath);
1163
1165
  if (!fs4.existsSync(dir)) {
@@ -1190,6 +1192,16 @@ function createDatabase(dbPath, dimensions) {
1190
1192
  const stmtGetChunksByFile = db.prepare(
1191
1193
  "SELECT id, file_id as fileId, line_start as lineStart, line_end as lineEnd, type, name, parent, text, imports, exports, hash FROM chunks WHERE file_id = ? ORDER BY line_start"
1192
1194
  );
1195
+ const stmtGetChunksMissingVectors = db.prepare(
1196
+ `SELECT c.id, c.file_id as fileId, f.path as filePath, f.language,
1197
+ c.line_start as lineStart, c.line_end as lineEnd,
1198
+ c.type, c.name, c.parent, c.text, c.exports as exports
1199
+ FROM chunks c
1200
+ JOIN files f ON f.id = c.file_id
1201
+ LEFT JOIN chunk_vectors v ON v.rowid = c.id
1202
+ WHERE v.rowid IS NULL
1203
+ ORDER BY c.id`
1204
+ );
1193
1205
  const stmtGetChunkIdsByFile = db.prepare(
1194
1206
  "SELECT id FROM chunks WHERE file_id = ?"
1195
1207
  );
@@ -1219,6 +1231,10 @@ function createDatabase(dbPath, dimensions) {
1219
1231
  const stmtLastIndexed = db.prepare(
1220
1232
  "SELECT MAX(last_indexed) as lastIndexed FROM files"
1221
1233
  );
1234
+ const stmtGetMeta = db.prepare("SELECT value FROM meta WHERE key = ?");
1235
+ const stmtSetMeta = db.prepare(
1236
+ "INSERT OR REPLACE INTO meta (key, value) VALUES (?, ?)"
1237
+ );
1222
1238
  return {
1223
1239
  upsertFile(file) {
1224
1240
  const row = stmtUpsertFile.get({
@@ -1332,6 +1348,13 @@ function createDatabase(dbPath, dimensions) {
1332
1348
  exports: r.exports === 1
1333
1349
  }));
1334
1350
  },
1351
+ getChunksMissingVectors() {
1352
+ const rows = stmtGetChunksMissingVectors.all();
1353
+ return rows.map((r) => ({
1354
+ ...r,
1355
+ exports: r.exports === 1
1356
+ }));
1357
+ },
1335
1358
  searchChunks(filters, limit) {
1336
1359
  const conditions = [];
1337
1360
  const params = [];
@@ -1429,6 +1452,66 @@ function createDatabase(dbPath, dimensions) {
1429
1452
  return Object.values(result[0])[0];
1430
1453
  }
1431
1454
  return String(result);
1455
+ },
1456
+ getVectorDimensions() {
1457
+ const row = stmtGetMeta.get(VECTOR_DIMENSIONS_META_KEY);
1458
+ if (!row) return null;
1459
+ const dimensions2 = Number.parseInt(row.value, 10);
1460
+ if (!Number.isInteger(dimensions2) || dimensions2 <= 0) {
1461
+ throw new DatabaseError(
1462
+ `Invalid stored vector dimensions metadata: ${row.value}`,
1463
+ ErrorCode.DB_CORRUPTED
1464
+ );
1465
+ }
1466
+ return dimensions2;
1467
+ },
1468
+ getIndexEmbedder() {
1469
+ const providerRow = stmtGetMeta.get(INDEX_EMBEDDER_PROVIDER_META_KEY);
1470
+ const modelRow = stmtGetMeta.get(INDEX_EMBEDDER_MODEL_META_KEY);
1471
+ if (!providerRow && !modelRow) return null;
1472
+ if (!providerRow || !modelRow) {
1473
+ throw new DatabaseError(
1474
+ "Corrupted index embedder metadata: provider/model keys are incomplete.",
1475
+ ErrorCode.DB_CORRUPTED
1476
+ );
1477
+ }
1478
+ const dimensions2 = this.getVectorDimensions();
1479
+ if (dimensions2 === null) {
1480
+ throw new DatabaseError(
1481
+ "Corrupted index embedder metadata: vector dimensions are missing.",
1482
+ ErrorCode.DB_CORRUPTED
1483
+ );
1484
+ }
1485
+ return {
1486
+ provider: providerRow.value,
1487
+ model: modelRow.value,
1488
+ dimensions: dimensions2
1489
+ };
1490
+ },
1491
+ setIndexEmbedder(metadata) {
1492
+ if (!metadata.provider || !metadata.model) {
1493
+ throw new DatabaseError(
1494
+ "Invalid index embedder metadata: provider and model are required.",
1495
+ ErrorCode.DB_WRITE_FAILED
1496
+ );
1497
+ }
1498
+ if (!Number.isInteger(metadata.dimensions) || metadata.dimensions <= 0) {
1499
+ throw new DatabaseError(
1500
+ `Invalid index embedder metadata dimensions: ${String(metadata.dimensions)}`,
1501
+ ErrorCode.DB_WRITE_FAILED
1502
+ );
1503
+ }
1504
+ const vectorDimensions = this.getVectorDimensions();
1505
+ if (vectorDimensions !== null && vectorDimensions !== metadata.dimensions) {
1506
+ throw new DatabaseError(
1507
+ `Index embedder metadata dimensions (${metadata.dimensions}) do not match vector table dimensions (${vectorDimensions}).`,
1508
+ ErrorCode.DB_WRITE_FAILED
1509
+ );
1510
+ }
1511
+ db.transaction(() => {
1512
+ stmtSetMeta.run(INDEX_EMBEDDER_PROVIDER_META_KEY, metadata.provider);
1513
+ stmtSetMeta.run(INDEX_EMBEDDER_MODEL_META_KEY, metadata.model);
1514
+ })();
1432
1515
  }
1433
1516
  };
1434
1517
  }
@@ -1789,10 +1872,14 @@ function validateProjectEmbedderConfig(config) {
1789
1872
  }
1790
1873
 
1791
1874
  // src/cli/commands/init.ts
1875
+ function isSameEmbedderConfig(a, b) {
1876
+ return a.provider === b.provider && a.model === b.model && a.dimensions === b.dimensions;
1877
+ }
1792
1878
  var CTX_DIR2 = ".ctx";
1793
1879
  var DB_FILENAME = "index.db";
1794
1880
  var CONFIG_FILENAME2 = "config.json";
1795
1881
  var GITIGNORE_ENTRY = ".ctx/";
1882
+ var EMBEDDING_SAVE_BATCH_SIZE = 128;
1796
1883
  function ensureGitignore(projectRoot) {
1797
1884
  const gitignorePath = path5.join(projectRoot, ".gitignore");
1798
1885
  if (fs6.existsSync(gitignorePath)) {
@@ -1827,6 +1914,25 @@ function formatLanguageSummary(counts) {
1827
1914
  const entries = [...counts.entries()].sort((a, b) => b[1] - a[1]).map(([lang, count]) => `${lang}: ${count}`);
1828
1915
  return entries.join(", ");
1829
1916
  }
1917
+ async function embedAndPersistInBatches(db, embedder, chunks, log) {
1918
+ let vectorsCreated = 0;
1919
+ const total = chunks.length;
1920
+ for (let i = 0; i < chunks.length; i += EMBEDDING_SAVE_BATCH_SIZE) {
1921
+ const batch = chunks.slice(i, i + EMBEDDING_SAVE_BATCH_SIZE);
1922
+ const texts = batch.map(
1923
+ (chunk) => prepareChunkText(chunk.filePath, chunk.parent, chunk.text)
1924
+ );
1925
+ const vectors = await embedder.embed(texts);
1926
+ db.transaction(() => {
1927
+ for (let j = 0; j < batch.length; j++) {
1928
+ db.insertVector(batch[j].id, vectors[j]);
1929
+ }
1930
+ });
1931
+ vectorsCreated += vectors.length;
1932
+ log(` Embedding... ${vectorsCreated}/${total}`);
1933
+ }
1934
+ return vectorsCreated;
1935
+ }
1830
1936
  async function runInit(projectPath, options = {}) {
1831
1937
  const log = options.log ?? console.log;
1832
1938
  const absoluteRoot = path5.resolve(projectPath);
@@ -1840,6 +1946,24 @@ async function runInit(projectPath, options = {}) {
1840
1946
  const dbPath = path5.join(ctxDir, DB_FILENAME);
1841
1947
  const db = createDatabase(dbPath, embedderConfig.dimensions);
1842
1948
  try {
1949
+ const existingEmbedder = db.getIndexEmbedder();
1950
+ if (existingEmbedder) {
1951
+ if (!isSameEmbedderConfig(existingEmbedder, embedderConfig)) {
1952
+ throw new IndexError(
1953
+ `Index embedder mismatch: index uses ${existingEmbedder.provider} (${existingEmbedder.model}, ${existingEmbedder.dimensions} dims) but config requests ${embedderConfig.provider} (${embedderConfig.model}, ${embedderConfig.dimensions} dims). Rebuild the index.`,
1954
+ ErrorCode.CONFIG_INVALID
1955
+ );
1956
+ }
1957
+ } else {
1958
+ const isEmptyIndex = db.getFileCount() === 0 && db.getChunkCount() === 0 && db.getVectorCount() === 0;
1959
+ if (isEmptyIndex) {
1960
+ db.setIndexEmbedder({
1961
+ provider: embedderConfig.provider,
1962
+ model: embedderConfig.model,
1963
+ dimensions: embedderConfig.dimensions
1964
+ });
1965
+ }
1966
+ }
1843
1967
  const discovered = await discoverFiles({
1844
1968
  root: absoluteRoot,
1845
1969
  extraIgnore: [".ctx/"]
@@ -1925,21 +2049,34 @@ async function runInit(projectPath, options = {}) {
1925
2049
  }
1926
2050
  log(` ${allChunksWithMeta.length} chunks created`);
1927
2051
  let vectorsCreated = 0;
1928
- if (!options.skipEmbedding && allChunksWithMeta.length > 0) {
1929
- const embedder = await createEmbedder(absoluteRoot);
1930
- const texts = allChunksWithMeta.map(
1931
- (cm) => prepareChunkText(cm.fileRelPath, cm.chunk.parent, cm.chunk.text)
1932
- );
1933
- const vectors = await embedder.embed(texts, (done, total) => {
1934
- log(` Embedding... ${done}/${total}`);
1935
- });
1936
- db.transaction(() => {
1937
- for (let i = 0; i < allChunksWithMeta.length; i++) {
1938
- const chunkDbId = parseInt(allChunksWithMeta[i].chunk.id, 10);
1939
- db.insertVector(chunkDbId, vectors[i]);
2052
+ if (!options.skipEmbedding) {
2053
+ const chunksMissingVectors = db.getChunksMissingVectors().map((chunk) => ({
2054
+ id: chunk.id,
2055
+ filePath: chunk.filePath,
2056
+ parent: chunk.parent,
2057
+ text: chunk.text
2058
+ }));
2059
+ if (chunksMissingVectors.length > 0) {
2060
+ log(` ${chunksMissingVectors.length} chunks need embeddings`);
2061
+ }
2062
+ if (chunksMissingVectors.length > 0) {
2063
+ const embedder = await createEmbedder(absoluteRoot);
2064
+ try {
2065
+ vectorsCreated = await embedAndPersistInBatches(
2066
+ db,
2067
+ embedder,
2068
+ chunksMissingVectors,
2069
+ log
2070
+ );
2071
+ } catch (err) {
2072
+ const total = chunksMissingVectors.length;
2073
+ throw new IndexError(
2074
+ `Embedding failed after saving ${vectorsCreated}/${total} vectors. Run "ctx init" again to resume. ${err instanceof Error ? err.message : String(err)}`,
2075
+ ErrorCode.EMBEDDER_FAILED,
2076
+ err instanceof Error ? err : void 0
2077
+ );
1940
2078
  }
1941
- });
1942
- vectorsCreated = vectors.length;
2079
+ }
1943
2080
  }
1944
2081
  const durationMs = performance.now() - start;
1945
2082
  const dbSize = fs6.existsSync(dbPath) ? fs6.statSync(dbPath).size : 0;
@@ -2284,6 +2421,7 @@ var PATH_BOOST_PARTIAL = 1.2;
2284
2421
  var IMPORT_PENALTY = 0.5;
2285
2422
  var TEST_FILE_PENALTY = 0.65;
2286
2423
  var SMALL_SNIPPET_PENALTY = 0.75;
2424
+ var DATA_LITERAL_PENALTY = 0.7;
2287
2425
  var PUBLIC_API_BOOST = 1.12;
2288
2426
  var TEST_FILE_DIRECTORY_PATTERN = /(?:^|\/)(?:tests|__tests__)(?:\/|$)/;
2289
2427
  var TEST_FILE_NAME_PATTERN = /(?:^|\/)[^/]*\.(?:test|spec)\.[cm]?[jt]sx?$/;
@@ -2298,7 +2436,8 @@ function fusionMergeWithPathBoost(strategyResults, limit, pathBoostTerms) {
2298
2436
  const importAdjusted = applyImportDeprioritization(boosted);
2299
2437
  const testAdjusted = applyTestFileDeprioritization(importAdjusted);
2300
2438
  const snippetAdjusted = applySmallSnippetDeprioritization(testAdjusted);
2301
- const boostedApi = applyPublicApiBoost(snippetAdjusted);
2439
+ const dataLiteralAdjusted = applyDataLiteralDeprioritization(snippetAdjusted);
2440
+ const boostedApi = applyPublicApiBoost(dataLiteralAdjusted);
2302
2441
  const adjusted = applyFileDiversityDiminishingReturns(boostedApi);
2303
2442
  adjusted.sort((a, b) => b.score - a.score);
2304
2443
  const sliced = adjusted.slice(0, limit);
@@ -2379,6 +2518,21 @@ function applySmallSnippetDeprioritization(results) {
2379
2518
  return r;
2380
2519
  });
2381
2520
  }
2521
+ function applyDataLiteralDeprioritization(results) {
2522
+ const hasNonDataLiteral = results.some((r) => !isDataLiteralChunk(r));
2523
+ if (!hasNonDataLiteral) return results;
2524
+ const maxNonDataScore = Math.max(
2525
+ ...results.filter((r) => !isDataLiteralChunk(r)).map((r) => r.score),
2526
+ 0
2527
+ );
2528
+ if (maxNonDataScore === 0) return results;
2529
+ return results.map((r) => {
2530
+ if (isDataLiteralChunk(r)) {
2531
+ return { ...r, score: r.score * DATA_LITERAL_PENALTY };
2532
+ }
2533
+ return r;
2534
+ });
2535
+ }
2382
2536
  function applyPublicApiBoost(results) {
2383
2537
  return results.map((r) => {
2384
2538
  if (isPublicApiSymbol(r)) {
@@ -2413,6 +2567,23 @@ function isPublicApiSymbol(result) {
2413
2567
  const textStart = result.text.trimStart().toLowerCase();
2414
2568
  return textStart.startsWith("export ");
2415
2569
  }
2570
+ function isDataLiteralChunk(result) {
2571
+ if (result.type !== "constant") return false;
2572
+ const text = result.text;
2573
+ const lines = text.split("\n").map((line) => line.trim()).filter((line) => line.length > 0);
2574
+ if (lines.length === 0) return false;
2575
+ const keyValueLineCount = lines.filter(
2576
+ (line) => /['"`]?[A-Za-z0-9_-]+['"`]?\s*:\s*/.test(line)
2577
+ ).length;
2578
+ const quoteCount = (text.match(/["'`]/g) ?? []).length;
2579
+ const colonCount = (text.match(/:/g) ?? []).length;
2580
+ const structuralCount = quoteCount + colonCount;
2581
+ const nonWhitespaceLength = text.replace(/\s+/g, "").length;
2582
+ const structuralDensity = structuralCount / Math.max(nonWhitespaceLength, 1);
2583
+ const hasLiteralShape = text.includes("{") && text.includes("}") || text.includes("[") && text.includes("]");
2584
+ const kvLineRatio = keyValueLineCount / lines.length;
2585
+ return hasLiteralShape && keyValueLineCount >= 2 && kvLineRatio >= 0.35 && structuralDensity >= 0.04;
2586
+ }
2416
2587
  function getFileDiversityFactor(fileOccurrence) {
2417
2588
  if (fileOccurrence <= 1) return 1;
2418
2589
  if (fileOccurrence === 2) return 0.9;
@@ -3679,6 +3850,7 @@ function createWatcher(options, events) {
3679
3850
  // src/cli/commands/watch.ts
3680
3851
  var CTX_DIR5 = ".ctx";
3681
3852
  var DB_FILENAME4 = "index.db";
3853
+ var EMBEDDING_SAVE_BATCH_SIZE2 = 128;
3682
3854
  function timestamp() {
3683
3855
  return (/* @__PURE__ */ new Date()).toLocaleTimeString("en-GB", { hour12: false });
3684
3856
  }
@@ -3695,6 +3867,9 @@ async function hashFile(absolutePath) {
3695
3867
  const content = fs9.readFileSync(absolutePath);
3696
3868
  return createHash3("sha256").update(content).digest("hex");
3697
3869
  }
3870
+ function isSameEmbedderConfig2(a, b) {
3871
+ return a.provider === b.provider && a.model === b.model && a.dimensions === b.dimensions;
3872
+ }
3698
3873
  async function reindexChanges(db, changes, projectPath, options) {
3699
3874
  const start = performance.now();
3700
3875
  const log = options.log;
@@ -3763,16 +3938,30 @@ async function reindexChanges(db, changes, projectPath, options) {
3763
3938
  }
3764
3939
  if (!options.skipEmbedding && allChunksWithMeta.length > 0) {
3765
3940
  const embedder = await loadEmbedder3(projectPath);
3766
- const texts = allChunksWithMeta.map(
3767
- (cm) => prepareChunkText(cm.fileRelPath, cm.chunk.parent, cm.chunk.text)
3768
- );
3769
- const vectors = await embedder.embed(texts);
3770
- db.transaction(() => {
3771
- for (let i = 0; i < allChunksWithMeta.length; i++) {
3772
- const chunkDbId = parseInt(allChunksWithMeta[i].chunk.id, 10);
3773
- db.insertVector(chunkDbId, vectors[i]);
3941
+ let vectorsCreated = 0;
3942
+ const total = allChunksWithMeta.length;
3943
+ try {
3944
+ for (let i = 0; i < allChunksWithMeta.length; i += EMBEDDING_SAVE_BATCH_SIZE2) {
3945
+ const batch = allChunksWithMeta.slice(i, i + EMBEDDING_SAVE_BATCH_SIZE2);
3946
+ const texts = batch.map(
3947
+ (cm) => prepareChunkText(cm.fileRelPath, cm.chunk.parent, cm.chunk.text)
3948
+ );
3949
+ const vectors = await embedder.embed(texts);
3950
+ db.transaction(() => {
3951
+ for (let j = 0; j < batch.length; j++) {
3952
+ const chunkDbId = parseInt(batch[j].chunk.id, 10);
3953
+ db.insertVector(chunkDbId, vectors[j]);
3954
+ }
3955
+ });
3956
+ vectorsCreated += vectors.length;
3774
3957
  }
3775
- });
3958
+ } catch (err) {
3959
+ throw new IndexError(
3960
+ `Embedding failed after saving ${vectorsCreated}/${total} vectors. Continue watching, then run "ctx init" to backfill missing vectors. ${err instanceof Error ? err.message : String(err)}`,
3961
+ ErrorCode.EMBEDDER_FAILED,
3962
+ err instanceof Error ? err : void 0
3963
+ );
3964
+ }
3776
3965
  }
3777
3966
  const durationMs = performance.now() - start;
3778
3967
  return { filesProcessed, chunksUpdated, durationMs };
@@ -3806,6 +3995,25 @@ async function runWatch(projectPath, options = {}) {
3806
3995
  await initParser();
3807
3996
  const embedderConfig = getProjectEmbedderConfig(absoluteRoot);
3808
3997
  const db = createDatabase(dbPath, embedderConfig.dimensions);
3998
+ const existingEmbedder = db.getIndexEmbedder();
3999
+ if (existingEmbedder) {
4000
+ if (!isSameEmbedderConfig2(existingEmbedder, embedderConfig)) {
4001
+ db.close();
4002
+ throw new IndexError(
4003
+ `Index embedder mismatch: index uses ${existingEmbedder.provider} (${existingEmbedder.model}, ${existingEmbedder.dimensions} dims) but config requests ${embedderConfig.provider} (${embedderConfig.model}, ${embedderConfig.dimensions} dims). Rebuild the index.`,
4004
+ ErrorCode.CONFIG_INVALID
4005
+ );
4006
+ }
4007
+ } else {
4008
+ const isEmptyIndex = db.getFileCount() === 0 && db.getChunkCount() === 0 && db.getVectorCount() === 0;
4009
+ if (isEmptyIndex) {
4010
+ db.setIndexEmbedder({
4011
+ provider: embedderConfig.provider,
4012
+ model: embedderConfig.model,
4013
+ dimensions: embedderConfig.dimensions
4014
+ });
4015
+ }
4016
+ }
3809
4017
  let watcherHandle = null;
3810
4018
  let reindexQueue = Promise.resolve();
3811
4019
  const watcher = createWatcher(
@@ -3951,15 +4159,54 @@ function formatStatus(projectPath, output) {
3951
4159
  lines.push(` ${label}${count} file${count !== 1 ? "s" : ""}`);
3952
4160
  }
3953
4161
  }
3954
- if (output.config) {
3955
- lines.push("");
3956
- lines.push(
3957
- ` Embedder: ${output.config.provider} (${output.config.model}, ${output.config.dimensions} dims)`
3958
- );
4162
+ const hasConfig = output.config !== null;
4163
+ const hasIndexEmbedder = output.indexEmbedder !== null;
4164
+ if (hasConfig || hasIndexEmbedder) lines.push("");
4165
+ if (hasConfig && hasIndexEmbedder) {
4166
+ const config = output.config;
4167
+ const indexEmbedder = output.indexEmbedder;
4168
+ if (!config || !indexEmbedder) {
4169
+ lines.push(" Embedder: unknown");
4170
+ } else if (isSameEmbedder(config, indexEmbedder)) {
4171
+ lines.push(
4172
+ ` Embedder: ${indexEmbedder.provider} (${indexEmbedder.model}, ${indexEmbedder.dimensions} dims)`
4173
+ );
4174
+ } else {
4175
+ lines.push(
4176
+ ` Index embedder: ${indexEmbedder.provider} (${indexEmbedder.model}, ${indexEmbedder.dimensions} dims)`
4177
+ );
4178
+ lines.push(
4179
+ ` Config embedder: ${config.provider} (${config.model}, ${config.dimensions} dims)`
4180
+ );
4181
+ }
4182
+ } else if (hasIndexEmbedder) {
4183
+ const indexEmbedder = output.indexEmbedder;
4184
+ if (!indexEmbedder) {
4185
+ lines.push(" Index embedder: unknown");
4186
+ } else {
4187
+ lines.push(
4188
+ ` Index embedder: ${indexEmbedder.provider} (${indexEmbedder.model}, ${indexEmbedder.dimensions} dims)`
4189
+ );
4190
+ }
4191
+ } else if (hasConfig) {
4192
+ const config = output.config;
4193
+ if (!config) {
4194
+ lines.push(" Config embedder: unknown");
4195
+ } else {
4196
+ lines.push(
4197
+ ` Config embedder: ${config.provider} (${config.model}, ${config.dimensions} dims)`
4198
+ );
4199
+ }
4200
+ }
4201
+ if (output.embedderWarning) {
4202
+ lines.push(` Warning: ${output.embedderWarning}`);
3959
4203
  }
3960
4204
  lines.push("");
3961
4205
  return lines.join("\n");
3962
4206
  }
4207
+ function isSameEmbedder(a, b) {
4208
+ return a.provider === b.provider && a.model === b.model && a.dimensions === b.dimensions;
4209
+ }
3963
4210
  async function runStatus(projectPath) {
3964
4211
  const absoluteRoot = path10.resolve(projectPath);
3965
4212
  const ctxDir = path10.join(absoluteRoot, CTX_DIR6);
@@ -3974,6 +4221,8 @@ async function runStatus(projectPath) {
3974
4221
  lastIndexed: null,
3975
4222
  languages: /* @__PURE__ */ new Map(),
3976
4223
  config: null,
4224
+ indexEmbedder: null,
4225
+ embedderWarning: null,
3977
4226
  text: formatNotInitialized(absoluteRoot)
3978
4227
  };
3979
4228
  return output;
@@ -3986,7 +4235,9 @@ async function runStatus(projectPath) {
3986
4235
  const languages = db.getLanguageBreakdown();
3987
4236
  const lastIndexed = db.getLastIndexed();
3988
4237
  const config = readConfig2(ctxDir);
4238
+ const indexEmbedder = db.getIndexEmbedder();
3989
4239
  const dbSizeBytes = fs10.statSync(dbPath).size;
4240
+ const embedderWarning = config && indexEmbedder && !isSameEmbedder(config, indexEmbedder) ? `Index built with ${indexEmbedder.provider} (${indexEmbedder.dimensions} dims), config requests ${config.provider} (${config.dimensions} dims) \u2014 rebuild needed.` : null;
3990
4241
  const output = {
3991
4242
  initialized: true,
3992
4243
  fileCount,
@@ -3996,6 +4247,8 @@ async function runStatus(projectPath) {
3996
4247
  lastIndexed,
3997
4248
  languages,
3998
4249
  config,
4250
+ indexEmbedder,
4251
+ embedderWarning,
3999
4252
  text: ""
4000
4253
  };
4001
4254
  output.text = formatStatus(absoluteRoot, output);