@open-mercato/search 0.5.1-develop.2691.d8a0934b37 → 0.5.1-develop.2694.732417c5ec

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/di.js +9 -9
  2. package/dist/di.js.map +2 -2
  3. package/dist/lib/presenter-enricher.js +14 -14
  4. package/dist/lib/presenter-enricher.js.map +2 -2
  5. package/dist/modules/search/api/embeddings/reindex/cancel/route.js +2 -2
  6. package/dist/modules/search/api/embeddings/reindex/cancel/route.js.map +2 -2
  7. package/dist/modules/search/api/embeddings/reindex/route.js +3 -3
  8. package/dist/modules/search/api/embeddings/reindex/route.js.map +2 -2
  9. package/dist/modules/search/api/reindex/cancel/route.js +2 -2
  10. package/dist/modules/search/api/reindex/cancel/route.js.map +2 -2
  11. package/dist/modules/search/api/reindex/route.js +4 -4
  12. package/dist/modules/search/api/reindex/route.js.map +2 -2
  13. package/dist/modules/search/api/settings/route.js +3 -3
  14. package/dist/modules/search/api/settings/route.js.map +2 -2
  15. package/dist/modules/search/lib/reindex-lock.js +20 -17
  16. package/dist/modules/search/lib/reindex-lock.js.map +2 -2
  17. package/dist/modules/search/subscribers/fulltext_upsert.js +2 -2
  18. package/dist/modules/search/subscribers/fulltext_upsert.js.map +2 -2
  19. package/dist/modules/search/subscribers/vector_delete.js +2 -2
  20. package/dist/modules/search/subscribers/vector_delete.js.map +2 -2
  21. package/dist/modules/search/subscribers/vector_upsert.js +2 -2
  22. package/dist/modules/search/subscribers/vector_upsert.js.map +2 -2
  23. package/dist/modules/search/workers/fulltext-index.worker.js +7 -7
  24. package/dist/modules/search/workers/fulltext-index.worker.js.map +2 -2
  25. package/dist/modules/search/workers/vector-index.worker.js +7 -7
  26. package/dist/modules/search/workers/vector-index.worker.js.map +2 -2
  27. package/dist/strategies/token.strategy.js +15 -10
  28. package/dist/strategies/token.strategy.js.map +2 -2
  29. package/jest.config.cjs +4 -2
  30. package/package.json +4 -4
  31. package/src/__tests__/presenter-enricher.test.ts +17 -60
  32. package/src/__tests__/workers.test.ts +20 -21
  33. package/src/di.ts +22 -21
  34. package/src/lib/presenter-enricher.ts +21 -20
  35. package/src/modules/search/api/embeddings/reindex/cancel/route.ts +4 -3
  36. package/src/modules/search/api/embeddings/reindex/route.ts +5 -4
  37. package/src/modules/search/api/reindex/cancel/route.ts +4 -3
  38. package/src/modules/search/api/reindex/route.ts +5 -5
  39. package/src/modules/search/api/settings/route.ts +5 -4
  40. package/src/modules/search/lib/reindex-lock.ts +50 -32
  41. package/src/modules/search/subscribers/fulltext_upsert.ts +6 -2
  42. package/src/modules/search/subscribers/vector_delete.ts +6 -2
  43. package/src/modules/search/subscribers/vector_upsert.ts +6 -2
  44. package/src/modules/search/workers/fulltext-index.worker.ts +10 -9
  45. package/src/modules/search/workers/vector-index.worker.ts +10 -9
  46. package/src/strategies/token.strategy.ts +25 -19
package/dist/di.js CHANGED
@@ -10,7 +10,7 @@ function shouldExcludeEncryptedFields() {
10
10
  const raw = (process.env.SEARCH_EXCLUDE_ENCRYPTED_FIELDS ?? "").toLowerCase();
11
11
  return raw === "1" || raw === "true" || raw === "yes" || raw === "on";
12
12
  }
13
- function createEncryptionMapResolver(knex) {
13
+ function createEncryptionMapResolver(db) {
14
14
  const cache = /* @__PURE__ */ new Map();
15
15
  const CACHE_TTL_MS = 5 * 60 * 1e3;
16
16
  return async (entityId) => {
@@ -19,8 +19,8 @@ function createEncryptionMapResolver(knex) {
19
19
  return cached.entries;
20
20
  }
21
21
  try {
22
- const rows = await knex("encryption_maps").select("fields_json").where("entity_id", entityId).where("is_active", true).whereNull("deleted_at").first();
23
- const fieldsJson = rows?.fields_json;
22
+ const row = await db.selectFrom("encryption_maps").select(["fields_json"]).where("entity_id", "=", entityId).where("is_active", "=", true).where("deleted_at", "is", null).executeTakeFirst();
23
+ const fieldsJson = row?.fields_json;
24
24
  const entries = Array.isArray(fieldsJson) ? fieldsJson.map((f) => ({
25
25
  field: f.field,
26
26
  hashField: f.hashField ?? null
@@ -37,8 +37,8 @@ function registerSearchModule(container, options) {
37
37
  if (!options?.skipTokens) {
38
38
  try {
39
39
  const em = container.resolve("em");
40
- const knex = em.getConnection().getKnex();
41
- strategies.push(new TokenSearchStrategy(knex));
40
+ const db = em.getKysely();
41
+ strategies.push(new TokenSearchStrategy(db));
42
42
  } catch {
43
43
  }
44
44
  }
@@ -66,8 +66,8 @@ function registerSearchModule(container, options) {
66
66
  if (shouldExcludeEncryptedFields()) {
67
67
  try {
68
68
  const em = container.resolve("em");
69
- const knex = em.getConnection().getKnex();
70
- encryptionMapResolver = createEncryptionMapResolver(knex);
69
+ const db = em.getKysely();
70
+ encryptionMapResolver = createEncryptionMapResolver(db);
71
71
  } catch {
72
72
  }
73
73
  }
@@ -96,8 +96,8 @@ function registerSearchModule(container, options) {
96
96
  let presenterEnricher;
97
97
  try {
98
98
  const em = container.resolve("em");
99
- const knex = em.getConnection().getKnex();
100
- presenterEnricher = createPresenterEnricher(knex, entityConfigMap, queryEngine, encryptionService);
99
+ const db = em.getKysely();
100
+ presenterEnricher = createPresenterEnricher(db, entityConfigMap, queryEngine, encryptionService);
101
101
  } catch {
102
102
  }
103
103
  const searchService = new SearchService({
package/dist/di.js.map CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../src/di.ts"],
4
- "sourcesContent": ["import { asValue } from 'awilix'\nimport type { Knex } from 'knex'\nimport { SearchService } from './service'\nimport { TokenSearchStrategy } from './strategies/token.strategy'\nimport { VectorSearchStrategy, type EmbeddingService } from './strategies/vector.strategy'\nimport { FullTextSearchStrategy } from './strategies/fulltext.strategy'\nimport { createFulltextDriver } from './fulltext/drivers'\nimport { SearchIndexer } from './indexer/search-indexer'\nimport type {\n SearchStrategy,\n ResultMergeConfig,\n SearchModuleConfig,\n SearchFieldPolicy,\n SearchEntityConfig,\n PresenterEnricherFn,\n} from './types'\nimport type { VectorDriver } from './vector/types'\nimport type { QueryEngine } from '@open-mercato/shared/lib/query/types'\nimport type { EntityId } from '@open-mercato/shared/modules/entities'\nimport type { SearchStrategyId } from '@open-mercato/shared/modules/search'\nimport type { Queue } from '@open-mercato/queue'\nimport type { FulltextIndexJobPayload } from './queue/fulltext-indexing'\nimport type { VectorIndexJobPayload } from './queue/vector-indexing'\nimport type { EncryptionMapEntry } from './lib/field-policy'\nimport type { TenantDataEncryptionService } from '@open-mercato/shared/lib/encryption/tenantDataEncryptionService'\nimport { createPresenterEnricher } from './lib/presenter-enricher'\n\n/**\n * Check if encrypted fields should be excluded from search indexing.\n * Controlled by SEARCH_EXCLUDE_ENCRYPTED_FIELDS environment variable.\n * Default: false (index all fields including decrypted data)\n */\nfunction shouldExcludeEncryptedFields(): boolean {\n const raw = (process.env.SEARCH_EXCLUDE_ENCRYPTED_FIELDS ?? '').toLowerCase()\n return raw === '1' || raw === 'true' || raw === 'yes' || raw === 'on'\n}\n\n/**\n * Create an encryption map resolver that queries the database.\n * Falls back to empty array if query fails.\n */\nfunction createEncryptionMapResolver(\n knex: Knex,\n): (entityId: EntityId) => Promise<EncryptionMapEntry[]> {\n // Cache encryption maps per entity to avoid repeated queries\n const cache = new Map<string, { entries: EncryptionMapEntry[]; expiresAt: number }>()\n const CACHE_TTL_MS = 5 * 60 * 1000 // 5 minutes\n\n return async (entityId: EntityId): Promise<EncryptionMapEntry[]> => {\n const cached = cache.get(entityId)\n if (cached && cached.expiresAt > Date.now()) {\n return cached.entries\n }\n\n try {\n const rows = await knex('encryption_maps')\n .select('fields_json')\n .where('entity_id', entityId)\n .where('is_active', true)\n .whereNull('deleted_at')\n .first()\n\n const fieldsJson = rows?.fields_json\n const entries: EncryptionMapEntry[] = Array.isArray(fieldsJson)\n ? fieldsJson.map((f: { field: string; hashField?: string | null }) => ({\n field: f.field,\n hashField: f.hashField ?? null,\n }))\n : []\n\n cache.set(entityId, { entries, expiresAt: Date.now() + CACHE_TTL_MS })\n return entries\n } catch {\n // Query failed, return empty array (don't exclude any fields)\n return []\n }\n }\n}\n\n/**\n * Container interface - minimal subset needed for registration.\n */\nexport interface SearchContainer {\n resolve<T = unknown>(name: string): T\n register(registrations: Record<string, unknown>): void\n}\n\n/**\n * Configuration options for search module registration.\n */\nexport type SearchModuleOptions = {\n /** Override default strategies to use */\n defaultStrategies?: SearchStrategyId[]\n /** Override merge configuration */\n mergeConfig?: ResultMergeConfig\n /** Skip token strategy registration */\n skipTokens?: boolean\n /** Skip vector strategy registration */\n skipVector?: boolean\n /** Skip fulltext strategy registration */\n skipFulltext?: boolean\n /** Module configurations (from generated/search.generated.ts) */\n moduleConfigs?: SearchModuleConfig[]\n}\n\n/**\n * Register the search module in the DI container.\n *\n * This creates and registers:\n * - SearchService instance\n * - All configured search strategies\n *\n * @param container - Awilix container\n * @param options - Optional configuration overrides\n */\nexport function registerSearchModule(\n container: SearchContainer,\n options?: SearchModuleOptions,\n): void {\n const strategies: SearchStrategy[] = []\n\n // Token strategy (always available unless explicitly skipped)\n if (!options?.skipTokens) {\n try {\n const em = container.resolve<{ getConnection: () => { getKnex: () => Knex } }>('em')\n const knex = em.getConnection().getKnex()\n strategies.push(new TokenSearchStrategy(knex))\n } catch {\n // knex not available via em, skipping TokenSearchStrategy\n }\n }\n\n // Vector strategy (requires embedding service and driver)\n // Note: We register even if not currently available - availability is checked at search time\n // via isAvailable(). The embedding config may be loaded later from the database.\n if (!options?.skipVector) {\n try {\n const embeddingService = container.resolve<EmbeddingService>('vectorEmbeddingService')\n const drivers = container.resolve<VectorDriver[]>('vectorDrivers')\n const primaryDriver = drivers?.[0]\n\n if (embeddingService && primaryDriver) {\n strategies.push(new VectorSearchStrategy(embeddingService, primaryDriver))\n }\n } catch {\n // Vector module not available, skipping VectorSearchStrategy\n }\n }\n\n // Build entity config map for field policy resolution\n const entityConfigMap = new Map<EntityId, SearchEntityConfig>()\n for (const moduleConfig of (options?.moduleConfigs ?? [])) {\n for (const entityConfig of moduleConfig.entities) {\n if (entityConfig.enabled !== false) {\n entityConfigMap.set(entityConfig.entityId as EntityId, entityConfig)\n }\n }\n }\n\n // Fulltext strategy (requires driver configuration, e.g., MEILISEARCH_HOST)\n if (!options?.skipFulltext) {\n // Build encryption map resolver if SEARCH_EXCLUDE_ENCRYPTED_FIELDS is enabled\n let encryptionMapResolver: ((entityId: EntityId) => Promise<EncryptionMapEntry[]>) | undefined\n if (shouldExcludeEncryptedFields()) {\n try {\n const em = container.resolve<{ getConnection: () => { getKnex: () => Knex } }>('em')\n const knex = em.getConnection().getKnex()\n encryptionMapResolver = createEncryptionMapResolver(knex)\n } catch {\n // Knex not available, encrypted field filtering disabled\n }\n }\n\n const fulltextDriver = createFulltextDriver({\n fieldPolicyResolver: (entityId: EntityId): SearchFieldPolicy | undefined => {\n const config = entityConfigMap.get(entityId)\n return config?.fieldPolicy\n },\n encryptionMapResolver,\n })\n\n if (fulltextDriver) {\n strategies.push(new FullTextSearchStrategy(fulltextDriver))\n }\n }\n\n // Determine default strategies based on what's available\n const defaultStrategies = options?.defaultStrategies ?? determineDefaultStrategies(strategies)\n\n // Try to resolve queryEngine for reindex support and presenter enrichment\n let queryEngine: QueryEngine | undefined\n try {\n queryEngine = container.resolve<QueryEngine>('queryEngine')\n } catch {\n // QueryEngine not available, reindex will be disabled\n }\n\n // Resolve encryption service for decrypting presenter data\n let encryptionService: TenantDataEncryptionService | null = null\n try {\n encryptionService = container.resolve<TenantDataEncryptionService>('tenantEncryptionService')\n } catch {\n // Encryption service not available, presenters won't be decrypted\n }\n\n // Create presenter enricher for database-based presenter resolution\n let presenterEnricher: PresenterEnricherFn | undefined\n try {\n const em = container.resolve<{ getConnection: () => { getKnex: () => Knex } }>('em')\n const knex = em.getConnection().getKnex()\n presenterEnricher = createPresenterEnricher(knex, entityConfigMap, queryEngine, encryptionService)\n } catch {\n // knex not available, presenter enrichment disabled\n }\n\n // Create search service\n const searchService = new SearchService({\n strategies,\n defaultStrategies,\n fallbackStrategy: 'tokens',\n mergeConfig: options?.mergeConfig ?? {\n duplicateHandling: 'highest_score',\n strategyWeights: {\n fulltext: 1.2,\n vector: 1.0,\n tokens: 0.8,\n },\n },\n presenterEnricher,\n })\n\n // Create search indexer with module configs\n const moduleConfigs = options?.moduleConfigs ?? []\n\n // Try to resolve fulltextIndexQueue for queue-based reindexing\n let fulltextQueue: Queue<FulltextIndexJobPayload> | undefined\n try {\n fulltextQueue = container.resolve<Queue<FulltextIndexJobPayload>>('fulltextIndexQueue')\n } catch {\n // Queue not available, queue-based fulltext reindex will be disabled\n }\n\n // Try to resolve vectorIndexQueue for queue-based vector reindexing\n let vectorQueue: Queue<VectorIndexJobPayload> | undefined\n try {\n vectorQueue = container.resolve<Queue<VectorIndexJobPayload>>('vectorIndexQueue')\n } catch {\n // Queue not available, queue-based vector reindex will be disabled\n }\n\n const searchIndexer = new SearchIndexer(searchService, moduleConfigs, {\n queryEngine,\n fulltextQueue,\n vectorQueue,\n })\n\n // Register in container\n container.register({\n searchService: asValue(searchService),\n searchStrategies: asValue(strategies),\n searchIndexer: asValue(searchIndexer),\n })\n}\n\n/**\n * Determine default strategy order based on available strategies.\n * Prefers fulltext > vector > tokens.\n */\nfunction determineDefaultStrategies(strategies: SearchStrategy[]): SearchStrategyId[] {\n const available = new Set(strategies.map((s) => s.id))\n const defaults: SearchStrategyId[] = []\n\n if (available.has('fulltext')) defaults.push('fulltext')\n if (available.has('vector')) defaults.push('vector')\n if (available.has('tokens')) defaults.push('tokens')\n\n return defaults.length > 0 ? defaults : ['tokens']\n}\n\n/**\n * Helper to add a custom strategy to an existing SearchService.\n *\n * @param container - DI container\n * @param strategy - Strategy to add\n */\nexport function addSearchStrategy(container: SearchContainer, strategy: SearchStrategy): void {\n const service = container.resolve<SearchService>('searchService')\n service.registerStrategy(strategy)\n\n const strategies = container.resolve<SearchStrategy[]>('searchStrategies')\n strategies.push(strategy)\n}\n"],
5
- "mappings": "AAAA,SAAS,eAAe;AAExB,SAAS,qBAAqB;AAC9B,SAAS,2BAA2B;AACpC,SAAS,4BAAmD;AAC5D,SAAS,8BAA8B;AACvC,SAAS,4BAA4B;AACrC,SAAS,qBAAqB;AAkB9B,SAAS,+BAA+B;AAOxC,SAAS,+BAAwC;AAC/C,QAAM,OAAO,QAAQ,IAAI,mCAAmC,IAAI,YAAY;AAC5E,SAAO,QAAQ,OAAO,QAAQ,UAAU,QAAQ,SAAS,QAAQ;AACnE;AAMA,SAAS,4BACP,MACuD;AAEvD,QAAM,QAAQ,oBAAI,IAAkE;AACpF,QAAM,eAAe,IAAI,KAAK;AAE9B,SAAO,OAAO,aAAsD;AAClE,UAAM,SAAS,MAAM,IAAI,QAAQ;AACjC,QAAI,UAAU,OAAO,YAAY,KAAK,IAAI,GAAG;AAC3C,aAAO,OAAO;AAAA,IAChB;AAEA,QAAI;AACF,YAAM,OAAO,MAAM,KAAK,iBAAiB,EACtC,OAAO,aAAa,EACpB,MAAM,aAAa,QAAQ,EAC3B,MAAM,aAAa,IAAI,EACvB,UAAU,YAAY,EACtB,MAAM;AAET,YAAM,aAAa,MAAM;AACzB,YAAM,UAAgC,MAAM,QAAQ,UAAU,IAC1D,WAAW,IAAI,CAAC,OAAqD;AAAA,QACnE,OAAO,EAAE;AAAA,QACT,WAAW,EAAE,aAAa;AAAA,MAC5B,EAAE,IACF,CAAC;AAEL,YAAM,IAAI,UAAU,EAAE,SAAS,WAAW,KAAK,IAAI,IAAI,aAAa,CAAC;AACrE,aAAO;AAAA,IACT,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AACF;AAsCO,SAAS,qBACd,WACA,SACM;AACN,QAAM,aAA+B,CAAC;AAGtC,MAAI,CAAC,SAAS,YAAY;AACxB,QAAI;AACF,YAAM,KAAK,UAAU,QAA0D,IAAI;AACnF,YAAM,OAAO,GAAG,cAAc,EAAE,QAAQ;AACxC,iBAAW,KAAK,IAAI,oBAAoB,IAAI,CAAC;AAAA,IAC/C,QAAQ;AAAA,IAER;AAAA,EACF;AAKA,MAAI,CAAC,SAAS,YAAY;AACxB,QAAI;AACF,YAAM,mBAAmB,UAAU,QAA0B,wBAAwB;AACrF,YAAM,UAAU,UAAU,QAAwB,eAAe;AACjE,YAAM,gBAAgB,UAAU,CAAC;AAEjC,UAAI,oBAAoB,eAAe;AACrC,mBAAW,KAAK,IAAI,qBAAqB,kBAAkB,aAAa,CAAC;AAAA,MAC3E;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,QAAM,kBAAkB,oBAAI,IAAkC;AAC9D,aAAW,gBAAiB,SAAS,iBAAiB,CAAC,GAAI;AACzD,eAAW,gBAAgB,aAAa,UAAU;AAChD,UAAI,aAAa,YAAY,OAAO;AAClC,wBAAgB,IAAI,aAAa,UAAsB,YAAY;AAAA,MACrE;AAAA,IACF;AAAA,EACF;AAGA,MAAI,CAAC,SAAS,cAAc;AAE1B,QAAI;AACJ,QAAI,6BAA6B,GAAG;AAClC,UAAI;AACF,cAAM,KAAK,UAAU,QAA0D,IAAI;AACnF,cAAM,OAAO,GAAG,cAAc,EAAE,QAAQ;AACxC,gCAAwB,4BAA4B,IAAI;AAAA,MAC1D,QAAQ;AAAA,MAER;AAAA,IACF;AAEA,UAAM,iBAAiB,qBAAqB;AAAA,MAC1C,qBAAqB,CAAC,aAAsD;AAC1E,cAAM,SAAS,gBAAgB,IAAI,QAAQ;AAC3C,eAAO,QAAQ;AAAA,MACjB;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,gBAAgB;AAClB,iBAAW,KAAK,IAAI,uBAAuB,cAAc,CAAC;AAAA,IAC5D;AAAA,EACF;AAGA,QAAM,oBAAoB,SAAS,qBAAqB,2BAA2B,UAAU;AAG7F,MAAI;AACJ,MAAI;AACF,kBAAc,UAAU,QAAqB,aAAa;AAAA,EAC5D,QAAQ;AAAA,EAER;AAGA,MAAI,oBAAwD;AAC5D,MAAI;AACF,wBAAoB,UAAU,QAAqC,yBAAyB;AAAA,EAC9F,QAAQ;AAAA,EAER;AAGA,MAAI;AACJ,MAAI;AACF,UAAM,KAAK,UAAU,QAA0D,IAAI;AACnF,UAAM,OAAO,GAAG,cAAc,EAAE,QAAQ;AACxC,wBAAoB,wBAAwB,MAAM,iBAAiB,aAAa,iBAAiB;AAAA,EACnG,QAAQ;AAAA,EAER;AAGA,QAAM,gBAAgB,IAAI,cAAc;AAAA,IACtC;AAAA,IACA;AAAA,IACA,kBAAkB;AAAA,IAClB,aAAa,SAAS,eAAe;AAAA,MACnC,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,IACA;AAAA,EACF,CAAC;AAGD,QAAM,gBAAgB,SAAS,iBAAiB,CAAC;AAGjD,MAAI;AACJ,MAAI;AACF,oBAAgB,UAAU,QAAwC,oBAAoB;AAAA,EACxF,QAAQ;AAAA,EAER;AAGA,MAAI;AACJ,MAAI;AACF,kBAAc,UAAU,QAAsC,kBAAkB;AAAA,EAClF,QAAQ;AAAA,EAER;AAEA,QAAM,gBAAgB,IAAI,cAAc,eAAe,eAAe;AAAA,IACpE;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAGD,YAAU,SAAS;AAAA,IACjB,eAAe,QAAQ,aAAa;AAAA,IACpC,kBAAkB,QAAQ,UAAU;AAAA,IACpC,eAAe,QAAQ,aAAa;AAAA,EACtC,CAAC;AACH;AAMA,SAAS,2BAA2B,YAAkD;AACpF,QAAM,YAAY,IAAI,IAAI,WAAW,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;AACrD,QAAM,WAA+B,CAAC;AAEtC,MAAI,UAAU,IAAI,UAAU,EAAG,UAAS,KAAK,UAAU;AACvD,MAAI,UAAU,IAAI,QAAQ,EAAG,UAAS,KAAK,QAAQ;AACnD,MAAI,UAAU,IAAI,QAAQ,EAAG,UAAS,KAAK,QAAQ;AAEnD,SAAO,SAAS,SAAS,IAAI,WAAW,CAAC,QAAQ;AACnD;AAQO,SAAS,kBAAkB,WAA4B,UAAgC;AAC5F,QAAM,UAAU,UAAU,QAAuB,eAAe;AAChE,UAAQ,iBAAiB,QAAQ;AAEjC,QAAM,aAAa,UAAU,QAA0B,kBAAkB;AACzE,aAAW,KAAK,QAAQ;AAC1B;",
4
+ "sourcesContent": ["import { asValue } from 'awilix'\nimport type { Kysely } from 'kysely'\nimport { SearchService } from './service'\nimport { TokenSearchStrategy } from './strategies/token.strategy'\nimport { VectorSearchStrategy, type EmbeddingService } from './strategies/vector.strategy'\nimport { FullTextSearchStrategy } from './strategies/fulltext.strategy'\nimport { createFulltextDriver } from './fulltext/drivers'\nimport { SearchIndexer } from './indexer/search-indexer'\nimport type {\n SearchStrategy,\n ResultMergeConfig,\n SearchModuleConfig,\n SearchFieldPolicy,\n SearchEntityConfig,\n PresenterEnricherFn,\n} from './types'\nimport type { VectorDriver } from './vector/types'\nimport type { QueryEngine } from '@open-mercato/shared/lib/query/types'\nimport type { EntityId } from '@open-mercato/shared/modules/entities'\nimport type { SearchStrategyId } from '@open-mercato/shared/modules/search'\nimport type { Queue } from '@open-mercato/queue'\nimport type { FulltextIndexJobPayload } from './queue/fulltext-indexing'\nimport type { VectorIndexJobPayload } from './queue/vector-indexing'\nimport type { EncryptionMapEntry } from './lib/field-policy'\nimport type { TenantDataEncryptionService } from '@open-mercato/shared/lib/encryption/tenantDataEncryptionService'\nimport { createPresenterEnricher } from './lib/presenter-enricher'\n\n/**\n * Check if encrypted fields should be excluded from search indexing.\n * Controlled by SEARCH_EXCLUDE_ENCRYPTED_FIELDS environment variable.\n * Default: false (index all fields including decrypted data)\n */\nfunction shouldExcludeEncryptedFields(): boolean {\n const raw = (process.env.SEARCH_EXCLUDE_ENCRYPTED_FIELDS ?? '').toLowerCase()\n return raw === '1' || raw === 'true' || raw === 'yes' || raw === 'on'\n}\n\n/**\n * Create an encryption map resolver that queries the database.\n * Falls back to empty array if query fails.\n */\nfunction createEncryptionMapResolver(\n db: Kysely<any>,\n): (entityId: EntityId) => Promise<EncryptionMapEntry[]> {\n // Cache encryption maps per entity to avoid repeated queries\n const cache = new Map<string, { entries: EncryptionMapEntry[]; expiresAt: number }>()\n const CACHE_TTL_MS = 5 * 60 * 1000 // 5 minutes\n\n return async (entityId: EntityId): Promise<EncryptionMapEntry[]> => {\n const cached = cache.get(entityId)\n if (cached && cached.expiresAt > Date.now()) {\n return cached.entries\n }\n\n try {\n const row = await db\n .selectFrom('encryption_maps' as any)\n .select(['fields_json' as any])\n .where('entity_id' as any, '=', entityId)\n .where('is_active' as any, '=', true)\n .where('deleted_at' as any, 'is', null)\n .executeTakeFirst() as { fields_json?: unknown } | undefined\n\n const fieldsJson = row?.fields_json\n const entries: EncryptionMapEntry[] = Array.isArray(fieldsJson)\n ? fieldsJson.map((f: { field: string; hashField?: string | null }) => ({\n field: f.field,\n hashField: f.hashField ?? null,\n }))\n : []\n\n cache.set(entityId, { entries, expiresAt: Date.now() + CACHE_TTL_MS })\n return entries\n } catch {\n // Query failed, return empty array (don't exclude any fields)\n return []\n }\n }\n}\n\n/**\n * Container interface - minimal subset needed for registration.\n */\nexport interface SearchContainer {\n resolve<T = unknown>(name: string): T\n register(registrations: Record<string, unknown>): void\n}\n\n/**\n * Configuration options for search module registration.\n */\nexport type SearchModuleOptions = {\n /** Override default strategies to use */\n defaultStrategies?: SearchStrategyId[]\n /** Override merge configuration */\n mergeConfig?: ResultMergeConfig\n /** Skip token strategy registration */\n skipTokens?: boolean\n /** Skip vector strategy registration */\n skipVector?: boolean\n /** Skip fulltext strategy registration */\n skipFulltext?: boolean\n /** Module configurations (from generated/search.generated.ts) */\n moduleConfigs?: SearchModuleConfig[]\n}\n\n/**\n * Register the search module in the DI container.\n *\n * This creates and registers:\n * - SearchService instance\n * - All configured search strategies\n *\n * @param container - Awilix container\n * @param options - Optional configuration overrides\n */\nexport function registerSearchModule(\n container: SearchContainer,\n options?: SearchModuleOptions,\n): void {\n const strategies: SearchStrategy[] = []\n\n // Token strategy (always available unless explicitly skipped)\n if (!options?.skipTokens) {\n try {\n const em = container.resolve<any>('em')\n const db = em.getKysely() as Kysely<any>\n strategies.push(new TokenSearchStrategy(db))\n } catch {\n // Kysely not available via em, skipping TokenSearchStrategy\n }\n }\n\n // Vector strategy (requires embedding service and driver)\n // Note: We register even if not currently available - availability is checked at search time\n // via isAvailable(). The embedding config may be loaded later from the database.\n if (!options?.skipVector) {\n try {\n const embeddingService = container.resolve<EmbeddingService>('vectorEmbeddingService')\n const drivers = container.resolve<VectorDriver[]>('vectorDrivers')\n const primaryDriver = drivers?.[0]\n\n if (embeddingService && primaryDriver) {\n strategies.push(new VectorSearchStrategy(embeddingService, primaryDriver))\n }\n } catch {\n // Vector module not available, skipping VectorSearchStrategy\n }\n }\n\n // Build entity config map for field policy resolution\n const entityConfigMap = new Map<EntityId, SearchEntityConfig>()\n for (const moduleConfig of (options?.moduleConfigs ?? [])) {\n for (const entityConfig of moduleConfig.entities) {\n if (entityConfig.enabled !== false) {\n entityConfigMap.set(entityConfig.entityId as EntityId, entityConfig)\n }\n }\n }\n\n // Fulltext strategy (requires driver configuration, e.g., MEILISEARCH_HOST)\n if (!options?.skipFulltext) {\n // Build encryption map resolver if SEARCH_EXCLUDE_ENCRYPTED_FIELDS is enabled\n let encryptionMapResolver: ((entityId: EntityId) => Promise<EncryptionMapEntry[]>) | undefined\n if (shouldExcludeEncryptedFields()) {\n try {\n const em = container.resolve<any>('em')\n const db = em.getKysely() as Kysely<any>\n encryptionMapResolver = createEncryptionMapResolver(db)\n } catch {\n // Kysely not available, encrypted field filtering disabled\n }\n }\n\n const fulltextDriver = createFulltextDriver({\n fieldPolicyResolver: (entityId: EntityId): SearchFieldPolicy | undefined => {\n const config = entityConfigMap.get(entityId)\n return config?.fieldPolicy\n },\n encryptionMapResolver,\n })\n\n if (fulltextDriver) {\n strategies.push(new FullTextSearchStrategy(fulltextDriver))\n }\n }\n\n // Determine default strategies based on what's available\n const defaultStrategies = options?.defaultStrategies ?? determineDefaultStrategies(strategies)\n\n // Try to resolve queryEngine for reindex support and presenter enrichment\n let queryEngine: QueryEngine | undefined\n try {\n queryEngine = container.resolve<QueryEngine>('queryEngine')\n } catch {\n // QueryEngine not available, reindex will be disabled\n }\n\n // Resolve encryption service for decrypting presenter data\n let encryptionService: TenantDataEncryptionService | null = null\n try {\n encryptionService = container.resolve<TenantDataEncryptionService>('tenantEncryptionService')\n } catch {\n // Encryption service not available, presenters won't be decrypted\n }\n\n // Create presenter enricher for database-based presenter resolution\n let presenterEnricher: PresenterEnricherFn | undefined\n try {\n const em = container.resolve<any>('em')\n const db = em.getKysely() as Kysely<any>\n presenterEnricher = createPresenterEnricher(db, entityConfigMap, queryEngine, encryptionService)\n } catch {\n // Kysely not available, presenter enrichment disabled\n }\n\n // Create search service\n const searchService = new SearchService({\n strategies,\n defaultStrategies,\n fallbackStrategy: 'tokens',\n mergeConfig: options?.mergeConfig ?? {\n duplicateHandling: 'highest_score',\n strategyWeights: {\n fulltext: 1.2,\n vector: 1.0,\n tokens: 0.8,\n },\n },\n presenterEnricher,\n })\n\n // Create search indexer with module configs\n const moduleConfigs = options?.moduleConfigs ?? []\n\n // Try to resolve fulltextIndexQueue for queue-based reindexing\n let fulltextQueue: Queue<FulltextIndexJobPayload> | undefined\n try {\n fulltextQueue = container.resolve<Queue<FulltextIndexJobPayload>>('fulltextIndexQueue')\n } catch {\n // Queue not available, queue-based fulltext reindex will be disabled\n }\n\n // Try to resolve vectorIndexQueue for queue-based vector reindexing\n let vectorQueue: Queue<VectorIndexJobPayload> | undefined\n try {\n vectorQueue = container.resolve<Queue<VectorIndexJobPayload>>('vectorIndexQueue')\n } catch {\n // Queue not available, queue-based vector reindex will be disabled\n }\n\n const searchIndexer = new SearchIndexer(searchService, moduleConfigs, {\n queryEngine,\n fulltextQueue,\n vectorQueue,\n })\n\n // Register in container\n container.register({\n searchService: asValue(searchService),\n searchStrategies: asValue(strategies),\n searchIndexer: asValue(searchIndexer),\n })\n}\n\n/**\n * Determine default strategy order based on available strategies.\n * Prefers fulltext > vector > tokens.\n */\nfunction determineDefaultStrategies(strategies: SearchStrategy[]): SearchStrategyId[] {\n const available = new Set(strategies.map((s) => s.id))\n const defaults: SearchStrategyId[] = []\n\n if (available.has('fulltext')) defaults.push('fulltext')\n if (available.has('vector')) defaults.push('vector')\n if (available.has('tokens')) defaults.push('tokens')\n\n return defaults.length > 0 ? defaults : ['tokens']\n}\n\n/**\n * Helper to add a custom strategy to an existing SearchService.\n *\n * @param container - DI container\n * @param strategy - Strategy to add\n */\nexport function addSearchStrategy(container: SearchContainer, strategy: SearchStrategy): void {\n const service = container.resolve<SearchService>('searchService')\n service.registerStrategy(strategy)\n\n const strategies = container.resolve<SearchStrategy[]>('searchStrategies')\n strategies.push(strategy)\n}\n"],
5
+ "mappings": "AAAA,SAAS,eAAe;AAExB,SAAS,qBAAqB;AAC9B,SAAS,2BAA2B;AACpC,SAAS,4BAAmD;AAC5D,SAAS,8BAA8B;AACvC,SAAS,4BAA4B;AACrC,SAAS,qBAAqB;AAkB9B,SAAS,+BAA+B;AAOxC,SAAS,+BAAwC;AAC/C,QAAM,OAAO,QAAQ,IAAI,mCAAmC,IAAI,YAAY;AAC5E,SAAO,QAAQ,OAAO,QAAQ,UAAU,QAAQ,SAAS,QAAQ;AACnE;AAMA,SAAS,4BACP,IACuD;AAEvD,QAAM,QAAQ,oBAAI,IAAkE;AACpF,QAAM,eAAe,IAAI,KAAK;AAE9B,SAAO,OAAO,aAAsD;AAClE,UAAM,SAAS,MAAM,IAAI,QAAQ;AACjC,QAAI,UAAU,OAAO,YAAY,KAAK,IAAI,GAAG;AAC3C,aAAO,OAAO;AAAA,IAChB;AAEA,QAAI;AACF,YAAM,MAAM,MAAM,GACf,WAAW,iBAAwB,EACnC,OAAO,CAAC,aAAoB,CAAC,EAC7B,MAAM,aAAoB,KAAK,QAAQ,EACvC,MAAM,aAAoB,KAAK,IAAI,EACnC,MAAM,cAAqB,MAAM,IAAI,EACrC,iBAAiB;AAEpB,YAAM,aAAa,KAAK;AACxB,YAAM,UAAgC,MAAM,QAAQ,UAAU,IAC1D,WAAW,IAAI,CAAC,OAAqD;AAAA,QACnE,OAAO,EAAE;AAAA,QACT,WAAW,EAAE,aAAa;AAAA,MAC5B,EAAE,IACF,CAAC;AAEL,YAAM,IAAI,UAAU,EAAE,SAAS,WAAW,KAAK,IAAI,IAAI,aAAa,CAAC;AACrE,aAAO;AAAA,IACT,QAAQ;AAEN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AACF;AAsCO,SAAS,qBACd,WACA,SACM;AACN,QAAM,aAA+B,CAAC;AAGtC,MAAI,CAAC,SAAS,YAAY;AACxB,QAAI;AACF,YAAM,KAAK,UAAU,QAAa,IAAI;AACtC,YAAM,KAAK,GAAG,UAAU;AACxB,iBAAW,KAAK,IAAI,oBAAoB,EAAE,CAAC;AAAA,IAC7C,QAAQ;AAAA,IAER;AAAA,EACF;AAKA,MAAI,CAAC,SAAS,YAAY;AACxB,QAAI;AACF,YAAM,mBAAmB,UAAU,QAA0B,wBAAwB;AACrF,YAAM,UAAU,UAAU,QAAwB,eAAe;AACjE,YAAM,gBAAgB,UAAU,CAAC;AAEjC,UAAI,oBAAoB,eAAe;AACrC,mBAAW,KAAK,IAAI,qBAAqB,kBAAkB,aAAa,CAAC;AAAA,MAC3E;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,QAAM,kBAAkB,oBAAI,IAAkC;AAC9D,aAAW,gBAAiB,SAAS,iBAAiB,CAAC,GAAI;AACzD,eAAW,gBAAgB,aAAa,UAAU;AAChD,UAAI,aAAa,YAAY,OAAO;AAClC,wBAAgB,IAAI,aAAa,UAAsB,YAAY;AAAA,MACrE;AAAA,IACF;AAAA,EACF;AAGA,MAAI,CAAC,SAAS,cAAc;AAE1B,QAAI;AACJ,QAAI,6BAA6B,GAAG;AAClC,UAAI;AACF,cAAM,KAAK,UAAU,QAAa,IAAI;AACtC,cAAM,KAAK,GAAG,UAAU;AACxB,gCAAwB,4BAA4B,EAAE;AAAA,MACxD,QAAQ;AAAA,MAER;AAAA,IACF;AAEA,UAAM,iBAAiB,qBAAqB;AAAA,MAC1C,qBAAqB,CAAC,aAAsD;AAC1E,cAAM,SAAS,gBAAgB,IAAI,QAAQ;AAC3C,eAAO,QAAQ;AAAA,MACjB;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,gBAAgB;AAClB,iBAAW,KAAK,IAAI,uBAAuB,cAAc,CAAC;AAAA,IAC5D;AAAA,EACF;AAGA,QAAM,oBAAoB,SAAS,qBAAqB,2BAA2B,UAAU;AAG7F,MAAI;AACJ,MAAI;AACF,kBAAc,UAAU,QAAqB,aAAa;AAAA,EAC5D,QAAQ;AAAA,EAER;AAGA,MAAI,oBAAwD;AAC5D,MAAI;AACF,wBAAoB,UAAU,QAAqC,yBAAyB;AAAA,EAC9F,QAAQ;AAAA,EAER;AAGA,MAAI;AACJ,MAAI;AACF,UAAM,KAAK,UAAU,QAAa,IAAI;AACtC,UAAM,KAAK,GAAG,UAAU;AACxB,wBAAoB,wBAAwB,IAAI,iBAAiB,aAAa,iBAAiB;AAAA,EACjG,QAAQ;AAAA,EAER;AAGA,QAAM,gBAAgB,IAAI,cAAc;AAAA,IACtC;AAAA,IACA;AAAA,IACA,kBAAkB;AAAA,IAClB,aAAa,SAAS,eAAe;AAAA,MACnC,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,IACA;AAAA,EACF,CAAC;AAGD,QAAM,gBAAgB,SAAS,iBAAiB,CAAC;AAGjD,MAAI;AACJ,MAAI;AACF,oBAAgB,UAAU,QAAwC,oBAAoB;AAAA,EACxF,QAAQ;AAAA,EAER;AAGA,MAAI;AACJ,MAAI;AACF,kBAAc,UAAU,QAAsC,kBAAkB;AAAA,EAClF,QAAQ;AAAA,EAER;AAEA,QAAM,gBAAgB,IAAI,cAAc,eAAe,eAAe;AAAA,IACpE;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAGD,YAAU,SAAS;AAAA,IACjB,eAAe,QAAQ,aAAa;AAAA,IACpC,kBAAkB,QAAQ,UAAU;AAAA,IACpC,eAAe,QAAQ,aAAa;AAAA,EACtC,CAAC;AACH;AAMA,SAAS,2BAA2B,YAAkD;AACpF,QAAM,YAAY,IAAI,IAAI,WAAW,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;AACrD,QAAM,WAA+B,CAAC;AAEtC,MAAI,UAAU,IAAI,UAAU,EAAG,UAAS,KAAK,UAAU;AACvD,MAAI,UAAU,IAAI,QAAQ,EAAG,UAAS,KAAK,QAAQ;AACnD,MAAI,UAAU,IAAI,QAAQ,EAAG,UAAS,KAAK,QAAQ;AAEnD,SAAO,SAAS,SAAS,IAAI,WAAW,CAAC,QAAQ;AACnD;AAQO,SAAS,kBAAkB,WAA4B,UAAgC;AAC5F,QAAM,UAAU,UAAU,QAAuB,eAAe;AAChE,UAAQ,iBAAiB,QAAQ;AAEjC,QAAM,aAAa,UAAU,QAA0B,kBAAkB;AACzE,aAAW,KAAK,QAAQ;AAC1B;",
6
6
  "names": []
7
7
  }
@@ -14,7 +14,7 @@ function chunk(array, size) {
14
14
  }
15
15
  return chunks;
16
16
  }
17
- async function fetchDocsBatch(knex, byEntityType, tenantId, organizationId) {
17
+ async function fetchDocsBatch(db, byEntityType, tenantId, organizationId) {
18
18
  const allDocs = [];
19
19
  const allPairs = [];
20
20
  for (const [entityType, results] of byEntityType) {
@@ -31,19 +31,19 @@ async function fetchDocsBatch(knex, byEntityType, tenantId, organizationId) {
31
31
  ids.push(recordId);
32
32
  chunkByType.set(entityType, ids);
33
33
  }
34
- const query = knex("entity_indexes").select("entity_type", "entity_id", "doc").where("tenant_id", tenantId).whereNull("deleted_at").where((builder) => {
35
- for (const [entityType, recordIds] of chunkByType) {
36
- builder.orWhere((sub) => {
37
- sub.where("entity_type", entityType).whereIn("entity_id", recordIds);
38
- });
39
- }
40
- });
34
+ let query = db.selectFrom("entity_indexes").select(["entity_type", "entity_id", "doc"]).where("tenant_id", "=", tenantId).where("deleted_at", "is", null).where((eb) => eb.or(
35
+ Array.from(chunkByType.entries()).map(([entityType, recordIds]) => eb.and([
36
+ eb("entity_type", "=", entityType),
37
+ eb("entity_id", "in", recordIds)
38
+ ]))
39
+ ));
41
40
  if (organizationId) {
42
- query.where((builder) => {
43
- builder.where("organization_id", organizationId).orWhereNull("organization_id");
44
- });
41
+ query = query.where((eb) => eb.or([
42
+ eb("organization_id", "=", organizationId),
43
+ eb("organization_id", "is", null)
44
+ ]));
45
45
  }
46
- const rows = await query;
46
+ const rows = await query.execute();
47
47
  allDocs.push(...rows);
48
48
  }
49
49
  return allDocs;
@@ -100,7 +100,7 @@ async function computePresenterAndLinks(doc, entityId, recordId, config, tenantI
100
100
  }
101
101
  return { presenter, url, links };
102
102
  }
103
- function createPresenterEnricher(knex, entityConfigMap, queryEngine, encryptionService) {
103
+ function createPresenterEnricher(db, entityConfigMap, queryEngine, encryptionService) {
104
104
  return async (results, tenantId, organizationId) => {
105
105
  const missingResults = results.filter(needsSearchResultEnrichment);
106
106
  if (missingResults.length === 0) return results;
@@ -110,7 +110,7 @@ function createPresenterEnricher(knex, entityConfigMap, queryEngine, encryptionS
110
110
  group.push(result);
111
111
  byEntityType.set(result.entityId, group);
112
112
  }
113
- const rawDocs = await fetchDocsBatch(knex, byEntityType, tenantId, organizationId);
113
+ const rawDocs = await fetchDocsBatch(db, byEntityType, tenantId, organizationId);
114
114
  const dekCache = /* @__PURE__ */ new Map();
115
115
  const decryptedDocs = await Promise.all(
116
116
  rawDocs.map(async (row) => {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/lib/presenter-enricher.ts"],
4
- "sourcesContent": ["import type { Knex } from 'knex'\nimport type {\n SearchBuildContext,\n SearchResult,\n SearchResultPresenter,\n SearchResultLink,\n SearchEntityConfig,\n PresenterEnricherFn,\n} from '../types'\nimport type { QueryEngine } from '@open-mercato/shared/lib/query/types'\nimport type { EntityId } from '@open-mercato/shared/modules/entities'\nimport type { TenantDataEncryptionService } from '@open-mercato/shared/lib/encryption/tenantDataEncryptionService'\nimport { decryptIndexDocForSearch } from '@open-mercato/shared/lib/encryption/indexDoc'\nimport { extractFallbackPresenter } from './fallback-presenter'\nimport { needsSearchResultEnrichment } from './search-result-enrichment'\n\n/** Maximum number of record IDs per batch query to avoid hitting DB parameter limits */\nconst BATCH_SIZE = 500\n\n/** Logger for debugging - uses console.warn to surface issues without breaking flow */\nconst logWarning = (message: string, context?: Record<string, unknown>) => {\n if (process.env.NODE_ENV === 'development' || process.env.DEBUG_SEARCH_ENRICHER) {\n console.warn(`[search:presenter-enricher] ${message}`, context ?? '')\n }\n}\n\n/**\n * Split an array into chunks of specified size.\n */\nfunction chunk<T>(array: T[], size: number): T[][] {\n const chunks: T[][] = []\n for (let i = 0; i < array.length; i += size) {\n chunks.push(array.slice(i, i + size))\n }\n return chunks\n}\n\n/**\n * Build a single batch query for multiple entity types and their record IDs.\n * Uses OR conditions to fetch all needed docs in one round trip.\n */\nasync function fetchDocsBatch(\n knex: Knex,\n byEntityType: Map<string, SearchResult[]>,\n tenantId: string,\n organizationId?: string | null,\n): Promise<Array<{ entity_type: string; entity_id: string; doc: Record<string, unknown> }>> {\n const allDocs: Array<{ entity_type: string; entity_id: string; doc: Record<string, unknown> }> = []\n\n // Collect all entity type + record ID pairs\n const allPairs: Array<{ entityType: string; recordId: string }> = []\n for (const [entityType, results] of byEntityType) {\n for (const result of results) {\n allPairs.push({ entityType, recordId: result.recordId })\n }\n }\n\n if (allPairs.length === 0) return allDocs\n\n // Process in chunks to avoid hitting DB parameter limits\n const chunks = chunk(allPairs, BATCH_SIZE)\n\n for (const pairChunk of chunks) {\n // Group by entity type within this chunk for efficient OR query\n const chunkByType = new Map<string, string[]>()\n for (const { entityType, recordId } of pairChunk) {\n const ids = chunkByType.get(entityType) ?? []\n ids.push(recordId)\n chunkByType.set(entityType, ids)\n }\n\n // Build query with OR conditions per entity type\n const query = knex('entity_indexes')\n .select('entity_type', 'entity_id', 'doc')\n .where('tenant_id', tenantId)\n .whereNull('deleted_at')\n .where((builder) => {\n for (const [entityType, recordIds] of chunkByType) {\n builder.orWhere((sub) => {\n sub.where('entity_type', entityType).whereIn('entity_id', recordIds)\n })\n }\n })\n\n // Add organization filter if provided\n if (organizationId) {\n query.where((builder) => {\n builder.where('organization_id', organizationId).orWhereNull('organization_id')\n })\n }\n\n const rows = await query\n allDocs.push(...(rows as typeof allDocs))\n }\n\n return allDocs\n}\n\n/** Result type for presenter and links computation */\ntype EnrichmentResult = {\n presenter: SearchResultPresenter | null\n url?: string\n links?: SearchResultLink[]\n}\n\n/**\n * Compute presenter, URL, and links for a single doc using config or fallback.\n * Returns presenter (null if cannot be computed), and optionally URL/links from config.\n */\nasync function computePresenterAndLinks(\n doc: Record<string, unknown>,\n entityId: string,\n recordId: string,\n config: SearchEntityConfig | undefined,\n tenantId: string,\n organizationId: string | null | undefined,\n queryEngine: QueryEngine | undefined,\n): Promise<EnrichmentResult> {\n let presenter: SearchResultPresenter | null = null\n let url: string | undefined\n let links: SearchResultLink[] | undefined\n\n // Build context for config functions\n const customFields: Record<string, unknown> = {}\n for (const [key, value] of Object.entries(doc)) {\n if (key.startsWith('cf:') || key.startsWith('cf_')) {\n customFields[key.slice(3)] = value\n }\n }\n\n const buildContext: SearchBuildContext = {\n record: doc,\n customFields,\n organizationId,\n tenantId,\n queryEngine,\n }\n\n // If search.ts config exists, use formatResult/buildSource for presenter\n if (config?.formatResult || config?.buildSource) {\n if (config.buildSource) {\n try {\n const source = await config.buildSource(buildContext)\n if (source?.presenter) presenter = source.presenter\n if (source?.links) links = source.links\n } catch (err) {\n logWarning(`buildSource failed for ${entityId}:${recordId}`, { error: String(err) })\n }\n }\n\n if (!presenter && config.formatResult) {\n try {\n presenter = (await config.formatResult(buildContext)) ?? null\n } catch (err) {\n logWarning(`formatResult failed for ${entityId}:${recordId}`, { error: String(err) })\n }\n }\n }\n\n // Fallback presenter: extract from doc fields directly\n if (!presenter) {\n presenter = extractFallbackPresenter(doc, entityId, recordId)\n }\n\n // Resolve URL from config\n if (config?.resolveUrl) {\n try {\n url = (await config.resolveUrl(buildContext)) ?? undefined\n } catch {\n // Skip URL resolution errors\n }\n }\n\n // Resolve links from config (if not already set from buildSource)\n if (!links && config?.resolveLinks) {\n try {\n links = (await config.resolveLinks(buildContext)) ?? undefined\n } catch {\n // Skip link resolution errors\n }\n }\n\n return { presenter, url, links }\n}\n\n/**\n * Create a presenter enricher that loads data from entity_indexes and computes presenter.\n * Uses formatResult from search.ts configs when available, otherwise falls back to extracting\n * common fields like display_name, name, title from the doc.\n *\n * Optimizations:\n * - Single batch DB query for all entity types (instead of one per type)\n * - Parallel Promise.all for formatResult/buildSource calls\n * - Tenant/organization scoping for security\n * - Chunked queries to avoid DB parameter limits\n * - Automatic decryption of encrypted fields when encryption service is provided\n */\nexport function createPresenterEnricher(\n knex: Knex,\n entityConfigMap: Map<EntityId, SearchEntityConfig>,\n queryEngine?: QueryEngine,\n encryptionService?: TenantDataEncryptionService | null,\n): PresenterEnricherFn {\n return async (results, tenantId, organizationId) => {\n // Find results missing presenter OR with encrypted presenter\n const missingResults = results.filter(needsSearchResultEnrichment)\n if (missingResults.length === 0) return results\n\n // Group by entity type for config lookup\n const byEntityType = new Map<string, SearchResult[]>()\n for (const result of missingResults) {\n const group = byEntityType.get(result.entityId) ?? []\n group.push(result)\n byEntityType.set(result.entityId, group)\n }\n\n // Single batch query for all docs across all entity types\n const rawDocs = await fetchDocsBatch(knex, byEntityType, tenantId, organizationId)\n\n // Decrypt docs in parallel using DEK cache for efficiency\n const dekCache = new Map<string | null, string | null>()\n\n const decryptedDocs = await Promise.all(\n rawDocs.map(async (row) => {\n try {\n // Use organization_id from the doc itself for proper encryption map lookup\n // This is critical for global search where organizationId param is null\n const docData = row.doc as Record<string, unknown>\n const docOrgId = (docData.organization_id as string | null | undefined) ?? organizationId\n const scope = { tenantId, organizationId: docOrgId }\n\n const decryptedDoc = await decryptIndexDocForSearch(\n row.entity_type,\n row.doc,\n scope,\n encryptionService ?? null,\n dekCache,\n )\n return { ...row, doc: decryptedDoc }\n } catch (err) {\n logWarning(`Failed to decrypt doc for ${row.entity_type}:${row.entity_id}`, { error: String(err) })\n return row // Return original doc if decryption fails\n }\n }),\n )\n\n // Build doc lookup map for fast access\n const docMap = new Map<string, Record<string, unknown>>()\n for (const row of decryptedDocs) {\n docMap.set(`${row.entity_type}:${row.entity_id}`, row.doc)\n }\n\n // Compute presenters and links in parallel\n const enrichmentPromises = missingResults.map(async (result) => {\n const key = `${result.entityId}:${result.recordId}`\n const doc = docMap.get(key)\n\n if (!doc) {\n logWarning(`Doc not found in entity_indexes`, { entityId: result.entityId, recordId: result.recordId })\n return { key, presenter: null, url: undefined, links: undefined }\n }\n\n const config = entityConfigMap.get(result.entityId as EntityId)\n const enrichment = await computePresenterAndLinks(\n doc,\n result.entityId,\n result.recordId,\n config,\n tenantId,\n organizationId,\n queryEngine,\n )\n\n return { key, ...enrichment }\n })\n\n const computed = await Promise.all(enrichmentPromises)\n\n // Build enrichment map from parallel results\n const enrichmentMap = new Map<string, EnrichmentResult>()\n for (const { key, presenter, url, links } of computed) {\n enrichmentMap.set(key, { presenter, url, links })\n }\n\n // Enrich results with computed presenter, URL, and links\n return results.map((result) => {\n if (!needsSearchResultEnrichment(result)) return result\n const key = `${result.entityId}:${result.recordId}`\n const enriched = enrichmentMap.get(key)\n if (!enriched) return result\n const hasExistingLinks = Array.isArray(result.links) && result.links.length > 0\n return {\n ...result,\n presenter: enriched.presenter ?? result.presenter,\n url: result.url ?? enriched.url,\n links: hasExistingLinks ? result.links : (enriched.links ?? result.links),\n }\n })\n }\n}\n"],
5
- "mappings": "AAYA,SAAS,gCAAgC;AACzC,SAAS,gCAAgC;AACzC,SAAS,mCAAmC;AAG5C,MAAM,aAAa;AAGnB,MAAM,aAAa,CAAC,SAAiB,YAAsC;AACzE,MAAI,QAAQ,IAAI,aAAa,iBAAiB,QAAQ,IAAI,uBAAuB;AAC/E,YAAQ,KAAK,+BAA+B,OAAO,IAAI,WAAW,EAAE;AAAA,EACtE;AACF;AAKA,SAAS,MAAS,OAAY,MAAqB;AACjD,QAAM,SAAgB,CAAC;AACvB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,MAAM;AAC3C,WAAO,KAAK,MAAM,MAAM,GAAG,IAAI,IAAI,CAAC;AAAA,EACtC;AACA,SAAO;AACT;AAMA,eAAe,eACb,MACA,cACA,UACA,gBAC0F;AAC1F,QAAM,UAA2F,CAAC;AAGlG,QAAM,WAA4D,CAAC;AACnE,aAAW,CAAC,YAAY,OAAO,KAAK,cAAc;AAChD,eAAW,UAAU,SAAS;AAC5B,eAAS,KAAK,EAAE,YAAY,UAAU,OAAO,SAAS,CAAC;AAAA,IACzD;AAAA,EACF;AAEA,MAAI,SAAS,WAAW,EAAG,QAAO;AAGlC,QAAM,SAAS,MAAM,UAAU,UAAU;AAEzC,aAAW,aAAa,QAAQ;AAE9B,UAAM,cAAc,oBAAI,IAAsB;AAC9C,eAAW,EAAE,YAAY,SAAS,KAAK,WAAW;AAChD,YAAM,MAAM,YAAY,IAAI,UAAU,KAAK,CAAC;AAC5C,UAAI,KAAK,QAAQ;AACjB,kBAAY,IAAI,YAAY,GAAG;AAAA,IACjC;AAGA,UAAM,QAAQ,KAAK,gBAAgB,EAChC,OAAO,eAAe,aAAa,KAAK,EACxC,MAAM,aAAa,QAAQ,EAC3B,UAAU,YAAY,EACtB,MAAM,CAAC,YAAY;AAClB,iBAAW,CAAC,YAAY,SAAS,KAAK,aAAa;AACjD,gBAAQ,QAAQ,CAAC,QAAQ;AACvB,cAAI,MAAM,eAAe,UAAU,EAAE,QAAQ,aAAa,SAAS;AAAA,QACrE,CAAC;AAAA,MACH;AAAA,IACF,CAAC;AAGH,QAAI,gBAAgB;AAClB,YAAM,MAAM,CAAC,YAAY;AACvB,gBAAQ,MAAM,mBAAmB,cAAc,EAAE,YAAY,iBAAiB;AAAA,MAChF,CAAC;AAAA,IACH;AAEA,UAAM,OAAO,MAAM;AACnB,YAAQ,KAAK,GAAI,IAAuB;AAAA,EAC1C;AAEA,SAAO;AACT;AAaA,eAAe,yBACb,KACA,UACA,UACA,QACA,UACA,gBACA,aAC2B;AAC3B,MAAI,YAA0C;AAC9C,MAAI;AACJ,MAAI;AAGJ,QAAM,eAAwC,CAAC;AAC/C,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC9C,QAAI,IAAI,WAAW,KAAK,KAAK,IAAI,WAAW,KAAK,GAAG;AAClD,mBAAa,IAAI,MAAM,CAAC,CAAC,IAAI;AAAA,IAC/B;AAAA,EACF;AAEA,QAAM,eAAmC;AAAA,IACvC,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,MAAI,QAAQ,gBAAgB,QAAQ,aAAa;AAC/C,QAAI,OAAO,aAAa;AACtB,UAAI;AACF,cAAM,SAAS,MAAM,OAAO,YAAY,YAAY;AACpD,YAAI,QAAQ,UAAW,aAAY,OAAO;AAC1C,YAAI,QAAQ,MAAO,SAAQ,OAAO;AAAA,MACpC,SAAS,KAAK;AACZ,mBAAW,0BAA0B,QAAQ,IAAI,QAAQ,IAAI,EAAE,OAAO,OAAO,GAAG,EAAE,CAAC;AAAA,MACrF;AAAA,IACF;AAEA,QAAI,CAAC,aAAa,OAAO,cAAc;AACrC,UAAI;AACF,oBAAa,MAAM,OAAO,aAAa,YAAY,KAAM;AAAA,MAC3D,SAAS,KAAK;AACZ,mBAAW,2BAA2B,QAAQ,IAAI,QAAQ,IAAI,EAAE,OAAO,OAAO,GAAG,EAAE,CAAC;AAAA,MACtF;AAAA,IACF;AAAA,EACF;AAGA,MAAI,CAAC,WAAW;AACd,gBAAY,yBAAyB,KAAK,UAAU,QAAQ;AAAA,EAC9D;AAGA,MAAI,QAAQ,YAAY;AACtB,QAAI;AACF,YAAO,MAAM,OAAO,WAAW,YAAY,KAAM;AAAA,IACnD,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI,CAAC,SAAS,QAAQ,cAAc;AAClC,QAAI;AACF,cAAS,MAAM,OAAO,aAAa,YAAY,KAAM;AAAA,IACvD,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO,EAAE,WAAW,KAAK,MAAM;AACjC;AAcO,SAAS,wBACd,MACA,iBACA,aACA,mBACqB;AACrB,SAAO,OAAO,SAAS,UAAU,mBAAmB;AAElD,UAAM,iBAAiB,QAAQ,OAAO,2BAA2B;AACjE,QAAI,eAAe,WAAW,EAAG,QAAO;AAGxC,UAAM,eAAe,oBAAI,IAA4B;AACrD,eAAW,UAAU,gBAAgB;AACnC,YAAM,QAAQ,aAAa,IAAI,OAAO,QAAQ,KAAK,CAAC;AACpD,YAAM,KAAK,MAAM;AACjB,mBAAa,IAAI,OAAO,UAAU,KAAK;AAAA,IACzC;AAGA,UAAM,UAAU,MAAM,eAAe,MAAM,cAAc,UAAU,cAAc;AAGjF,UAAM,WAAW,oBAAI,IAAkC;AAEvD,UAAM,gBAAgB,MAAM,QAAQ;AAAA,MAClC,QAAQ,IAAI,OAAO,QAAQ;AACzB,YAAI;AAGF,gBAAM,UAAU,IAAI;AACpB,gBAAM,WAAY,QAAQ,mBAAiD;AAC3E,gBAAM,QAAQ,EAAE,UAAU,gBAAgB,SAAS;AAEnD,gBAAM,eAAe,MAAM;AAAA,YACzB,IAAI;AAAA,YACJ,IAAI;AAAA,YACJ;AAAA,YACA,qBAAqB;AAAA,YACrB;AAAA,UACF;AACA,iBAAO,EAAE,GAAG,KAAK,KAAK,aAAa;AAAA,QACrC,SAAS,KAAK;AACZ,qBAAW,6BAA6B,IAAI,WAAW,IAAI,IAAI,SAAS,IAAI,EAAE,OAAO,OAAO,GAAG,EAAE,CAAC;AAClG,iBAAO;AAAA,QACT;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,SAAS,oBAAI,IAAqC;AACxD,eAAW,OAAO,eAAe;AAC/B,aAAO,IAAI,GAAG,IAAI,WAAW,IAAI,IAAI,SAAS,IAAI,IAAI,GAAG;AAAA,IAC3D;AAGA,UAAM,qBAAqB,eAAe,IAAI,OAAO,WAAW;AAC9D,YAAM,MAAM,GAAG,OAAO,QAAQ,IAAI,OAAO,QAAQ;AACjD,YAAM,MAAM,OAAO,IAAI,GAAG;AAE1B,UAAI,CAAC,KAAK;AACR,mBAAW,mCAAmC,EAAE,UAAU,OAAO,UAAU,UAAU,OAAO,SAAS,CAAC;AACtG,eAAO,EAAE,KAAK,WAAW,MAAM,KAAK,QAAW,OAAO,OAAU;AAAA,MAClE;AAEA,YAAM,SAAS,gBAAgB,IAAI,OAAO,QAAoB;AAC9D,YAAM,aAAa,MAAM;AAAA,QACvB;AAAA,QACA,OAAO;AAAA,QACP,OAAO;AAAA,QACP;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,aAAO,EAAE,KAAK,GAAG,WAAW;AAAA,IAC9B,CAAC;AAED,UAAM,WAAW,MAAM,QAAQ,IAAI,kBAAkB;AAGrD,UAAM,gBAAgB,oBAAI,IAA8B;AACxD,eAAW,EAAE,KAAK,WAAW,KAAK,MAAM,KAAK,UAAU;AACrD,oBAAc,IAAI,KAAK,EAAE,WAAW,KAAK,MAAM,CAAC;AAAA,IAClD;AAGA,WAAO,QAAQ,IAAI,CAAC,WAAW;AAC7B,UAAI,CAAC,4BAA4B,MAAM,EAAG,QAAO;AACjD,YAAM,MAAM,GAAG,OAAO,QAAQ,IAAI,OAAO,QAAQ;AACjD,YAAM,WAAW,cAAc,IAAI,GAAG;AACtC,UAAI,CAAC,SAAU,QAAO;AACtB,YAAM,mBAAmB,MAAM,QAAQ,OAAO,KAAK,KAAK,OAAO,MAAM,SAAS;AAC9E,aAAO;AAAA,QACL,GAAG;AAAA,QACH,WAAW,SAAS,aAAa,OAAO;AAAA,QACxC,KAAK,OAAO,OAAO,SAAS;AAAA,QAC5B,OAAO,mBAAmB,OAAO,QAAS,SAAS,SAAS,OAAO;AAAA,MACrE;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
4
+ "sourcesContent": ["import type { Kysely } from 'kysely'\nimport type {\n SearchBuildContext,\n SearchResult,\n SearchResultPresenter,\n SearchResultLink,\n SearchEntityConfig,\n PresenterEnricherFn,\n} from '../types'\nimport type { QueryEngine } from '@open-mercato/shared/lib/query/types'\nimport type { EntityId } from '@open-mercato/shared/modules/entities'\nimport type { TenantDataEncryptionService } from '@open-mercato/shared/lib/encryption/tenantDataEncryptionService'\nimport { decryptIndexDocForSearch } from '@open-mercato/shared/lib/encryption/indexDoc'\nimport { extractFallbackPresenter } from './fallback-presenter'\nimport { needsSearchResultEnrichment } from './search-result-enrichment'\n\n/** Maximum number of record IDs per batch query to avoid hitting DB parameter limits */\nconst BATCH_SIZE = 500\n\n/** Logger for debugging - uses console.warn to surface issues without breaking flow */\nconst logWarning = (message: string, context?: Record<string, unknown>) => {\n if (process.env.NODE_ENV === 'development' || process.env.DEBUG_SEARCH_ENRICHER) {\n console.warn(`[search:presenter-enricher] ${message}`, context ?? '')\n }\n}\n\n/**\n * Split an array into chunks of specified size.\n */\nfunction chunk<T>(array: T[], size: number): T[][] {\n const chunks: T[][] = []\n for (let i = 0; i < array.length; i += size) {\n chunks.push(array.slice(i, i + size))\n }\n return chunks\n}\n\n/**\n * Build a single batch query for multiple entity types and their record IDs.\n * Uses OR conditions to fetch all needed docs in one round trip.\n */\nasync function fetchDocsBatch(\n db: Kysely<any>,\n byEntityType: Map<string, SearchResult[]>,\n tenantId: string,\n organizationId?: string | null,\n): Promise<Array<{ entity_type: string; entity_id: string; doc: Record<string, unknown> }>> {\n const allDocs: Array<{ entity_type: string; entity_id: string; doc: Record<string, unknown> }> = []\n\n // Collect all entity type + record ID pairs\n const allPairs: Array<{ entityType: string; recordId: string }> = []\n for (const [entityType, results] of byEntityType) {\n for (const result of results) {\n allPairs.push({ entityType, recordId: result.recordId })\n }\n }\n\n if (allPairs.length === 0) return allDocs\n\n // Process in chunks to avoid hitting DB parameter limits\n const chunks = chunk(allPairs, BATCH_SIZE)\n\n for (const pairChunk of chunks) {\n // Group by entity type within this chunk for efficient OR query\n const chunkByType = new Map<string, string[]>()\n for (const { entityType, recordId } of pairChunk) {\n const ids = chunkByType.get(entityType) ?? []\n ids.push(recordId)\n chunkByType.set(entityType, ids)\n }\n\n // Build query with OR conditions per entity type\n let query = db\n .selectFrom('entity_indexes' as any)\n .select(['entity_type' as any, 'entity_id' as any, 'doc' as any])\n .where('tenant_id' as any, '=', tenantId)\n .where('deleted_at' as any, 'is', null)\n .where((eb: any) => eb.or(\n Array.from(chunkByType.entries()).map(([entityType, recordIds]) => eb.and([\n eb('entity_type' as any, '=', entityType),\n eb('entity_id' as any, 'in', recordIds),\n ])),\n ))\n\n // Add organization filter if provided\n if (organizationId) {\n query = query.where((eb: any) => eb.or([\n eb('organization_id' as any, '=', organizationId),\n eb('organization_id' as any, 'is', null),\n ]))\n }\n\n const rows = await query.execute() as Array<{ entity_type: string; entity_id: string; doc: Record<string, unknown> }>\n allDocs.push(...rows)\n }\n\n return allDocs\n}\n\n/** Result type for presenter and links computation */\ntype EnrichmentResult = {\n presenter: SearchResultPresenter | null\n url?: string\n links?: SearchResultLink[]\n}\n\n/**\n * Compute presenter, URL, and links for a single doc using config or fallback.\n * Returns presenter (null if cannot be computed), and optionally URL/links from config.\n */\nasync function computePresenterAndLinks(\n doc: Record<string, unknown>,\n entityId: string,\n recordId: string,\n config: SearchEntityConfig | undefined,\n tenantId: string,\n organizationId: string | null | undefined,\n queryEngine: QueryEngine | undefined,\n): Promise<EnrichmentResult> {\n let presenter: SearchResultPresenter | null = null\n let url: string | undefined\n let links: SearchResultLink[] | undefined\n\n // Build context for config functions\n const customFields: Record<string, unknown> = {}\n for (const [key, value] of Object.entries(doc)) {\n if (key.startsWith('cf:') || key.startsWith('cf_')) {\n customFields[key.slice(3)] = value\n }\n }\n\n const buildContext: SearchBuildContext = {\n record: doc,\n customFields,\n organizationId,\n tenantId,\n queryEngine,\n }\n\n // If search.ts config exists, use formatResult/buildSource for presenter\n if (config?.formatResult || config?.buildSource) {\n if (config.buildSource) {\n try {\n const source = await config.buildSource(buildContext)\n if (source?.presenter) presenter = source.presenter\n if (source?.links) links = source.links\n } catch (err) {\n logWarning(`buildSource failed for ${entityId}:${recordId}`, { error: String(err) })\n }\n }\n\n if (!presenter && config.formatResult) {\n try {\n presenter = (await config.formatResult(buildContext)) ?? null\n } catch (err) {\n logWarning(`formatResult failed for ${entityId}:${recordId}`, { error: String(err) })\n }\n }\n }\n\n // Fallback presenter: extract from doc fields directly\n if (!presenter) {\n presenter = extractFallbackPresenter(doc, entityId, recordId)\n }\n\n // Resolve URL from config\n if (config?.resolveUrl) {\n try {\n url = (await config.resolveUrl(buildContext)) ?? undefined\n } catch {\n // Skip URL resolution errors\n }\n }\n\n // Resolve links from config (if not already set from buildSource)\n if (!links && config?.resolveLinks) {\n try {\n links = (await config.resolveLinks(buildContext)) ?? undefined\n } catch {\n // Skip link resolution errors\n }\n }\n\n return { presenter, url, links }\n}\n\n/**\n * Create a presenter enricher that loads data from entity_indexes and computes presenter.\n * Uses formatResult from search.ts configs when available, otherwise falls back to extracting\n * common fields like display_name, name, title from the doc.\n *\n * Optimizations:\n * - Single batch DB query for all entity types (instead of one per type)\n * - Parallel Promise.all for formatResult/buildSource calls\n * - Tenant/organization scoping for security\n * - Chunked queries to avoid DB parameter limits\n * - Automatic decryption of encrypted fields when encryption service is provided\n */\nexport function createPresenterEnricher(\n db: Kysely<any>,\n entityConfigMap: Map<EntityId, SearchEntityConfig>,\n queryEngine?: QueryEngine,\n encryptionService?: TenantDataEncryptionService | null,\n): PresenterEnricherFn {\n return async (results, tenantId, organizationId) => {\n // Find results missing presenter OR with encrypted presenter\n const missingResults = results.filter(needsSearchResultEnrichment)\n if (missingResults.length === 0) return results\n\n // Group by entity type for config lookup\n const byEntityType = new Map<string, SearchResult[]>()\n for (const result of missingResults) {\n const group = byEntityType.get(result.entityId) ?? []\n group.push(result)\n byEntityType.set(result.entityId, group)\n }\n\n // Single batch query for all docs across all entity types\n const rawDocs = await fetchDocsBatch(db, byEntityType, tenantId, organizationId)\n\n // Decrypt docs in parallel using DEK cache for efficiency\n const dekCache = new Map<string | null, string | null>()\n\n const decryptedDocs = await Promise.all(\n rawDocs.map(async (row) => {\n try {\n // Use organization_id from the doc itself for proper encryption map lookup\n // This is critical for global search where organizationId param is null\n const docData = row.doc as Record<string, unknown>\n const docOrgId = (docData.organization_id as string | null | undefined) ?? organizationId\n const scope = { tenantId, organizationId: docOrgId }\n\n const decryptedDoc = await decryptIndexDocForSearch(\n row.entity_type,\n row.doc,\n scope,\n encryptionService ?? null,\n dekCache,\n )\n return { ...row, doc: decryptedDoc }\n } catch (err) {\n logWarning(`Failed to decrypt doc for ${row.entity_type}:${row.entity_id}`, { error: String(err) })\n return row // Return original doc if decryption fails\n }\n }),\n )\n\n // Build doc lookup map for fast access\n const docMap = new Map<string, Record<string, unknown>>()\n for (const row of decryptedDocs) {\n docMap.set(`${row.entity_type}:${row.entity_id}`, row.doc)\n }\n\n // Compute presenters and links in parallel\n const enrichmentPromises = missingResults.map(async (result) => {\n const key = `${result.entityId}:${result.recordId}`\n const doc = docMap.get(key)\n\n if (!doc) {\n logWarning(`Doc not found in entity_indexes`, { entityId: result.entityId, recordId: result.recordId })\n return { key, presenter: null, url: undefined, links: undefined }\n }\n\n const config = entityConfigMap.get(result.entityId as EntityId)\n const enrichment = await computePresenterAndLinks(\n doc,\n result.entityId,\n result.recordId,\n config,\n tenantId,\n organizationId,\n queryEngine,\n )\n\n return { key, ...enrichment }\n })\n\n const computed = await Promise.all(enrichmentPromises)\n\n // Build enrichment map from parallel results\n const enrichmentMap = new Map<string, EnrichmentResult>()\n for (const { key, presenter, url, links } of computed) {\n enrichmentMap.set(key, { presenter, url, links })\n }\n\n // Enrich results with computed presenter, URL, and links\n return results.map((result) => {\n if (!needsSearchResultEnrichment(result)) return result\n const key = `${result.entityId}:${result.recordId}`\n const enriched = enrichmentMap.get(key)\n if (!enriched) return result\n const hasExistingLinks = Array.isArray(result.links) && result.links.length > 0\n return {\n ...result,\n presenter: enriched.presenter ?? result.presenter,\n url: result.url ?? enriched.url,\n links: hasExistingLinks ? result.links : (enriched.links ?? result.links),\n }\n })\n }\n}\n"],
5
+ "mappings": "AAYA,SAAS,gCAAgC;AACzC,SAAS,gCAAgC;AACzC,SAAS,mCAAmC;AAG5C,MAAM,aAAa;AAGnB,MAAM,aAAa,CAAC,SAAiB,YAAsC;AACzE,MAAI,QAAQ,IAAI,aAAa,iBAAiB,QAAQ,IAAI,uBAAuB;AAC/E,YAAQ,KAAK,+BAA+B,OAAO,IAAI,WAAW,EAAE;AAAA,EACtE;AACF;AAKA,SAAS,MAAS,OAAY,MAAqB;AACjD,QAAM,SAAgB,CAAC;AACvB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,MAAM;AAC3C,WAAO,KAAK,MAAM,MAAM,GAAG,IAAI,IAAI,CAAC;AAAA,EACtC;AACA,SAAO;AACT;AAMA,eAAe,eACb,IACA,cACA,UACA,gBAC0F;AAC1F,QAAM,UAA2F,CAAC;AAGlG,QAAM,WAA4D,CAAC;AACnE,aAAW,CAAC,YAAY,OAAO,KAAK,cAAc;AAChD,eAAW,UAAU,SAAS;AAC5B,eAAS,KAAK,EAAE,YAAY,UAAU,OAAO,SAAS,CAAC;AAAA,IACzD;AAAA,EACF;AAEA,MAAI,SAAS,WAAW,EAAG,QAAO;AAGlC,QAAM,SAAS,MAAM,UAAU,UAAU;AAEzC,aAAW,aAAa,QAAQ;AAE9B,UAAM,cAAc,oBAAI,IAAsB;AAC9C,eAAW,EAAE,YAAY,SAAS,KAAK,WAAW;AAChD,YAAM,MAAM,YAAY,IAAI,UAAU,KAAK,CAAC;AAC5C,UAAI,KAAK,QAAQ;AACjB,kBAAY,IAAI,YAAY,GAAG;AAAA,IACjC;AAGA,QAAI,QAAQ,GACT,WAAW,gBAAuB,EAClC,OAAO,CAAC,eAAsB,aAAoB,KAAY,CAAC,EAC/D,MAAM,aAAoB,KAAK,QAAQ,EACvC,MAAM,cAAqB,MAAM,IAAI,EACrC,MAAM,CAAC,OAAY,GAAG;AAAA,MACrB,MAAM,KAAK,YAAY,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,YAAY,SAAS,MAAM,GAAG,IAAI;AAAA,QACxE,GAAG,eAAsB,KAAK,UAAU;AAAA,QACxC,GAAG,aAAoB,MAAM,SAAS;AAAA,MACxC,CAAC,CAAC;AAAA,IACJ,CAAC;AAGH,QAAI,gBAAgB;AAClB,cAAQ,MAAM,MAAM,CAAC,OAAY,GAAG,GAAG;AAAA,QACrC,GAAG,mBAA0B,KAAK,cAAc;AAAA,QAChD,GAAG,mBAA0B,MAAM,IAAI;AAAA,MACzC,CAAC,CAAC;AAAA,IACJ;AAEA,UAAM,OAAO,MAAM,MAAM,QAAQ;AACjC,YAAQ,KAAK,GAAG,IAAI;AAAA,EACtB;AAEA,SAAO;AACT;AAaA,eAAe,yBACb,KACA,UACA,UACA,QACA,UACA,gBACA,aAC2B;AAC3B,MAAI,YAA0C;AAC9C,MAAI;AACJ,MAAI;AAGJ,QAAM,eAAwC,CAAC;AAC/C,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC9C,QAAI,IAAI,WAAW,KAAK,KAAK,IAAI,WAAW,KAAK,GAAG;AAClD,mBAAa,IAAI,MAAM,CAAC,CAAC,IAAI;AAAA,IAC/B;AAAA,EACF;AAEA,QAAM,eAAmC;AAAA,IACvC,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,MAAI,QAAQ,gBAAgB,QAAQ,aAAa;AAC/C,QAAI,OAAO,aAAa;AACtB,UAAI;AACF,cAAM,SAAS,MAAM,OAAO,YAAY,YAAY;AACpD,YAAI,QAAQ,UAAW,aAAY,OAAO;AAC1C,YAAI,QAAQ,MAAO,SAAQ,OAAO;AAAA,MACpC,SAAS,KAAK;AACZ,mBAAW,0BAA0B,QAAQ,IAAI,QAAQ,IAAI,EAAE,OAAO,OAAO,GAAG,EAAE,CAAC;AAAA,MACrF;AAAA,IACF;AAEA,QAAI,CAAC,aAAa,OAAO,cAAc;AACrC,UAAI;AACF,oBAAa,MAAM,OAAO,aAAa,YAAY,KAAM;AAAA,MAC3D,SAAS,KAAK;AACZ,mBAAW,2BAA2B,QAAQ,IAAI,QAAQ,IAAI,EAAE,OAAO,OAAO,GAAG,EAAE,CAAC;AAAA,MACtF;AAAA,IACF;AAAA,EACF;AAGA,MAAI,CAAC,WAAW;AACd,gBAAY,yBAAyB,KAAK,UAAU,QAAQ;AAAA,EAC9D;AAGA,MAAI,QAAQ,YAAY;AACtB,QAAI;AACF,YAAO,MAAM,OAAO,WAAW,YAAY,KAAM;AAAA,IACnD,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI,CAAC,SAAS,QAAQ,cAAc;AAClC,QAAI;AACF,cAAS,MAAM,OAAO,aAAa,YAAY,KAAM;AAAA,IACvD,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO,EAAE,WAAW,KAAK,MAAM;AACjC;AAcO,SAAS,wBACd,IACA,iBACA,aACA,mBACqB;AACrB,SAAO,OAAO,SAAS,UAAU,mBAAmB;AAElD,UAAM,iBAAiB,QAAQ,OAAO,2BAA2B;AACjE,QAAI,eAAe,WAAW,EAAG,QAAO;AAGxC,UAAM,eAAe,oBAAI,IAA4B;AACrD,eAAW,UAAU,gBAAgB;AACnC,YAAM,QAAQ,aAAa,IAAI,OAAO,QAAQ,KAAK,CAAC;AACpD,YAAM,KAAK,MAAM;AACjB,mBAAa,IAAI,OAAO,UAAU,KAAK;AAAA,IACzC;AAGA,UAAM,UAAU,MAAM,eAAe,IAAI,cAAc,UAAU,cAAc;AAG/E,UAAM,WAAW,oBAAI,IAAkC;AAEvD,UAAM,gBAAgB,MAAM,QAAQ;AAAA,MAClC,QAAQ,IAAI,OAAO,QAAQ;AACzB,YAAI;AAGF,gBAAM,UAAU,IAAI;AACpB,gBAAM,WAAY,QAAQ,mBAAiD;AAC3E,gBAAM,QAAQ,EAAE,UAAU,gBAAgB,SAAS;AAEnD,gBAAM,eAAe,MAAM;AAAA,YACzB,IAAI;AAAA,YACJ,IAAI;AAAA,YACJ;AAAA,YACA,qBAAqB;AAAA,YACrB;AAAA,UACF;AACA,iBAAO,EAAE,GAAG,KAAK,KAAK,aAAa;AAAA,QACrC,SAAS,KAAK;AACZ,qBAAW,6BAA6B,IAAI,WAAW,IAAI,IAAI,SAAS,IAAI,EAAE,OAAO,OAAO,GAAG,EAAE,CAAC;AAClG,iBAAO;AAAA,QACT;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,SAAS,oBAAI,IAAqC;AACxD,eAAW,OAAO,eAAe;AAC/B,aAAO,IAAI,GAAG,IAAI,WAAW,IAAI,IAAI,SAAS,IAAI,IAAI,GAAG;AAAA,IAC3D;AAGA,UAAM,qBAAqB,eAAe,IAAI,OAAO,WAAW;AAC9D,YAAM,MAAM,GAAG,OAAO,QAAQ,IAAI,OAAO,QAAQ;AACjD,YAAM,MAAM,OAAO,IAAI,GAAG;AAE1B,UAAI,CAAC,KAAK;AACR,mBAAW,mCAAmC,EAAE,UAAU,OAAO,UAAU,UAAU,OAAO,SAAS,CAAC;AACtG,eAAO,EAAE,KAAK,WAAW,MAAM,KAAK,QAAW,OAAO,OAAU;AAAA,MAClE;AAEA,YAAM,SAAS,gBAAgB,IAAI,OAAO,QAAoB;AAC9D,YAAM,aAAa,MAAM;AAAA,QACvB;AAAA,QACA,OAAO;AAAA,QACP,OAAO;AAAA,QACP;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,aAAO,EAAE,KAAK,GAAG,WAAW;AAAA,IAC9B,CAAC;AAED,UAAM,WAAW,MAAM,QAAQ,IAAI,kBAAkB;AAGrD,UAAM,gBAAgB,oBAAI,IAA8B;AACxD,eAAW,EAAE,KAAK,WAAW,KAAK,MAAM,KAAK,UAAU;AACrD,oBAAc,IAAI,KAAK,EAAE,WAAW,KAAK,MAAM,CAAC;AAAA,IAClD;AAGA,WAAO,QAAQ,IAAI,CAAC,WAAW;AAC7B,UAAI,CAAC,4BAA4B,MAAM,EAAG,QAAO;AACjD,YAAM,MAAM,GAAG,OAAO,QAAQ,IAAI,OAAO,QAAQ;AACjD,YAAM,WAAW,cAAc,IAAI,GAAG;AACtC,UAAI,CAAC,SAAU,QAAO;AACtB,YAAM,mBAAmB,MAAM,QAAQ,OAAO,KAAK,KAAK,OAAO,MAAM,SAAS;AAC9E,aAAO;AAAA,QACL,GAAG;AAAA,QACH,WAAW,SAAS,aAAa,OAAO;AAAA,QACxC,KAAK,OAAO,OAAO,SAAS;AAAA,QAC5B,OAAO,mBAAmB,OAAO,QAAS,SAAS,SAAS,OAAO;AAAA,MACrE;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
6
6
  "names": []
7
7
  }
@@ -18,7 +18,7 @@ async function POST(req) {
18
18
  const container = await createRequestContainer();
19
19
  const em = container.resolve("em");
20
20
  const progressService = container.resolve("progressService");
21
- const knex = em.getConnection().getKnex();
21
+ const db = em.getKysely();
22
22
  let queue;
23
23
  try {
24
24
  queue = container.resolve("vectorIndexQueue");
@@ -33,7 +33,7 @@ async function POST(req) {
33
33
  } catch {
34
34
  }
35
35
  }
36
- await clearReindexLock(knex, auth.tenantId, "vector", auth.orgId ?? null);
36
+ await clearReindexLock(db, auth.tenantId, "vector", auth.orgId ?? null);
37
37
  await cancelReindexProgress({
38
38
  em,
39
39
  progressService,
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../../../../src/modules/search/api/embeddings/reindex/cancel/route.ts"],
4
- "sourcesContent": ["import { NextResponse } from 'next/server'\nimport { createRequestContainer } from '@open-mercato/shared/lib/di/container'\nimport { getAuthFromRequest } from '@open-mercato/shared/lib/auth/server'\nimport type { Queue } from '@open-mercato/queue'\nimport type { Knex } from 'knex'\nimport type { EntityManager } from '@mikro-orm/postgresql'\nimport type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'\nimport { clearReindexLock } from '../../../../lib/reindex-lock'\nimport { cancelReindexProgress } from '../../../../lib/reindex-progress'\nimport { resolveTranslations } from '@open-mercato/shared/lib/i18n/server'\nimport { recordIndexerLog } from '@open-mercato/shared/lib/indexers/status-log'\nimport { embeddingsReindexCancelOpenApi } from '../../../openapi'\n\nexport const metadata = {\n POST: { requireAuth: true, requireFeatures: ['search.embeddings.manage'] },\n}\n\nexport async function POST(req: Request) {\n const { t } = await resolveTranslations()\n const auth = await getAuthFromRequest(req)\n if (!auth?.tenantId) {\n return NextResponse.json({ error: t('api.errors.unauthorized', 'Unauthorized') }, { status: 401 })\n }\n\n const container = await createRequestContainer()\n const em = container.resolve('em') as EntityManager\n const progressService = container.resolve('progressService') as ProgressService\n const knex = (em.getConnection() as unknown as { getKnex: () => Knex }).getKnex()\n\n let queue: Queue | undefined\n try {\n queue = container.resolve<Queue>('vectorIndexQueue')\n } catch {\n // Queue not available - just clear the lock\n }\n\n let jobsRemoved = 0\n if (queue) {\n try {\n const countsBefore = await queue.getJobCounts()\n jobsRemoved = countsBefore.waiting + countsBefore.active\n await queue.clear()\n } catch {\n // Queue clear failed - continue to clear lock\n }\n }\n\n await clearReindexLock(knex, auth.tenantId, 'vector', auth.orgId ?? null)\n await cancelReindexProgress({\n em,\n progressService,\n type: 'vector',\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n userId: auth.sub ?? null,\n })\n\n // Log the cancellation\n try {\n const em = container.resolve('em')\n await recordIndexerLog(\n { em },\n {\n source: 'vector',\n handler: 'api:search.embeddings.reindex.cancel',\n message: `Cancelled vector reindex operation (${jobsRemoved} jobs removed)`,\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n details: { jobsRemoved },\n },\n )\n } catch {\n // Logging failure should not fail the cancel operation\n }\n\n try {\n const disposable = container as unknown as { dispose?: () => Promise<void> }\n if (typeof disposable.dispose === 'function') {\n await disposable.dispose()\n }\n } catch {\n // Ignore disposal errors\n }\n\n return NextResponse.json({\n ok: true,\n jobsRemoved,\n })\n}\n\nexport const openApi = embeddingsReindexCancelOpenApi\n"],
5
- "mappings": "AAAA,SAAS,oBAAoB;AAC7B,SAAS,8BAA8B;AACvC,SAAS,0BAA0B;AAKnC,SAAS,wBAAwB;AACjC,SAAS,6BAA6B;AACtC,SAAS,2BAA2B;AACpC,SAAS,wBAAwB;AACjC,SAAS,sCAAsC;AAExC,MAAM,WAAW;AAAA,EACtB,MAAM,EAAE,aAAa,MAAM,iBAAiB,CAAC,0BAA0B,EAAE;AAC3E;AAEA,eAAsB,KAAK,KAAc;AACvC,QAAM,EAAE,EAAE,IAAI,MAAM,oBAAoB;AACxC,QAAM,OAAO,MAAM,mBAAmB,GAAG;AACzC,MAAI,CAAC,MAAM,UAAU;AACnB,WAAO,aAAa,KAAK,EAAE,OAAO,EAAE,2BAA2B,cAAc,EAAE,GAAG,EAAE,QAAQ,IAAI,CAAC;AAAA,EACnG;AAEA,QAAM,YAAY,MAAM,uBAAuB;AAC/C,QAAM,KAAK,UAAU,QAAQ,IAAI;AACjC,QAAM,kBAAkB,UAAU,QAAQ,iBAAiB;AAC3D,QAAM,OAAQ,GAAG,cAAc,EAAyC,QAAQ;AAEhF,MAAI;AACJ,MAAI;AACF,YAAQ,UAAU,QAAe,kBAAkB;AAAA,EACrD,QAAQ;AAAA,EAER;AAEA,MAAI,cAAc;AAClB,MAAI,OAAO;AACT,QAAI;AACF,YAAM,eAAe,MAAM,MAAM,aAAa;AAC9C,oBAAc,aAAa,UAAU,aAAa;AAClD,YAAM,MAAM,MAAM;AAAA,IACpB,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,QAAM,iBAAiB,MAAM,KAAK,UAAU,UAAU,KAAK,SAAS,IAAI;AACxE,QAAM,sBAAsB;AAAA,IAC1B;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN,UAAU,KAAK;AAAA,IACf,gBAAgB,KAAK,SAAS;AAAA,IAC9B,QAAQ,KAAK,OAAO;AAAA,EACtB,CAAC;AAGD,MAAI;AACF,UAAMA,MAAK,UAAU,QAAQ,IAAI;AACjC,UAAM;AAAA,MACJ,EAAE,IAAAA,IAAG;AAAA,MACL;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,SAAS,uCAAuC,WAAW;AAAA,QAC3D,UAAU,KAAK;AAAA,QACf,gBAAgB,KAAK,SAAS;AAAA,QAC9B,SAAS,EAAE,YAAY;AAAA,MACzB;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,MAAI;AACF,UAAM,aAAa;AACnB,QAAI,OAAO,WAAW,YAAY,YAAY;AAC5C,YAAM,WAAW,QAAQ;AAAA,IAC3B;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO,aAAa,KAAK;AAAA,IACvB,IAAI;AAAA,IACJ;AAAA,EACF,CAAC;AACH;AAEO,MAAM,UAAU;",
4
+ "sourcesContent": ["import { NextResponse } from 'next/server'\nimport { createRequestContainer } from '@open-mercato/shared/lib/di/container'\nimport { getAuthFromRequest } from '@open-mercato/shared/lib/auth/server'\nimport type { Queue } from '@open-mercato/queue'\n\nimport type { EntityManager } from '@mikro-orm/postgresql'\nimport type { Kysely } from 'kysely'\nimport type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'\nimport { clearReindexLock } from '../../../../lib/reindex-lock'\nimport { cancelReindexProgress } from '../../../../lib/reindex-progress'\nimport { resolveTranslations } from '@open-mercato/shared/lib/i18n/server'\nimport { recordIndexerLog } from '@open-mercato/shared/lib/indexers/status-log'\nimport { embeddingsReindexCancelOpenApi } from '../../../openapi'\n\nexport const metadata = {\n POST: { requireAuth: true, requireFeatures: ['search.embeddings.manage'] },\n}\n\nexport async function POST(req: Request) {\n const { t } = await resolveTranslations()\n const auth = await getAuthFromRequest(req)\n if (!auth?.tenantId) {\n return NextResponse.json({ error: t('api.errors.unauthorized', 'Unauthorized') }, { status: 401 })\n }\n\n const container = await createRequestContainer()\n const em = container.resolve('em') as EntityManager\n const progressService = container.resolve('progressService') as ProgressService\n const db = (em as unknown as { getKysely: () => Kysely<any> }).getKysely()\n\n let queue: Queue | undefined\n try {\n queue = container.resolve<Queue>('vectorIndexQueue')\n } catch {\n // Queue not available - just clear the lock\n }\n\n let jobsRemoved = 0\n if (queue) {\n try {\n const countsBefore = await queue.getJobCounts()\n jobsRemoved = countsBefore.waiting + countsBefore.active\n await queue.clear()\n } catch {\n // Queue clear failed - continue to clear lock\n }\n }\n\n await clearReindexLock(db, auth.tenantId, 'vector', auth.orgId ?? null)\n await cancelReindexProgress({\n em,\n progressService,\n type: 'vector',\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n userId: auth.sub ?? null,\n })\n\n // Log the cancellation\n try {\n const em = container.resolve('em')\n await recordIndexerLog(\n { em },\n {\n source: 'vector',\n handler: 'api:search.embeddings.reindex.cancel',\n message: `Cancelled vector reindex operation (${jobsRemoved} jobs removed)`,\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n details: { jobsRemoved },\n },\n )\n } catch {\n // Logging failure should not fail the cancel operation\n }\n\n try {\n const disposable = container as unknown as { dispose?: () => Promise<void> }\n if (typeof disposable.dispose === 'function') {\n await disposable.dispose()\n }\n } catch {\n // Ignore disposal errors\n }\n\n return NextResponse.json({\n ok: true,\n jobsRemoved,\n })\n}\n\nexport const openApi = embeddingsReindexCancelOpenApi\n"],
5
+ "mappings": "AAAA,SAAS,oBAAoB;AAC7B,SAAS,8BAA8B;AACvC,SAAS,0BAA0B;AAMnC,SAAS,wBAAwB;AACjC,SAAS,6BAA6B;AACtC,SAAS,2BAA2B;AACpC,SAAS,wBAAwB;AACjC,SAAS,sCAAsC;AAExC,MAAM,WAAW;AAAA,EACtB,MAAM,EAAE,aAAa,MAAM,iBAAiB,CAAC,0BAA0B,EAAE;AAC3E;AAEA,eAAsB,KAAK,KAAc;AACvC,QAAM,EAAE,EAAE,IAAI,MAAM,oBAAoB;AACxC,QAAM,OAAO,MAAM,mBAAmB,GAAG;AACzC,MAAI,CAAC,MAAM,UAAU;AACnB,WAAO,aAAa,KAAK,EAAE,OAAO,EAAE,2BAA2B,cAAc,EAAE,GAAG,EAAE,QAAQ,IAAI,CAAC;AAAA,EACnG;AAEA,QAAM,YAAY,MAAM,uBAAuB;AAC/C,QAAM,KAAK,UAAU,QAAQ,IAAI;AACjC,QAAM,kBAAkB,UAAU,QAAQ,iBAAiB;AAC3D,QAAM,KAAM,GAAmD,UAAU;AAEzE,MAAI;AACJ,MAAI;AACF,YAAQ,UAAU,QAAe,kBAAkB;AAAA,EACrD,QAAQ;AAAA,EAER;AAEA,MAAI,cAAc;AAClB,MAAI,OAAO;AACT,QAAI;AACF,YAAM,eAAe,MAAM,MAAM,aAAa;AAC9C,oBAAc,aAAa,UAAU,aAAa;AAClD,YAAM,MAAM,MAAM;AAAA,IACpB,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,QAAM,iBAAiB,IAAI,KAAK,UAAU,UAAU,KAAK,SAAS,IAAI;AACtE,QAAM,sBAAsB;AAAA,IAC1B;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN,UAAU,KAAK;AAAA,IACf,gBAAgB,KAAK,SAAS;AAAA,IAC9B,QAAQ,KAAK,OAAO;AAAA,EACtB,CAAC;AAGD,MAAI;AACF,UAAMA,MAAK,UAAU,QAAQ,IAAI;AACjC,UAAM;AAAA,MACJ,EAAE,IAAAA,IAAG;AAAA,MACL;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,SAAS,uCAAuC,WAAW;AAAA,QAC3D,UAAU,KAAK;AAAA,QACf,gBAAgB,KAAK,SAAS;AAAA,QAC9B,SAAS,EAAE,YAAY;AAAA,MACzB;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,MAAI;AACF,UAAM,aAAa;AACnB,QAAI,OAAO,WAAW,YAAY,YAAY;AAC5C,YAAM,WAAW,QAAQ;AAAA,IAC3B;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO,aAAa,KAAK;AAAA,IACvB,IAAI;AAAA,IACJ;AAAA,EACF,CAAC;AACH;AAEO,MAAM,UAAU;",
6
6
  "names": ["em"]
7
7
  }
@@ -30,8 +30,8 @@ async function POST(req) {
30
30
  const container = await createRequestContainer();
31
31
  const em = container.resolve("em");
32
32
  const progressService = container.resolve("progressService");
33
- const knex = em.getConnection().getKnex();
34
- const existingLock = await getReindexLockStatus(knex, auth.tenantId, { type: "vector" });
33
+ const db = em.getKysely();
34
+ const existingLock = await getReindexLockStatus(db, auth.tenantId, { type: "vector" });
35
35
  if (existingLock) {
36
36
  const startedAt = new Date(existingLock.startedAt);
37
37
  return NextResponse.json(
@@ -49,7 +49,7 @@ async function POST(req) {
49
49
  { status: 409 }
50
50
  );
51
51
  }
52
- const { acquired: lockAcquired } = await acquireReindexLock(knex, {
52
+ const { acquired: lockAcquired } = await acquireReindexLock(db, {
53
53
  type: "vector",
54
54
  action: entityId ? `reindex:${entityId}` : "reindex:all",
55
55
  tenantId: auth.tenantId,
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../../../src/modules/search/api/embeddings/reindex/route.ts"],
4
- "sourcesContent": ["import { NextResponse } from 'next/server'\nimport { createRequestContainer } from '@open-mercato/shared/lib/di/container'\nimport { getAuthFromRequest } from '@open-mercato/shared/lib/auth/server'\nimport type { SearchIndexer } from '../../../../../indexer/search-indexer'\nimport type { EmbeddingService } from '../../../../../vector'\nimport type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'\nimport type { Knex } from 'knex'\nimport type { EntityManager } from '@mikro-orm/postgresql'\nimport { recordIndexerLog } from '@open-mercato/shared/lib/indexers/status-log'\nimport { resolveTranslations } from '@open-mercato/shared/lib/i18n/server'\nimport { resolveEmbeddingConfig } from '../../../lib/embedding-config'\nimport type { EntityId } from '@open-mercato/shared/modules/entities'\nimport { searchDebug, searchDebugWarn, searchError } from '../../../../../lib/debug'\nimport { acquireReindexLock, clearReindexLock, getReindexLockStatus } from '../../../lib/reindex-lock'\nimport {\n ensureReindexProgressJob,\n failReindexProgress,\n} from '../../../lib/reindex-progress'\nimport { embeddingsReindexOpenApi } from '../../openapi'\n\nexport const metadata = {\n POST: { requireAuth: true, requireFeatures: ['search.embeddings.manage'] },\n}\n\nexport async function POST(req: Request) {\n const { t } = await resolveTranslations()\n const auth = await getAuthFromRequest(req)\n if (!auth?.tenantId) {\n return NextResponse.json({ error: t('api.errors.unauthorized', 'Unauthorized') }, { status: 401 })\n }\n\n let payload: { entityId?: string; purgeFirst?: boolean } = {}\n try {\n payload = await req.json()\n } catch {\n // Default values\n }\n\n const entityId = typeof payload?.entityId === 'string' ? payload.entityId : undefined\n const purgeFirst = payload?.purgeFirst === true\n\n const container = await createRequestContainer()\n const em = container.resolve('em') as EntityManager\n const progressService = container.resolve('progressService') as ProgressService\n const knex = (em.getConnection() as unknown as { getKnex: () => Knex }).getKnex()\n\n // Check if another vector reindex operation is already in progress\n const existingLock = await getReindexLockStatus(knex, auth.tenantId, { type: 'vector' })\n if (existingLock) {\n const startedAt = new Date(existingLock.startedAt)\n return NextResponse.json(\n {\n error: t('search.api.errors.reindexInProgress', 'A reindex operation is already in progress'),\n lock: {\n type: existingLock.type,\n action: existingLock.action,\n startedAt: existingLock.startedAt,\n elapsedMinutes: Math.round((Date.now() - startedAt.getTime()) / 60000),\n processedCount: existingLock.processedCount,\n totalCount: existingLock.totalCount,\n },\n },\n { status: 409 }\n )\n }\n\n // Acquire lock before starting the operation\n const { acquired: lockAcquired } = await acquireReindexLock(knex, {\n type: 'vector',\n action: entityId ? `reindex:${entityId}` : 'reindex:all',\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n })\n\n if (!lockAcquired) {\n return NextResponse.json(\n { error: t('search.api.errors.lockFailed', 'Failed to acquire reindex lock') },\n { status: 409 }\n )\n }\n\n try {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let em: any = null\n try {\n em = container.resolve('em')\n } catch {\n // em not available\n }\n\n let searchIndexer: SearchIndexer\n try {\n searchIndexer = container.resolve('searchIndexer') as SearchIndexer\n } catch {\n return NextResponse.json(\n { error: t('search.api.errors.indexUnavailable', 'Search indexer unavailable') },\n { status: 503 }\n )\n }\n\n // Load saved embedding config and update the embedding service\n try {\n const embeddingConfig = await resolveEmbeddingConfig(container, { defaultValue: null })\n if (embeddingConfig) {\n const embeddingService = container.resolve<EmbeddingService>('vectorEmbeddingService')\n embeddingService.updateConfig(embeddingConfig)\n searchDebug('search.embeddings.reindex', 'using embedding config', {\n providerId: embeddingConfig.providerId,\n model: embeddingConfig.model,\n dimension: embeddingConfig.dimension,\n })\n }\n } catch (err) {\n searchDebugWarn('search.embeddings.reindex', 'failed to load embedding config, using defaults', {\n error: err instanceof Error ? err.message : err,\n })\n }\n\n await recordIndexerLog(\n { em: em ?? undefined },\n {\n source: 'vector',\n handler: 'api:search.embeddings.reindex',\n message: entityId\n ? `Vector reindex requested for ${entityId}`\n : 'Vector reindex requested for all entities',\n entityType: entityId ?? null,\n tenantId: auth.tenantId ?? null,\n organizationId: auth.orgId ?? null,\n details: { purgeFirst },\n },\n ).catch(() => undefined)\n\n // Use queue-based vector reindexing (similar to fulltext)\n // This enqueues batches for background processing by workers\n let result\n if (entityId) {\n result = await searchIndexer.reindexEntityToVector({\n entityId: entityId as EntityId,\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n purgeFirst,\n useQueue: true,\n })\n } else {\n result = await searchIndexer.reindexAllToVector({\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n purgeFirst,\n useQueue: true,\n })\n }\n\n await ensureReindexProgressJob({\n em,\n progressService,\n type: 'vector',\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n userId: auth.sub ?? null,\n totalCount: result.recordsIndexed,\n description: entityId\n ? `Vector reindex ${entityId} (queued)`\n : 'Vector reindex all entities (queued)',\n })\n\n await recordIndexerLog(\n { em: em ?? undefined },\n {\n source: 'vector',\n handler: 'api:search.embeddings.reindex',\n message: result.jobsEnqueued\n ? `Vector reindex enqueued ${result.jobsEnqueued} jobs for ${entityId ?? 'all entities'}`\n : `Vector reindex completed for ${entityId ?? 'all entities'}`,\n entityType: entityId ?? null,\n tenantId: auth.tenantId ?? null,\n organizationId: auth.orgId ?? null,\n details: {\n purgeFirst,\n recordsIndexed: result.recordsIndexed,\n jobsEnqueued: result.jobsEnqueued,\n success: result.success,\n },\n },\n ).catch(() => undefined)\n\n return NextResponse.json({\n ok: result.success,\n recordsIndexed: result.recordsIndexed,\n jobsEnqueued: result.jobsEnqueued,\n entitiesProcessed: result.entitiesProcessed,\n errors: result.errors.length > 0 ? result.errors : undefined,\n })\n } catch (error: unknown) {\n const err = error as { message?: string; status?: number; statusCode?: number }\n const status = typeof err?.status === 'number'\n ? err.status\n : (typeof err?.statusCode === 'number' ? err.statusCode : 500)\n searchError('search.embeddings.reindex', 'failed', {\n error: error instanceof Error ? error.message : error,\n stack: error instanceof Error ? error.stack : undefined,\n status,\n })\n await failReindexProgress({\n em,\n progressService,\n type: 'vector',\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n errorMessage: error instanceof Error ? error.message : 'Vector reindex failed',\n })\n return NextResponse.json(\n { error: t('search.api.errors.reindexFailed', 'Vector reindex failed. Please try again or contact support.') },\n { status: status >= 400 ? status : 500 }\n )\n } finally {\n // Do NOT clear lock here - vector reindex always uses queue mode\n // Workers update heartbeat and stale detection handles cleanup when done\n\n const disposable = container as unknown as { dispose?: () => Promise<void> }\n if (typeof disposable.dispose === 'function') {\n await disposable.dispose()\n }\n }\n}\n\nexport const openApi = embeddingsReindexOpenApi\n"],
5
- "mappings": "AAAA,SAAS,oBAAoB;AAC7B,SAAS,8BAA8B;AACvC,SAAS,0BAA0B;AAMnC,SAAS,wBAAwB;AACjC,SAAS,2BAA2B;AACpC,SAAS,8BAA8B;AAEvC,SAAS,aAAa,iBAAiB,mBAAmB;AAC1D,SAAS,oBAAsC,4BAA4B;AAC3E;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP,SAAS,gCAAgC;AAElC,MAAM,WAAW;AAAA,EACtB,MAAM,EAAE,aAAa,MAAM,iBAAiB,CAAC,0BAA0B,EAAE;AAC3E;AAEA,eAAsB,KAAK,KAAc;AACvC,QAAM,EAAE,EAAE,IAAI,MAAM,oBAAoB;AACxC,QAAM,OAAO,MAAM,mBAAmB,GAAG;AACzC,MAAI,CAAC,MAAM,UAAU;AACnB,WAAO,aAAa,KAAK,EAAE,OAAO,EAAE,2BAA2B,cAAc,EAAE,GAAG,EAAE,QAAQ,IAAI,CAAC;AAAA,EACnG;AAEA,MAAI,UAAuD,CAAC;AAC5D,MAAI;AACF,cAAU,MAAM,IAAI,KAAK;AAAA,EAC3B,QAAQ;AAAA,EAER;AAEA,QAAM,WAAW,OAAO,SAAS,aAAa,WAAW,QAAQ,WAAW;AAC5E,QAAM,aAAa,SAAS,eAAe;AAE3C,QAAM,YAAY,MAAM,uBAAuB;AAC/C,QAAM,KAAK,UAAU,QAAQ,IAAI;AACjC,QAAM,kBAAkB,UAAU,QAAQ,iBAAiB;AAC3D,QAAM,OAAQ,GAAG,cAAc,EAAyC,QAAQ;AAGhF,QAAM,eAAe,MAAM,qBAAqB,MAAM,KAAK,UAAU,EAAE,MAAM,SAAS,CAAC;AACvF,MAAI,cAAc;AAChB,UAAM,YAAY,IAAI,KAAK,aAAa,SAAS;AACjD,WAAO,aAAa;AAAA,MAClB;AAAA,QACE,OAAO,EAAE,uCAAuC,4CAA4C;AAAA,QAC5F,MAAM;AAAA,UACJ,MAAM,aAAa;AAAA,UACnB,QAAQ,aAAa;AAAA,UACrB,WAAW,aAAa;AAAA,UACxB,gBAAgB,KAAK,OAAO,KAAK,IAAI,IAAI,UAAU,QAAQ,KAAK,GAAK;AAAA,UACrE,gBAAgB,aAAa;AAAA,UAC7B,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,MACA,EAAE,QAAQ,IAAI;AAAA,IAChB;AAAA,EACF;AAGA,QAAM,EAAE,UAAU,aAAa,IAAI,MAAM,mBAAmB,MAAM;AAAA,IAChE,MAAM;AAAA,IACN,QAAQ,WAAW,WAAW,QAAQ,KAAK;AAAA,IAC3C,UAAU,KAAK;AAAA,IACf,gBAAgB,KAAK,SAAS;AAAA,EAChC,CAAC;AAED,MAAI,CAAC,cAAc;AACjB,WAAO,aAAa;AAAA,MAClB,EAAE,OAAO,EAAE,gCAAgC,gCAAgC,EAAE;AAAA,MAC7E,EAAE,QAAQ,IAAI;AAAA,IAChB;AAAA,EACF;AAEA,MAAI;AAEF,QAAIA,MAAU;AACd,QAAI;AACF,MAAAA,MAAK,UAAU,QAAQ,IAAI;AAAA,IAC7B,QAAQ;AAAA,IAER;AAEA,QAAI;AACJ,QAAI;AACF,sBAAgB,UAAU,QAAQ,eAAe;AAAA,IACnD,QAAQ;AACN,aAAO,aAAa;AAAA,QAClB,EAAE,OAAO,EAAE,sCAAsC,4BAA4B,EAAE;AAAA,QAC/E,EAAE,QAAQ,IAAI;AAAA,MAChB;AAAA,IACF;AAGA,QAAI;AACF,YAAM,kBAAkB,MAAM,uBAAuB,WAAW,EAAE,cAAc,KAAK,CAAC;AACtF,UAAI,iBAAiB;AACnB,cAAM,mBAAmB,UAAU,QAA0B,wBAAwB;AACrF,yBAAiB,aAAa,eAAe;AAC7C,oBAAY,6BAA6B,0BAA0B;AAAA,UACjE,YAAY,gBAAgB;AAAA,UAC5B,OAAO,gBAAgB;AAAA,UACvB,WAAW,gBAAgB;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,IACF,SAAS,KAAK;AACZ,sBAAgB,6BAA6B,mDAAmD;AAAA,QAC9F,OAAO,eAAe,QAAQ,IAAI,UAAU;AAAA,MAC9C,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,EAAE,IAAIA,OAAM,OAAU;AAAA,MACtB;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,SAAS,WACL,gCAAgC,QAAQ,KACxC;AAAA,QACJ,YAAY,YAAY;AAAA,QACxB,UAAU,KAAK,YAAY;AAAA,QAC3B,gBAAgB,KAAK,SAAS;AAAA,QAC9B,SAAS,EAAE,WAAW;AAAA,MACxB;AAAA,IACF,EAAE,MAAM,MAAM,MAAS;AAIvB,QAAI;AACJ,QAAI,UAAU;AACZ,eAAS,MAAM,cAAc,sBAAsB;AAAA,QACjD;AAAA,QACA,UAAU,KAAK;AAAA,QACf,gBAAgB,KAAK,SAAS;AAAA,QAC9B;AAAA,QACA,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,OAAO;AACL,eAAS,MAAM,cAAc,mBAAmB;AAAA,QAC9C,UAAU,KAAK;AAAA,QACf,gBAAgB,KAAK,SAAS;AAAA,QAC9B;AAAA,QACA,UAAU;AAAA,MACZ,CAAC;AAAA,IACH;AAEA,UAAM,yBAAyB;AAAA,MAC7B,IAAAA;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,UAAU,KAAK;AAAA,MACf,gBAAgB,KAAK,SAAS;AAAA,MAC9B,QAAQ,KAAK,OAAO;AAAA,MACpB,YAAY,OAAO;AAAA,MACnB,aAAa,WACT,kBAAkB,QAAQ,cAC1B;AAAA,IACN,CAAC;AAED,UAAM;AAAA,MACJ,EAAE,IAAIA,OAAM,OAAU;AAAA,MACtB;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,SAAS,OAAO,eACZ,2BAA2B,OAAO,YAAY,aAAa,YAAY,cAAc,KACrF,gCAAgC,YAAY,cAAc;AAAA,QAC9D,YAAY,YAAY;AAAA,QACxB,UAAU,KAAK,YAAY;AAAA,QAC3B,gBAAgB,KAAK,SAAS;AAAA,QAC9B,SAAS;AAAA,UACP;AAAA,UACA,gBAAgB,OAAO;AAAA,UACvB,cAAc,OAAO;AAAA,UACrB,SAAS,OAAO;AAAA,QAClB;AAAA,MACF;AAAA,IACF,EAAE,MAAM,MAAM,MAAS;AAEvB,WAAO,aAAa,KAAK;AAAA,MACvB,IAAI,OAAO;AAAA,MACX,gBAAgB,OAAO;AAAA,MACvB,cAAc,OAAO;AAAA,MACrB,mBAAmB,OAAO;AAAA,MAC1B,QAAQ,OAAO,OAAO,SAAS,IAAI,OAAO,SAAS;AAAA,IACrD,CAAC;AAAA,EACH,SAAS,OAAgB;AACvB,UAAM,MAAM;AACZ,UAAM,SAAS,OAAO,KAAK,WAAW,WAClC,IAAI,SACH,OAAO,KAAK,eAAe,WAAW,IAAI,aAAa;AAC5D,gBAAY,6BAA6B,UAAU;AAAA,MACjD,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,MAChD,OAAO,iBAAiB,QAAQ,MAAM,QAAQ;AAAA,MAC9C;AAAA,IACF,CAAC;AACD,UAAM,oBAAoB;AAAA,MACxB;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,UAAU,KAAK;AAAA,MACf,gBAAgB,KAAK,SAAS;AAAA,MAC9B,cAAc,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IACzD,CAAC;AACD,WAAO,aAAa;AAAA,MAClB,EAAE,OAAO,EAAE,mCAAmC,6DAA6D,EAAE;AAAA,MAC7G,EAAE,QAAQ,UAAU,MAAM,SAAS,IAAI;AAAA,IACzC;AAAA,EACF,UAAE;AAIA,UAAM,aAAa;AACnB,QAAI,OAAO,WAAW,YAAY,YAAY;AAC5C,YAAM,WAAW,QAAQ;AAAA,IAC3B;AAAA,EACF;AACF;AAEO,MAAM,UAAU;",
4
+ "sourcesContent": ["import { NextResponse } from 'next/server'\nimport { createRequestContainer } from '@open-mercato/shared/lib/di/container'\nimport { getAuthFromRequest } from '@open-mercato/shared/lib/auth/server'\nimport type { SearchIndexer } from '../../../../../indexer/search-indexer'\nimport type { EmbeddingService } from '../../../../../vector'\nimport type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'\n\nimport type { EntityManager } from '@mikro-orm/postgresql'\nimport type { Kysely } from 'kysely'\nimport { recordIndexerLog } from '@open-mercato/shared/lib/indexers/status-log'\nimport { resolveTranslations } from '@open-mercato/shared/lib/i18n/server'\nimport { resolveEmbeddingConfig } from '../../../lib/embedding-config'\nimport type { EntityId } from '@open-mercato/shared/modules/entities'\nimport { searchDebug, searchDebugWarn, searchError } from '../../../../../lib/debug'\nimport { acquireReindexLock, clearReindexLock, getReindexLockStatus } from '../../../lib/reindex-lock'\nimport {\n ensureReindexProgressJob,\n failReindexProgress,\n} from '../../../lib/reindex-progress'\nimport { embeddingsReindexOpenApi } from '../../openapi'\n\nexport const metadata = {\n POST: { requireAuth: true, requireFeatures: ['search.embeddings.manage'] },\n}\n\nexport async function POST(req: Request) {\n const { t } = await resolveTranslations()\n const auth = await getAuthFromRequest(req)\n if (!auth?.tenantId) {\n return NextResponse.json({ error: t('api.errors.unauthorized', 'Unauthorized') }, { status: 401 })\n }\n\n let payload: { entityId?: string; purgeFirst?: boolean } = {}\n try {\n payload = await req.json()\n } catch {\n // Default values\n }\n\n const entityId = typeof payload?.entityId === 'string' ? payload.entityId : undefined\n const purgeFirst = payload?.purgeFirst === true\n\n const container = await createRequestContainer()\n const em = container.resolve('em') as EntityManager\n const progressService = container.resolve('progressService') as ProgressService\n const db = (em as unknown as { getKysely: () => Kysely<any> }).getKysely()\n\n // Check if another vector reindex operation is already in progress\n const existingLock = await getReindexLockStatus(db, auth.tenantId, { type: 'vector' })\n if (existingLock) {\n const startedAt = new Date(existingLock.startedAt)\n return NextResponse.json(\n {\n error: t('search.api.errors.reindexInProgress', 'A reindex operation is already in progress'),\n lock: {\n type: existingLock.type,\n action: existingLock.action,\n startedAt: existingLock.startedAt,\n elapsedMinutes: Math.round((Date.now() - startedAt.getTime()) / 60000),\n processedCount: existingLock.processedCount,\n totalCount: existingLock.totalCount,\n },\n },\n { status: 409 }\n )\n }\n\n // Acquire lock before starting the operation\n const { acquired: lockAcquired } = await acquireReindexLock(db, {\n type: 'vector',\n action: entityId ? `reindex:${entityId}` : 'reindex:all',\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n })\n\n if (!lockAcquired) {\n return NextResponse.json(\n { error: t('search.api.errors.lockFailed', 'Failed to acquire reindex lock') },\n { status: 409 }\n )\n }\n\n try {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let em: any = null\n try {\n em = container.resolve('em')\n } catch {\n // em not available\n }\n\n let searchIndexer: SearchIndexer\n try {\n searchIndexer = container.resolve('searchIndexer') as SearchIndexer\n } catch {\n return NextResponse.json(\n { error: t('search.api.errors.indexUnavailable', 'Search indexer unavailable') },\n { status: 503 }\n )\n }\n\n // Load saved embedding config and update the embedding service\n try {\n const embeddingConfig = await resolveEmbeddingConfig(container, { defaultValue: null })\n if (embeddingConfig) {\n const embeddingService = container.resolve<EmbeddingService>('vectorEmbeddingService')\n embeddingService.updateConfig(embeddingConfig)\n searchDebug('search.embeddings.reindex', 'using embedding config', {\n providerId: embeddingConfig.providerId,\n model: embeddingConfig.model,\n dimension: embeddingConfig.dimension,\n })\n }\n } catch (err) {\n searchDebugWarn('search.embeddings.reindex', 'failed to load embedding config, using defaults', {\n error: err instanceof Error ? err.message : err,\n })\n }\n\n await recordIndexerLog(\n { em: em ?? undefined },\n {\n source: 'vector',\n handler: 'api:search.embeddings.reindex',\n message: entityId\n ? `Vector reindex requested for ${entityId}`\n : 'Vector reindex requested for all entities',\n entityType: entityId ?? null,\n tenantId: auth.tenantId ?? null,\n organizationId: auth.orgId ?? null,\n details: { purgeFirst },\n },\n ).catch(() => undefined)\n\n // Use queue-based vector reindexing (similar to fulltext)\n // This enqueues batches for background processing by workers\n let result\n if (entityId) {\n result = await searchIndexer.reindexEntityToVector({\n entityId: entityId as EntityId,\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n purgeFirst,\n useQueue: true,\n })\n } else {\n result = await searchIndexer.reindexAllToVector({\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n purgeFirst,\n useQueue: true,\n })\n }\n\n await ensureReindexProgressJob({\n em,\n progressService,\n type: 'vector',\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n userId: auth.sub ?? null,\n totalCount: result.recordsIndexed,\n description: entityId\n ? `Vector reindex ${entityId} (queued)`\n : 'Vector reindex all entities (queued)',\n })\n\n await recordIndexerLog(\n { em: em ?? undefined },\n {\n source: 'vector',\n handler: 'api:search.embeddings.reindex',\n message: result.jobsEnqueued\n ? `Vector reindex enqueued ${result.jobsEnqueued} jobs for ${entityId ?? 'all entities'}`\n : `Vector reindex completed for ${entityId ?? 'all entities'}`,\n entityType: entityId ?? null,\n tenantId: auth.tenantId ?? null,\n organizationId: auth.orgId ?? null,\n details: {\n purgeFirst,\n recordsIndexed: result.recordsIndexed,\n jobsEnqueued: result.jobsEnqueued,\n success: result.success,\n },\n },\n ).catch(() => undefined)\n\n return NextResponse.json({\n ok: result.success,\n recordsIndexed: result.recordsIndexed,\n jobsEnqueued: result.jobsEnqueued,\n entitiesProcessed: result.entitiesProcessed,\n errors: result.errors.length > 0 ? result.errors : undefined,\n })\n } catch (error: unknown) {\n const err = error as { message?: string; status?: number; statusCode?: number }\n const status = typeof err?.status === 'number'\n ? err.status\n : (typeof err?.statusCode === 'number' ? err.statusCode : 500)\n searchError('search.embeddings.reindex', 'failed', {\n error: error instanceof Error ? error.message : error,\n stack: error instanceof Error ? error.stack : undefined,\n status,\n })\n await failReindexProgress({\n em,\n progressService,\n type: 'vector',\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n errorMessage: error instanceof Error ? error.message : 'Vector reindex failed',\n })\n return NextResponse.json(\n { error: t('search.api.errors.reindexFailed', 'Vector reindex failed. Please try again or contact support.') },\n { status: status >= 400 ? status : 500 }\n )\n } finally {\n // Do NOT clear lock here - vector reindex always uses queue mode\n // Workers update heartbeat and stale detection handles cleanup when done\n\n const disposable = container as unknown as { dispose?: () => Promise<void> }\n if (typeof disposable.dispose === 'function') {\n await disposable.dispose()\n }\n }\n}\n\nexport const openApi = embeddingsReindexOpenApi\n"],
5
+ "mappings": "AAAA,SAAS,oBAAoB;AAC7B,SAAS,8BAA8B;AACvC,SAAS,0BAA0B;AAOnC,SAAS,wBAAwB;AACjC,SAAS,2BAA2B;AACpC,SAAS,8BAA8B;AAEvC,SAAS,aAAa,iBAAiB,mBAAmB;AAC1D,SAAS,oBAAsC,4BAA4B;AAC3E;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP,SAAS,gCAAgC;AAElC,MAAM,WAAW;AAAA,EACtB,MAAM,EAAE,aAAa,MAAM,iBAAiB,CAAC,0BAA0B,EAAE;AAC3E;AAEA,eAAsB,KAAK,KAAc;AACvC,QAAM,EAAE,EAAE,IAAI,MAAM,oBAAoB;AACxC,QAAM,OAAO,MAAM,mBAAmB,GAAG;AACzC,MAAI,CAAC,MAAM,UAAU;AACnB,WAAO,aAAa,KAAK,EAAE,OAAO,EAAE,2BAA2B,cAAc,EAAE,GAAG,EAAE,QAAQ,IAAI,CAAC;AAAA,EACnG;AAEA,MAAI,UAAuD,CAAC;AAC5D,MAAI;AACF,cAAU,MAAM,IAAI,KAAK;AAAA,EAC3B,QAAQ;AAAA,EAER;AAEA,QAAM,WAAW,OAAO,SAAS,aAAa,WAAW,QAAQ,WAAW;AAC5E,QAAM,aAAa,SAAS,eAAe;AAE3C,QAAM,YAAY,MAAM,uBAAuB;AAC/C,QAAM,KAAK,UAAU,QAAQ,IAAI;AACjC,QAAM,kBAAkB,UAAU,QAAQ,iBAAiB;AAC3D,QAAM,KAAM,GAAmD,UAAU;AAGzE,QAAM,eAAe,MAAM,qBAAqB,IAAI,KAAK,UAAU,EAAE,MAAM,SAAS,CAAC;AACrF,MAAI,cAAc;AAChB,UAAM,YAAY,IAAI,KAAK,aAAa,SAAS;AACjD,WAAO,aAAa;AAAA,MAClB;AAAA,QACE,OAAO,EAAE,uCAAuC,4CAA4C;AAAA,QAC5F,MAAM;AAAA,UACJ,MAAM,aAAa;AAAA,UACnB,QAAQ,aAAa;AAAA,UACrB,WAAW,aAAa;AAAA,UACxB,gBAAgB,KAAK,OAAO,KAAK,IAAI,IAAI,UAAU,QAAQ,KAAK,GAAK;AAAA,UACrE,gBAAgB,aAAa;AAAA,UAC7B,YAAY,aAAa;AAAA,QAC3B;AAAA,MACF;AAAA,MACA,EAAE,QAAQ,IAAI;AAAA,IAChB;AAAA,EACF;AAGA,QAAM,EAAE,UAAU,aAAa,IAAI,MAAM,mBAAmB,IAAI;AAAA,IAC9D,MAAM;AAAA,IACN,QAAQ,WAAW,WAAW,QAAQ,KAAK;AAAA,IAC3C,UAAU,KAAK;AAAA,IACf,gBAAgB,KAAK,SAAS;AAAA,EAChC,CAAC;AAED,MAAI,CAAC,cAAc;AACjB,WAAO,aAAa;AAAA,MAClB,EAAE,OAAO,EAAE,gCAAgC,gCAAgC,EAAE;AAAA,MAC7E,EAAE,QAAQ,IAAI;AAAA,IAChB;AAAA,EACF;AAEA,MAAI;AAEF,QAAIA,MAAU;AACd,QAAI;AACF,MAAAA,MAAK,UAAU,QAAQ,IAAI;AAAA,IAC7B,QAAQ;AAAA,IAER;AAEA,QAAI;AACJ,QAAI;AACF,sBAAgB,UAAU,QAAQ,eAAe;AAAA,IACnD,QAAQ;AACN,aAAO,aAAa;AAAA,QAClB,EAAE,OAAO,EAAE,sCAAsC,4BAA4B,EAAE;AAAA,QAC/E,EAAE,QAAQ,IAAI;AAAA,MAChB;AAAA,IACF;AAGA,QAAI;AACF,YAAM,kBAAkB,MAAM,uBAAuB,WAAW,EAAE,cAAc,KAAK,CAAC;AACtF,UAAI,iBAAiB;AACnB,cAAM,mBAAmB,UAAU,QAA0B,wBAAwB;AACrF,yBAAiB,aAAa,eAAe;AAC7C,oBAAY,6BAA6B,0BAA0B;AAAA,UACjE,YAAY,gBAAgB;AAAA,UAC5B,OAAO,gBAAgB;AAAA,UACvB,WAAW,gBAAgB;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,IACF,SAAS,KAAK;AACZ,sBAAgB,6BAA6B,mDAAmD;AAAA,QAC9F,OAAO,eAAe,QAAQ,IAAI,UAAU;AAAA,MAC9C,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,EAAE,IAAIA,OAAM,OAAU;AAAA,MACtB;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,SAAS,WACL,gCAAgC,QAAQ,KACxC;AAAA,QACJ,YAAY,YAAY;AAAA,QACxB,UAAU,KAAK,YAAY;AAAA,QAC3B,gBAAgB,KAAK,SAAS;AAAA,QAC9B,SAAS,EAAE,WAAW;AAAA,MACxB;AAAA,IACF,EAAE,MAAM,MAAM,MAAS;AAIvB,QAAI;AACJ,QAAI,UAAU;AACZ,eAAS,MAAM,cAAc,sBAAsB;AAAA,QACjD;AAAA,QACA,UAAU,KAAK;AAAA,QACf,gBAAgB,KAAK,SAAS;AAAA,QAC9B;AAAA,QACA,UAAU;AAAA,MACZ,CAAC;AAAA,IACH,OAAO;AACL,eAAS,MAAM,cAAc,mBAAmB;AAAA,QAC9C,UAAU,KAAK;AAAA,QACf,gBAAgB,KAAK,SAAS;AAAA,QAC9B;AAAA,QACA,UAAU;AAAA,MACZ,CAAC;AAAA,IACH;AAEA,UAAM,yBAAyB;AAAA,MAC7B,IAAAA;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,UAAU,KAAK;AAAA,MACf,gBAAgB,KAAK,SAAS;AAAA,MAC9B,QAAQ,KAAK,OAAO;AAAA,MACpB,YAAY,OAAO;AAAA,MACnB,aAAa,WACT,kBAAkB,QAAQ,cAC1B;AAAA,IACN,CAAC;AAED,UAAM;AAAA,MACJ,EAAE,IAAIA,OAAM,OAAU;AAAA,MACtB;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,SAAS,OAAO,eACZ,2BAA2B,OAAO,YAAY,aAAa,YAAY,cAAc,KACrF,gCAAgC,YAAY,cAAc;AAAA,QAC9D,YAAY,YAAY;AAAA,QACxB,UAAU,KAAK,YAAY;AAAA,QAC3B,gBAAgB,KAAK,SAAS;AAAA,QAC9B,SAAS;AAAA,UACP;AAAA,UACA,gBAAgB,OAAO;AAAA,UACvB,cAAc,OAAO;AAAA,UACrB,SAAS,OAAO;AAAA,QAClB;AAAA,MACF;AAAA,IACF,EAAE,MAAM,MAAM,MAAS;AAEvB,WAAO,aAAa,KAAK;AAAA,MACvB,IAAI,OAAO;AAAA,MACX,gBAAgB,OAAO;AAAA,MACvB,cAAc,OAAO;AAAA,MACrB,mBAAmB,OAAO;AAAA,MAC1B,QAAQ,OAAO,OAAO,SAAS,IAAI,OAAO,SAAS;AAAA,IACrD,CAAC;AAAA,EACH,SAAS,OAAgB;AACvB,UAAM,MAAM;AACZ,UAAM,SAAS,OAAO,KAAK,WAAW,WAClC,IAAI,SACH,OAAO,KAAK,eAAe,WAAW,IAAI,aAAa;AAC5D,gBAAY,6BAA6B,UAAU;AAAA,MACjD,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,MAChD,OAAO,iBAAiB,QAAQ,MAAM,QAAQ;AAAA,MAC9C;AAAA,IACF,CAAC;AACD,UAAM,oBAAoB;AAAA,MACxB;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,UAAU,KAAK;AAAA,MACf,gBAAgB,KAAK,SAAS;AAAA,MAC9B,cAAc,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IACzD,CAAC;AACD,WAAO,aAAa;AAAA,MAClB,EAAE,OAAO,EAAE,mCAAmC,6DAA6D,EAAE;AAAA,MAC7G,EAAE,QAAQ,UAAU,MAAM,SAAS,IAAI;AAAA,IACzC;AAAA,EACF,UAAE;AAIA,UAAM,aAAa;AACnB,QAAI,OAAO,WAAW,YAAY,YAAY;AAC5C,YAAM,WAAW,QAAQ;AAAA,IAC3B;AAAA,EACF;AACF;AAEO,MAAM,UAAU;",
6
6
  "names": ["em"]
7
7
  }
@@ -18,7 +18,7 @@ async function POST(req) {
18
18
  const container = await createRequestContainer();
19
19
  const em = container.resolve("em");
20
20
  const progressService = container.resolve("progressService");
21
- const knex = em.getConnection().getKnex();
21
+ const db = em.getKysely();
22
22
  let queue;
23
23
  try {
24
24
  queue = container.resolve("fulltextIndexQueue");
@@ -33,7 +33,7 @@ async function POST(req) {
33
33
  } catch {
34
34
  }
35
35
  }
36
- await clearReindexLock(knex, auth.tenantId, "fulltext", auth.orgId ?? null);
36
+ await clearReindexLock(db, auth.tenantId, "fulltext", auth.orgId ?? null);
37
37
  await cancelReindexProgress({
38
38
  em,
39
39
  progressService,
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../../../src/modules/search/api/reindex/cancel/route.ts"],
4
- "sourcesContent": ["import { NextResponse } from 'next/server'\nimport { createRequestContainer } from '@open-mercato/shared/lib/di/container'\nimport { getAuthFromRequest } from '@open-mercato/shared/lib/auth/server'\nimport type { Queue } from '@open-mercato/queue'\nimport type { Knex } from 'knex'\nimport type { EntityManager } from '@mikro-orm/postgresql'\nimport type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'\nimport { clearReindexLock } from '../../../lib/reindex-lock'\nimport { cancelReindexProgress } from '../../../lib/reindex-progress'\nimport { resolveTranslations } from '@open-mercato/shared/lib/i18n/server'\nimport { recordIndexerLog } from '@open-mercato/shared/lib/indexers/status-log'\nimport { reindexCancelOpenApi } from '../../openapi'\n\nexport const metadata = {\n POST: { requireAuth: true, requireFeatures: ['search.reindex'] },\n}\n\nexport async function POST(req: Request) {\n const { t } = await resolveTranslations()\n const auth = await getAuthFromRequest(req)\n if (!auth?.tenantId) {\n return NextResponse.json({ error: t('api.errors.unauthorized', 'Unauthorized') }, { status: 401 })\n }\n\n const container = await createRequestContainer()\n const em = container.resolve('em') as EntityManager\n const progressService = container.resolve('progressService') as ProgressService\n const knex = (em.getConnection() as unknown as { getKnex: () => Knex }).getKnex()\n\n let queue: Queue | undefined\n try {\n queue = container.resolve<Queue>('fulltextIndexQueue')\n } catch {\n // Queue not available - just clear the lock\n }\n\n let jobsRemoved = 0\n if (queue) {\n try {\n const countsBefore = await queue.getJobCounts()\n jobsRemoved = countsBefore.waiting + countsBefore.active\n await queue.clear()\n } catch {\n // Queue clear failed - continue to clear lock\n }\n }\n\n await clearReindexLock(knex, auth.tenantId, 'fulltext', auth.orgId ?? null)\n await cancelReindexProgress({\n em,\n progressService,\n type: 'fulltext',\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n userId: auth.sub ?? null,\n })\n\n // Log the cancellation\n try {\n const em = container.resolve('em')\n await recordIndexerLog(\n { em },\n {\n source: 'fulltext',\n handler: 'api:search.reindex.cancel',\n message: `Cancelled fulltext reindex operation (${jobsRemoved} jobs removed)`,\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n details: { jobsRemoved },\n },\n )\n } catch {\n // Logging failure should not fail the cancel operation\n }\n\n try {\n const disposable = container as unknown as { dispose?: () => Promise<void> }\n if (typeof disposable.dispose === 'function') {\n await disposable.dispose()\n }\n } catch {\n // Ignore disposal errors\n }\n\n return NextResponse.json({\n ok: true,\n jobsRemoved,\n })\n}\n\nexport const openApi = reindexCancelOpenApi\n"],
5
- "mappings": "AAAA,SAAS,oBAAoB;AAC7B,SAAS,8BAA8B;AACvC,SAAS,0BAA0B;AAKnC,SAAS,wBAAwB;AACjC,SAAS,6BAA6B;AACtC,SAAS,2BAA2B;AACpC,SAAS,wBAAwB;AACjC,SAAS,4BAA4B;AAE9B,MAAM,WAAW;AAAA,EACtB,MAAM,EAAE,aAAa,MAAM,iBAAiB,CAAC,gBAAgB,EAAE;AACjE;AAEA,eAAsB,KAAK,KAAc;AACvC,QAAM,EAAE,EAAE,IAAI,MAAM,oBAAoB;AACxC,QAAM,OAAO,MAAM,mBAAmB,GAAG;AACzC,MAAI,CAAC,MAAM,UAAU;AACnB,WAAO,aAAa,KAAK,EAAE,OAAO,EAAE,2BAA2B,cAAc,EAAE,GAAG,EAAE,QAAQ,IAAI,CAAC;AAAA,EACnG;AAEA,QAAM,YAAY,MAAM,uBAAuB;AAC/C,QAAM,KAAK,UAAU,QAAQ,IAAI;AACjC,QAAM,kBAAkB,UAAU,QAAQ,iBAAiB;AAC3D,QAAM,OAAQ,GAAG,cAAc,EAAyC,QAAQ;AAEhF,MAAI;AACJ,MAAI;AACF,YAAQ,UAAU,QAAe,oBAAoB;AAAA,EACvD,QAAQ;AAAA,EAER;AAEA,MAAI,cAAc;AAClB,MAAI,OAAO;AACT,QAAI;AACF,YAAM,eAAe,MAAM,MAAM,aAAa;AAC9C,oBAAc,aAAa,UAAU,aAAa;AAClD,YAAM,MAAM,MAAM;AAAA,IACpB,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,QAAM,iBAAiB,MAAM,KAAK,UAAU,YAAY,KAAK,SAAS,IAAI;AAC1E,QAAM,sBAAsB;AAAA,IAC1B;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN,UAAU,KAAK;AAAA,IACf,gBAAgB,KAAK,SAAS;AAAA,IAC9B,QAAQ,KAAK,OAAO;AAAA,EACtB,CAAC;AAGD,MAAI;AACF,UAAMA,MAAK,UAAU,QAAQ,IAAI;AACjC,UAAM;AAAA,MACJ,EAAE,IAAAA,IAAG;AAAA,MACL;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,SAAS,yCAAyC,WAAW;AAAA,QAC7D,UAAU,KAAK;AAAA,QACf,gBAAgB,KAAK,SAAS;AAAA,QAC9B,SAAS,EAAE,YAAY;AAAA,MACzB;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,MAAI;AACF,UAAM,aAAa;AACnB,QAAI,OAAO,WAAW,YAAY,YAAY;AAC5C,YAAM,WAAW,QAAQ;AAAA,IAC3B;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO,aAAa,KAAK;AAAA,IACvB,IAAI;AAAA,IACJ;AAAA,EACF,CAAC;AACH;AAEO,MAAM,UAAU;",
4
+ "sourcesContent": ["import { NextResponse } from 'next/server'\nimport { createRequestContainer } from '@open-mercato/shared/lib/di/container'\nimport { getAuthFromRequest } from '@open-mercato/shared/lib/auth/server'\nimport type { Queue } from '@open-mercato/queue'\n\nimport type { EntityManager } from '@mikro-orm/postgresql'\nimport type { Kysely } from 'kysely'\nimport type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'\nimport { clearReindexLock } from '../../../lib/reindex-lock'\nimport { cancelReindexProgress } from '../../../lib/reindex-progress'\nimport { resolveTranslations } from '@open-mercato/shared/lib/i18n/server'\nimport { recordIndexerLog } from '@open-mercato/shared/lib/indexers/status-log'\nimport { reindexCancelOpenApi } from '../../openapi'\n\nexport const metadata = {\n POST: { requireAuth: true, requireFeatures: ['search.reindex'] },\n}\n\nexport async function POST(req: Request) {\n const { t } = await resolveTranslations()\n const auth = await getAuthFromRequest(req)\n if (!auth?.tenantId) {\n return NextResponse.json({ error: t('api.errors.unauthorized', 'Unauthorized') }, { status: 401 })\n }\n\n const container = await createRequestContainer()\n const em = container.resolve('em') as EntityManager\n const progressService = container.resolve('progressService') as ProgressService\n const db = (em as unknown as { getKysely: () => Kysely<any> }).getKysely()\n\n let queue: Queue | undefined\n try {\n queue = container.resolve<Queue>('fulltextIndexQueue')\n } catch {\n // Queue not available - just clear the lock\n }\n\n let jobsRemoved = 0\n if (queue) {\n try {\n const countsBefore = await queue.getJobCounts()\n jobsRemoved = countsBefore.waiting + countsBefore.active\n await queue.clear()\n } catch {\n // Queue clear failed - continue to clear lock\n }\n }\n\n await clearReindexLock(db, auth.tenantId, 'fulltext', auth.orgId ?? null)\n await cancelReindexProgress({\n em,\n progressService,\n type: 'fulltext',\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n userId: auth.sub ?? null,\n })\n\n // Log the cancellation\n try {\n const em = container.resolve('em')\n await recordIndexerLog(\n { em },\n {\n source: 'fulltext',\n handler: 'api:search.reindex.cancel',\n message: `Cancelled fulltext reindex operation (${jobsRemoved} jobs removed)`,\n tenantId: auth.tenantId,\n organizationId: auth.orgId ?? null,\n details: { jobsRemoved },\n },\n )\n } catch {\n // Logging failure should not fail the cancel operation\n }\n\n try {\n const disposable = container as unknown as { dispose?: () => Promise<void> }\n if (typeof disposable.dispose === 'function') {\n await disposable.dispose()\n }\n } catch {\n // Ignore disposal errors\n }\n\n return NextResponse.json({\n ok: true,\n jobsRemoved,\n })\n}\n\nexport const openApi = reindexCancelOpenApi\n"],
5
+ "mappings": "AAAA,SAAS,oBAAoB;AAC7B,SAAS,8BAA8B;AACvC,SAAS,0BAA0B;AAMnC,SAAS,wBAAwB;AACjC,SAAS,6BAA6B;AACtC,SAAS,2BAA2B;AACpC,SAAS,wBAAwB;AACjC,SAAS,4BAA4B;AAE9B,MAAM,WAAW;AAAA,EACtB,MAAM,EAAE,aAAa,MAAM,iBAAiB,CAAC,gBAAgB,EAAE;AACjE;AAEA,eAAsB,KAAK,KAAc;AACvC,QAAM,EAAE,EAAE,IAAI,MAAM,oBAAoB;AACxC,QAAM,OAAO,MAAM,mBAAmB,GAAG;AACzC,MAAI,CAAC,MAAM,UAAU;AACnB,WAAO,aAAa,KAAK,EAAE,OAAO,EAAE,2BAA2B,cAAc,EAAE,GAAG,EAAE,QAAQ,IAAI,CAAC;AAAA,EACnG;AAEA,QAAM,YAAY,MAAM,uBAAuB;AAC/C,QAAM,KAAK,UAAU,QAAQ,IAAI;AACjC,QAAM,kBAAkB,UAAU,QAAQ,iBAAiB;AAC3D,QAAM,KAAM,GAAmD,UAAU;AAEzE,MAAI;AACJ,MAAI;AACF,YAAQ,UAAU,QAAe,oBAAoB;AAAA,EACvD,QAAQ;AAAA,EAER;AAEA,MAAI,cAAc;AAClB,MAAI,OAAO;AACT,QAAI;AACF,YAAM,eAAe,MAAM,MAAM,aAAa;AAC9C,oBAAc,aAAa,UAAU,aAAa;AAClD,YAAM,MAAM,MAAM;AAAA,IACpB,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,QAAM,iBAAiB,IAAI,KAAK,UAAU,YAAY,KAAK,SAAS,IAAI;AACxE,QAAM,sBAAsB;AAAA,IAC1B;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN,UAAU,KAAK;AAAA,IACf,gBAAgB,KAAK,SAAS;AAAA,IAC9B,QAAQ,KAAK,OAAO;AAAA,EACtB,CAAC;AAGD,MAAI;AACF,UAAMA,MAAK,UAAU,QAAQ,IAAI;AACjC,UAAM;AAAA,MACJ,EAAE,IAAAA,IAAG;AAAA,MACL;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,SAAS,yCAAyC,WAAW;AAAA,QAC7D,UAAU,KAAK;AAAA,QACf,gBAAgB,KAAK,SAAS;AAAA,QAC9B,SAAS,EAAE,YAAY;AAAA,MACzB;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,MAAI;AACF,UAAM,aAAa;AACnB,QAAI,OAAO,WAAW,YAAY,YAAY;AAC5C,YAAM,WAAW,QAAQ;AAAA,IAC3B;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO,aAAa,KAAK;AAAA,IACvB,IAAI;AAAA,IACJ;AAAA,EACF,CAAC;AACH;AAEO,MAAM,UAAU;",
6
6
  "names": ["em"]
7
7
  }
@@ -57,8 +57,8 @@ async function POST(req) {
57
57
  const container = await createRequestContainer();
58
58
  const em = container.resolve("em");
59
59
  const progressService = container.resolve("progressService");
60
- const knex = em.getConnection().getKnex();
61
- const existingLock = await getReindexLockStatus(knex, tenantId, { type: "fulltext" });
60
+ const db = em.getKysely();
61
+ const existingLock = await getReindexLockStatus(db, tenantId, { type: "fulltext" });
62
62
  if (existingLock) {
63
63
  const startedAt = new Date(existingLock.startedAt);
64
64
  return NextResponse.json(
@@ -76,7 +76,7 @@ async function POST(req) {
76
76
  { status: 409 }
77
77
  );
78
78
  }
79
- const { acquired: lockAcquired } = await acquireReindexLock(knex, {
79
+ const { acquired: lockAcquired } = await acquireReindexLock(db, {
80
80
  type: "fulltext",
81
81
  action,
82
82
  tenantId,
@@ -357,7 +357,7 @@ async function POST(req) {
357
357
  );
358
358
  } finally {
359
359
  if (!useQueue) {
360
- await clearReindexLock(knex, tenantId, "fulltext", auth.orgId ?? null);
360
+ await clearReindexLock(db, tenantId, "fulltext", auth.orgId ?? null);
361
361
  }
362
362
  const disposable = container;
363
363
  if (typeof disposable.dispose === "function") {