@open-mercato/search 0.5.1-develop.2691.d8a0934b37 → 0.5.1-develop.2694.732417c5ec

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/di.js +9 -9
  2. package/dist/di.js.map +2 -2
  3. package/dist/lib/presenter-enricher.js +14 -14
  4. package/dist/lib/presenter-enricher.js.map +2 -2
  5. package/dist/modules/search/api/embeddings/reindex/cancel/route.js +2 -2
  6. package/dist/modules/search/api/embeddings/reindex/cancel/route.js.map +2 -2
  7. package/dist/modules/search/api/embeddings/reindex/route.js +3 -3
  8. package/dist/modules/search/api/embeddings/reindex/route.js.map +2 -2
  9. package/dist/modules/search/api/reindex/cancel/route.js +2 -2
  10. package/dist/modules/search/api/reindex/cancel/route.js.map +2 -2
  11. package/dist/modules/search/api/reindex/route.js +4 -4
  12. package/dist/modules/search/api/reindex/route.js.map +2 -2
  13. package/dist/modules/search/api/settings/route.js +3 -3
  14. package/dist/modules/search/api/settings/route.js.map +2 -2
  15. package/dist/modules/search/lib/reindex-lock.js +20 -17
  16. package/dist/modules/search/lib/reindex-lock.js.map +2 -2
  17. package/dist/modules/search/subscribers/fulltext_upsert.js +2 -2
  18. package/dist/modules/search/subscribers/fulltext_upsert.js.map +2 -2
  19. package/dist/modules/search/subscribers/vector_delete.js +2 -2
  20. package/dist/modules/search/subscribers/vector_delete.js.map +2 -2
  21. package/dist/modules/search/subscribers/vector_upsert.js +2 -2
  22. package/dist/modules/search/subscribers/vector_upsert.js.map +2 -2
  23. package/dist/modules/search/workers/fulltext-index.worker.js +7 -7
  24. package/dist/modules/search/workers/fulltext-index.worker.js.map +2 -2
  25. package/dist/modules/search/workers/vector-index.worker.js +7 -7
  26. package/dist/modules/search/workers/vector-index.worker.js.map +2 -2
  27. package/dist/strategies/token.strategy.js +15 -10
  28. package/dist/strategies/token.strategy.js.map +2 -2
  29. package/jest.config.cjs +4 -2
  30. package/package.json +4 -4
  31. package/src/__tests__/presenter-enricher.test.ts +17 -60
  32. package/src/__tests__/workers.test.ts +20 -21
  33. package/src/di.ts +22 -21
  34. package/src/lib/presenter-enricher.ts +21 -20
  35. package/src/modules/search/api/embeddings/reindex/cancel/route.ts +4 -3
  36. package/src/modules/search/api/embeddings/reindex/route.ts +5 -4
  37. package/src/modules/search/api/reindex/cancel/route.ts +4 -3
  38. package/src/modules/search/api/reindex/route.ts +5 -5
  39. package/src/modules/search/api/settings/route.ts +5 -4
  40. package/src/modules/search/lib/reindex-lock.ts +50 -32
  41. package/src/modules/search/subscribers/fulltext_upsert.ts +6 -2
  42. package/src/modules/search/subscribers/vector_delete.ts +6 -2
  43. package/src/modules/search/subscribers/vector_upsert.ts +6 -2
  44. package/src/modules/search/workers/fulltext-index.worker.ts +10 -9
  45. package/src/modules/search/workers/vector-index.worker.ts +10 -9
  46. package/src/strategies/token.strategy.ts +25 -19
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../src/modules/search/workers/fulltext-index.worker.ts"],
4
- "sourcesContent": ["import type { QueuedJob, JobContext, WorkerMeta } from '@open-mercato/queue'\nimport { FULLTEXT_INDEXING_QUEUE_NAME, type FulltextIndexJobPayload } from '../../../queue/fulltext-indexing'\nimport type { FullTextSearchStrategy } from '../../../strategies/fulltext.strategy'\nimport type { SearchIndexer } from '../../../indexer/search-indexer'\nimport type { EntityManager } from '@mikro-orm/postgresql'\nimport type { Knex } from 'knex'\nimport type { EntityId } from '@open-mercato/shared/modules/entities'\nimport { recordIndexerLog } from '@open-mercato/shared/lib/indexers/status-log'\nimport { recordIndexerError } from '@open-mercato/shared/lib/indexers/error-log'\nimport type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'\nimport { searchDebug, searchDebugWarn, searchError } from '../../../lib/debug'\nimport { clearReindexLock, updateReindexProgress } from '../lib/reindex-lock'\nimport { incrementReindexProgress } from '../lib/reindex-progress'\n\n// Worker metadata for auto-discovery\nconst DEFAULT_CONCURRENCY = 2\nconst envConcurrency = process.env.WORKERS_FULLTEXT_INDEXING_CONCURRENCY\n\nexport const metadata: WorkerMeta = {\n queue: FULLTEXT_INDEXING_QUEUE_NAME,\n concurrency: envConcurrency ? parseInt(envConcurrency, 10) : DEFAULT_CONCURRENCY,\n}\n\ntype HandlerContext = { resolve: <T = unknown>(name: string) => T }\n\n/**\n * Process a fulltext indexing job.\n *\n * This handler processes single record indexing, batch indexing, deletion, and purge\n * operations for the fulltext search strategy.\n *\n * All indexing operations (single and batch) use searchIndexer.indexRecordById() to load\n * fresh data, ensuring consistency with the vector worker pattern.\n *\n * @param job - The queued job containing payload\n * @param jobCtx - Queue job context with job ID and attempt info\n * @param ctx - DI container context for resolving services\n */\nexport async function handleFulltextIndexJob(\n job: QueuedJob<FulltextIndexJobPayload>,\n jobCtx: JobContext,\n ctx: HandlerContext,\n): Promise<void> {\n const { jobType, tenantId } = job.payload\n\n if (!tenantId) {\n searchDebugWarn('fulltext-index.worker', 'Skipping job with missing tenantId', {\n jobId: jobCtx.jobId,\n jobType,\n })\n return\n }\n\n // Resolve EntityManager for logging and knex for database queries\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let em: any | null = null\n let knex: Knex | null = null\n try {\n em = ctx.resolve('em') as EntityManager\n knex = (em.getConnection() as unknown as { getKnex: () => Knex }).getKnex()\n } catch {\n em = null\n knex = null\n }\n\n // Resolve searchIndexer for loading fresh data\n let searchIndexer: SearchIndexer | undefined\n try {\n searchIndexer = ctx.resolve<SearchIndexer>('searchIndexer')\n } catch {\n searchDebugWarn('fulltext-index.worker', 'searchIndexer not available')\n }\n\n // Resolve fulltext strategy\n let fulltextStrategy: FullTextSearchStrategy | undefined\n try {\n const searchStrategies = ctx.resolve<unknown[]>('searchStrategies')\n fulltextStrategy = searchStrategies?.find(\n (s: unknown) => (s as { id?: string })?.id === 'fulltext',\n ) as FullTextSearchStrategy | undefined\n } catch {\n searchDebugWarn('fulltext-index.worker', 'searchStrategies not available')\n return\n }\n\n if (!fulltextStrategy) {\n searchDebugWarn('fulltext-index.worker', 'Fulltext strategy not configured')\n return\n }\n\n // Check if fulltext is available\n const isAvailable = await fulltextStrategy.isAvailable()\n if (!isAvailable) {\n throw new Error('Fulltext search is not available') // Will trigger retry\n }\n\n try {\n let progressService: ProgressService | null = null\n try {\n progressService = ctx.resolve<ProgressService>('progressService')\n } catch {\n progressService = null\n }\n\n // ========== SINGLE INDEX: Use searchIndexer.indexRecordById() for fresh data ==========\n if (jobType === 'index') {\n const { entityType, recordId, organizationId } = job.payload as {\n entityType: string\n recordId: string\n organizationId?: string | null\n }\n\n if (!entityType || !recordId) {\n searchDebugWarn('fulltext-index.worker', 'Skipping index with missing fields', {\n jobId: jobCtx.jobId,\n entityType,\n recordId,\n })\n return\n }\n\n if (!searchIndexer) {\n throw new Error('searchIndexer not available for single-record index')\n }\n\n const result = await searchIndexer.indexRecordById({\n entityId: entityType as EntityId,\n recordId,\n tenantId,\n organizationId,\n })\n\n searchDebug('fulltext-index.worker', 'Indexed single record to fulltext', {\n jobId: jobCtx.jobId,\n tenantId,\n entityType,\n recordId,\n action: result.action,\n })\n\n await recordIndexerLog(\n { em: em ?? undefined },\n {\n source: 'fulltext',\n handler: 'worker:fulltext:index',\n message: `Indexed record to fulltext (${result.action})`,\n entityType,\n recordId,\n tenantId,\n details: { jobId: jobCtx.jobId },\n },\n )\n return\n }\n\n // ========== BATCH-INDEX: Use searchIndexer.indexRecordById() for fresh data ==========\n if (jobType === 'batch-index') {\n const { records, organizationId } = job.payload\n if (!records || records.length === 0) {\n searchDebugWarn('fulltext-index.worker', 'Skipping batch-index with no records', {\n jobId: jobCtx.jobId,\n })\n return\n }\n\n if (!searchIndexer) {\n throw new Error('searchIndexer not available for batch indexing')\n }\n\n // Process each record using indexRecordById (same pattern as vector worker)\n let successCount = 0\n let failCount = 0\n\n for (const { entityId, recordId } of records) {\n try {\n const result = await searchIndexer.indexRecordById({\n entityId: entityId as EntityId,\n recordId,\n tenantId,\n organizationId,\n })\n if (result.action === 'indexed') {\n successCount++\n }\n } catch (error) {\n failCount++\n searchDebugWarn('fulltext-index.worker', 'Failed to index record in batch', {\n entityId,\n recordId,\n error: error instanceof Error ? error.message : error,\n })\n }\n }\n\n // Update heartbeat to signal worker is still processing\n if (knex && records.length > 0) {\n await updateReindexProgress(knex, tenantId, 'fulltext', successCount, organizationId ?? null)\n }\n if (progressService && em && records.length > 0) {\n const completed = await incrementReindexProgress({\n em,\n progressService,\n type: 'fulltext',\n tenantId,\n organizationId: organizationId ?? null,\n delta: successCount,\n })\n if (completed && knex) {\n await clearReindexLock(knex, tenantId, 'fulltext', organizationId ?? null)\n }\n }\n\n searchDebug('fulltext-index.worker', 'Batch indexed to fulltext', {\n jobId: jobCtx.jobId,\n tenantId,\n requestedCount: records.length,\n successCount,\n failCount,\n })\n\n await recordIndexerLog(\n { em: em ?? undefined },\n {\n source: 'fulltext',\n handler: 'worker:fulltext:batch-index',\n message: `Indexed ${successCount}/${records.length} records to fulltext`,\n tenantId,\n details: { jobId: jobCtx.jobId, requestedCount: records.length, successCount, failCount },\n },\n )\n return\n }\n\n // ========== DELETE ==========\n if (jobType === 'delete') {\n const { entityId, recordId } = job.payload\n if (!entityId || !recordId) {\n searchDebugWarn('fulltext-index.worker', 'Skipping delete with missing fields', {\n jobId: jobCtx.jobId,\n entityId,\n recordId,\n })\n return\n }\n\n await fulltextStrategy.delete(entityId, recordId, tenantId)\n\n searchDebug('fulltext-index.worker', 'Deleted from fulltext', {\n jobId: jobCtx.jobId,\n tenantId,\n entityId,\n recordId,\n })\n\n await recordIndexerLog(\n { em: em ?? undefined },\n {\n source: 'fulltext',\n handler: 'worker:fulltext:delete',\n message: `Deleted record from fulltext`,\n entityType: entityId,\n recordId,\n tenantId,\n details: { jobId: jobCtx.jobId },\n },\n )\n return\n }\n\n // ========== PURGE ==========\n if (jobType === 'purge') {\n const { entityId } = job.payload\n if (!entityId) {\n searchDebugWarn('fulltext-index.worker', 'Skipping purge with missing entityId', {\n jobId: jobCtx.jobId,\n })\n return\n }\n\n await fulltextStrategy.purge(entityId, tenantId)\n\n searchDebug('fulltext-index.worker', 'Purged entity from fulltext', {\n jobId: jobCtx.jobId,\n tenantId,\n entityId,\n })\n\n await recordIndexerLog(\n { em: em ?? undefined },\n {\n source: 'fulltext',\n handler: 'worker:fulltext:purge',\n message: `Purged entity from fulltext`,\n entityType: entityId,\n tenantId,\n details: { jobId: jobCtx.jobId },\n },\n )\n return\n }\n } catch (error) {\n searchError('fulltext-index.worker', `Failed to ${jobType}`, {\n jobId: jobCtx.jobId,\n tenantId,\n error: error instanceof Error ? error.message : error,\n attemptNumber: jobCtx.attemptNumber,\n })\n\n const entityId = 'entityId' in job.payload ? job.payload.entityId :\n 'entityType' in job.payload ? (job.payload as { entityType?: string }).entityType : undefined\n const recordId = 'recordId' in job.payload ? job.payload.recordId : undefined\n\n await recordIndexerError(\n { em: em ?? undefined },\n {\n source: 'fulltext',\n handler: `worker:fulltext:${jobType}`,\n error,\n entityType: entityId,\n recordId,\n tenantId,\n payload: job.payload,\n },\n )\n\n // Re-throw to let the queue handle retry logic\n throw error\n }\n}\n\n/**\n * Default export for worker auto-discovery.\n * Wraps handleFulltextIndexJob to match the expected handler signature.\n */\nexport default async function handle(\n job: QueuedJob<FulltextIndexJobPayload>,\n ctx: JobContext & HandlerContext\n): Promise<void> {\n return handleFulltextIndexJob(job, ctx, ctx)\n}\n"],
5
- "mappings": "AACA,SAAS,oCAAkE;AAM3E,SAAS,wBAAwB;AACjC,SAAS,0BAA0B;AAEnC,SAAS,aAAa,iBAAiB,mBAAmB;AAC1D,SAAS,kBAAkB,6BAA6B;AACxD,SAAS,gCAAgC;AAGzC,MAAM,sBAAsB;AAC5B,MAAM,iBAAiB,QAAQ,IAAI;AAE5B,MAAM,WAAuB;AAAA,EAClC,OAAO;AAAA,EACP,aAAa,iBAAiB,SAAS,gBAAgB,EAAE,IAAI;AAC/D;AAiBA,eAAsB,uBACpB,KACA,QACA,KACe;AACf,QAAM,EAAE,SAAS,SAAS,IAAI,IAAI;AAElC,MAAI,CAAC,UAAU;AACb,oBAAgB,yBAAyB,sCAAsC;AAAA,MAC7E,OAAO,OAAO;AAAA,MACd;AAAA,IACF,CAAC;AACD;AAAA,EACF;AAIA,MAAI,KAAiB;AACrB,MAAI,OAAoB;AACxB,MAAI;AACF,SAAK,IAAI,QAAQ,IAAI;AACrB,WAAQ,GAAG,cAAc,EAAyC,QAAQ;AAAA,EAC5E,QAAQ;AACN,SAAK;AACL,WAAO;AAAA,EACT;AAGA,MAAI;AACJ,MAAI;AACF,oBAAgB,IAAI,QAAuB,eAAe;AAAA,EAC5D,QAAQ;AACN,oBAAgB,yBAAyB,6BAA6B;AAAA,EACxE;AAGA,MAAI;AACJ,MAAI;AACF,UAAM,mBAAmB,IAAI,QAAmB,kBAAkB;AAClE,uBAAmB,kBAAkB;AAAA,MACnC,CAAC,MAAgB,GAAuB,OAAO;AAAA,IACjD;AAAA,EACF,QAAQ;AACN,oBAAgB,yBAAyB,gCAAgC;AACzE;AAAA,EACF;AAEA,MAAI,CAAC,kBAAkB;AACrB,oBAAgB,yBAAyB,kCAAkC;AAC3E;AAAA,EACF;AAGA,QAAM,cAAc,MAAM,iBAAiB,YAAY;AACvD,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,kCAAkC;AAAA,EACpD;AAEA,MAAI;AACF,QAAI,kBAA0C;AAC9C,QAAI;AACF,wBAAkB,IAAI,QAAyB,iBAAiB;AAAA,IAClE,QAAQ;AACN,wBAAkB;AAAA,IACpB;AAGA,QAAI,YAAY,SAAS;AACvB,YAAM,EAAE,YAAY,UAAU,eAAe,IAAI,IAAI;AAMrD,UAAI,CAAC,cAAc,CAAC,UAAU;AAC5B,wBAAgB,yBAAyB,sCAAsC;AAAA,UAC7E,OAAO,OAAO;AAAA,UACd;AAAA,UACA;AAAA,QACF,CAAC;AACD;AAAA,MACF;AAEA,UAAI,CAAC,eAAe;AAClB,cAAM,IAAI,MAAM,qDAAqD;AAAA,MACvE;AAEA,YAAM,SAAS,MAAM,cAAc,gBAAgB;AAAA,QACjD,UAAU;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAED,kBAAY,yBAAyB,qCAAqC;AAAA,QACxE,OAAO,OAAO;AAAA,QACd;AAAA,QACA;AAAA,QACA;AAAA,QACA,QAAQ,OAAO;AAAA,MACjB,CAAC;AAED,YAAM;AAAA,QACJ,EAAE,IAAI,MAAM,OAAU;AAAA,QACtB;AAAA,UACE,QAAQ;AAAA,UACR,SAAS;AAAA,UACT,SAAS,+BAA+B,OAAO,MAAM;AAAA,UACrD;AAAA,UACA;AAAA,UACA;AAAA,UACA,SAAS,EAAE,OAAO,OAAO,MAAM;AAAA,QACjC;AAAA,MACF;AACA;AAAA,IACF;AAGA,QAAI,YAAY,eAAe;AAC7B,YAAM,EAAE,SAAS,eAAe,IAAI,IAAI;AACxC,UAAI,CAAC,WAAW,QAAQ,WAAW,GAAG;AACpC,wBAAgB,yBAAyB,wCAAwC;AAAA,UAC/E,OAAO,OAAO;AAAA,QAChB,CAAC;AACD;AAAA,MACF;AAEA,UAAI,CAAC,eAAe;AAClB,cAAM,IAAI,MAAM,gDAAgD;AAAA,MAClE;AAGA,UAAI,eAAe;AACnB,UAAI,YAAY;AAEhB,iBAAW,EAAE,UAAU,SAAS,KAAK,SAAS;AAC5C,YAAI;AACF,gBAAM,SAAS,MAAM,cAAc,gBAAgB;AAAA,YACjD;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AACD,cAAI,OAAO,WAAW,WAAW;AAC/B;AAAA,UACF;AAAA,QACF,SAAS,OAAO;AACd;AACA,0BAAgB,yBAAyB,mCAAmC;AAAA,YAC1E;AAAA,YACA;AAAA,YACA,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,UAClD,CAAC;AAAA,QACH;AAAA,MACF;AAGA,UAAI,QAAQ,QAAQ,SAAS,GAAG;AAC9B,cAAM,sBAAsB,MAAM,UAAU,YAAY,cAAc,kBAAkB,IAAI;AAAA,MAC9F;AACA,UAAI,mBAAmB,MAAM,QAAQ,SAAS,GAAG;AAC/C,cAAM,YAAY,MAAM,yBAAyB;AAAA,UAC/C;AAAA,UACA;AAAA,UACA,MAAM;AAAA,UACN;AAAA,UACA,gBAAgB,kBAAkB;AAAA,UAClC,OAAO;AAAA,QACT,CAAC;AACD,YAAI,aAAa,MAAM;AACrB,gBAAM,iBAAiB,MAAM,UAAU,YAAY,kBAAkB,IAAI;AAAA,QAC3E;AAAA,MACF;AAEA,kBAAY,yBAAyB,6BAA6B;AAAA,QAChE,OAAO,OAAO;AAAA,QACd;AAAA,QACA,gBAAgB,QAAQ;AAAA,QACxB;AAAA,QACA;AAAA,MACF,CAAC;AAED,YAAM;AAAA,QACJ,EAAE,IAAI,MAAM,OAAU;AAAA,QACtB;AAAA,UACE,QAAQ;AAAA,UACR,SAAS;AAAA,UACT,SAAS,WAAW,YAAY,IAAI,QAAQ,MAAM;AAAA,UAClD;AAAA,UACA,SAAS,EAAE,OAAO,OAAO,OAAO,gBAAgB,QAAQ,QAAQ,cAAc,UAAU;AAAA,QAC1F;AAAA,MACF;AACA;AAAA,IACF;AAGA,QAAI,YAAY,UAAU;AACxB,YAAM,EAAE,UAAU,SAAS,IAAI,IAAI;AACnC,UAAI,CAAC,YAAY,CAAC,UAAU;AAC1B,wBAAgB,yBAAyB,uCAAuC;AAAA,UAC9E,OAAO,OAAO;AAAA,UACd;AAAA,UACA;AAAA,QACF,CAAC;AACD;AAAA,MACF;AAEA,YAAM,iBAAiB,OAAO,UAAU,UAAU,QAAQ;AAE1D,kBAAY,yBAAyB,yBAAyB;AAAA,QAC5D,OAAO,OAAO;AAAA,QACd;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAED,YAAM;AAAA,QACJ,EAAE,IAAI,MAAM,OAAU;AAAA,QACtB;AAAA,UACE,QAAQ;AAAA,UACR,SAAS;AAAA,UACT,SAAS;AAAA,UACT,YAAY;AAAA,UACZ;AAAA,UACA;AAAA,UACA,SAAS,EAAE,OAAO,OAAO,MAAM;AAAA,QACjC;AAAA,MACF;AACA;AAAA,IACF;AAGA,QAAI,YAAY,SAAS;AACvB,YAAM,EAAE,SAAS,IAAI,IAAI;AACzB,UAAI,CAAC,UAAU;AACb,wBAAgB,yBAAyB,wCAAwC;AAAA,UAC/E,OAAO,OAAO;AAAA,QAChB,CAAC;AACD;AAAA,MACF;AAEA,YAAM,iBAAiB,MAAM,UAAU,QAAQ;AAE/C,kBAAY,yBAAyB,+BAA+B;AAAA,QAClE,OAAO,OAAO;AAAA,QACd;AAAA,QACA;AAAA,MACF,CAAC;AAED,YAAM;AAAA,QACJ,EAAE,IAAI,MAAM,OAAU;AAAA,QACtB;AAAA,UACE,QAAQ;AAAA,UACR,SAAS;AAAA,UACT,SAAS;AAAA,UACT,YAAY;AAAA,UACZ;AAAA,UACA,SAAS,EAAE,OAAO,OAAO,MAAM;AAAA,QACjC;AAAA,MACF;AACA;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,gBAAY,yBAAyB,aAAa,OAAO,IAAI;AAAA,MAC3D,OAAO,OAAO;AAAA,MACd;AAAA,MACA,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,MAChD,eAAe,OAAO;AAAA,IACxB,CAAC;AAED,UAAM,WAAW,cAAc,IAAI,UAAU,IAAI,QAAQ,WACxC,gBAAgB,IAAI,UAAW,IAAI,QAAoC,aAAa;AACrG,UAAM,WAAW,cAAc,IAAI,UAAU,IAAI,QAAQ,WAAW;AAEpE,UAAM;AAAA,MACJ,EAAE,IAAI,MAAM,OAAU;AAAA,MACtB;AAAA,QACE,QAAQ;AAAA,QACR,SAAS,mBAAmB,OAAO;AAAA,QACnC;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA;AAAA,QACA,SAAS,IAAI;AAAA,MACf;AAAA,IACF;AAGA,UAAM;AAAA,EACR;AACF;AAMA,eAAO,OACL,KACA,KACe;AACf,SAAO,uBAAuB,KAAK,KAAK,GAAG;AAC7C;",
4
+ "sourcesContent": ["import type { QueuedJob, JobContext, WorkerMeta } from '@open-mercato/queue'\nimport type { Kysely } from 'kysely'\nimport { FULLTEXT_INDEXING_QUEUE_NAME, type FulltextIndexJobPayload } from '../../../queue/fulltext-indexing'\nimport type { FullTextSearchStrategy } from '../../../strategies/fulltext.strategy'\nimport type { SearchIndexer } from '../../../indexer/search-indexer'\nimport type { EntityManager } from '@mikro-orm/postgresql'\n\nimport type { EntityId } from '@open-mercato/shared/modules/entities'\nimport { recordIndexerLog } from '@open-mercato/shared/lib/indexers/status-log'\nimport { recordIndexerError } from '@open-mercato/shared/lib/indexers/error-log'\nimport type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'\nimport { searchDebug, searchDebugWarn, searchError } from '../../../lib/debug'\nimport { clearReindexLock, updateReindexProgress } from '../lib/reindex-lock'\nimport { incrementReindexProgress } from '../lib/reindex-progress'\n\n// Worker metadata for auto-discovery\nconst DEFAULT_CONCURRENCY = 2\nconst envConcurrency = process.env.WORKERS_FULLTEXT_INDEXING_CONCURRENCY\n\nexport const metadata: WorkerMeta = {\n queue: FULLTEXT_INDEXING_QUEUE_NAME,\n concurrency: envConcurrency ? parseInt(envConcurrency, 10) : DEFAULT_CONCURRENCY,\n}\n\ntype HandlerContext = { resolve: <T = unknown>(name: string) => T }\n\n/**\n * Process a fulltext indexing job.\n *\n * This handler processes single record indexing, batch indexing, deletion, and purge\n * operations for the fulltext search strategy.\n *\n * All indexing operations (single and batch) use searchIndexer.indexRecordById() to load\n * fresh data, ensuring consistency with the vector worker pattern.\n *\n * @param job - The queued job containing payload\n * @param jobCtx - Queue job context with job ID and attempt info\n * @param ctx - DI container context for resolving services\n */\nexport async function handleFulltextIndexJob(\n job: QueuedJob<FulltextIndexJobPayload>,\n jobCtx: JobContext,\n ctx: HandlerContext,\n): Promise<void> {\n const { jobType, tenantId } = job.payload\n\n if (!tenantId) {\n searchDebugWarn('fulltext-index.worker', 'Skipping job with missing tenantId', {\n jobId: jobCtx.jobId,\n jobType,\n })\n return\n }\n\n // Resolve EntityManager for logging and Kysely for database queries\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let em: any | null = null\n let db: Kysely<any> | null = null\n try {\n em = ctx.resolve('em') as EntityManager\n db = (em as unknown as { getKysely: () => Kysely<any> }).getKysely()\n } catch {\n em = null\n db = null\n }\n\n // Resolve searchIndexer for loading fresh data\n let searchIndexer: SearchIndexer | undefined\n try {\n searchIndexer = ctx.resolve<SearchIndexer>('searchIndexer')\n } catch {\n searchDebugWarn('fulltext-index.worker', 'searchIndexer not available')\n }\n\n // Resolve fulltext strategy\n let fulltextStrategy: FullTextSearchStrategy | undefined\n try {\n const searchStrategies = ctx.resolve<unknown[]>('searchStrategies')\n fulltextStrategy = searchStrategies?.find(\n (s: unknown) => (s as { id?: string })?.id === 'fulltext',\n ) as FullTextSearchStrategy | undefined\n } catch {\n searchDebugWarn('fulltext-index.worker', 'searchStrategies not available')\n return\n }\n\n if (!fulltextStrategy) {\n searchDebugWarn('fulltext-index.worker', 'Fulltext strategy not configured')\n return\n }\n\n // Check if fulltext is available\n const isAvailable = await fulltextStrategy.isAvailable()\n if (!isAvailable) {\n throw new Error('Fulltext search is not available') // Will trigger retry\n }\n\n try {\n let progressService: ProgressService | null = null\n try {\n progressService = ctx.resolve<ProgressService>('progressService')\n } catch {\n progressService = null\n }\n\n // ========== SINGLE INDEX: Use searchIndexer.indexRecordById() for fresh data ==========\n if (jobType === 'index') {\n const { entityType, recordId, organizationId } = job.payload as {\n entityType: string\n recordId: string\n organizationId?: string | null\n }\n\n if (!entityType || !recordId) {\n searchDebugWarn('fulltext-index.worker', 'Skipping index with missing fields', {\n jobId: jobCtx.jobId,\n entityType,\n recordId,\n })\n return\n }\n\n if (!searchIndexer) {\n throw new Error('searchIndexer not available for single-record index')\n }\n\n const result = await searchIndexer.indexRecordById({\n entityId: entityType as EntityId,\n recordId,\n tenantId,\n organizationId,\n })\n\n searchDebug('fulltext-index.worker', 'Indexed single record to fulltext', {\n jobId: jobCtx.jobId,\n tenantId,\n entityType,\n recordId,\n action: result.action,\n })\n\n await recordIndexerLog(\n { em: em ?? undefined },\n {\n source: 'fulltext',\n handler: 'worker:fulltext:index',\n message: `Indexed record to fulltext (${result.action})`,\n entityType,\n recordId,\n tenantId,\n details: { jobId: jobCtx.jobId },\n },\n )\n return\n }\n\n // ========== BATCH-INDEX: Use searchIndexer.indexRecordById() for fresh data ==========\n if (jobType === 'batch-index') {\n const { records, organizationId } = job.payload\n if (!records || records.length === 0) {\n searchDebugWarn('fulltext-index.worker', 'Skipping batch-index with no records', {\n jobId: jobCtx.jobId,\n })\n return\n }\n\n if (!searchIndexer) {\n throw new Error('searchIndexer not available for batch indexing')\n }\n\n // Process each record using indexRecordById (same pattern as vector worker)\n let successCount = 0\n let failCount = 0\n\n for (const { entityId, recordId } of records) {\n try {\n const result = await searchIndexer.indexRecordById({\n entityId: entityId as EntityId,\n recordId,\n tenantId,\n organizationId,\n })\n if (result.action === 'indexed') {\n successCount++\n }\n } catch (error) {\n failCount++\n searchDebugWarn('fulltext-index.worker', 'Failed to index record in batch', {\n entityId,\n recordId,\n error: error instanceof Error ? error.message : error,\n })\n }\n }\n\n // Update heartbeat to signal worker is still processing\n if (db && records.length > 0) {\n await updateReindexProgress(db, tenantId, 'fulltext', successCount, organizationId ?? null)\n }\n if (progressService && em && records.length > 0) {\n const completed = await incrementReindexProgress({\n em,\n progressService,\n type: 'fulltext',\n tenantId,\n organizationId: organizationId ?? null,\n delta: successCount,\n })\n if (completed && db) {\n await clearReindexLock(db, tenantId, 'fulltext', organizationId ?? null)\n }\n }\n\n searchDebug('fulltext-index.worker', 'Batch indexed to fulltext', {\n jobId: jobCtx.jobId,\n tenantId,\n requestedCount: records.length,\n successCount,\n failCount,\n })\n\n await recordIndexerLog(\n { em: em ?? undefined },\n {\n source: 'fulltext',\n handler: 'worker:fulltext:batch-index',\n message: `Indexed ${successCount}/${records.length} records to fulltext`,\n tenantId,\n details: { jobId: jobCtx.jobId, requestedCount: records.length, successCount, failCount },\n },\n )\n return\n }\n\n // ========== DELETE ==========\n if (jobType === 'delete') {\n const { entityId, recordId } = job.payload\n if (!entityId || !recordId) {\n searchDebugWarn('fulltext-index.worker', 'Skipping delete with missing fields', {\n jobId: jobCtx.jobId,\n entityId,\n recordId,\n })\n return\n }\n\n await fulltextStrategy.delete(entityId, recordId, tenantId)\n\n searchDebug('fulltext-index.worker', 'Deleted from fulltext', {\n jobId: jobCtx.jobId,\n tenantId,\n entityId,\n recordId,\n })\n\n await recordIndexerLog(\n { em: em ?? undefined },\n {\n source: 'fulltext',\n handler: 'worker:fulltext:delete',\n message: `Deleted record from fulltext`,\n entityType: entityId,\n recordId,\n tenantId,\n details: { jobId: jobCtx.jobId },\n },\n )\n return\n }\n\n // ========== PURGE ==========\n if (jobType === 'purge') {\n const { entityId } = job.payload\n if (!entityId) {\n searchDebugWarn('fulltext-index.worker', 'Skipping purge with missing entityId', {\n jobId: jobCtx.jobId,\n })\n return\n }\n\n await fulltextStrategy.purge(entityId, tenantId)\n\n searchDebug('fulltext-index.worker', 'Purged entity from fulltext', {\n jobId: jobCtx.jobId,\n tenantId,\n entityId,\n })\n\n await recordIndexerLog(\n { em: em ?? undefined },\n {\n source: 'fulltext',\n handler: 'worker:fulltext:purge',\n message: `Purged entity from fulltext`,\n entityType: entityId,\n tenantId,\n details: { jobId: jobCtx.jobId },\n },\n )\n return\n }\n } catch (error) {\n searchError('fulltext-index.worker', `Failed to ${jobType}`, {\n jobId: jobCtx.jobId,\n tenantId,\n error: error instanceof Error ? error.message : error,\n attemptNumber: jobCtx.attemptNumber,\n })\n\n const entityId = 'entityId' in job.payload ? job.payload.entityId :\n 'entityType' in job.payload ? (job.payload as { entityType?: string }).entityType : undefined\n const recordId = 'recordId' in job.payload ? job.payload.recordId : undefined\n\n await recordIndexerError(\n { em: em ?? undefined },\n {\n source: 'fulltext',\n handler: `worker:fulltext:${jobType}`,\n error,\n entityType: entityId,\n recordId,\n tenantId,\n payload: job.payload,\n },\n )\n\n // Re-throw to let the queue handle retry logic\n throw error\n }\n}\n\n/**\n * Default export for worker auto-discovery.\n * Wraps handleFulltextIndexJob to match the expected handler signature.\n */\nexport default async function handle(\n job: QueuedJob<FulltextIndexJobPayload>,\n ctx: JobContext & HandlerContext\n): Promise<void> {\n return handleFulltextIndexJob(job, ctx, ctx)\n}\n"],
5
+ "mappings": "AAEA,SAAS,oCAAkE;AAM3E,SAAS,wBAAwB;AACjC,SAAS,0BAA0B;AAEnC,SAAS,aAAa,iBAAiB,mBAAmB;AAC1D,SAAS,kBAAkB,6BAA6B;AACxD,SAAS,gCAAgC;AAGzC,MAAM,sBAAsB;AAC5B,MAAM,iBAAiB,QAAQ,IAAI;AAE5B,MAAM,WAAuB;AAAA,EAClC,OAAO;AAAA,EACP,aAAa,iBAAiB,SAAS,gBAAgB,EAAE,IAAI;AAC/D;AAiBA,eAAsB,uBACpB,KACA,QACA,KACe;AACf,QAAM,EAAE,SAAS,SAAS,IAAI,IAAI;AAElC,MAAI,CAAC,UAAU;AACb,oBAAgB,yBAAyB,sCAAsC;AAAA,MAC7E,OAAO,OAAO;AAAA,MACd;AAAA,IACF,CAAC;AACD;AAAA,EACF;AAIA,MAAI,KAAiB;AACrB,MAAI,KAAyB;AAC7B,MAAI;AACF,SAAK,IAAI,QAAQ,IAAI;AACrB,SAAM,GAAmD,UAAU;AAAA,EACrE,QAAQ;AACN,SAAK;AACL,SAAK;AAAA,EACP;AAGA,MAAI;AACJ,MAAI;AACF,oBAAgB,IAAI,QAAuB,eAAe;AAAA,EAC5D,QAAQ;AACN,oBAAgB,yBAAyB,6BAA6B;AAAA,EACxE;AAGA,MAAI;AACJ,MAAI;AACF,UAAM,mBAAmB,IAAI,QAAmB,kBAAkB;AAClE,uBAAmB,kBAAkB;AAAA,MACnC,CAAC,MAAgB,GAAuB,OAAO;AAAA,IACjD;AAAA,EACF,QAAQ;AACN,oBAAgB,yBAAyB,gCAAgC;AACzE;AAAA,EACF;AAEA,MAAI,CAAC,kBAAkB;AACrB,oBAAgB,yBAAyB,kCAAkC;AAC3E;AAAA,EACF;AAGA,QAAM,cAAc,MAAM,iBAAiB,YAAY;AACvD,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,kCAAkC;AAAA,EACpD;AAEA,MAAI;AACF,QAAI,kBAA0C;AAC9C,QAAI;AACF,wBAAkB,IAAI,QAAyB,iBAAiB;AAAA,IAClE,QAAQ;AACN,wBAAkB;AAAA,IACpB;AAGA,QAAI,YAAY,SAAS;AACvB,YAAM,EAAE,YAAY,UAAU,eAAe,IAAI,IAAI;AAMrD,UAAI,CAAC,cAAc,CAAC,UAAU;AAC5B,wBAAgB,yBAAyB,sCAAsC;AAAA,UAC7E,OAAO,OAAO;AAAA,UACd;AAAA,UACA;AAAA,QACF,CAAC;AACD;AAAA,MACF;AAEA,UAAI,CAAC,eAAe;AAClB,cAAM,IAAI,MAAM,qDAAqD;AAAA,MACvE;AAEA,YAAM,SAAS,MAAM,cAAc,gBAAgB;AAAA,QACjD,UAAU;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAED,kBAAY,yBAAyB,qCAAqC;AAAA,QACxE,OAAO,OAAO;AAAA,QACd;AAAA,QACA;AAAA,QACA;AAAA,QACA,QAAQ,OAAO;AAAA,MACjB,CAAC;AAED,YAAM;AAAA,QACJ,EAAE,IAAI,MAAM,OAAU;AAAA,QACtB;AAAA,UACE,QAAQ;AAAA,UACR,SAAS;AAAA,UACT,SAAS,+BAA+B,OAAO,MAAM;AAAA,UACrD;AAAA,UACA;AAAA,UACA;AAAA,UACA,SAAS,EAAE,OAAO,OAAO,MAAM;AAAA,QACjC;AAAA,MACF;AACA;AAAA,IACF;AAGA,QAAI,YAAY,eAAe;AAC7B,YAAM,EAAE,SAAS,eAAe,IAAI,IAAI;AACxC,UAAI,CAAC,WAAW,QAAQ,WAAW,GAAG;AACpC,wBAAgB,yBAAyB,wCAAwC;AAAA,UAC/E,OAAO,OAAO;AAAA,QAChB,CAAC;AACD;AAAA,MACF;AAEA,UAAI,CAAC,eAAe;AAClB,cAAM,IAAI,MAAM,gDAAgD;AAAA,MAClE;AAGA,UAAI,eAAe;AACnB,UAAI,YAAY;AAEhB,iBAAW,EAAE,UAAU,SAAS,KAAK,SAAS;AAC5C,YAAI;AACF,gBAAM,SAAS,MAAM,cAAc,gBAAgB;AAAA,YACjD;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AACD,cAAI,OAAO,WAAW,WAAW;AAC/B;AAAA,UACF;AAAA,QACF,SAAS,OAAO;AACd;AACA,0BAAgB,yBAAyB,mCAAmC;AAAA,YAC1E;AAAA,YACA;AAAA,YACA,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,UAClD,CAAC;AAAA,QACH;AAAA,MACF;AAGA,UAAI,MAAM,QAAQ,SAAS,GAAG;AAC5B,cAAM,sBAAsB,IAAI,UAAU,YAAY,cAAc,kBAAkB,IAAI;AAAA,MAC5F;AACA,UAAI,mBAAmB,MAAM,QAAQ,SAAS,GAAG;AAC/C,cAAM,YAAY,MAAM,yBAAyB;AAAA,UAC/C;AAAA,UACA;AAAA,UACA,MAAM;AAAA,UACN;AAAA,UACA,gBAAgB,kBAAkB;AAAA,UAClC,OAAO;AAAA,QACT,CAAC;AACD,YAAI,aAAa,IAAI;AACnB,gBAAM,iBAAiB,IAAI,UAAU,YAAY,kBAAkB,IAAI;AAAA,QACzE;AAAA,MACF;AAEA,kBAAY,yBAAyB,6BAA6B;AAAA,QAChE,OAAO,OAAO;AAAA,QACd;AAAA,QACA,gBAAgB,QAAQ;AAAA,QACxB;AAAA,QACA;AAAA,MACF,CAAC;AAED,YAAM;AAAA,QACJ,EAAE,IAAI,MAAM,OAAU;AAAA,QACtB;AAAA,UACE,QAAQ;AAAA,UACR,SAAS;AAAA,UACT,SAAS,WAAW,YAAY,IAAI,QAAQ,MAAM;AAAA,UAClD;AAAA,UACA,SAAS,EAAE,OAAO,OAAO,OAAO,gBAAgB,QAAQ,QAAQ,cAAc,UAAU;AAAA,QAC1F;AAAA,MACF;AACA;AAAA,IACF;AAGA,QAAI,YAAY,UAAU;AACxB,YAAM,EAAE,UAAU,SAAS,IAAI,IAAI;AACnC,UAAI,CAAC,YAAY,CAAC,UAAU;AAC1B,wBAAgB,yBAAyB,uCAAuC;AAAA,UAC9E,OAAO,OAAO;AAAA,UACd;AAAA,UACA;AAAA,QACF,CAAC;AACD;AAAA,MACF;AAEA,YAAM,iBAAiB,OAAO,UAAU,UAAU,QAAQ;AAE1D,kBAAY,yBAAyB,yBAAyB;AAAA,QAC5D,OAAO,OAAO;AAAA,QACd;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAED,YAAM;AAAA,QACJ,EAAE,IAAI,MAAM,OAAU;AAAA,QACtB;AAAA,UACE,QAAQ;AAAA,UACR,SAAS;AAAA,UACT,SAAS;AAAA,UACT,YAAY;AAAA,UACZ;AAAA,UACA;AAAA,UACA,SAAS,EAAE,OAAO,OAAO,MAAM;AAAA,QACjC;AAAA,MACF;AACA;AAAA,IACF;AAGA,QAAI,YAAY,SAAS;AACvB,YAAM,EAAE,SAAS,IAAI,IAAI;AACzB,UAAI,CAAC,UAAU;AACb,wBAAgB,yBAAyB,wCAAwC;AAAA,UAC/E,OAAO,OAAO;AAAA,QAChB,CAAC;AACD;AAAA,MACF;AAEA,YAAM,iBAAiB,MAAM,UAAU,QAAQ;AAE/C,kBAAY,yBAAyB,+BAA+B;AAAA,QAClE,OAAO,OAAO;AAAA,QACd;AAAA,QACA;AAAA,MACF,CAAC;AAED,YAAM;AAAA,QACJ,EAAE,IAAI,MAAM,OAAU;AAAA,QACtB;AAAA,UACE,QAAQ;AAAA,UACR,SAAS;AAAA,UACT,SAAS;AAAA,UACT,YAAY;AAAA,UACZ;AAAA,UACA,SAAS,EAAE,OAAO,OAAO,MAAM;AAAA,QACjC;AAAA,MACF;AACA;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,gBAAY,yBAAyB,aAAa,OAAO,IAAI;AAAA,MAC3D,OAAO,OAAO;AAAA,MACd;AAAA,MACA,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,MAChD,eAAe,OAAO;AAAA,IACxB,CAAC;AAED,UAAM,WAAW,cAAc,IAAI,UAAU,IAAI,QAAQ,WACxC,gBAAgB,IAAI,UAAW,IAAI,QAAoC,aAAa;AACrG,UAAM,WAAW,cAAc,IAAI,UAAU,IAAI,QAAQ,WAAW;AAEpE,UAAM;AAAA,MACJ,EAAE,IAAI,MAAM,OAAU;AAAA,MACtB;AAAA,QACE,QAAQ;AAAA,QACR,SAAS,mBAAmB,OAAO;AAAA,QACnC;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA;AAAA,QACA,SAAS,IAAI;AAAA,MACf;AAAA,IACF;AAGA,UAAM;AAAA,EACR;AACF;AAMA,eAAO,OACL,KACA,KACe;AACf,SAAO,uBAAuB,KAAK,KAAK,GAAG;AAC7C;",
6
6
  "names": []
7
7
  }
@@ -31,13 +31,13 @@ async function handleVectorIndexJob(job, jobCtx, ctx) {
31
31
  searchDebugWarn("vector-index.worker", "searchIndexer not available");
32
32
  return;
33
33
  }
34
- let knex = null;
34
+ let db = null;
35
35
  let em2 = null;
36
36
  try {
37
37
  em2 = ctx.resolve("em");
38
- knex = em2.getConnection().getKnex();
38
+ db = em2.getKysely();
39
39
  } catch {
40
- knex = null;
40
+ db = null;
41
41
  em2 = null;
42
42
  }
43
43
  let progressService = null;
@@ -79,8 +79,8 @@ async function handleVectorIndexJob(job, jobCtx, ctx) {
79
79
  });
80
80
  }
81
81
  }
82
- if (knex && records.length > 0) {
83
- await updateReindexProgress(knex, tenantId, "vector", successCount, organizationId ?? null);
82
+ if (db && records.length > 0) {
83
+ await updateReindexProgress(db, tenantId, "vector", successCount, organizationId ?? null);
84
84
  }
85
85
  if (progressService && em2 && records.length > 0) {
86
86
  const completed = await incrementReindexProgress({
@@ -91,8 +91,8 @@ async function handleVectorIndexJob(job, jobCtx, ctx) {
91
91
  organizationId: organizationId ?? null,
92
92
  delta: successCount
93
93
  });
94
- if (completed && knex) {
95
- await clearReindexLock(knex, tenantId, "vector", organizationId ?? null);
94
+ if (completed && db) {
95
+ await clearReindexLock(db, tenantId, "vector", organizationId ?? null);
96
96
  }
97
97
  }
98
98
  searchDebugWarn("vector-index.worker", "Batch-index job completed", {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../src/modules/search/workers/vector-index.worker.ts"],
4
- "sourcesContent": ["import type { QueuedJob, JobContext, WorkerMeta } from '@open-mercato/queue'\nimport { VECTOR_INDEXING_QUEUE_NAME, type VectorIndexJobPayload } from '../../../queue/vector-indexing'\nimport type { SearchIndexer } from '../../../indexer/search-indexer'\nimport type { EmbeddingService } from '../../../vector'\nimport type { EntityManager } from '@mikro-orm/postgresql'\nimport type { Knex } from 'knex'\nimport type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'\nimport { recordIndexerError } from '@open-mercato/shared/lib/indexers/error-log'\nimport { applyCoverageAdjustments, createCoverageAdjustments } from '@open-mercato/core/modules/query_index/lib/coverage'\nimport { logVectorOperation } from '../../../vector/lib/vector-logs'\nimport { resolveAutoIndexingEnabled } from '../lib/auto-indexing'\nimport { resolveEmbeddingConfig } from '../lib/embedding-config'\nimport { searchDebugWarn } from '../../../lib/debug'\nimport { clearReindexLock, updateReindexProgress } from '../lib/reindex-lock'\nimport { incrementReindexProgress } from '../lib/reindex-progress'\n\n// Worker metadata for auto-discovery\nconst DEFAULT_CONCURRENCY = 2\nconst envConcurrency = process.env.WORKERS_VECTOR_INDEXING_CONCURRENCY\n\nexport const metadata: WorkerMeta = {\n queue: VECTOR_INDEXING_QUEUE_NAME,\n concurrency: envConcurrency ? parseInt(envConcurrency, 10) : DEFAULT_CONCURRENCY,\n}\n\ntype HandlerContext = { resolve: <T = unknown>(name: string) => T }\n\n/**\n * Process a vector index job.\n *\n * This handler is called by the queue worker to process indexing and deletion jobs.\n * It uses SearchIndexer to load records and index them via SearchService.\n *\n * @param job - The queued job containing payload\n * @param jobCtx - Queue job context with job ID and attempt info\n * @param ctx - DI container context for resolving services\n */\nexport async function handleVectorIndexJob(\n job: QueuedJob<VectorIndexJobPayload>,\n jobCtx: JobContext,\n ctx: HandlerContext,\n): Promise<void> {\n const { jobType, entityType, recordId, tenantId, organizationId, records } = job.payload\n\n // Handle batch-index jobs (from reindex operations)\n if (jobType === 'batch-index') {\n if (!records?.length || !tenantId) {\n searchDebugWarn('vector-index.worker', 'Skipping batch-index job with missing required fields', {\n jobId: jobCtx.jobId,\n recordCount: records?.length ?? 0,\n tenantId,\n })\n return\n }\n\n let searchIndexer: SearchIndexer\n try {\n searchIndexer = ctx.resolve<SearchIndexer>('searchIndexer')\n } catch {\n searchDebugWarn('vector-index.worker', 'searchIndexer not available')\n return\n }\n\n // Get knex for heartbeat updates\n let knex: Knex | null = null\n let em: EntityManager | null = null\n try {\n em = ctx.resolve('em') as EntityManager\n knex = (em.getConnection() as unknown as { getKnex: () => Knex }).getKnex()\n } catch {\n knex = null\n em = null\n }\n\n let progressService: ProgressService | null = null\n try {\n progressService = ctx.resolve<ProgressService>('progressService')\n } catch {\n progressService = null\n }\n\n // Load saved embedding config to use the correct provider/model\n try {\n const embeddingConfig = await resolveEmbeddingConfig(ctx, { defaultValue: null })\n if (embeddingConfig) {\n const embeddingService = ctx.resolve<EmbeddingService>('vectorEmbeddingService')\n embeddingService.updateConfig(embeddingConfig)\n }\n } catch (configErr) {\n searchDebugWarn('vector-index.worker', 'Failed to load embedding config for batch, using defaults', {\n error: configErr instanceof Error ? configErr.message : configErr,\n })\n }\n\n // Process each record in the batch\n let successCount = 0\n let failCount = 0\n for (const { entityId, recordId: recId } of records) {\n try {\n const result = await searchIndexer.indexRecordById({\n entityId: entityId as Parameters<typeof searchIndexer.indexRecordById>[0]['entityId'],\n recordId: recId,\n tenantId,\n organizationId,\n })\n if (result.action === 'indexed') {\n successCount++\n }\n } catch (error) {\n failCount++\n searchDebugWarn('vector-index.worker', 'Failed to index record in batch', {\n entityId,\n recordId: recId,\n error: error instanceof Error ? error.message : error,\n })\n }\n }\n\n // Update heartbeat to signal worker is still processing\n if (knex && records.length > 0) {\n await updateReindexProgress(knex, tenantId, 'vector', successCount, organizationId ?? null)\n }\n if (progressService && em && records.length > 0) {\n const completed = await incrementReindexProgress({\n em,\n progressService,\n type: 'vector',\n tenantId,\n organizationId: organizationId ?? null,\n delta: successCount,\n })\n if (completed && knex) {\n await clearReindexLock(knex, tenantId, 'vector', organizationId ?? null)\n }\n }\n\n searchDebugWarn('vector-index.worker', 'Batch-index job completed', {\n jobId: jobCtx.jobId,\n totalRecords: records.length,\n successCount,\n failCount,\n })\n return\n }\n\n // Handle single record jobs (index/delete)\n if (!entityType || !recordId || !tenantId) {\n searchDebugWarn('vector-index.worker', 'Skipping job with missing required fields', {\n jobId: jobCtx.jobId,\n entityType,\n recordId,\n tenantId,\n })\n return\n }\n\n const autoIndexingEnabled = await resolveAutoIndexingEnabled(ctx, { defaultValue: true })\n if (!autoIndexingEnabled) {\n return\n }\n\n let searchIndexer: SearchIndexer\n try {\n searchIndexer = ctx.resolve<SearchIndexer>('searchIndexer')\n } catch {\n searchDebugWarn('vector-index.worker', 'searchIndexer not available')\n return\n }\n\n // Load saved embedding config to use the correct provider/model\n try {\n const embeddingConfig = await resolveEmbeddingConfig(ctx, { defaultValue: null })\n if (embeddingConfig) {\n const embeddingService = ctx.resolve<EmbeddingService>('vectorEmbeddingService')\n embeddingService.updateConfig(embeddingConfig)\n }\n } catch (configErr) {\n // Delete operations don't require embedding, only warn for index operations\n if (jobType === 'index') {\n searchDebugWarn('vector-index.worker', 'Failed to load embedding config, using defaults', {\n error: configErr instanceof Error ? configErr.message : configErr,\n })\n }\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let em: any | null = null\n try {\n em = ctx.resolve('em')\n } catch {\n em = null\n }\n\n let eventBus: { emitEvent(event: string, payload: unknown, options?: unknown): Promise<void> } | null = null\n try {\n eventBus = ctx.resolve('eventBus')\n } catch {\n eventBus = null\n }\n\n const handlerName = jobType === 'delete'\n ? 'worker:vector-indexing:delete'\n : 'worker:vector-indexing:index'\n\n try {\n let action: 'indexed' | 'deleted' | 'skipped' = 'skipped'\n let delta = 0\n\n if (jobType === 'delete') {\n await searchIndexer.deleteRecord({\n entityId: entityType,\n recordId,\n tenantId,\n })\n action = 'deleted'\n delta = -1\n } else {\n const result = await searchIndexer.indexRecordById({\n entityId: entityType,\n recordId,\n tenantId,\n organizationId,\n })\n action = result.action\n if (result.action === 'indexed') {\n delta = 1\n }\n }\n\n if (delta !== 0) {\n let adjustmentsApplied = false\n if (em) {\n try {\n const adjustments = createCoverageAdjustments({\n entityType,\n tenantId,\n organizationId,\n baseDelta: 0,\n indexDelta: 0,\n vectorDelta: delta,\n })\n if (adjustments.length) {\n await applyCoverageAdjustments(em, adjustments)\n adjustmentsApplied = true\n }\n } catch (coverageError) {\n searchDebugWarn('vector-index.worker', 'Failed to adjust vector coverage', {\n error: coverageError instanceof Error ? coverageError.message : coverageError,\n })\n }\n }\n\n if (!adjustmentsApplied && eventBus) {\n try {\n await eventBus.emitEvent('query_index.coverage.refresh', {\n entityType,\n tenantId,\n organizationId,\n withDeleted: false,\n delayMs: 1000,\n })\n } catch (emitError) {\n searchDebugWarn('vector-index.worker', 'Failed to enqueue coverage refresh', {\n error: emitError instanceof Error ? emitError.message : emitError,\n })\n }\n }\n }\n\n await logVectorOperation({\n em,\n handler: handlerName,\n entityType,\n recordId,\n result: {\n action,\n tenantId,\n organizationId: organizationId ?? null,\n created: action === 'indexed',\n existed: action === 'deleted',\n },\n })\n } catch (error) {\n searchDebugWarn('vector-index.worker', `Failed to ${jobType} vector index`, {\n entityType,\n recordId,\n error: error instanceof Error ? error.message : error,\n })\n await recordIndexerError(\n { em: em ?? undefined },\n {\n source: 'vector',\n handler: handlerName,\n error,\n entityType,\n recordId,\n tenantId,\n organizationId,\n payload: job.payload,\n },\n )\n // Re-throw to let the queue handle retry logic\n throw error\n }\n}\n\n/**\n * Default export for worker auto-discovery.\n * Wraps handleVectorIndexJob to match the expected handler signature.\n */\nexport default async function handle(\n job: QueuedJob<VectorIndexJobPayload>,\n ctx: JobContext & HandlerContext\n): Promise<void> {\n return handleVectorIndexJob(job, ctx, ctx)\n}\n"],
5
- "mappings": "AACA,SAAS,kCAA8D;AAMvE,SAAS,0BAA0B;AACnC,SAAS,0BAA0B,iCAAiC;AACpE,SAAS,0BAA0B;AACnC,SAAS,kCAAkC;AAC3C,SAAS,8BAA8B;AACvC,SAAS,uBAAuB;AAChC,SAAS,kBAAkB,6BAA6B;AACxD,SAAS,gCAAgC;AAGzC,MAAM,sBAAsB;AAC5B,MAAM,iBAAiB,QAAQ,IAAI;AAE5B,MAAM,WAAuB;AAAA,EAClC,OAAO;AAAA,EACP,aAAa,iBAAiB,SAAS,gBAAgB,EAAE,IAAI;AAC/D;AAcA,eAAsB,qBACpB,KACA,QACA,KACe;AACf,QAAM,EAAE,SAAS,YAAY,UAAU,UAAU,gBAAgB,QAAQ,IAAI,IAAI;AAGjF,MAAI,YAAY,eAAe;AAC7B,QAAI,CAAC,SAAS,UAAU,CAAC,UAAU;AACjC,sBAAgB,uBAAuB,yDAAyD;AAAA,QAC9F,OAAO,OAAO;AAAA,QACd,aAAa,SAAS,UAAU;AAAA,QAChC;AAAA,MACF,CAAC;AACD;AAAA,IACF;AAEA,QAAIA;AACJ,QAAI;AACF,MAAAA,iBAAgB,IAAI,QAAuB,eAAe;AAAA,IAC5D,QAAQ;AACN,sBAAgB,uBAAuB,6BAA6B;AACpE;AAAA,IACF;AAGA,QAAI,OAAoB;AACxB,QAAIC,MAA2B;AAC/B,QAAI;AACF,MAAAA,MAAK,IAAI,QAAQ,IAAI;AACrB,aAAQA,IAAG,cAAc,EAAyC,QAAQ;AAAA,IAC5E,QAAQ;AACN,aAAO;AACP,MAAAA,MAAK;AAAA,IACP;AAEA,QAAI,kBAA0C;AAC9C,QAAI;AACF,wBAAkB,IAAI,QAAyB,iBAAiB;AAAA,IAClE,QAAQ;AACN,wBAAkB;AAAA,IACpB;AAGA,QAAI;AACF,YAAM,kBAAkB,MAAM,uBAAuB,KAAK,EAAE,cAAc,KAAK,CAAC;AAChF,UAAI,iBAAiB;AACnB,cAAM,mBAAmB,IAAI,QAA0B,wBAAwB;AAC/E,yBAAiB,aAAa,eAAe;AAAA,MAC/C;AAAA,IACF,SAAS,WAAW;AAClB,sBAAgB,uBAAuB,6DAA6D;AAAA,QAClG,OAAO,qBAAqB,QAAQ,UAAU,UAAU;AAAA,MAC1D,CAAC;AAAA,IACH;AAGA,QAAI,eAAe;AACnB,QAAI,YAAY;AAChB,eAAW,EAAE,UAAU,UAAU,MAAM,KAAK,SAAS;AACnD,UAAI;AACF,cAAM,SAAS,MAAMD,eAAc,gBAAgB;AAAA,UACjD;AAAA,UACA,UAAU;AAAA,UACV;AAAA,UACA;AAAA,QACF,CAAC;AACD,YAAI,OAAO,WAAW,WAAW;AAC/B;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd;AACA,wBAAgB,uBAAuB,mCAAmC;AAAA,UACxE;AAAA,UACA,UAAU;AAAA,UACV,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,QAClD,CAAC;AAAA,MACH;AAAA,IACF;AAGA,QAAI,QAAQ,QAAQ,SAAS,GAAG;AAC9B,YAAM,sBAAsB,MAAM,UAAU,UAAU,cAAc,kBAAkB,IAAI;AAAA,IAC5F;AACA,QAAI,mBAAmBC,OAAM,QAAQ,SAAS,GAAG;AAC/C,YAAM,YAAY,MAAM,yBAAyB;AAAA,QAC/C,IAAAA;AAAA,QACA;AAAA,QACA,MAAM;AAAA,QACN;AAAA,QACA,gBAAgB,kBAAkB;AAAA,QAClC,OAAO;AAAA,MACT,CAAC;AACD,UAAI,aAAa,MAAM;AACrB,cAAM,iBAAiB,MAAM,UAAU,UAAU,kBAAkB,IAAI;AAAA,MACzE;AAAA,IACF;AAEA,oBAAgB,uBAAuB,6BAA6B;AAAA,MAClE,OAAO,OAAO;AAAA,MACd,cAAc,QAAQ;AAAA,MACtB;AAAA,MACA;AAAA,IACF,CAAC;AACD;AAAA,EACF;AAGA,MAAI,CAAC,cAAc,CAAC,YAAY,CAAC,UAAU;AACzC,oBAAgB,uBAAuB,6CAA6C;AAAA,MAClF,OAAO,OAAO;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AACD;AAAA,EACF;AAEA,QAAM,sBAAsB,MAAM,2BAA2B,KAAK,EAAE,cAAc,KAAK,CAAC;AACxF,MAAI,CAAC,qBAAqB;AACxB;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,oBAAgB,IAAI,QAAuB,eAAe;AAAA,EAC5D,QAAQ;AACN,oBAAgB,uBAAuB,6BAA6B;AACpE;AAAA,EACF;AAGA,MAAI;AACF,UAAM,kBAAkB,MAAM,uBAAuB,KAAK,EAAE,cAAc,KAAK,CAAC;AAChF,QAAI,iBAAiB;AACnB,YAAM,mBAAmB,IAAI,QAA0B,wBAAwB;AAC/E,uBAAiB,aAAa,eAAe;AAAA,IAC/C;AAAA,EACF,SAAS,WAAW;AAElB,QAAI,YAAY,SAAS;AACvB,sBAAgB,uBAAuB,mDAAmD;AAAA,QACxF,OAAO,qBAAqB,QAAQ,UAAU,UAAU;AAAA,MAC1D,CAAC;AAAA,IACH;AAAA,EACF;AAGA,MAAI,KAAiB;AACrB,MAAI;AACF,SAAK,IAAI,QAAQ,IAAI;AAAA,EACvB,QAAQ;AACN,SAAK;AAAA,EACP;AAEA,MAAI,WAAoG;AACxG,MAAI;AACF,eAAW,IAAI,QAAQ,UAAU;AAAA,EACnC,QAAQ;AACN,eAAW;AAAA,EACb;AAEA,QAAM,cAAc,YAAY,WAC5B,kCACA;AAEJ,MAAI;AACF,QAAI,SAA4C;AAChD,QAAI,QAAQ;AAEZ,QAAI,YAAY,UAAU;AACxB,YAAM,cAAc,aAAa;AAAA,QAC/B,UAAU;AAAA,QACV;AAAA,QACA;AAAA,MACF,CAAC;AACD,eAAS;AACT,cAAQ;AAAA,IACV,OAAO;AACL,YAAM,SAAS,MAAM,cAAc,gBAAgB;AAAA,QACjD,UAAU;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AACD,eAAS,OAAO;AAChB,UAAI,OAAO,WAAW,WAAW;AAC/B,gBAAQ;AAAA,MACV;AAAA,IACF;AAEA,QAAI,UAAU,GAAG;AACf,UAAI,qBAAqB;AACzB,UAAI,IAAI;AACN,YAAI;AACF,gBAAM,cAAc,0BAA0B;AAAA,YAC5C;AAAA,YACA;AAAA,YACA;AAAA,YACA,WAAW;AAAA,YACX,YAAY;AAAA,YACZ,aAAa;AAAA,UACf,CAAC;AACD,cAAI,YAAY,QAAQ;AACtB,kBAAM,yBAAyB,IAAI,WAAW;AAC9C,iCAAqB;AAAA,UACvB;AAAA,QACF,SAAS,eAAe;AACtB,0BAAgB,uBAAuB,oCAAoC;AAAA,YACzE,OAAO,yBAAyB,QAAQ,cAAc,UAAU;AAAA,UAClE,CAAC;AAAA,QACH;AAAA,MACF;AAEA,UAAI,CAAC,sBAAsB,UAAU;AACnC,YAAI;AACF,gBAAM,SAAS,UAAU,gCAAgC;AAAA,YACvD;AAAA,YACA;AAAA,YACA;AAAA,YACA,aAAa;AAAA,YACb,SAAS;AAAA,UACX,CAAC;AAAA,QACH,SAAS,WAAW;AAClB,0BAAgB,uBAAuB,sCAAsC;AAAA,YAC3E,OAAO,qBAAqB,QAAQ,UAAU,UAAU;AAAA,UAC1D,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,UAAM,mBAAmB;AAAA,MACvB;AAAA,MACA,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,QACN;AAAA,QACA;AAAA,QACA,gBAAgB,kBAAkB;AAAA,QAClC,SAAS,WAAW;AAAA,QACpB,SAAS,WAAW;AAAA,MACtB;AAAA,IACF,CAAC;AAAA,EACH,SAAS,OAAO;AACd,oBAAgB,uBAAuB,aAAa,OAAO,iBAAiB;AAAA,MAC1E;AAAA,MACA;AAAA,MACA,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IAClD,CAAC;AACD,UAAM;AAAA,MACJ,EAAE,IAAI,MAAM,OAAU;AAAA,MACtB;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,QACT;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS,IAAI;AAAA,MACf;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAMA,eAAO,OACL,KACA,KACe;AACf,SAAO,qBAAqB,KAAK,KAAK,GAAG;AAC3C;",
4
+ "sourcesContent": ["import type { QueuedJob, JobContext, WorkerMeta } from '@open-mercato/queue'\nimport type { Kysely } from 'kysely'\nimport { VECTOR_INDEXING_QUEUE_NAME, type VectorIndexJobPayload } from '../../../queue/vector-indexing'\nimport type { SearchIndexer } from '../../../indexer/search-indexer'\nimport type { EmbeddingService } from '../../../vector'\nimport type { EntityManager } from '@mikro-orm/postgresql'\n\nimport type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'\nimport { recordIndexerError } from '@open-mercato/shared/lib/indexers/error-log'\nimport { applyCoverageAdjustments, createCoverageAdjustments } from '@open-mercato/core/modules/query_index/lib/coverage'\nimport { logVectorOperation } from '../../../vector/lib/vector-logs'\nimport { resolveAutoIndexingEnabled } from '../lib/auto-indexing'\nimport { resolveEmbeddingConfig } from '../lib/embedding-config'\nimport { searchDebugWarn } from '../../../lib/debug'\nimport { clearReindexLock, updateReindexProgress } from '../lib/reindex-lock'\nimport { incrementReindexProgress } from '../lib/reindex-progress'\n\n// Worker metadata for auto-discovery\nconst DEFAULT_CONCURRENCY = 2\nconst envConcurrency = process.env.WORKERS_VECTOR_INDEXING_CONCURRENCY\n\nexport const metadata: WorkerMeta = {\n queue: VECTOR_INDEXING_QUEUE_NAME,\n concurrency: envConcurrency ? parseInt(envConcurrency, 10) : DEFAULT_CONCURRENCY,\n}\n\ntype HandlerContext = { resolve: <T = unknown>(name: string) => T }\n\n/**\n * Process a vector index job.\n *\n * This handler is called by the queue worker to process indexing and deletion jobs.\n * It uses SearchIndexer to load records and index them via SearchService.\n *\n * @param job - The queued job containing payload\n * @param jobCtx - Queue job context with job ID and attempt info\n * @param ctx - DI container context for resolving services\n */\nexport async function handleVectorIndexJob(\n job: QueuedJob<VectorIndexJobPayload>,\n jobCtx: JobContext,\n ctx: HandlerContext,\n): Promise<void> {\n const { jobType, entityType, recordId, tenantId, organizationId, records } = job.payload\n\n // Handle batch-index jobs (from reindex operations)\n if (jobType === 'batch-index') {\n if (!records?.length || !tenantId) {\n searchDebugWarn('vector-index.worker', 'Skipping batch-index job with missing required fields', {\n jobId: jobCtx.jobId,\n recordCount: records?.length ?? 0,\n tenantId,\n })\n return\n }\n\n let searchIndexer: SearchIndexer\n try {\n searchIndexer = ctx.resolve<SearchIndexer>('searchIndexer')\n } catch {\n searchDebugWarn('vector-index.worker', 'searchIndexer not available')\n return\n }\n\n // Get Kysely for heartbeat updates\n let db: Kysely<any> | null = null\n let em: EntityManager | null = null\n try {\n em = ctx.resolve('em') as EntityManager\n db = (em as unknown as { getKysely: () => Kysely<any> }).getKysely()\n } catch {\n db = null\n em = null\n }\n\n let progressService: ProgressService | null = null\n try {\n progressService = ctx.resolve<ProgressService>('progressService')\n } catch {\n progressService = null\n }\n\n // Load saved embedding config to use the correct provider/model\n try {\n const embeddingConfig = await resolveEmbeddingConfig(ctx, { defaultValue: null })\n if (embeddingConfig) {\n const embeddingService = ctx.resolve<EmbeddingService>('vectorEmbeddingService')\n embeddingService.updateConfig(embeddingConfig)\n }\n } catch (configErr) {\n searchDebugWarn('vector-index.worker', 'Failed to load embedding config for batch, using defaults', {\n error: configErr instanceof Error ? configErr.message : configErr,\n })\n }\n\n // Process each record in the batch\n let successCount = 0\n let failCount = 0\n for (const { entityId, recordId: recId } of records) {\n try {\n const result = await searchIndexer.indexRecordById({\n entityId: entityId as Parameters<typeof searchIndexer.indexRecordById>[0]['entityId'],\n recordId: recId,\n tenantId,\n organizationId,\n })\n if (result.action === 'indexed') {\n successCount++\n }\n } catch (error) {\n failCount++\n searchDebugWarn('vector-index.worker', 'Failed to index record in batch', {\n entityId,\n recordId: recId,\n error: error instanceof Error ? error.message : error,\n })\n }\n }\n\n // Update heartbeat to signal worker is still processing\n if (db && records.length > 0) {\n await updateReindexProgress(db, tenantId, 'vector', successCount, organizationId ?? null)\n }\n if (progressService && em && records.length > 0) {\n const completed = await incrementReindexProgress({\n em,\n progressService,\n type: 'vector',\n tenantId,\n organizationId: organizationId ?? null,\n delta: successCount,\n })\n if (completed && db) {\n await clearReindexLock(db, tenantId, 'vector', organizationId ?? null)\n }\n }\n\n searchDebugWarn('vector-index.worker', 'Batch-index job completed', {\n jobId: jobCtx.jobId,\n totalRecords: records.length,\n successCount,\n failCount,\n })\n return\n }\n\n // Handle single record jobs (index/delete)\n if (!entityType || !recordId || !tenantId) {\n searchDebugWarn('vector-index.worker', 'Skipping job with missing required fields', {\n jobId: jobCtx.jobId,\n entityType,\n recordId,\n tenantId,\n })\n return\n }\n\n const autoIndexingEnabled = await resolveAutoIndexingEnabled(ctx, { defaultValue: true })\n if (!autoIndexingEnabled) {\n return\n }\n\n let searchIndexer: SearchIndexer\n try {\n searchIndexer = ctx.resolve<SearchIndexer>('searchIndexer')\n } catch {\n searchDebugWarn('vector-index.worker', 'searchIndexer not available')\n return\n }\n\n // Load saved embedding config to use the correct provider/model\n try {\n const embeddingConfig = await resolveEmbeddingConfig(ctx, { defaultValue: null })\n if (embeddingConfig) {\n const embeddingService = ctx.resolve<EmbeddingService>('vectorEmbeddingService')\n embeddingService.updateConfig(embeddingConfig)\n }\n } catch (configErr) {\n // Delete operations don't require embedding, only warn for index operations\n if (jobType === 'index') {\n searchDebugWarn('vector-index.worker', 'Failed to load embedding config, using defaults', {\n error: configErr instanceof Error ? configErr.message : configErr,\n })\n }\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let em: any | null = null\n try {\n em = ctx.resolve('em')\n } catch {\n em = null\n }\n\n let eventBus: { emitEvent(event: string, payload: unknown, options?: unknown): Promise<void> } | null = null\n try {\n eventBus = ctx.resolve('eventBus')\n } catch {\n eventBus = null\n }\n\n const handlerName = jobType === 'delete'\n ? 'worker:vector-indexing:delete'\n : 'worker:vector-indexing:index'\n\n try {\n let action: 'indexed' | 'deleted' | 'skipped' = 'skipped'\n let delta = 0\n\n if (jobType === 'delete') {\n await searchIndexer.deleteRecord({\n entityId: entityType,\n recordId,\n tenantId,\n })\n action = 'deleted'\n delta = -1\n } else {\n const result = await searchIndexer.indexRecordById({\n entityId: entityType,\n recordId,\n tenantId,\n organizationId,\n })\n action = result.action\n if (result.action === 'indexed') {\n delta = 1\n }\n }\n\n if (delta !== 0) {\n let adjustmentsApplied = false\n if (em) {\n try {\n const adjustments = createCoverageAdjustments({\n entityType,\n tenantId,\n organizationId,\n baseDelta: 0,\n indexDelta: 0,\n vectorDelta: delta,\n })\n if (adjustments.length) {\n await applyCoverageAdjustments(em, adjustments)\n adjustmentsApplied = true\n }\n } catch (coverageError) {\n searchDebugWarn('vector-index.worker', 'Failed to adjust vector coverage', {\n error: coverageError instanceof Error ? coverageError.message : coverageError,\n })\n }\n }\n\n if (!adjustmentsApplied && eventBus) {\n try {\n await eventBus.emitEvent('query_index.coverage.refresh', {\n entityType,\n tenantId,\n organizationId,\n withDeleted: false,\n delayMs: 1000,\n })\n } catch (emitError) {\n searchDebugWarn('vector-index.worker', 'Failed to enqueue coverage refresh', {\n error: emitError instanceof Error ? emitError.message : emitError,\n })\n }\n }\n }\n\n await logVectorOperation({\n em,\n handler: handlerName,\n entityType,\n recordId,\n result: {\n action,\n tenantId,\n organizationId: organizationId ?? null,\n created: action === 'indexed',\n existed: action === 'deleted',\n },\n })\n } catch (error) {\n searchDebugWarn('vector-index.worker', `Failed to ${jobType} vector index`, {\n entityType,\n recordId,\n error: error instanceof Error ? error.message : error,\n })\n await recordIndexerError(\n { em: em ?? undefined },\n {\n source: 'vector',\n handler: handlerName,\n error,\n entityType,\n recordId,\n tenantId,\n organizationId,\n payload: job.payload,\n },\n )\n // Re-throw to let the queue handle retry logic\n throw error\n }\n}\n\n/**\n * Default export for worker auto-discovery.\n * Wraps handleVectorIndexJob to match the expected handler signature.\n */\nexport default async function handle(\n job: QueuedJob<VectorIndexJobPayload>,\n ctx: JobContext & HandlerContext\n): Promise<void> {\n return handleVectorIndexJob(job, ctx, ctx)\n}\n"],
5
+ "mappings": "AAEA,SAAS,kCAA8D;AAMvE,SAAS,0BAA0B;AACnC,SAAS,0BAA0B,iCAAiC;AACpE,SAAS,0BAA0B;AACnC,SAAS,kCAAkC;AAC3C,SAAS,8BAA8B;AACvC,SAAS,uBAAuB;AAChC,SAAS,kBAAkB,6BAA6B;AACxD,SAAS,gCAAgC;AAGzC,MAAM,sBAAsB;AAC5B,MAAM,iBAAiB,QAAQ,IAAI;AAE5B,MAAM,WAAuB;AAAA,EAClC,OAAO;AAAA,EACP,aAAa,iBAAiB,SAAS,gBAAgB,EAAE,IAAI;AAC/D;AAcA,eAAsB,qBACpB,KACA,QACA,KACe;AACf,QAAM,EAAE,SAAS,YAAY,UAAU,UAAU,gBAAgB,QAAQ,IAAI,IAAI;AAGjF,MAAI,YAAY,eAAe;AAC7B,QAAI,CAAC,SAAS,UAAU,CAAC,UAAU;AACjC,sBAAgB,uBAAuB,yDAAyD;AAAA,QAC9F,OAAO,OAAO;AAAA,QACd,aAAa,SAAS,UAAU;AAAA,QAChC;AAAA,MACF,CAAC;AACD;AAAA,IACF;AAEA,QAAIA;AACJ,QAAI;AACF,MAAAA,iBAAgB,IAAI,QAAuB,eAAe;AAAA,IAC5D,QAAQ;AACN,sBAAgB,uBAAuB,6BAA6B;AACpE;AAAA,IACF;AAGA,QAAI,KAAyB;AAC7B,QAAIC,MAA2B;AAC/B,QAAI;AACF,MAAAA,MAAK,IAAI,QAAQ,IAAI;AACrB,WAAMA,IAAmD,UAAU;AAAA,IACrE,QAAQ;AACN,WAAK;AACL,MAAAA,MAAK;AAAA,IACP;AAEA,QAAI,kBAA0C;AAC9C,QAAI;AACF,wBAAkB,IAAI,QAAyB,iBAAiB;AAAA,IAClE,QAAQ;AACN,wBAAkB;AAAA,IACpB;AAGA,QAAI;AACF,YAAM,kBAAkB,MAAM,uBAAuB,KAAK,EAAE,cAAc,KAAK,CAAC;AAChF,UAAI,iBAAiB;AACnB,cAAM,mBAAmB,IAAI,QAA0B,wBAAwB;AAC/E,yBAAiB,aAAa,eAAe;AAAA,MAC/C;AAAA,IACF,SAAS,WAAW;AAClB,sBAAgB,uBAAuB,6DAA6D;AAAA,QAClG,OAAO,qBAAqB,QAAQ,UAAU,UAAU;AAAA,MAC1D,CAAC;AAAA,IACH;AAGA,QAAI,eAAe;AACnB,QAAI,YAAY;AAChB,eAAW,EAAE,UAAU,UAAU,MAAM,KAAK,SAAS;AACnD,UAAI;AACF,cAAM,SAAS,MAAMD,eAAc,gBAAgB;AAAA,UACjD;AAAA,UACA,UAAU;AAAA,UACV;AAAA,UACA;AAAA,QACF,CAAC;AACD,YAAI,OAAO,WAAW,WAAW;AAC/B;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd;AACA,wBAAgB,uBAAuB,mCAAmC;AAAA,UACxE;AAAA,UACA,UAAU;AAAA,UACV,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,QAClD,CAAC;AAAA,MACH;AAAA,IACF;AAGA,QAAI,MAAM,QAAQ,SAAS,GAAG;AAC5B,YAAM,sBAAsB,IAAI,UAAU,UAAU,cAAc,kBAAkB,IAAI;AAAA,IAC1F;AACA,QAAI,mBAAmBC,OAAM,QAAQ,SAAS,GAAG;AAC/C,YAAM,YAAY,MAAM,yBAAyB;AAAA,QAC/C,IAAAA;AAAA,QACA;AAAA,QACA,MAAM;AAAA,QACN;AAAA,QACA,gBAAgB,kBAAkB;AAAA,QAClC,OAAO;AAAA,MACT,CAAC;AACD,UAAI,aAAa,IAAI;AACnB,cAAM,iBAAiB,IAAI,UAAU,UAAU,kBAAkB,IAAI;AAAA,MACvE;AAAA,IACF;AAEA,oBAAgB,uBAAuB,6BAA6B;AAAA,MAClE,OAAO,OAAO;AAAA,MACd,cAAc,QAAQ;AAAA,MACtB;AAAA,MACA;AAAA,IACF,CAAC;AACD;AAAA,EACF;AAGA,MAAI,CAAC,cAAc,CAAC,YAAY,CAAC,UAAU;AACzC,oBAAgB,uBAAuB,6CAA6C;AAAA,MAClF,OAAO,OAAO;AAAA,MACd;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AACD;AAAA,EACF;AAEA,QAAM,sBAAsB,MAAM,2BAA2B,KAAK,EAAE,cAAc,KAAK,CAAC;AACxF,MAAI,CAAC,qBAAqB;AACxB;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACF,oBAAgB,IAAI,QAAuB,eAAe;AAAA,EAC5D,QAAQ;AACN,oBAAgB,uBAAuB,6BAA6B;AACpE;AAAA,EACF;AAGA,MAAI;AACF,UAAM,kBAAkB,MAAM,uBAAuB,KAAK,EAAE,cAAc,KAAK,CAAC;AAChF,QAAI,iBAAiB;AACnB,YAAM,mBAAmB,IAAI,QAA0B,wBAAwB;AAC/E,uBAAiB,aAAa,eAAe;AAAA,IAC/C;AAAA,EACF,SAAS,WAAW;AAElB,QAAI,YAAY,SAAS;AACvB,sBAAgB,uBAAuB,mDAAmD;AAAA,QACxF,OAAO,qBAAqB,QAAQ,UAAU,UAAU;AAAA,MAC1D,CAAC;AAAA,IACH;AAAA,EACF;AAGA,MAAI,KAAiB;AACrB,MAAI;AACF,SAAK,IAAI,QAAQ,IAAI;AAAA,EACvB,QAAQ;AACN,SAAK;AAAA,EACP;AAEA,MAAI,WAAoG;AACxG,MAAI;AACF,eAAW,IAAI,QAAQ,UAAU;AAAA,EACnC,QAAQ;AACN,eAAW;AAAA,EACb;AAEA,QAAM,cAAc,YAAY,WAC5B,kCACA;AAEJ,MAAI;AACF,QAAI,SAA4C;AAChD,QAAI,QAAQ;AAEZ,QAAI,YAAY,UAAU;AACxB,YAAM,cAAc,aAAa;AAAA,QAC/B,UAAU;AAAA,QACV;AAAA,QACA;AAAA,MACF,CAAC;AACD,eAAS;AACT,cAAQ;AAAA,IACV,OAAO;AACL,YAAM,SAAS,MAAM,cAAc,gBAAgB;AAAA,QACjD,UAAU;AAAA,QACV;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AACD,eAAS,OAAO;AAChB,UAAI,OAAO,WAAW,WAAW;AAC/B,gBAAQ;AAAA,MACV;AAAA,IACF;AAEA,QAAI,UAAU,GAAG;AACf,UAAI,qBAAqB;AACzB,UAAI,IAAI;AACN,YAAI;AACF,gBAAM,cAAc,0BAA0B;AAAA,YAC5C;AAAA,YACA;AAAA,YACA;AAAA,YACA,WAAW;AAAA,YACX,YAAY;AAAA,YACZ,aAAa;AAAA,UACf,CAAC;AACD,cAAI,YAAY,QAAQ;AACtB,kBAAM,yBAAyB,IAAI,WAAW;AAC9C,iCAAqB;AAAA,UACvB;AAAA,QACF,SAAS,eAAe;AACtB,0BAAgB,uBAAuB,oCAAoC;AAAA,YACzE,OAAO,yBAAyB,QAAQ,cAAc,UAAU;AAAA,UAClE,CAAC;AAAA,QACH;AAAA,MACF;AAEA,UAAI,CAAC,sBAAsB,UAAU;AACnC,YAAI;AACF,gBAAM,SAAS,UAAU,gCAAgC;AAAA,YACvD;AAAA,YACA;AAAA,YACA;AAAA,YACA,aAAa;AAAA,YACb,SAAS;AAAA,UACX,CAAC;AAAA,QACH,SAAS,WAAW;AAClB,0BAAgB,uBAAuB,sCAAsC;AAAA,YAC3E,OAAO,qBAAqB,QAAQ,UAAU,UAAU;AAAA,UAC1D,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,UAAM,mBAAmB;AAAA,MACvB;AAAA,MACA,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,QACN;AAAA,QACA;AAAA,QACA,gBAAgB,kBAAkB;AAAA,QAClC,SAAS,WAAW;AAAA,QACpB,SAAS,WAAW;AAAA,MACtB;AAAA,IACF,CAAC;AAAA,EACH,SAAS,OAAO;AACd,oBAAgB,uBAAuB,aAAa,OAAO,iBAAiB;AAAA,MAC1E;AAAA,MACA;AAAA,MACA,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IAClD,CAAC;AACD,UAAM;AAAA,MACJ,EAAE,IAAI,MAAM,OAAU;AAAA,MACtB;AAAA,QACE,QAAQ;AAAA,QACR,SAAS;AAAA,QACT;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS,IAAI;AAAA,MACf;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAMA,eAAO,OACL,KACA,KACe;AACf,SAAO,qBAAqB,KAAK,KAAK,GAAG;AAC3C;",
6
6
  "names": ["searchIndexer", "em"]
7
7
  }
@@ -1,6 +1,7 @@
1
+ import { sql } from "kysely";
1
2
  class TokenSearchStrategy {
2
- constructor(knex, config) {
3
- this.knex = knex;
3
+ constructor(db, config) {
4
+ this.db = db;
4
5
  this.id = "tokens";
5
6
  this.name = "Token Search";
6
7
  this.priority = 10;
@@ -21,14 +22,18 @@ class TokenSearchStrategy {
21
22
  if (hashes.length === 0) return [];
22
23
  const minMatches = Math.max(1, Math.ceil(hashes.length * this.minMatchRatio));
23
24
  const limit = options.limit ?? this.defaultLimit;
24
- let queryBuilder = this.knex("search_tokens").select("entity_type", "entity_id").count("* as match_count").whereIn("token_hash", hashes).where("tenant_id", options.tenantId).groupBy("entity_type", "entity_id").havingRaw("COUNT(DISTINCT token_hash) >= ?", [minMatches]).orderByRaw("COUNT(DISTINCT token_hash) DESC").limit(limit);
25
+ let queryBuilder = this.db.selectFrom("search_tokens").select([
26
+ "entity_type",
27
+ "entity_id",
28
+ sql`count(*)`.as("match_count")
29
+ ]).where("token_hash", "in", hashes).where("tenant_id", "=", options.tenantId).groupBy(["entity_type", "entity_id"]).having(sql`count(distinct token_hash) >= ${minMatches}`).orderBy(sql`count(distinct token_hash) desc`).limit(limit);
25
30
  if (options.organizationId) {
26
- queryBuilder = queryBuilder.where("organization_id", options.organizationId);
31
+ queryBuilder = queryBuilder.where("organization_id", "=", options.organizationId);
27
32
  }
28
33
  if (options.entityTypes?.length) {
29
- queryBuilder = queryBuilder.whereIn("entity_type", options.entityTypes);
34
+ queryBuilder = queryBuilder.where("entity_type", "in", options.entityTypes);
30
35
  }
31
- const rows = await queryBuilder;
36
+ const rows = await queryBuilder.execute();
32
37
  return rows.map((row) => {
33
38
  const matchCount = typeof row.match_count === "string" ? parseInt(row.match_count, 10) : row.match_count;
34
39
  const score = matchCount / hashes.length;
@@ -42,7 +47,7 @@ class TokenSearchStrategy {
42
47
  }
43
48
  async index(record) {
44
49
  const { replaceSearchTokensForRecord } = await import("@open-mercato/core/modules/query_index/lib/search-tokens");
45
- await replaceSearchTokensForRecord(this.knex, {
50
+ await replaceSearchTokensForRecord(this.db, {
46
51
  entityType: record.entityId,
47
52
  recordId: record.recordId,
48
53
  tenantId: record.tenantId,
@@ -52,7 +57,7 @@ class TokenSearchStrategy {
52
57
  }
53
58
  async delete(entityId, recordId, tenantId) {
54
59
  const { deleteSearchTokensForRecord } = await import("@open-mercato/core/modules/query_index/lib/search-tokens");
55
- await deleteSearchTokensForRecord(this.knex, {
60
+ await deleteSearchTokensForRecord(this.db, {
56
61
  entityType: entityId,
57
62
  recordId,
58
63
  tenantId
@@ -68,10 +73,10 @@ class TokenSearchStrategy {
68
73
  organizationId: record.organizationId,
69
74
  doc: record.fields
70
75
  }));
71
- await replaceSearchTokensForBatch(this.knex, payloads);
76
+ await replaceSearchTokensForBatch(this.db, payloads);
72
77
  }
73
78
  async purge(entityId, tenantId) {
74
- await this.knex("search_tokens").where({ entity_type: entityId, tenant_id: tenantId }).del();
79
+ await this.db.deleteFrom("search_tokens").where("entity_type", "=", entityId).where("tenant_id", "=", tenantId).execute();
75
80
  }
76
81
  }
77
82
  export {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/strategies/token.strategy.ts"],
4
- "sourcesContent": ["import type { Knex } from 'knex'\nimport type {\n SearchStrategy,\n SearchStrategyId,\n SearchOptions,\n SearchResult,\n IndexableRecord,\n} from '../types'\nimport type { EntityId } from '@open-mercato/shared/modules/entities'\n\n/**\n * Configuration for TokenSearchStrategy.\n */\nexport type TokenStrategyConfig = {\n /** Minimum number of query tokens that must match (0-1 ratio, default 0.5) */\n minMatchRatio?: number\n /** Default limit for search results */\n defaultLimit?: number\n}\n\n/**\n * TokenSearchStrategy provides hash-based search using the existing search_tokens table.\n * This strategy is always available and serves as a fallback when other strategies fail.\n *\n * It tokenizes queries into hashes and matches against pre-indexed token hashes,\n * enabling search on encrypted fields without exposing plaintext to external services.\n */\nexport class TokenSearchStrategy implements SearchStrategy {\n readonly id: SearchStrategyId = 'tokens'\n readonly name = 'Token Search'\n readonly priority = 10 // Lowest priority, always available as fallback\n\n private readonly minMatchRatio: number\n private readonly defaultLimit: number\n\n constructor(\n private readonly knex: Knex,\n config?: TokenStrategyConfig,\n ) {\n this.minMatchRatio = config?.minMatchRatio ?? 0.5\n this.defaultLimit = config?.defaultLimit ?? 50\n }\n\n async isAvailable(): Promise<boolean> {\n return true // Always available\n }\n\n async ensureReady(): Promise<void> {\n // No initialization needed\n }\n\n async search(query: string, options: SearchOptions): Promise<SearchResult[]> {\n // Dynamically import tokenization to avoid circular dependencies\n const { tokenizeText } = await import('@open-mercato/shared/lib/search/tokenize')\n const { resolveSearchConfig } = await import('@open-mercato/shared/lib/search/config')\n\n const config = resolveSearchConfig()\n if (!config.enabled) return []\n\n const { hashes } = tokenizeText(query, config)\n if (hashes.length === 0) return []\n\n const minMatches = Math.max(1, Math.ceil(hashes.length * this.minMatchRatio))\n const limit = options.limit ?? this.defaultLimit\n\n let queryBuilder = this.knex('search_tokens')\n .select('entity_type', 'entity_id')\n .count('* as match_count')\n .whereIn('token_hash', hashes)\n .where('tenant_id', options.tenantId)\n .groupBy('entity_type', 'entity_id')\n .havingRaw('COUNT(DISTINCT token_hash) >= ?', [minMatches])\n .orderByRaw('COUNT(DISTINCT token_hash) DESC')\n .limit(limit)\n\n if (options.organizationId) {\n queryBuilder = queryBuilder.where('organization_id', options.organizationId)\n }\n\n if (options.entityTypes?.length) {\n queryBuilder = queryBuilder.whereIn('entity_type', options.entityTypes)\n }\n\n const rows = await queryBuilder as Array<{ entity_type: string; entity_id: string; match_count: string | number }>\n\n return rows.map((row) => {\n const matchCount = typeof row.match_count === 'string'\n ? parseInt(row.match_count, 10)\n : row.match_count\n // Calculate score based on match ratio\n const score = matchCount / hashes.length\n\n return {\n entityId: row.entity_type as EntityId,\n recordId: row.entity_id,\n score,\n source: this.id,\n }\n })\n }\n\n async index(record: IndexableRecord): Promise<void> {\n // Dynamically import to avoid circular dependencies\n const { replaceSearchTokensForRecord } = await import(\n '@open-mercato/core/modules/query_index/lib/search-tokens'\n )\n\n await replaceSearchTokensForRecord(this.knex, {\n entityType: record.entityId,\n recordId: record.recordId,\n tenantId: record.tenantId,\n organizationId: record.organizationId,\n doc: record.fields,\n })\n }\n\n async delete(entityId: EntityId, recordId: string, tenantId: string): Promise<void> {\n // Dynamically import to avoid circular dependencies\n const { deleteSearchTokensForRecord } = await import(\n '@open-mercato/core/modules/query_index/lib/search-tokens'\n )\n\n await deleteSearchTokensForRecord(this.knex, {\n entityType: entityId,\n recordId,\n tenantId,\n })\n }\n\n async bulkIndex(records: IndexableRecord[]): Promise<void> {\n if (records.length === 0) return\n\n const { replaceSearchTokensForBatch } = await import(\n '@open-mercato/core/modules/query_index/lib/search-tokens'\n )\n\n const payloads = records.map((record) => ({\n entityType: record.entityId,\n recordId: record.recordId,\n tenantId: record.tenantId,\n organizationId: record.organizationId,\n doc: record.fields as Record<string, unknown>,\n }))\n\n await replaceSearchTokensForBatch(this.knex, payloads)\n }\n\n async purge(entityId: EntityId, tenantId: string): Promise<void> {\n await this.knex('search_tokens')\n .where({ entity_type: entityId, tenant_id: tenantId })\n .del()\n }\n}\n"],
5
- "mappings": "AA2BO,MAAM,oBAA8C;AAAA,EAQzD,YACmB,MACjB,QACA;AAFiB;AARnB,SAAS,KAAuB;AAChC,SAAS,OAAO;AAChB,SAAS,WAAW;AASlB,SAAK,gBAAgB,QAAQ,iBAAiB;AAC9C,SAAK,eAAe,QAAQ,gBAAgB;AAAA,EAC9C;AAAA,EAEA,MAAM,cAAgC;AACpC,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,cAA6B;AAAA,EAEnC;AAAA,EAEA,MAAM,OAAO,OAAe,SAAiD;AAE3E,UAAM,EAAE,aAAa,IAAI,MAAM,OAAO,0CAA0C;AAChF,UAAM,EAAE,oBAAoB,IAAI,MAAM,OAAO,wCAAwC;AAErF,UAAM,SAAS,oBAAoB;AACnC,QAAI,CAAC,OAAO,QAAS,QAAO,CAAC;AAE7B,UAAM,EAAE,OAAO,IAAI,aAAa,OAAO,MAAM;AAC7C,QAAI,OAAO,WAAW,EAAG,QAAO,CAAC;AAEjC,UAAM,aAAa,KAAK,IAAI,GAAG,KAAK,KAAK,OAAO,SAAS,KAAK,aAAa,CAAC;AAC5E,UAAM,QAAQ,QAAQ,SAAS,KAAK;AAEpC,QAAI,eAAe,KAAK,KAAK,eAAe,EACzC,OAAO,eAAe,WAAW,EACjC,MAAM,kBAAkB,EACxB,QAAQ,cAAc,MAAM,EAC5B,MAAM,aAAa,QAAQ,QAAQ,EACnC,QAAQ,eAAe,WAAW,EAClC,UAAU,mCAAmC,CAAC,UAAU,CAAC,EACzD,WAAW,iCAAiC,EAC5C,MAAM,KAAK;AAEd,QAAI,QAAQ,gBAAgB;AAC1B,qBAAe,aAAa,MAAM,mBAAmB,QAAQ,cAAc;AAAA,IAC7E;AAEA,QAAI,QAAQ,aAAa,QAAQ;AAC/B,qBAAe,aAAa,QAAQ,eAAe,QAAQ,WAAW;AAAA,IACxE;AAEA,UAAM,OAAO,MAAM;AAEnB,WAAO,KAAK,IAAI,CAAC,QAAQ;AACvB,YAAM,aAAa,OAAO,IAAI,gBAAgB,WAC1C,SAAS,IAAI,aAAa,EAAE,IAC5B,IAAI;AAER,YAAM,QAAQ,aAAa,OAAO;AAElC,aAAO;AAAA,QACL,UAAU,IAAI;AAAA,QACd,UAAU,IAAI;AAAA,QACd;AAAA,QACA,QAAQ,KAAK;AAAA,MACf;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,MAAM,QAAwC;AAElD,UAAM,EAAE,6BAA6B,IAAI,MAAM,OAC7C,0DACF;AAEA,UAAM,6BAA6B,KAAK,MAAM;AAAA,MAC5C,YAAY,OAAO;AAAA,MACnB,UAAU,OAAO;AAAA,MACjB,UAAU,OAAO;AAAA,MACjB,gBAAgB,OAAO;AAAA,MACvB,KAAK,OAAO;AAAA,IACd,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,OAAO,UAAoB,UAAkB,UAAiC;AAElF,UAAM,EAAE,4BAA4B,IAAI,MAAM,OAC5C,0DACF;AAEA,UAAM,4BAA4B,KAAK,MAAM;AAAA,MAC3C,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,UAAU,SAA2C;AACzD,QAAI,QAAQ,WAAW,EAAG;AAE1B,UAAM,EAAE,4BAA4B,IAAI,MAAM,OAC5C,0DACF;AAEA,UAAM,WAAW,QAAQ,IAAI,CAAC,YAAY;AAAA,MACxC,YAAY,OAAO;AAAA,MACnB,UAAU,OAAO;AAAA,MACjB,UAAU,OAAO;AAAA,MACjB,gBAAgB,OAAO;AAAA,MACvB,KAAK,OAAO;AAAA,IACd,EAAE;AAEF,UAAM,4BAA4B,KAAK,MAAM,QAAQ;AAAA,EACvD;AAAA,EAEA,MAAM,MAAM,UAAoB,UAAiC;AAC/D,UAAM,KAAK,KAAK,eAAe,EAC5B,MAAM,EAAE,aAAa,UAAU,WAAW,SAAS,CAAC,EACpD,IAAI;AAAA,EACT;AACF;",
4
+ "sourcesContent": ["import { type Kysely, sql, type SqlBool } from 'kysely'\nimport type {\n SearchStrategy,\n SearchStrategyId,\n SearchOptions,\n SearchResult,\n IndexableRecord,\n} from '../types'\nimport type { EntityId } from '@open-mercato/shared/modules/entities'\n\n/**\n * Configuration for TokenSearchStrategy.\n */\nexport type TokenStrategyConfig = {\n /** Minimum number of query tokens that must match (0-1 ratio, default 0.5) */\n minMatchRatio?: number\n /** Default limit for search results */\n defaultLimit?: number\n}\n\n/**\n * TokenSearchStrategy provides hash-based search using the existing search_tokens table.\n * This strategy is always available and serves as a fallback when other strategies fail.\n *\n * It tokenizes queries into hashes and matches against pre-indexed token hashes,\n * enabling search on encrypted fields without exposing plaintext to external services.\n */\nexport class TokenSearchStrategy implements SearchStrategy {\n readonly id: SearchStrategyId = 'tokens'\n readonly name = 'Token Search'\n readonly priority = 10 // Lowest priority, always available as fallback\n\n private readonly minMatchRatio: number\n private readonly defaultLimit: number\n\n constructor(\n private readonly db: Kysely<any>,\n config?: TokenStrategyConfig,\n ) {\n this.minMatchRatio = config?.minMatchRatio ?? 0.5\n this.defaultLimit = config?.defaultLimit ?? 50\n }\n\n async isAvailable(): Promise<boolean> {\n return true // Always available\n }\n\n async ensureReady(): Promise<void> {\n // No initialization needed\n }\n\n async search(query: string, options: SearchOptions): Promise<SearchResult[]> {\n // Dynamically import tokenization to avoid circular dependencies\n const { tokenizeText } = await import('@open-mercato/shared/lib/search/tokenize')\n const { resolveSearchConfig } = await import('@open-mercato/shared/lib/search/config')\n\n const config = resolveSearchConfig()\n if (!config.enabled) return []\n\n const { hashes } = tokenizeText(query, config)\n if (hashes.length === 0) return []\n\n const minMatches = Math.max(1, Math.ceil(hashes.length * this.minMatchRatio))\n const limit = options.limit ?? this.defaultLimit\n\n let queryBuilder = this.db\n .selectFrom('search_tokens' as any)\n .select([\n 'entity_type' as any,\n 'entity_id' as any,\n sql<string>`count(*)`.as('match_count'),\n ])\n .where('token_hash' as any, 'in', hashes)\n .where('tenant_id' as any, '=', options.tenantId)\n .groupBy(['entity_type' as any, 'entity_id' as any])\n .having(sql<SqlBool>`count(distinct token_hash) >= ${minMatches}`)\n .orderBy(sql`count(distinct token_hash) desc`)\n .limit(limit)\n\n if (options.organizationId) {\n queryBuilder = queryBuilder.where('organization_id' as any, '=', options.organizationId)\n }\n\n if (options.entityTypes?.length) {\n queryBuilder = queryBuilder.where('entity_type' as any, 'in', options.entityTypes)\n }\n\n const rows = await queryBuilder.execute() as Array<{ entity_type: string; entity_id: string; match_count: string | number }>\n\n return rows.map((row) => {\n const matchCount = typeof row.match_count === 'string'\n ? parseInt(row.match_count, 10)\n : row.match_count\n // Calculate score based on match ratio\n const score = matchCount / hashes.length\n\n return {\n entityId: row.entity_type as EntityId,\n recordId: row.entity_id,\n score,\n source: this.id,\n }\n })\n }\n\n async index(record: IndexableRecord): Promise<void> {\n // Dynamically import to avoid circular dependencies\n const { replaceSearchTokensForRecord } = await import(\n '@open-mercato/core/modules/query_index/lib/search-tokens'\n )\n\n await replaceSearchTokensForRecord(this.db, {\n entityType: record.entityId,\n recordId: record.recordId,\n tenantId: record.tenantId,\n organizationId: record.organizationId,\n doc: record.fields,\n })\n }\n\n async delete(entityId: EntityId, recordId: string, tenantId: string): Promise<void> {\n // Dynamically import to avoid circular dependencies\n const { deleteSearchTokensForRecord } = await import(\n '@open-mercato/core/modules/query_index/lib/search-tokens'\n )\n\n await deleteSearchTokensForRecord(this.db, {\n entityType: entityId,\n recordId,\n tenantId,\n })\n }\n\n async bulkIndex(records: IndexableRecord[]): Promise<void> {\n if (records.length === 0) return\n\n const { replaceSearchTokensForBatch } = await import(\n '@open-mercato/core/modules/query_index/lib/search-tokens'\n )\n\n const payloads = records.map((record) => ({\n entityType: record.entityId,\n recordId: record.recordId,\n tenantId: record.tenantId,\n organizationId: record.organizationId,\n doc: record.fields as Record<string, unknown>,\n }))\n\n await replaceSearchTokensForBatch(this.db, payloads)\n }\n\n async purge(entityId: EntityId, tenantId: string): Promise<void> {\n await this.db\n .deleteFrom('search_tokens' as any)\n .where('entity_type' as any, '=', entityId)\n .where('tenant_id' as any, '=', tenantId)\n .execute()\n }\n}\n"],
5
+ "mappings": "AAAA,SAAsB,WAAyB;AA2BxC,MAAM,oBAA8C;AAAA,EAQzD,YACmB,IACjB,QACA;AAFiB;AARnB,SAAS,KAAuB;AAChC,SAAS,OAAO;AAChB,SAAS,WAAW;AASlB,SAAK,gBAAgB,QAAQ,iBAAiB;AAC9C,SAAK,eAAe,QAAQ,gBAAgB;AAAA,EAC9C;AAAA,EAEA,MAAM,cAAgC;AACpC,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,cAA6B;AAAA,EAEnC;AAAA,EAEA,MAAM,OAAO,OAAe,SAAiD;AAE3E,UAAM,EAAE,aAAa,IAAI,MAAM,OAAO,0CAA0C;AAChF,UAAM,EAAE,oBAAoB,IAAI,MAAM,OAAO,wCAAwC;AAErF,UAAM,SAAS,oBAAoB;AACnC,QAAI,CAAC,OAAO,QAAS,QAAO,CAAC;AAE7B,UAAM,EAAE,OAAO,IAAI,aAAa,OAAO,MAAM;AAC7C,QAAI,OAAO,WAAW,EAAG,QAAO,CAAC;AAEjC,UAAM,aAAa,KAAK,IAAI,GAAG,KAAK,KAAK,OAAO,SAAS,KAAK,aAAa,CAAC;AAC5E,UAAM,QAAQ,QAAQ,SAAS,KAAK;AAEpC,QAAI,eAAe,KAAK,GACrB,WAAW,eAAsB,EACjC,OAAO;AAAA,MACN;AAAA,MACA;AAAA,MACA,cAAsB,GAAG,aAAa;AAAA,IACxC,CAAC,EACA,MAAM,cAAqB,MAAM,MAAM,EACvC,MAAM,aAAoB,KAAK,QAAQ,QAAQ,EAC/C,QAAQ,CAAC,eAAsB,WAAkB,CAAC,EAClD,OAAO,oCAA6C,UAAU,EAAE,EAChE,QAAQ,oCAAoC,EAC5C,MAAM,KAAK;AAEd,QAAI,QAAQ,gBAAgB;AAC1B,qBAAe,aAAa,MAAM,mBAA0B,KAAK,QAAQ,cAAc;AAAA,IACzF;AAEA,QAAI,QAAQ,aAAa,QAAQ;AAC/B,qBAAe,aAAa,MAAM,eAAsB,MAAM,QAAQ,WAAW;AAAA,IACnF;AAEA,UAAM,OAAO,MAAM,aAAa,QAAQ;AAExC,WAAO,KAAK,IAAI,CAAC,QAAQ;AACvB,YAAM,aAAa,OAAO,IAAI,gBAAgB,WAC1C,SAAS,IAAI,aAAa,EAAE,IAC5B,IAAI;AAER,YAAM,QAAQ,aAAa,OAAO;AAElC,aAAO;AAAA,QACL,UAAU,IAAI;AAAA,QACd,UAAU,IAAI;AAAA,QACd;AAAA,QACA,QAAQ,KAAK;AAAA,MACf;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,MAAM,QAAwC;AAElD,UAAM,EAAE,6BAA6B,IAAI,MAAM,OAC7C,0DACF;AAEA,UAAM,6BAA6B,KAAK,IAAI;AAAA,MAC1C,YAAY,OAAO;AAAA,MACnB,UAAU,OAAO;AAAA,MACjB,UAAU,OAAO;AAAA,MACjB,gBAAgB,OAAO;AAAA,MACvB,KAAK,OAAO;AAAA,IACd,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,OAAO,UAAoB,UAAkB,UAAiC;AAElF,UAAM,EAAE,4BAA4B,IAAI,MAAM,OAC5C,0DACF;AAEA,UAAM,4BAA4B,KAAK,IAAI;AAAA,MACzC,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,UAAU,SAA2C;AACzD,QAAI,QAAQ,WAAW,EAAG;AAE1B,UAAM,EAAE,4BAA4B,IAAI,MAAM,OAC5C,0DACF;AAEA,UAAM,WAAW,QAAQ,IAAI,CAAC,YAAY;AAAA,MACxC,YAAY,OAAO;AAAA,MACnB,UAAU,OAAO;AAAA,MACjB,UAAU,OAAO;AAAA,MACjB,gBAAgB,OAAO;AAAA,MACvB,KAAK,OAAO;AAAA,IACd,EAAE;AAEF,UAAM,4BAA4B,KAAK,IAAI,QAAQ;AAAA,EACrD;AAAA,EAEA,MAAM,MAAM,UAAoB,UAAiC;AAC/D,UAAM,KAAK,GACR,WAAW,eAAsB,EACjC,MAAM,eAAsB,KAAK,QAAQ,EACzC,MAAM,aAAoB,KAAK,QAAQ,EACvC,QAAQ;AAAA,EACb;AACF;",
6
6
  "names": []
7
7
  }
package/jest.config.cjs CHANGED
@@ -1,6 +1,5 @@
1
1
  /** @type {import('jest').Config} */
2
2
  module.exports = {
3
- preset: 'ts-jest',
4
3
  testEnvironment: 'node',
5
4
  watchman: false,
6
5
  rootDir: '.',
@@ -15,7 +14,7 @@ module.exports = {
15
14
  },
16
15
  transform: {
17
16
  '^.+\\.(t|j)sx?$': [
18
- 'ts-jest',
17
+ '<rootDir>/../../scripts/jest-mikroorm-transformer.cjs',
19
18
  {
20
19
  tsconfig: {
21
20
  jsx: 'react-jsx',
@@ -23,6 +22,9 @@ module.exports = {
23
22
  },
24
23
  ],
25
24
  },
25
+ transformIgnorePatterns: [
26
+ 'node_modules/(?!(@mikro-orm)/)',
27
+ ],
26
28
  testMatch: ['<rootDir>/src/**/__tests__/**/*.test.(ts|tsx)'],
27
29
  passWithNoTests: true,
28
30
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@open-mercato/search",
3
- "version": "0.5.1-develop.2691.d8a0934b37",
3
+ "version": "0.5.1-develop.2694.732417c5ec",
4
4
  "type": "module",
5
5
  "main": "./dist/index.js",
6
6
  "exports": {
@@ -126,9 +126,9 @@
126
126
  "zod": "^4.3.6"
127
127
  },
128
128
  "peerDependencies": {
129
- "@open-mercato/core": "0.5.1-develop.2691.d8a0934b37",
130
- "@open-mercato/queue": "0.5.1-develop.2691.d8a0934b37",
131
- "@open-mercato/shared": "0.5.1-develop.2691.d8a0934b37"
129
+ "@open-mercato/core": "0.5.1-develop.2694.732417c5ec",
130
+ "@open-mercato/queue": "0.5.1-develop.2694.732417c5ec",
131
+ "@open-mercato/shared": "0.5.1-develop.2694.732417c5ec"
132
132
  },
133
133
  "devDependencies": {
134
134
  "@types/jest": "^30.0.0",
@@ -1,4 +1,5 @@
1
- import type { Knex } from 'knex'
1
+
2
+ import type { Kysely } from 'kysely'
2
3
  import type { SearchEntityConfig } from '../types'
3
4
  import type { QueryEngine } from '@open-mercato/shared/lib/query/types'
4
5
  import type { SearchResult } from '@open-mercato/shared/modules/search'
@@ -15,67 +16,23 @@ type IndexRow = {
15
16
  doc: Record<string, unknown>
16
17
  }
17
18
 
18
- type ConditionBuilder = {
19
- where: (fieldOrCallback: unknown, value?: unknown) => ConditionBuilder
20
- whereIn: (field: string, values: string[]) => ConditionBuilder
21
- whereNull: (field: string) => ConditionBuilder
22
- orWhere: (callback: (builder: ConditionBuilder) => void) => ConditionBuilder
23
- orWhereNull: (field: string) => ConditionBuilder
24
- }
25
-
26
- type QueryBuilder = ConditionBuilder & {
27
- select: (...fields: string[]) => QueryBuilder
28
- then: Promise<IndexRow[]>['then']
29
- }
30
-
31
19
  const mockedDecryptIndexDocForSearch = jest.mocked(decryptIndexDocForSearch)
32
20
 
33
- function createConditionBuilder(): ConditionBuilder {
34
- const builder: ConditionBuilder = {
35
- where: (fieldOrCallback) => {
36
- if (typeof fieldOrCallback === 'function') {
37
- fieldOrCallback(createConditionBuilder())
38
- }
39
- return builder
40
- },
41
- whereIn: () => builder,
42
- whereNull: () => builder,
43
- orWhere: (callback) => {
44
- callback(createConditionBuilder())
45
- return builder
46
- },
47
- orWhereNull: () => builder,
21
+ /**
22
+ * Build a minimal Kysely-like mock for `db.selectFrom(...).select(...).where(...).execute()` chains.
23
+ * The presenter enricher only uses selectFrom/select/where/execute on the resolved Kysely instance,
24
+ * so we don't need full coverage here.
25
+ */
26
+ function createKyselyMock(rows: IndexRow[]): Kysely<any> {
27
+ const chain: any = {
28
+ select: jest.fn(() => chain),
29
+ where: jest.fn(() => chain),
30
+ execute: jest.fn().mockResolvedValue(rows),
48
31
  }
49
-
50
- return builder
51
- }
52
-
53
- function createQueryBuilder(rows: IndexRow[]): QueryBuilder {
54
- let query: QueryBuilder
55
-
56
- query = {
57
- where: (fieldOrCallback) => {
58
- if (typeof fieldOrCallback === 'function') {
59
- fieldOrCallback(createConditionBuilder())
60
- }
61
- return query
62
- },
63
- whereIn: () => query,
64
- whereNull: () => query,
65
- orWhere: (callback) => {
66
- callback(createConditionBuilder())
67
- return query
68
- },
69
- orWhereNull: () => query,
70
- select: () => query,
71
- then: (onFulfilled, onRejected) => Promise.resolve(rows).then(onFulfilled, onRejected),
32
+ const db: any = {
33
+ selectFrom: jest.fn(() => chain),
72
34
  }
73
-
74
- return query
75
- }
76
-
77
- function createKnex(rows: IndexRow[]): Knex {
78
- return jest.fn((_tableName: string) => createQueryBuilder(rows)) as unknown as Knex
35
+ return db as Kysely<any>
79
36
  }
80
37
 
81
38
  function createConfig(config: Omit<SearchEntityConfig, 'entityId'> & { entityId?: SearchEntityConfig['entityId'] }): SearchEntityConfig {
@@ -123,7 +80,7 @@ describe('createPresenterEnricher', () => {
123
80
  const config = createConfig({ buildSource, resolveUrl })
124
81
 
125
82
  const enrich = createPresenterEnricher(
126
- createKnex([{ entity_type: 'customers:person', entity_id: 'person-1', doc: decryptedDoc }]),
83
+ createKyselyMock([{ entity_type: 'customers:person', entity_id: 'person-1', doc: decryptedDoc }]),
127
84
  new Map([[config.entityId, config]]),
128
85
  queryEngine,
129
86
  {} as never,
@@ -171,7 +128,7 @@ describe('createPresenterEnricher', () => {
171
128
  const config = createConfig({ resolveLinks })
172
129
 
173
130
  const enrich = createPresenterEnricher(
174
- createKnex([{ entity_type: 'customers:person', entity_id: 'person-1', doc }]),
131
+ createKyselyMock([{ entity_type: 'customers:person', entity_id: 'person-1', doc }]),
175
132
  new Map([[config.entityId, config]]),
176
133
  )
177
134
 
@@ -159,17 +159,26 @@ describe('Fulltext Index Worker', () => {
159
159
  purge: jest.fn().mockResolvedValue(undefined),
160
160
  }
161
161
 
162
- // Mock knex query builder for batch-index tests
163
- const mockKnexQuery = {
164
- select: jest.fn().mockReturnThis(),
165
- where: jest.fn().mockReturnThis(),
166
- whereIn: jest.fn().mockReturnThis(),
167
- whereNull: jest.fn().mockResolvedValue([
168
- { entity_id: 'rec-1', doc: { name: 'Test 1' } },
169
- { entity_id: 'rec-2', doc: { name: 'Test 2' } },
170
- ]),
162
+ // Mock Kysely query builder for batch-index tests
163
+ const createKyselyChain = () => {
164
+ const chain: any = {
165
+ set: jest.fn(() => chain),
166
+ where: jest.fn(() => chain),
167
+ values: jest.fn(() => chain),
168
+ select: jest.fn(() => chain),
169
+ selectAll: jest.fn(() => chain),
170
+ from: jest.fn(() => chain),
171
+ execute: jest.fn().mockResolvedValue([]),
172
+ executeTakeFirst: jest.fn().mockResolvedValue(undefined),
173
+ }
174
+ return chain
175
+ }
176
+ const mockDb = {
177
+ selectFrom: jest.fn(() => createKyselyChain()),
178
+ updateTable: jest.fn(() => createKyselyChain()),
179
+ insertInto: jest.fn(() => createKyselyChain()),
180
+ deleteFrom: jest.fn(() => createKyselyChain()),
171
181
  }
172
- const mockKnex = jest.fn(() => mockKnexQuery)
173
182
 
174
183
  const mockSearchIndexer = {
175
184
  getEntityConfig: jest.fn().mockReturnValue(null),
@@ -177,9 +186,7 @@ describe('Fulltext Index Worker', () => {
177
186
  }
178
187
 
179
188
  const mockEm = {
180
- getConnection: jest.fn().mockReturnValue({
181
- getKnex: jest.fn().mockReturnValue(mockKnex),
182
- }),
189
+ getKysely: jest.fn().mockReturnValue(mockDb),
183
190
  }
184
191
 
185
192
  const mockContainer: HandlerContext = {
@@ -193,14 +200,6 @@ describe('Fulltext Index Worker', () => {
193
200
 
194
201
  beforeEach(() => {
195
202
  jest.clearAllMocks()
196
- // Reset knex mock chain
197
- mockKnexQuery.select.mockReturnThis()
198
- mockKnexQuery.where.mockReturnThis()
199
- mockKnexQuery.whereIn.mockReturnThis()
200
- mockKnexQuery.whereNull.mockResolvedValue([
201
- { entity_id: 'rec-1', doc: { name: 'Test 1' } },
202
- { entity_id: 'rec-2', doc: { name: 'Test 2' } },
203
- ])
204
203
  })
205
204
 
206
205
  it('should skip job with missing tenantId', async () => {
package/src/di.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { asValue } from 'awilix'
2
- import type { Knex } from 'knex'
2
+ import type { Kysely } from 'kysely'
3
3
  import { SearchService } from './service'
4
4
  import { TokenSearchStrategy } from './strategies/token.strategy'
5
5
  import { VectorSearchStrategy, type EmbeddingService } from './strategies/vector.strategy'
@@ -40,7 +40,7 @@ function shouldExcludeEncryptedFields(): boolean {
40
40
  * Falls back to empty array if query fails.
41
41
  */
42
42
  function createEncryptionMapResolver(
43
- knex: Knex,
43
+ db: Kysely<any>,
44
44
  ): (entityId: EntityId) => Promise<EncryptionMapEntry[]> {
45
45
  // Cache encryption maps per entity to avoid repeated queries
46
46
  const cache = new Map<string, { entries: EncryptionMapEntry[]; expiresAt: number }>()
@@ -53,14 +53,15 @@ function createEncryptionMapResolver(
53
53
  }
54
54
 
55
55
  try {
56
- const rows = await knex('encryption_maps')
57
- .select('fields_json')
58
- .where('entity_id', entityId)
59
- .where('is_active', true)
60
- .whereNull('deleted_at')
61
- .first()
56
+ const row = await db
57
+ .selectFrom('encryption_maps' as any)
58
+ .select(['fields_json' as any])
59
+ .where('entity_id' as any, '=', entityId)
60
+ .where('is_active' as any, '=', true)
61
+ .where('deleted_at' as any, 'is', null)
62
+ .executeTakeFirst() as { fields_json?: unknown } | undefined
62
63
 
63
- const fieldsJson = rows?.fields_json
64
+ const fieldsJson = row?.fields_json
64
65
  const entries: EncryptionMapEntry[] = Array.isArray(fieldsJson)
65
66
  ? fieldsJson.map((f: { field: string; hashField?: string | null }) => ({
66
67
  field: f.field,
@@ -122,11 +123,11 @@ export function registerSearchModule(
122
123
  // Token strategy (always available unless explicitly skipped)
123
124
  if (!options?.skipTokens) {
124
125
  try {
125
- const em = container.resolve<{ getConnection: () => { getKnex: () => Knex } }>('em')
126
- const knex = em.getConnection().getKnex()
127
- strategies.push(new TokenSearchStrategy(knex))
126
+ const em = container.resolve<any>('em')
127
+ const db = em.getKysely() as Kysely<any>
128
+ strategies.push(new TokenSearchStrategy(db))
128
129
  } catch {
129
- // knex not available via em, skipping TokenSearchStrategy
130
+ // Kysely not available via em, skipping TokenSearchStrategy
130
131
  }
131
132
  }
132
133
 
@@ -163,11 +164,11 @@ export function registerSearchModule(
163
164
  let encryptionMapResolver: ((entityId: EntityId) => Promise<EncryptionMapEntry[]>) | undefined
164
165
  if (shouldExcludeEncryptedFields()) {
165
166
  try {
166
- const em = container.resolve<{ getConnection: () => { getKnex: () => Knex } }>('em')
167
- const knex = em.getConnection().getKnex()
168
- encryptionMapResolver = createEncryptionMapResolver(knex)
167
+ const em = container.resolve<any>('em')
168
+ const db = em.getKysely() as Kysely<any>
169
+ encryptionMapResolver = createEncryptionMapResolver(db)
169
170
  } catch {
170
- // Knex not available, encrypted field filtering disabled
171
+ // Kysely not available, encrypted field filtering disabled
171
172
  }
172
173
  }
173
174
 
@@ -206,11 +207,11 @@ export function registerSearchModule(
206
207
  // Create presenter enricher for database-based presenter resolution
207
208
  let presenterEnricher: PresenterEnricherFn | undefined
208
209
  try {
209
- const em = container.resolve<{ getConnection: () => { getKnex: () => Knex } }>('em')
210
- const knex = em.getConnection().getKnex()
211
- presenterEnricher = createPresenterEnricher(knex, entityConfigMap, queryEngine, encryptionService)
210
+ const em = container.resolve<any>('em')
211
+ const db = em.getKysely() as Kysely<any>
212
+ presenterEnricher = createPresenterEnricher(db, entityConfigMap, queryEngine, encryptionService)
212
213
  } catch {
213
- // knex not available, presenter enrichment disabled
214
+ // Kysely not available, presenter enrichment disabled
214
215
  }
215
216
 
216
217
  // Create search service