@open-mercato/search 0.5.1-develop.2691.d8a0934b37 → 0.5.1-develop.2694.732417c5ec

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/di.js +9 -9
  2. package/dist/di.js.map +2 -2
  3. package/dist/lib/presenter-enricher.js +14 -14
  4. package/dist/lib/presenter-enricher.js.map +2 -2
  5. package/dist/modules/search/api/embeddings/reindex/cancel/route.js +2 -2
  6. package/dist/modules/search/api/embeddings/reindex/cancel/route.js.map +2 -2
  7. package/dist/modules/search/api/embeddings/reindex/route.js +3 -3
  8. package/dist/modules/search/api/embeddings/reindex/route.js.map +2 -2
  9. package/dist/modules/search/api/reindex/cancel/route.js +2 -2
  10. package/dist/modules/search/api/reindex/cancel/route.js.map +2 -2
  11. package/dist/modules/search/api/reindex/route.js +4 -4
  12. package/dist/modules/search/api/reindex/route.js.map +2 -2
  13. package/dist/modules/search/api/settings/route.js +3 -3
  14. package/dist/modules/search/api/settings/route.js.map +2 -2
  15. package/dist/modules/search/lib/reindex-lock.js +20 -17
  16. package/dist/modules/search/lib/reindex-lock.js.map +2 -2
  17. package/dist/modules/search/subscribers/fulltext_upsert.js +2 -2
  18. package/dist/modules/search/subscribers/fulltext_upsert.js.map +2 -2
  19. package/dist/modules/search/subscribers/vector_delete.js +2 -2
  20. package/dist/modules/search/subscribers/vector_delete.js.map +2 -2
  21. package/dist/modules/search/subscribers/vector_upsert.js +2 -2
  22. package/dist/modules/search/subscribers/vector_upsert.js.map +2 -2
  23. package/dist/modules/search/workers/fulltext-index.worker.js +7 -7
  24. package/dist/modules/search/workers/fulltext-index.worker.js.map +2 -2
  25. package/dist/modules/search/workers/vector-index.worker.js +7 -7
  26. package/dist/modules/search/workers/vector-index.worker.js.map +2 -2
  27. package/dist/strategies/token.strategy.js +15 -10
  28. package/dist/strategies/token.strategy.js.map +2 -2
  29. package/jest.config.cjs +4 -2
  30. package/package.json +4 -4
  31. package/src/__tests__/presenter-enricher.test.ts +17 -60
  32. package/src/__tests__/workers.test.ts +20 -21
  33. package/src/di.ts +22 -21
  34. package/src/lib/presenter-enricher.ts +21 -20
  35. package/src/modules/search/api/embeddings/reindex/cancel/route.ts +4 -3
  36. package/src/modules/search/api/embeddings/reindex/route.ts +5 -4
  37. package/src/modules/search/api/reindex/cancel/route.ts +4 -3
  38. package/src/modules/search/api/reindex/route.ts +5 -5
  39. package/src/modules/search/api/settings/route.ts +5 -4
  40. package/src/modules/search/lib/reindex-lock.ts +50 -32
  41. package/src/modules/search/subscribers/fulltext_upsert.ts +6 -2
  42. package/src/modules/search/subscribers/vector_delete.ts +6 -2
  43. package/src/modules/search/subscribers/vector_upsert.ts +6 -2
  44. package/src/modules/search/workers/fulltext-index.worker.ts +10 -9
  45. package/src/modules/search/workers/vector-index.worker.ts +10 -9
  46. package/src/strategies/token.strategy.ts +25 -19
@@ -1,4 +1,4 @@
1
- import type { Knex } from 'knex'
1
+ import type { Kysely } from 'kysely'
2
2
  import type {
3
3
  SearchBuildContext,
4
4
  SearchResult,
@@ -40,7 +40,7 @@ function chunk<T>(array: T[], size: number): T[][] {
40
40
  * Uses OR conditions to fetch all needed docs in one round trip.
41
41
  */
42
42
  async function fetchDocsBatch(
43
- knex: Knex,
43
+ db: Kysely<any>,
44
44
  byEntityType: Map<string, SearchResult[]>,
45
45
  tenantId: string,
46
46
  organizationId?: string | null,
@@ -70,27 +70,28 @@ async function fetchDocsBatch(
70
70
  }
71
71
 
72
72
  // Build query with OR conditions per entity type
73
- const query = knex('entity_indexes')
74
- .select('entity_type', 'entity_id', 'doc')
75
- .where('tenant_id', tenantId)
76
- .whereNull('deleted_at')
77
- .where((builder) => {
78
- for (const [entityType, recordIds] of chunkByType) {
79
- builder.orWhere((sub) => {
80
- sub.where('entity_type', entityType).whereIn('entity_id', recordIds)
81
- })
82
- }
83
- })
73
+ let query = db
74
+ .selectFrom('entity_indexes' as any)
75
+ .select(['entity_type' as any, 'entity_id' as any, 'doc' as any])
76
+ .where('tenant_id' as any, '=', tenantId)
77
+ .where('deleted_at' as any, 'is', null)
78
+ .where((eb: any) => eb.or(
79
+ Array.from(chunkByType.entries()).map(([entityType, recordIds]) => eb.and([
80
+ eb('entity_type' as any, '=', entityType),
81
+ eb('entity_id' as any, 'in', recordIds),
82
+ ])),
83
+ ))
84
84
 
85
85
  // Add organization filter if provided
86
86
  if (organizationId) {
87
- query.where((builder) => {
88
- builder.where('organization_id', organizationId).orWhereNull('organization_id')
89
- })
87
+ query = query.where((eb: any) => eb.or([
88
+ eb('organization_id' as any, '=', organizationId),
89
+ eb('organization_id' as any, 'is', null),
90
+ ]))
90
91
  }
91
92
 
92
- const rows = await query
93
- allDocs.push(...(rows as typeof allDocs))
93
+ const rows = await query.execute() as Array<{ entity_type: string; entity_id: string; doc: Record<string, unknown> }>
94
+ allDocs.push(...rows)
94
95
  }
95
96
 
96
97
  return allDocs
@@ -196,7 +197,7 @@ async function computePresenterAndLinks(
196
197
  * - Automatic decryption of encrypted fields when encryption service is provided
197
198
  */
198
199
  export function createPresenterEnricher(
199
- knex: Knex,
200
+ db: Kysely<any>,
200
201
  entityConfigMap: Map<EntityId, SearchEntityConfig>,
201
202
  queryEngine?: QueryEngine,
202
203
  encryptionService?: TenantDataEncryptionService | null,
@@ -215,7 +216,7 @@ export function createPresenterEnricher(
215
216
  }
216
217
 
217
218
  // Single batch query for all docs across all entity types
218
- const rawDocs = await fetchDocsBatch(knex, byEntityType, tenantId, organizationId)
219
+ const rawDocs = await fetchDocsBatch(db, byEntityType, tenantId, organizationId)
219
220
 
220
221
  // Decrypt docs in parallel using DEK cache for efficiency
221
222
  const dekCache = new Map<string | null, string | null>()
@@ -2,8 +2,9 @@ import { NextResponse } from 'next/server'
2
2
  import { createRequestContainer } from '@open-mercato/shared/lib/di/container'
3
3
  import { getAuthFromRequest } from '@open-mercato/shared/lib/auth/server'
4
4
  import type { Queue } from '@open-mercato/queue'
5
- import type { Knex } from 'knex'
5
+
6
6
  import type { EntityManager } from '@mikro-orm/postgresql'
7
+ import type { Kysely } from 'kysely'
7
8
  import type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'
8
9
  import { clearReindexLock } from '../../../../lib/reindex-lock'
9
10
  import { cancelReindexProgress } from '../../../../lib/reindex-progress'
@@ -25,7 +26,7 @@ export async function POST(req: Request) {
25
26
  const container = await createRequestContainer()
26
27
  const em = container.resolve('em') as EntityManager
27
28
  const progressService = container.resolve('progressService') as ProgressService
28
- const knex = (em.getConnection() as unknown as { getKnex: () => Knex }).getKnex()
29
+ const db = (em as unknown as { getKysely: () => Kysely<any> }).getKysely()
29
30
 
30
31
  let queue: Queue | undefined
31
32
  try {
@@ -45,7 +46,7 @@ export async function POST(req: Request) {
45
46
  }
46
47
  }
47
48
 
48
- await clearReindexLock(knex, auth.tenantId, 'vector', auth.orgId ?? null)
49
+ await clearReindexLock(db, auth.tenantId, 'vector', auth.orgId ?? null)
49
50
  await cancelReindexProgress({
50
51
  em,
51
52
  progressService,
@@ -4,8 +4,9 @@ import { getAuthFromRequest } from '@open-mercato/shared/lib/auth/server'
4
4
  import type { SearchIndexer } from '../../../../../indexer/search-indexer'
5
5
  import type { EmbeddingService } from '../../../../../vector'
6
6
  import type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'
7
- import type { Knex } from 'knex'
7
+
8
8
  import type { EntityManager } from '@mikro-orm/postgresql'
9
+ import type { Kysely } from 'kysely'
9
10
  import { recordIndexerLog } from '@open-mercato/shared/lib/indexers/status-log'
10
11
  import { resolveTranslations } from '@open-mercato/shared/lib/i18n/server'
11
12
  import { resolveEmbeddingConfig } from '../../../lib/embedding-config'
@@ -42,10 +43,10 @@ export async function POST(req: Request) {
42
43
  const container = await createRequestContainer()
43
44
  const em = container.resolve('em') as EntityManager
44
45
  const progressService = container.resolve('progressService') as ProgressService
45
- const knex = (em.getConnection() as unknown as { getKnex: () => Knex }).getKnex()
46
+ const db = (em as unknown as { getKysely: () => Kysely<any> }).getKysely()
46
47
 
47
48
  // Check if another vector reindex operation is already in progress
48
- const existingLock = await getReindexLockStatus(knex, auth.tenantId, { type: 'vector' })
49
+ const existingLock = await getReindexLockStatus(db, auth.tenantId, { type: 'vector' })
49
50
  if (existingLock) {
50
51
  const startedAt = new Date(existingLock.startedAt)
51
52
  return NextResponse.json(
@@ -65,7 +66,7 @@ export async function POST(req: Request) {
65
66
  }
66
67
 
67
68
  // Acquire lock before starting the operation
68
- const { acquired: lockAcquired } = await acquireReindexLock(knex, {
69
+ const { acquired: lockAcquired } = await acquireReindexLock(db, {
69
70
  type: 'vector',
70
71
  action: entityId ? `reindex:${entityId}` : 'reindex:all',
71
72
  tenantId: auth.tenantId,
@@ -2,8 +2,9 @@ import { NextResponse } from 'next/server'
2
2
  import { createRequestContainer } from '@open-mercato/shared/lib/di/container'
3
3
  import { getAuthFromRequest } from '@open-mercato/shared/lib/auth/server'
4
4
  import type { Queue } from '@open-mercato/queue'
5
- import type { Knex } from 'knex'
5
+
6
6
  import type { EntityManager } from '@mikro-orm/postgresql'
7
+ import type { Kysely } from 'kysely'
7
8
  import type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'
8
9
  import { clearReindexLock } from '../../../lib/reindex-lock'
9
10
  import { cancelReindexProgress } from '../../../lib/reindex-progress'
@@ -25,7 +26,7 @@ export async function POST(req: Request) {
25
26
  const container = await createRequestContainer()
26
27
  const em = container.resolve('em') as EntityManager
27
28
  const progressService = container.resolve('progressService') as ProgressService
28
- const knex = (em.getConnection() as unknown as { getKnex: () => Knex }).getKnex()
29
+ const db = (em as unknown as { getKysely: () => Kysely<any> }).getKysely()
29
30
 
30
31
  let queue: Queue | undefined
31
32
  try {
@@ -45,7 +46,7 @@ export async function POST(req: Request) {
45
46
  }
46
47
  }
47
48
 
48
- await clearReindexLock(knex, auth.tenantId, 'fulltext', auth.orgId ?? null)
49
+ await clearReindexLock(db, auth.tenantId, 'fulltext', auth.orgId ?? null)
49
50
  await cancelReindexProgress({
50
51
  em,
51
52
  progressService,
@@ -9,7 +9,7 @@ import { recordIndexerLog } from '@open-mercato/shared/lib/indexers/status-log'
9
9
  import { recordIndexerError } from '@open-mercato/shared/lib/indexers/error-log'
10
10
  import type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'
11
11
  import type { EntityManager } from '@mikro-orm/postgresql'
12
- import type { Knex } from 'knex'
12
+ import type { Kysely } from 'kysely'
13
13
  import { searchDebug, searchError } from '../../../../lib/debug'
14
14
  import {
15
15
  acquireReindexLock,
@@ -91,10 +91,10 @@ export async function POST(req: Request) {
91
91
  const container = await createRequestContainer()
92
92
  const em = container.resolve('em') as EntityManager
93
93
  const progressService = container.resolve('progressService') as ProgressService
94
- const knex = (em.getConnection() as unknown as { getKnex: () => Knex }).getKnex()
94
+ const db = (em as unknown as { getKysely: () => Kysely<any> }).getKysely()
95
95
 
96
96
  // Check if another fulltext reindex operation is already in progress
97
- const existingLock = await getReindexLockStatus(knex, tenantId, { type: 'fulltext' })
97
+ const existingLock = await getReindexLockStatus(db, tenantId, { type: 'fulltext' })
98
98
  if (existingLock) {
99
99
  const startedAt = new Date(existingLock.startedAt)
100
100
  return NextResponse.json(
@@ -114,7 +114,7 @@ export async function POST(req: Request) {
114
114
  }
115
115
 
116
116
  // Acquire lock before starting the operation
117
- const { acquired: lockAcquired } = await acquireReindexLock(knex, {
117
+ const { acquired: lockAcquired } = await acquireReindexLock(db, {
118
118
  type: 'fulltext',
119
119
  action,
120
120
  tenantId: tenantId,
@@ -453,7 +453,7 @@ export async function POST(req: Request) {
453
453
  // Only clear lock immediately if NOT using queue mode
454
454
  // When using queue mode, workers update heartbeat and stale detection handles cleanup
455
455
  if (!useQueue) {
456
- await clearReindexLock(knex, tenantId, 'fulltext', auth.orgId ?? null)
456
+ await clearReindexLock(db, tenantId, 'fulltext', auth.orgId ?? null)
457
457
  }
458
458
 
459
459
  const disposable = container as unknown as { dispose?: () => Promise<void> }
@@ -4,8 +4,9 @@ import { getAuthFromRequest } from '@open-mercato/shared/lib/auth/server'
4
4
  import { resolveTranslations } from '@open-mercato/shared/lib/i18n/server'
5
5
  import type { SearchService } from '@open-mercato/search'
6
6
  import type { FullTextSearchStrategy } from '@open-mercato/search/strategies'
7
- import type { Knex } from 'knex'
7
+
8
8
  import type { EntityManager } from '@mikro-orm/postgresql'
9
+ import type { Kysely } from 'kysely'
9
10
  import { getReindexLockStatus } from '../../lib/reindex-lock'
10
11
  import { settingsOpenApi } from '../openapi'
11
12
 
@@ -132,10 +133,10 @@ export async function GET(req: Request) {
132
133
 
133
134
  if (auth.tenantId) {
134
135
  const em = container.resolve('em') as EntityManager
135
- const knex = (em.getConnection() as unknown as { getKnex: () => Knex }).getKnex()
136
+ const db = (em as unknown as { getKysely: () => Kysely<any> }).getKysely()
136
137
 
137
138
  // Check fulltext lock (auto-cleans stale locks based on heartbeat)
138
- const fulltextLockStatus = await getReindexLockStatus(knex, auth.tenantId, { type: 'fulltext' })
139
+ const fulltextLockStatus = await getReindexLockStatus(db, auth.tenantId, { type: 'fulltext' })
139
140
  if (fulltextLockStatus) {
140
141
  const startedAt = new Date(fulltextLockStatus.startedAt)
141
142
  fulltextReindexLock = {
@@ -149,7 +150,7 @@ export async function GET(req: Request) {
149
150
  }
150
151
 
151
152
  // Check vector lock (auto-cleans stale locks based on heartbeat)
152
- const vectorLockStatus = await getReindexLockStatus(knex, auth.tenantId, { type: 'vector' })
153
+ const vectorLockStatus = await getReindexLockStatus(db, auth.tenantId, { type: 'vector' })
153
154
  if (vectorLockStatus) {
154
155
  const startedAt = new Date(vectorLockStatus.startedAt)
155
156
  vectorReindexLock = {
@@ -1,7 +1,7 @@
1
- import type { Knex } from 'knex'
1
+
2
+ import { type Kysely, sql } from 'kysely'
2
3
  import {
3
4
  prepareJob,
4
- updateJobProgress,
5
5
  finalizeJob,
6
6
  type JobScope,
7
7
  } from '@open-mercato/core/modules/query_index/lib/jobs'
@@ -50,7 +50,7 @@ function buildScope(
50
50
  * Automatically cleans up stale locks (heartbeat older than 60 seconds).
51
51
  */
52
52
  export async function getReindexLockStatus(
53
- knex: Knex,
53
+ db: Kysely<any>,
54
54
  tenantId: string,
55
55
  options?: { type?: ReindexLockType },
56
56
  ): Promise<ReindexLockStatus | null> {
@@ -62,29 +62,41 @@ export async function getReindexLockStatus(
62
62
  const entityType = LOCK_ENTITY_TYPES[lockType]
63
63
 
64
64
  try {
65
- const job = await knex('entity_index_jobs')
66
- .where('entity_type', entityType)
67
- .whereRaw('tenant_id is not distinct from ?', [tenantId])
68
- .whereNull('finished_at')
69
- .first()
65
+ const job = await db
66
+ .selectFrom('entity_index_jobs' as any)
67
+ .selectAll()
68
+ .where('entity_type' as any, '=', entityType)
69
+ .where(sql<boolean>`tenant_id is not distinct from ${tenantId}`)
70
+ .where('finished_at' as any, 'is', null)
71
+ .executeTakeFirst() as {
72
+ id: string
73
+ status?: string | null
74
+ started_at?: Date | string | null
75
+ heartbeat_at?: Date | string | null
76
+ organization_id?: string | null
77
+ processed_count?: number | null
78
+ total_count?: number | null
79
+ } | undefined
70
80
 
71
81
  if (!job) continue
72
82
 
73
83
  // Check heartbeat staleness
74
84
  const heartbeatAt = job.heartbeat_at
75
- ? new Date(job.heartbeat_at).getTime()
85
+ ? new Date(job.heartbeat_at as string | Date).getTime()
76
86
  : 0
77
87
  const elapsed = Date.now() - heartbeatAt
78
88
 
79
89
  if (elapsed > HEARTBEAT_STALE_MS) {
80
90
  // Auto-cleanup stale lock
81
- await knex('entity_index_jobs')
82
- .where('id', job.id)
83
- .update({ finished_at: knex.fn.now() })
91
+ await db
92
+ .updateTable('entity_index_jobs' as any)
93
+ .set({ finished_at: sql`now()` } as any)
94
+ .where('id' as any, '=', job.id)
95
+ .execute()
84
96
  continue
85
97
  }
86
98
 
87
- // started_at comes as string from knex, convert if needed
99
+ // started_at comes as string from Kysely, convert if needed
88
100
  const startedAtStr = job.started_at
89
101
  ? (typeof job.started_at === 'string' ? job.started_at : new Date(job.started_at).toISOString())
90
102
  : new Date().toISOString()
@@ -94,9 +106,9 @@ export async function getReindexLockStatus(
94
106
  action: job.status || 'reindexing',
95
107
  startedAt: startedAtStr,
96
108
  tenantId,
97
- organizationId: job.organization_id,
98
- processedCount: job.processed_count,
99
- totalCount: job.total_count,
109
+ organizationId: job.organization_id ?? null,
110
+ processedCount: job.processed_count ?? null,
111
+ totalCount: job.total_count ?? null,
100
112
  }
101
113
  return result
102
114
  } catch {
@@ -112,7 +124,7 @@ export async function getReindexLockStatus(
112
124
  * Fulltext and vector locks are independent - they don't block each other.
113
125
  */
114
126
  export async function acquireReindexLock(
115
- knex: Knex,
127
+ db: Kysely<any>,
116
128
  options: {
117
129
  type: ReindexLockType
118
130
  action: string
@@ -122,7 +134,7 @@ export async function acquireReindexLock(
122
134
  },
123
135
  ): Promise<{ acquired: boolean; jobId?: string }> {
124
136
  // Check existing active lock
125
- const existing = await getReindexLockStatus(knex, options.tenantId, {
137
+ const existing = await getReindexLockStatus(db, options.tenantId, {
126
138
  type: options.type,
127
139
  })
128
140
  if (existing) {
@@ -135,7 +147,7 @@ export async function acquireReindexLock(
135
147
  options.tenantId,
136
148
  options.organizationId,
137
149
  )
138
- const jobId = await prepareJob(knex, scope, 'reindexing', {
150
+ const jobId = await prepareJob(db, scope, 'reindexing', {
139
151
  totalCount: options.totalCount,
140
152
  })
141
153
 
@@ -149,14 +161,14 @@ export async function acquireReindexLock(
149
161
  * Release the reindex lock for a specific type.
150
162
  */
151
163
  export async function clearReindexLock(
152
- knex: Knex,
164
+ db: Kysely<any>,
153
165
  tenantId: string,
154
166
  type: ReindexLockType,
155
167
  organizationId?: string | null,
156
168
  ): Promise<void> {
157
169
  try {
158
170
  const scope = buildScope(type, tenantId, organizationId)
159
- await finalizeJob(knex, scope)
171
+ await finalizeJob(db, scope)
160
172
  } catch {
161
173
  // Ignore errors when clearing lock
162
174
  }
@@ -170,7 +182,7 @@ export async function clearReindexLock(
170
182
  * recreate the lock so the reindex button stays disabled while processing.
171
183
  */
172
184
  export async function updateReindexProgress(
173
- knex: Knex,
185
+ db: Kysely<any>,
174
186
  tenantId: string,
175
187
  type: ReindexLockType,
176
188
  processedDelta: number,
@@ -179,21 +191,27 @@ export async function updateReindexProgress(
179
191
  try {
180
192
  const scope = buildScope(type, tenantId, organizationId)
181
193
  const entityType = LOCK_ENTITY_TYPES[type]
194
+ const delta = Math.max(0, processedDelta)
182
195
 
183
196
  // Try to update existing active job first
184
- const updated = await knex('entity_index_jobs')
185
- .where('entity_type', entityType)
186
- .whereRaw('tenant_id is not distinct from ?', [tenantId])
187
- .whereRaw('organization_id is not distinct from ?', [organizationId ?? null])
188
- .whereNull('finished_at')
189
- .update({
190
- processed_count: knex.raw('coalesce(processed_count, 0) + ?', [Math.max(0, processedDelta)]),
191
- heartbeat_at: knex.fn.now(),
192
- })
197
+ const result = await db
198
+ .updateTable('entity_index_jobs' as any)
199
+ .set({
200
+ processed_count: sql`coalesce(processed_count, 0) + ${delta}`,
201
+ heartbeat_at: sql`now()`,
202
+ } as any)
203
+ .where('entity_type' as any, '=', entityType)
204
+ .where(sql<boolean>`tenant_id is not distinct from ${tenantId}`)
205
+ .where(sql<boolean>`organization_id is not distinct from ${organizationId ?? null}`)
206
+ .where('finished_at' as any, 'is', null)
207
+ .executeTakeFirst()
208
+
209
+ // Kysely returns numUpdatedRows as bigint; coerce
210
+ const updated = Number(result?.numUpdatedRows ?? 0n)
193
211
 
194
212
  // If no active lock exists, recreate it
195
213
  if (updated === 0) {
196
- await prepareJob(knex, scope, 'reindexing')
214
+ await prepareJob(db, scope, 'reindexing')
197
215
  }
198
216
  } catch {
199
217
  // Ignore errors when updating progress
@@ -37,9 +37,13 @@ export default async function handle(payload: Payload, ctx: HandlerContext) {
37
37
  // Resolve missing scope from DB if needed (same pattern as vector_upsert.ts)
38
38
  if ((organizationId == null || tenantId == null) && em) {
39
39
  try {
40
- const knex = em.getConnection().getKnex()
40
+ const db = em.getKysely()
41
41
  const table = resolveEntityTableName(em, entityType)
42
- const row = await knex(table).select(['organization_id', 'tenant_id']).where({ id: recordId }).first()
42
+ const row = await db
43
+ .selectFrom(table as any)
44
+ .select(['organization_id' as any, 'tenant_id' as any])
45
+ .where('id' as any, '=', recordId)
46
+ .executeTakeFirst() as { organization_id?: string | null; tenant_id?: string | null } | undefined
43
47
  if (organizationId == null) organizationId = row?.organization_id ?? organizationId
44
48
  if (tenantId == null) tenantId = row?.tenant_id ?? tenantId
45
49
  } catch {
@@ -33,9 +33,13 @@ export default async function handle(payload: Payload, ctx: HandlerContext) {
33
33
 
34
34
  if ((organizationId == null || tenantId == null) && em) {
35
35
  try {
36
- const knex = em.getConnection().getKnex()
36
+ const db = em.getKysely()
37
37
  const table = resolveEntityTableName(em, entityType)
38
- const row = await knex(table).select(['organization_id', 'tenant_id']).where({ id: recordId }).first()
38
+ const row = await db
39
+ .selectFrom(table as any)
40
+ .select(['organization_id' as any, 'tenant_id' as any])
41
+ .where('id' as any, '=', recordId)
42
+ .executeTakeFirst() as { organization_id?: string | null; tenant_id?: string | null } | undefined
39
43
  if (organizationId == null) organizationId = row?.organization_id ?? organizationId
40
44
  if (tenantId == null) tenantId = row?.tenant_id ?? tenantId
41
45
  } catch {
@@ -33,9 +33,13 @@ export default async function handle(payload: Payload, ctx: HandlerContext) {
33
33
 
34
34
  if ((organizationId == null || tenantId == null) && em) {
35
35
  try {
36
- const knex = em.getConnection().getKnex()
36
+ const db = em.getKysely()
37
37
  const table = resolveEntityTableName(em, entityType)
38
- const row = await knex(table).select(['organization_id', 'tenant_id']).where({ id: recordId }).first()
38
+ const row = await db
39
+ .selectFrom(table as any)
40
+ .select(['organization_id' as any, 'tenant_id' as any])
41
+ .where('id' as any, '=', recordId)
42
+ .executeTakeFirst() as { organization_id?: string | null; tenant_id?: string | null } | undefined
39
43
  if (organizationId == null) organizationId = row?.organization_id ?? organizationId
40
44
  if (tenantId == null) tenantId = row?.tenant_id ?? tenantId
41
45
  } catch {
@@ -1,9 +1,10 @@
1
1
  import type { QueuedJob, JobContext, WorkerMeta } from '@open-mercato/queue'
2
+ import type { Kysely } from 'kysely'
2
3
  import { FULLTEXT_INDEXING_QUEUE_NAME, type FulltextIndexJobPayload } from '../../../queue/fulltext-indexing'
3
4
  import type { FullTextSearchStrategy } from '../../../strategies/fulltext.strategy'
4
5
  import type { SearchIndexer } from '../../../indexer/search-indexer'
5
6
  import type { EntityManager } from '@mikro-orm/postgresql'
6
- import type { Knex } from 'knex'
7
+
7
8
  import type { EntityId } from '@open-mercato/shared/modules/entities'
8
9
  import { recordIndexerLog } from '@open-mercato/shared/lib/indexers/status-log'
9
10
  import { recordIndexerError } from '@open-mercato/shared/lib/indexers/error-log'
@@ -51,16 +52,16 @@ export async function handleFulltextIndexJob(
51
52
  return
52
53
  }
53
54
 
54
- // Resolve EntityManager for logging and knex for database queries
55
+ // Resolve EntityManager for logging and Kysely for database queries
55
56
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
56
57
  let em: any | null = null
57
- let knex: Knex | null = null
58
+ let db: Kysely<any> | null = null
58
59
  try {
59
60
  em = ctx.resolve('em') as EntityManager
60
- knex = (em.getConnection() as unknown as { getKnex: () => Knex }).getKnex()
61
+ db = (em as unknown as { getKysely: () => Kysely<any> }).getKysely()
61
62
  } catch {
62
63
  em = null
63
- knex = null
64
+ db = null
64
65
  }
65
66
 
66
67
  // Resolve searchIndexer for loading fresh data
@@ -193,8 +194,8 @@ export async function handleFulltextIndexJob(
193
194
  }
194
195
 
195
196
  // Update heartbeat to signal worker is still processing
196
- if (knex && records.length > 0) {
197
- await updateReindexProgress(knex, tenantId, 'fulltext', successCount, organizationId ?? null)
197
+ if (db && records.length > 0) {
198
+ await updateReindexProgress(db, tenantId, 'fulltext', successCount, organizationId ?? null)
198
199
  }
199
200
  if (progressService && em && records.length > 0) {
200
201
  const completed = await incrementReindexProgress({
@@ -205,8 +206,8 @@ export async function handleFulltextIndexJob(
205
206
  organizationId: organizationId ?? null,
206
207
  delta: successCount,
207
208
  })
208
- if (completed && knex) {
209
- await clearReindexLock(knex, tenantId, 'fulltext', organizationId ?? null)
209
+ if (completed && db) {
210
+ await clearReindexLock(db, tenantId, 'fulltext', organizationId ?? null)
210
211
  }
211
212
  }
212
213
 
@@ -1,9 +1,10 @@
1
1
  import type { QueuedJob, JobContext, WorkerMeta } from '@open-mercato/queue'
2
+ import type { Kysely } from 'kysely'
2
3
  import { VECTOR_INDEXING_QUEUE_NAME, type VectorIndexJobPayload } from '../../../queue/vector-indexing'
3
4
  import type { SearchIndexer } from '../../../indexer/search-indexer'
4
5
  import type { EmbeddingService } from '../../../vector'
5
6
  import type { EntityManager } from '@mikro-orm/postgresql'
6
- import type { Knex } from 'knex'
7
+
7
8
  import type { ProgressService } from '@open-mercato/core/modules/progress/lib/progressService'
8
9
  import { recordIndexerError } from '@open-mercato/shared/lib/indexers/error-log'
9
10
  import { applyCoverageAdjustments, createCoverageAdjustments } from '@open-mercato/core/modules/query_index/lib/coverage'
@@ -61,14 +62,14 @@ export async function handleVectorIndexJob(
61
62
  return
62
63
  }
63
64
 
64
- // Get knex for heartbeat updates
65
- let knex: Knex | null = null
65
+ // Get Kysely for heartbeat updates
66
+ let db: Kysely<any> | null = null
66
67
  let em: EntityManager | null = null
67
68
  try {
68
69
  em = ctx.resolve('em') as EntityManager
69
- knex = (em.getConnection() as unknown as { getKnex: () => Knex }).getKnex()
70
+ db = (em as unknown as { getKysely: () => Kysely<any> }).getKysely()
70
71
  } catch {
71
- knex = null
72
+ db = null
72
73
  em = null
73
74
  }
74
75
 
@@ -117,8 +118,8 @@ export async function handleVectorIndexJob(
117
118
  }
118
119
 
119
120
  // Update heartbeat to signal worker is still processing
120
- if (knex && records.length > 0) {
121
- await updateReindexProgress(knex, tenantId, 'vector', successCount, organizationId ?? null)
121
+ if (db && records.length > 0) {
122
+ await updateReindexProgress(db, tenantId, 'vector', successCount, organizationId ?? null)
122
123
  }
123
124
  if (progressService && em && records.length > 0) {
124
125
  const completed = await incrementReindexProgress({
@@ -129,8 +130,8 @@ export async function handleVectorIndexJob(
129
130
  organizationId: organizationId ?? null,
130
131
  delta: successCount,
131
132
  })
132
- if (completed && knex) {
133
- await clearReindexLock(knex, tenantId, 'vector', organizationId ?? null)
133
+ if (completed && db) {
134
+ await clearReindexLock(db, tenantId, 'vector', organizationId ?? null)
134
135
  }
135
136
  }
136
137