@stonyx/orm 0.2.1-beta.80 → 0.2.1-beta.82

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,524 @@
1
+ import { getPool, closePool } from './connection.js';
2
+ import { ensureMigrationsTable, getAppliedMigrations, getMigrationFiles, applyMigration, parseMigrationFile } from './migration-runner.js';
3
+ import { introspectModels, introspectViews, getTopologicalOrder, schemasToSnapshot } from './schema-introspector.js';
4
+ import { loadLatestSnapshot, detectSchemaDrift } from './migration-generator.js';
5
+ import { buildInsert, buildUpdate, buildDelete, buildSelect, buildVectorSearch, buildHybridSearch } from './query-builder.js';
6
+ import { createRecord, store } from '@stonyx/orm';
7
+ import { confirm } from '@stonyx/utils/prompt';
8
+ import { readFile } from '@stonyx/utils/file';
9
+ import { getPluralName } from '../plural-registry.js';
10
+ import config from 'stonyx/config';
11
+ import log from 'stonyx/log';
12
+ import path from 'path';
13
+
14
+ const defaultDeps = {
15
+ getPool, closePool, ensureMigrationsTable, getAppliedMigrations,
16
+ getMigrationFiles, applyMigration, parseMigrationFile,
17
+ introspectModels, introspectViews, getTopologicalOrder, schemasToSnapshot,
18
+ loadLatestSnapshot, detectSchemaDrift,
19
+ buildInsert, buildUpdate, buildDelete, buildSelect, buildVectorSearch, buildHybridSearch,
20
+ createRecord, store, confirm, readFile, getPluralName, config, log, path
21
+ };
22
+
23
+ export default class PostgresDB {
24
+ constructor(deps = {}) {
25
+ if (PostgresDB.instance) return PostgresDB.instance;
26
+ PostgresDB.instance = this;
27
+
28
+ this.deps = { ...defaultDeps, ...deps };
29
+ this.pool = null;
30
+ this.pgConfig = this.deps.config.orm.postgres;
31
+ }
32
+
33
+ async init() {
34
+ this.pool = await this.deps.getPool(this.pgConfig);
35
+ await this.deps.ensureMigrationsTable(this.pool, this.pgConfig.migrationsTable);
36
+ await this.loadMemoryRecords();
37
+ }
38
+
39
+ async startup() {
40
+ const migrationsPath = this.deps.path.resolve(this.deps.config.rootPath, this.pgConfig.migrationsDir);
41
+
42
+ // Check for pending migrations
43
+ const applied = await this.deps.getAppliedMigrations(this.pool, this.pgConfig.migrationsTable);
44
+ const files = await this.deps.getMigrationFiles(migrationsPath);
45
+ const pending = files.filter(f => !applied.includes(f));
46
+
47
+ if (pending.length > 0) {
48
+ this.deps.log.db(`${pending.length} pending migration(s) found.`);
49
+
50
+ const shouldApply = await this.deps.confirm(`${pending.length} pending migration(s) found. Apply now?`);
51
+
52
+ if (shouldApply) {
53
+ for (const filename of pending) {
54
+ const content = await this.deps.readFile(this.deps.path.join(migrationsPath, filename));
55
+ const { up } = this.deps.parseMigrationFile(content);
56
+
57
+ await this.deps.applyMigration(this.pool, filename, up, this.pgConfig.migrationsTable);
58
+ this.deps.log.db(`Applied migration: ${filename}`);
59
+ }
60
+
61
+ // Reload records after applying migrations
62
+ await this.loadMemoryRecords();
63
+ } else {
64
+ this.deps.log.warn('Skipping pending migrations. Schema may be outdated.');
65
+ }
66
+ } else if (files.length === 0) {
67
+ const schemas = this.deps.introspectModels();
68
+ const modelCount = Object.keys(schemas).length;
69
+
70
+ if (modelCount > 0) {
71
+ const shouldGenerate = await this.deps.confirm(
72
+ `No migrations found but ${modelCount} model(s) detected. Generate and apply initial migration?`
73
+ );
74
+
75
+ if (shouldGenerate) {
76
+ const { generateMigration } = await import('./migration-generator.js');
77
+ const result = await generateMigration('initial_setup');
78
+
79
+ if (result) {
80
+ const { up } = this.deps.parseMigrationFile(result.content);
81
+ await this.deps.applyMigration(this.pool, result.filename, up, this.pgConfig.migrationsTable);
82
+ this.deps.log.db(`Applied migration: ${result.filename}`);
83
+ await this.loadMemoryRecords();
84
+ }
85
+ } else {
86
+ this.deps.log.warn('Skipping initial migration. Tables may not exist.');
87
+ }
88
+ }
89
+ }
90
+
91
+ // Check for schema drift
92
+ const schemas = this.deps.introspectModels();
93
+ const snapshot = await this.deps.loadLatestSnapshot(this.deps.path.resolve(this.deps.config.rootPath, this.pgConfig.migrationsDir));
94
+
95
+ if (Object.keys(snapshot).length > 0) {
96
+ const drift = this.deps.detectSchemaDrift(schemas, snapshot);
97
+
98
+ if (drift.hasChanges) {
99
+ this.deps.log.warn('Schema drift detected: models have changed since the last migration.');
100
+ this.deps.log.warn('Run `stonyx db:generate-migration` to create a new migration.');
101
+ }
102
+ }
103
+ }
104
+
105
+ async shutdown() {
106
+ await this.deps.closePool();
107
+ this.pool = null;
108
+ }
109
+
110
+ async save() {
111
+ // No-op: PostgreSQL persists data immediately via persist()
112
+ }
113
+
114
+ /**
115
+ * Loads only models with memory: true into the in-memory store on startup.
116
+ * Models with memory: false are skipped — accessed on-demand via find()/findAll().
117
+ */
118
+ async loadMemoryRecords() {
119
+ const schemas = this.deps.introspectModels();
120
+ const order = this.deps.getTopologicalOrder(schemas);
121
+ const Orm = (await import('@stonyx/orm')).default;
122
+
123
+ for (const modelName of order) {
124
+ const { modelClass } = Orm.instance.getRecordClasses(modelName);
125
+ if (modelClass?.memory === false) {
126
+ this.deps.log.db(`Skipping memory load for '${modelName}' (memory: false)`);
127
+ continue;
128
+ }
129
+
130
+ const schema = schemas[modelName];
131
+ const { sql, values } = this.deps.buildSelect(schema.table);
132
+
133
+ try {
134
+ const result = await this.pool.query(sql, values);
135
+
136
+ for (const row of result.rows) {
137
+ const rawData = this._rowToRawData(row, schema);
138
+ this.deps.createRecord(modelName, rawData, { isDbRecord: true, serialize: false, transform: false });
139
+ }
140
+ } catch (error) {
141
+ // 42P01 = undefined_table (PG equivalent of ER_NO_SUCH_TABLE)
142
+ if (error.code === '42P01') {
143
+ this.deps.log.db(`Table '${schema.table}' does not exist yet. Skipping load for '${modelName}'.`);
144
+ continue;
145
+ }
146
+
147
+ throw error;
148
+ }
149
+ }
150
+
151
+ // Load views with memory: true
152
+ const viewSchemas = this.deps.introspectViews();
153
+
154
+ for (const [viewName, viewSchema] of Object.entries(viewSchemas)) {
155
+ const { modelClass: viewClass } = Orm.instance.getRecordClasses(viewName);
156
+ if (viewClass?.memory !== true) {
157
+ this.deps.log.db(`Skipping memory load for view '${viewName}' (memory: false)`);
158
+ continue;
159
+ }
160
+
161
+ const schema = { table: viewSchema.viewName, columns: viewSchema.columns || {}, foreignKeys: viewSchema.foreignKeys || {} };
162
+ const { sql, values } = this.deps.buildSelect(schema.table);
163
+
164
+ try {
165
+ const result = await this.pool.query(sql, values);
166
+
167
+ for (const row of result.rows) {
168
+ const rawData = this._rowToRawData(row, schema);
169
+ this.deps.createRecord(viewName, rawData, { isDbRecord: true, serialize: false, transform: false });
170
+ }
171
+ } catch (error) {
172
+ if (error.code === '42P01') {
173
+ this.deps.log.db(`View '${viewSchema.viewName}' does not exist yet. Skipping load for '${viewName}'.`);
174
+ continue;
175
+ }
176
+ throw error;
177
+ }
178
+ }
179
+ }
180
+
181
+ /**
182
+ * @deprecated Use loadMemoryRecords() instead. Kept for backward compatibility.
183
+ */
184
+ async loadAllRecords() {
185
+ return this.loadMemoryRecords();
186
+ }
187
+
188
+ /**
189
+ * Find a single record by ID from PostgreSQL.
190
+ * Does NOT cache the result in the store for memory: false models.
191
+ * @param {string} modelName
192
+ * @param {string|number} id
193
+ * @returns {Promise<Record|undefined>}
194
+ */
195
+ async findRecord(modelName, id) {
196
+ const schemas = this.deps.introspectModels();
197
+ let schema = schemas[modelName];
198
+
199
+ // Check views if not found in models
200
+ if (!schema) {
201
+ const viewSchemas = this.deps.introspectViews();
202
+ const viewSchema = viewSchemas[modelName];
203
+ if (viewSchema) {
204
+ schema = { table: viewSchema.viewName, columns: viewSchema.columns || {}, foreignKeys: viewSchema.foreignKeys || {} };
205
+ }
206
+ }
207
+
208
+ if (!schema) return undefined;
209
+
210
+ const { sql, values } = this.deps.buildSelect(schema.table, { id });
211
+
212
+ try {
213
+ const result = await this.pool.query(sql, values);
214
+
215
+ if (result.rows.length === 0) return undefined;
216
+
217
+ const rawData = this._rowToRawData(result.rows[0], schema);
218
+ const record = this.deps.createRecord(modelName, rawData, { isDbRecord: true, serialize: false, transform: false });
219
+
220
+ this._evictIfNotMemory(modelName, record);
221
+
222
+ return record;
223
+ } catch (error) {
224
+ if (error.code === '42P01') return undefined;
225
+ throw error;
226
+ }
227
+ }
228
+
229
+ /**
230
+ * Find all records of a model from PostgreSQL, with optional conditions.
231
+ * @param {string} modelName
232
+ * @param {Object} [conditions] - Optional WHERE conditions (key-value pairs)
233
+ * @returns {Promise<Record[]>}
234
+ */
235
+ async findAll(modelName, conditions) {
236
+ const schemas = this.deps.introspectModels();
237
+ let schema = schemas[modelName];
238
+
239
+ // Check views if not found in models
240
+ if (!schema) {
241
+ const viewSchemas = this.deps.introspectViews();
242
+ const viewSchema = viewSchemas[modelName];
243
+ if (viewSchema) {
244
+ schema = { table: viewSchema.viewName, columns: viewSchema.columns || {}, foreignKeys: viewSchema.foreignKeys || {} };
245
+ }
246
+ }
247
+
248
+ if (!schema) return [];
249
+
250
+ const { sql, values } = this.deps.buildSelect(schema.table, conditions);
251
+
252
+ try {
253
+ const result = await this.pool.query(sql, values);
254
+
255
+ const records = result.rows.map(row => {
256
+ const rawData = this._rowToRawData(row, schema);
257
+ return this.deps.createRecord(modelName, rawData, { isDbRecord: true, serialize: false, transform: false });
258
+ });
259
+
260
+ for (const record of records) {
261
+ this._evictIfNotMemory(modelName, record);
262
+ }
263
+
264
+ return records;
265
+ } catch (error) {
266
+ if (error.code === '42P01') return [];
267
+ throw error;
268
+ }
269
+ }
270
+
271
+ /**
272
+ * Perform a vector similarity search using cosine distance.
273
+ * @param {string} modelName
274
+ * @param {string} vectorColumn - Name of the vector column
275
+ * @param {number[]} queryVector - The query vector
276
+ * @param {Object} [options]
277
+ * @param {number} [options.limit=10]
278
+ * @param {Object} [options.where] - Additional conditions
279
+ * @returns {Promise<{ record: Record, distance: number }[]>}
280
+ */
281
+ async vectorSearch(modelName, vectorColumn, queryVector, options = {}) {
282
+ const schemas = this.deps.introspectModels();
283
+ const schema = schemas[modelName];
284
+ if (!schema) return [];
285
+
286
+ const { sql, values } = this.deps.buildVectorSearch(schema.table, vectorColumn, queryVector, options);
287
+
288
+ try {
289
+ const result = await this.pool.query(sql, values);
290
+
291
+ return result.rows.map(row => {
292
+ const distance = row.distance;
293
+ delete row.distance;
294
+ const rawData = this._rowToRawData(row, schema);
295
+ const record = this.deps.createRecord(modelName, rawData, { isDbRecord: true, serialize: false, transform: false });
296
+ this._evictIfNotMemory(modelName, record);
297
+ return { record, distance };
298
+ });
299
+ } catch (error) {
300
+ if (error.code === '42P01') return [];
301
+ throw error;
302
+ }
303
+ }
304
+
305
+ /**
306
+ * Perform a hybrid search combining vector similarity with text filtering.
307
+ * @param {string} modelName
308
+ * @param {string} vectorColumn
309
+ * @param {number[]} queryVector
310
+ * @param {string} textColumn
311
+ * @param {string} textQuery
312
+ * @param {Object} [options]
313
+ * @returns {Promise<{ record: Record, distance: number }[]>}
314
+ */
315
+ async hybridSearch(modelName, vectorColumn, queryVector, textColumn, textQuery, options = {}) {
316
+ const schemas = this.deps.introspectModels();
317
+ const schema = schemas[modelName];
318
+ if (!schema) return [];
319
+
320
+ const { sql, values } = this.deps.buildHybridSearch(schema.table, vectorColumn, queryVector, textColumn, textQuery, options);
321
+
322
+ try {
323
+ const result = await this.pool.query(sql, values);
324
+
325
+ return result.rows.map(row => {
326
+ const distance = row.distance;
327
+ delete row.distance;
328
+ const rawData = this._rowToRawData(row, schema);
329
+ const record = this.deps.createRecord(modelName, rawData, { isDbRecord: true, serialize: false, transform: false });
330
+ this._evictIfNotMemory(modelName, record);
331
+ return { record, distance };
332
+ });
333
+ } catch (error) {
334
+ if (error.code === '42P01') return [];
335
+ throw error;
336
+ }
337
+ }
338
+
339
+ /**
340
+ * Remove a record from the in-memory store if its model has memory: false.
341
+ * The record object itself survives — the caller retains the reference.
342
+ * @private
343
+ */
344
+ _evictIfNotMemory(modelName, record) {
345
+ const store = this.deps.store;
346
+
347
+ if (store._memoryResolver && !store._memoryResolver(modelName)) {
348
+ const modelStore = store.get?.(modelName) ?? store.data?.get(modelName);
349
+ if (modelStore) modelStore.delete(record.id);
350
+ }
351
+ }
352
+
353
+ _rowToRawData(row, schema) {
354
+ const rawData = { ...row };
355
+
356
+ // PostgreSQL returns native booleans and parsed JSONB — no manual conversion needed.
357
+ // Only FK remapping and timestamp stripping required.
358
+
359
+ // Map FK columns back to relationship keys
360
+ for (const [fkCol] of Object.entries(schema.foreignKeys)) {
361
+ const relName = fkCol.replace(/_id$/, '');
362
+
363
+ if (rawData[fkCol] !== undefined) {
364
+ rawData[relName] = rawData[fkCol];
365
+ delete rawData[fkCol];
366
+ }
367
+ }
368
+
369
+ // Remove timestamp columns — managed by PostgreSQL
370
+ delete rawData.created_at;
371
+ delete rawData.updated_at;
372
+
373
+ return rawData;
374
+ }
375
+
376
+ async persist(operation, modelName, context, response) {
377
+ // Views are read-only — no-op for all write operations
378
+ const Orm = (await import('@stonyx/orm')).default;
379
+ if (Orm.instance?.isView?.(modelName)) return;
380
+
381
+ switch (operation) {
382
+ case 'create':
383
+ return this._persistCreate(modelName, context, response);
384
+ case 'update':
385
+ return this._persistUpdate(modelName, context, response);
386
+ case 'delete':
387
+ return this._persistDelete(modelName, context);
388
+ }
389
+ }
390
+
391
+ async _persistCreate(modelName, context, response) {
392
+ const schemas = this.deps.introspectModels();
393
+ const schema = schemas[modelName];
394
+
395
+ if (!schema) return;
396
+
397
+ const recordId = response?.data?.id;
398
+ const record = recordId != null ? this.deps.store.get(modelName, isNaN(recordId) ? recordId : parseInt(recordId)) : null;
399
+
400
+ if (!record) return;
401
+
402
+ const insertData = this._recordToRow(record, schema);
403
+
404
+ // For auto-increment models, remove the pending ID
405
+ const isPendingId = record.__data.__pendingSqlId;
406
+
407
+ if (isPendingId) {
408
+ delete insertData.id;
409
+ }
410
+
411
+ const { sql, values } = this.deps.buildInsert(schema.table, insertData);
412
+
413
+ const result = await this.pool.query(sql, values);
414
+
415
+ // Re-key the record in the store if PostgreSQL generated the ID (via RETURNING)
416
+ if (isPendingId && result.rows.length > 0) {
417
+ const pendingId = record.id;
418
+ const realId = result.rows[0].id;
419
+ const modelStore = this.deps.store.get(modelName);
420
+
421
+ modelStore.delete(pendingId);
422
+ record.__data.id = realId;
423
+ record.id = realId;
424
+ modelStore.set(realId, record);
425
+
426
+ // Update the response data with the real ID
427
+ if (response?.data) {
428
+ response.data.id = realId;
429
+ }
430
+
431
+ delete record.__data.__pendingSqlId;
432
+ }
433
+ }
434
+
435
+ async _persistUpdate(modelName, context, response) {
436
+ const schemas = this.deps.introspectModels();
437
+ const schema = schemas[modelName];
438
+
439
+ if (!schema) return;
440
+
441
+ const record = context.record;
442
+ if (!record) return;
443
+
444
+ const id = record.id;
445
+ const oldState = context.oldState || {};
446
+ const currentData = record.__data;
447
+
448
+ // Build a diff of changed columns
449
+ const changedData = {};
450
+
451
+ for (const [col] of Object.entries(schema.columns)) {
452
+ if (currentData[col] !== oldState[col]) {
453
+ changedData[col] = currentData[col] ?? null;
454
+ }
455
+ }
456
+
457
+ // Check FK changes too
458
+ for (const fkCol of Object.keys(schema.foreignKeys)) {
459
+ const relName = fkCol.replace(/_id$/, '');
460
+ const currentFkValue = record.__relationships[relName]?.id ?? null;
461
+ const oldFkValue = oldState[relName] ?? null;
462
+
463
+ if (currentFkValue !== oldFkValue) {
464
+ changedData[fkCol] = currentFkValue;
465
+ }
466
+ }
467
+
468
+ if (Object.keys(changedData).length === 0) return;
469
+
470
+ // PostgreSQL doesn't have ON UPDATE CURRENT_TIMESTAMP — set updated_at manually
471
+ changedData.updated_at = new Date();
472
+
473
+ const { sql, values } = this.deps.buildUpdate(schema.table, id, changedData);
474
+ await this.pool.query(sql, values);
475
+ }
476
+
477
+ async _persistDelete(modelName, context) {
478
+ const schemas = this.deps.introspectModels();
479
+ const schema = schemas[modelName];
480
+
481
+ if (!schema) return;
482
+
483
+ const id = context.recordId;
484
+ if (id == null) return;
485
+
486
+ const { sql, values } = this.deps.buildDelete(schema.table, id);
487
+ await this.pool.query(sql, values);
488
+ }
489
+
490
+ _recordToRow(record, schema) {
491
+ const row = {};
492
+ const data = record.__data;
493
+
494
+ // ID
495
+ if (data.id !== undefined) {
496
+ row.id = data.id;
497
+ }
498
+
499
+ // Attribute columns
500
+ for (const [col, pgType] of Object.entries(schema.columns)) {
501
+ if (data[col] !== undefined) {
502
+ // JSONB columns: stringify non-string values for PostgreSQL JSONB storage
503
+ row[col] = pgType === 'JSONB' && typeof data[col] !== 'string'
504
+ ? JSON.stringify(data[col])
505
+ : data[col];
506
+ }
507
+ }
508
+
509
+ // FK columns from relationships
510
+ for (const fkCol of Object.keys(schema.foreignKeys)) {
511
+ const relName = fkCol.replace(/_id$/, '');
512
+ const related = record.__relationships[relName];
513
+
514
+ if (related) {
515
+ row[fkCol] = related.id;
516
+ } else if (data[relName] !== undefined) {
517
+ // Raw FK value (e.g., from create payload)
518
+ row[fkCol] = data[relName];
519
+ }
520
+ }
521
+
522
+ return row;
523
+ }
524
+ }
@@ -0,0 +1,149 @@
1
+ const SAFE_IDENTIFIER = /^[a-zA-Z_][a-zA-Z0-9_-]*$/;
2
+
3
+ export function validateIdentifier(name, context = 'identifier') {
4
+ if (!name || typeof name !== 'string' || !SAFE_IDENTIFIER.test(name)) {
5
+ throw new Error(`Invalid SQL ${context}: "${name}". Identifiers must match ${SAFE_IDENTIFIER}`);
6
+ }
7
+
8
+ return name;
9
+ }
10
+
11
+ export function buildInsert(table, data) {
12
+ validateIdentifier(table, 'table name');
13
+
14
+ const keys = Object.keys(data);
15
+ keys.forEach(k => validateIdentifier(k, 'column name'));
16
+
17
+ const placeholders = keys.map((_, i) => `$${i + 1}`);
18
+ const values = keys.map(k => data[k]);
19
+
20
+ const sql = `INSERT INTO "${table}" (${keys.map(k => `"${k}"`).join(', ')}) VALUES (${placeholders.join(', ')}) RETURNING "id"`;
21
+
22
+ return { sql, values };
23
+ }
24
+
25
+ export function buildUpdate(table, id, data) {
26
+ validateIdentifier(table, 'table name');
27
+
28
+ const keys = Object.keys(data);
29
+ keys.forEach(k => validateIdentifier(k, 'column name'));
30
+
31
+ const setClauses = keys.map((k, i) => `"${k}" = $${i + 1}`);
32
+ const values = [...keys.map(k => data[k]), id];
33
+
34
+ const sql = `UPDATE "${table}" SET ${setClauses.join(', ')} WHERE "id" = $${keys.length + 1}`;
35
+
36
+ return { sql, values };
37
+ }
38
+
39
+ export function buildDelete(table, id) {
40
+ validateIdentifier(table, 'table name');
41
+
42
+ return {
43
+ sql: `DELETE FROM "${table}" WHERE "id" = $1`,
44
+ values: [id],
45
+ };
46
+ }
47
+
48
+ export function buildSelect(table, conditions) {
49
+ validateIdentifier(table, 'table name');
50
+
51
+ if (!conditions || Object.keys(conditions).length === 0) {
52
+ return { sql: `SELECT * FROM "${table}"`, values: [] };
53
+ }
54
+
55
+ const keys = Object.keys(conditions);
56
+ keys.forEach(k => validateIdentifier(k, 'column name'));
57
+
58
+ const whereClauses = keys.map((k, i) => `"${k}" = $${i + 1}`);
59
+ const values = keys.map(k => conditions[k]);
60
+
61
+ const sql = `SELECT * FROM "${table}" WHERE ${whereClauses.join(' AND ')}`;
62
+
63
+ return { sql, values };
64
+ }
65
+
66
+ /**
67
+ * Build a vector similarity search query using cosine distance (<=>).
68
+ * @param {string} table - Table name
69
+ * @param {string} vectorColumn - Name of the vector column
70
+ * @param {number[]} queryVector - The query vector
71
+ * @param {Object} [options]
72
+ * @param {number} [options.limit=10] - Number of results to return
73
+ * @param {Object} [options.where] - Additional WHERE conditions
74
+ * @returns {{ sql: string, values: any[] }}
75
+ */
76
+ export function buildVectorSearch(table, vectorColumn, queryVector, options = {}) {
77
+ validateIdentifier(table, 'table name');
78
+ validateIdentifier(vectorColumn, 'column name');
79
+
80
+ const { limit = 10, where } = options;
81
+ const values = [];
82
+ let paramIndex = 1;
83
+
84
+ // Vector parameter as a formatted string for pgvector
85
+ const vectorStr = `[${queryVector.join(',')}]`;
86
+ values.push(vectorStr);
87
+ const vectorParam = `$${paramIndex++}`;
88
+
89
+ let whereClauses = [];
90
+ if (where) {
91
+ for (const [k, v] of Object.entries(where)) {
92
+ validateIdentifier(k, 'column name');
93
+ whereClauses.push(`"${k}" = $${paramIndex++}`);
94
+ values.push(v);
95
+ }
96
+ }
97
+
98
+ const whereStr = whereClauses.length > 0 ? ` WHERE ${whereClauses.join(' AND ')}` : '';
99
+ values.push(limit);
100
+
101
+ const sql = `SELECT *, ("${vectorColumn}" <=> ${vectorParam}::vector) AS distance FROM "${table}"${whereStr} ORDER BY "${vectorColumn}" <=> ${vectorParam}::vector LIMIT $${paramIndex}`;
102
+
103
+ return { sql, values };
104
+ }
105
+
106
+ /**
107
+ * Build a hybrid search query combining vector similarity with text filtering.
108
+ * Uses cosine distance for vector ranking and ILIKE for text matching.
109
+ * @param {string} table - Table name
110
+ * @param {string} vectorColumn - Vector column name
111
+ * @param {number[]} queryVector - The query vector
112
+ * @param {string} textColumn - Column to search text in
113
+ * @param {string} textQuery - Text to search for
114
+ * @param {Object} [options]
115
+ * @param {number} [options.limit=10]
116
+ * @param {Object} [options.where] - Additional WHERE conditions
117
+ * @returns {{ sql: string, values: any[] }}
118
+ */
119
+ export function buildHybridSearch(table, vectorColumn, queryVector, textColumn, textQuery, options = {}) {
120
+ validateIdentifier(table, 'table name');
121
+ validateIdentifier(vectorColumn, 'column name');
122
+ validateIdentifier(textColumn, 'column name');
123
+
124
+ const { limit = 10, where } = options;
125
+ const values = [];
126
+ let paramIndex = 1;
127
+
128
+ const vectorStr = `[${queryVector.join(',')}]`;
129
+ values.push(vectorStr);
130
+ const vectorParam = `$${paramIndex++}`;
131
+
132
+ values.push(`%${textQuery}%`);
133
+ const textParam = `$${paramIndex++}`;
134
+
135
+ let whereClauses = [`"${textColumn}" ILIKE ${textParam}`];
136
+ if (where) {
137
+ for (const [k, v] of Object.entries(where)) {
138
+ validateIdentifier(k, 'column name');
139
+ whereClauses.push(`"${k}" = $${paramIndex++}`);
140
+ values.push(v);
141
+ }
142
+ }
143
+
144
+ values.push(limit);
145
+
146
+ const sql = `SELECT *, ("${vectorColumn}" <=> ${vectorParam}::vector) AS distance FROM "${table}" WHERE ${whereClauses.join(' AND ')} ORDER BY "${vectorColumn}" <=> ${vectorParam}::vector LIMIT $${paramIndex}`;
147
+
148
+ return { sql, values };
149
+ }