@kernl-sdk/pg 0.1.25 → 0.1.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/.turbo/turbo-build.log +1 -1
  2. package/CHANGELOG.md +9 -0
  3. package/dist/__tests__/memory-integration.test.js +1 -1
  4. package/dist/pgvector/__tests__/handle.test.js +13 -13
  5. package/dist/pgvector/__tests__/integration/document.integration.test.js +21 -21
  6. package/dist/pgvector/__tests__/integration/edge.integration.test.js +32 -32
  7. package/dist/pgvector/__tests__/integration/filters.integration.test.js +5 -5
  8. package/dist/pgvector/__tests__/integration/lifecycle.integration.test.js +12 -12
  9. package/dist/pgvector/__tests__/integration/query.integration.test.d.ts +1 -1
  10. package/dist/pgvector/__tests__/integration/query.integration.test.js +51 -51
  11. package/dist/pgvector/__tests__/search.test.js +1 -1
  12. package/dist/pgvector/sql/__tests__/limit.test.js +20 -20
  13. package/dist/pgvector/sql/__tests__/query.test.js +17 -17
  14. package/dist/pgvector/sql/limit.js +2 -2
  15. package/dist/pgvector/sql/query.d.ts +1 -1
  16. package/dist/pgvector/sql/query.d.ts.map +1 -1
  17. package/dist/pgvector/sql/query.js +1 -1
  18. package/package.json +6 -6
  19. package/src/__tests__/memory-integration.test.ts +1 -1
  20. package/src/pgvector/__tests__/handle.test.ts +13 -13
  21. package/src/pgvector/__tests__/integration/document.integration.test.ts +21 -21
  22. package/src/pgvector/__tests__/integration/edge.integration.test.ts +32 -32
  23. package/src/pgvector/__tests__/integration/filters.integration.test.ts +5 -5
  24. package/src/pgvector/__tests__/integration/lifecycle.integration.test.ts +12 -12
  25. package/src/pgvector/__tests__/integration/query.integration.test.ts +51 -51
  26. package/src/pgvector/__tests__/search.test.ts +1 -1
  27. package/src/pgvector/sql/__tests__/limit.test.ts +20 -20
  28. package/src/pgvector/sql/__tests__/query.test.ts +17 -17
  29. package/src/pgvector/sql/limit.ts +2 -2
  30. package/src/pgvector/sql/query.ts +2 -2
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * Query behavior integration tests for pgvector.
3
3
  *
4
- * Tests vector search, topK behavior, offset pagination, orderBy,
4
+ * Tests vector search, limit behavior, offset pagination, orderBy,
5
5
  * and result structure against real PostgreSQL.
6
6
  */
7
7
 
@@ -135,7 +135,7 @@ describe.sequential("pgvector query integration tests", () => {
135
135
  // Query with basis vector 1 - should match vec-1 best
136
136
  const hits = await handle.query({
137
137
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
138
- topK: 10,
138
+ limit: 10,
139
139
  });
140
140
 
141
141
  expect(hits.length).toBeGreaterThan(0);
@@ -146,7 +146,7 @@ describe.sequential("pgvector query integration tests", () => {
146
146
  // Query with basis vector 2
147
147
  const hits = await handle.query({
148
148
  query: [{ embedding: [0.0, 1.0, 0.0, 0.0] }],
149
- topK: 10,
149
+ limit: 10,
150
150
  });
151
151
 
152
152
  expect(hits.length).toBeGreaterThan(0);
@@ -162,7 +162,7 @@ describe.sequential("pgvector query integration tests", () => {
162
162
  // Query with mix of basis 1 and 2 - should prefer vec-5 which has [0.5, 0.5, 0, 0]
163
163
  const hits = await handle.query({
164
164
  query: [{ embedding: [0.5, 0.5, 0.0, 0.0] }],
165
- topK: 10,
165
+ limit: 10,
166
166
  });
167
167
 
168
168
  expect(hits.length).toBeGreaterThan(0);
@@ -172,7 +172,7 @@ describe.sequential("pgvector query integration tests", () => {
172
172
  it("returns high similarity score for exact match", async () => {
173
173
  const hits = await handle.query({
174
174
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
175
- topK: 1,
175
+ limit: 1,
176
176
  });
177
177
 
178
178
  // Cosine similarity of identical vectors should be very close to 1
@@ -182,7 +182,7 @@ describe.sequential("pgvector query integration tests", () => {
182
182
  it("returns lower similarity for orthogonal vectors", async () => {
183
183
  const hits = await handle.query({
184
184
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
185
- topK: 10,
185
+ limit: 10,
186
186
  });
187
187
 
188
188
  // Find vec-4 which is orthogonal to query
@@ -196,7 +196,7 @@ describe.sequential("pgvector query integration tests", () => {
196
196
  // Already normalized
197
197
  const hits = await handle.query({
198
198
  query: [{ embedding: [0.707, 0.707, 0.0, 0.0] }],
199
- topK: 3,
199
+ limit: 3,
200
200
  });
201
201
 
202
202
  // Should still find vec-5 as best match
@@ -207,7 +207,7 @@ describe.sequential("pgvector query integration tests", () => {
207
207
  // Not normalized (should still work with cosine similarity)
208
208
  const hits = await handle.query({
209
209
  query: [{ embedding: [2.0, 2.0, 0.0, 0.0] }],
210
- topK: 3,
210
+ limit: 3,
211
211
  });
212
212
 
213
213
  // Should still find vec-5 as best match
@@ -216,42 +216,42 @@ describe.sequential("pgvector query integration tests", () => {
216
216
  });
217
217
 
218
218
  // ============================================================
219
- // TOPK BEHAVIOR
219
+ // LIMIT BEHAVIOR
220
220
  // ============================================================
221
221
 
222
- describe("topK behavior", () => {
223
- it("topK smaller than doc count returns exactly topK", async () => {
222
+ describe("limit behavior", () => {
223
+ it("limit smaller than doc count returns exactly limit", async () => {
224
224
  const hits = await handle.query({
225
225
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
226
- topK: 3,
226
+ limit: 3,
227
227
  });
228
228
 
229
229
  expect(hits.length).toBe(3);
230
230
  });
231
231
 
232
- it("topK larger than doc count returns all docs", async () => {
232
+ it("limit larger than doc count returns all docs", async () => {
233
233
  const hits = await handle.query({
234
234
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
235
- topK: 100,
235
+ limit: 100,
236
236
  });
237
237
 
238
238
  expect(hits.length).toBe(6);
239
239
  });
240
240
 
241
- it("topK of 1 returns single best match", async () => {
241
+ it("limit of 1 returns single best match", async () => {
242
242
  const hits = await handle.query({
243
243
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
244
- topK: 1,
244
+ limit: 1,
245
245
  });
246
246
 
247
247
  expect(hits.length).toBe(1);
248
248
  expect(hits[0].id).toBe("vec-1");
249
249
  });
250
250
 
251
- it("topK with filter returns limited filtered results", async () => {
251
+ it("limit with filter returns limited filtered results", async () => {
252
252
  const hits = await handle.query({
253
253
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
254
- topK: 1,
254
+ limit: 1,
255
255
  filter: { category: "db" },
256
256
  });
257
257
 
@@ -259,10 +259,10 @@ describe.sequential("pgvector query integration tests", () => {
259
259
  expect(hits[0].document?.category).toBe("db");
260
260
  });
261
261
 
262
- it("topK of 0 returns empty array", async () => {
262
+ it("limit of 0 returns empty array", async () => {
263
263
  const hits = await handle.query({
264
264
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
265
- topK: 0,
265
+ limit: 0,
266
266
  });
267
267
 
268
268
  expect(hits.length).toBe(0);
@@ -278,13 +278,13 @@ describe.sequential("pgvector query integration tests", () => {
278
278
  // Get all results
279
279
  const allHits = await handle.query({
280
280
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
281
- topK: 10,
281
+ limit: 10,
282
282
  });
283
283
 
284
284
  // Get results with offset
285
285
  const offsetHits = await handle.query({
286
286
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
287
- topK: 10,
287
+ limit: 10,
288
288
  offset: 2,
289
289
  });
290
290
 
@@ -292,10 +292,10 @@ describe.sequential("pgvector query integration tests", () => {
292
292
  expect(offsetHits[0].id).toBe(allHits[2].id);
293
293
  });
294
294
 
295
- it("offset with topK limits correctly", async () => {
295
+ it("offset with limit limits correctly", async () => {
296
296
  const hits = await handle.query({
297
297
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
298
- topK: 2,
298
+ limit: 2,
299
299
  offset: 2,
300
300
  });
301
301
 
@@ -305,7 +305,7 @@ describe.sequential("pgvector query integration tests", () => {
305
305
  it("offset beyond result count returns empty", async () => {
306
306
  const hits = await handle.query({
307
307
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
308
- topK: 10,
308
+ limit: 10,
309
309
  offset: 100,
310
310
  });
311
311
 
@@ -316,21 +316,21 @@ describe.sequential("pgvector query integration tests", () => {
316
316
  // Page 1
317
317
  const page1 = await handle.query({
318
318
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
319
- topK: 2,
319
+ limit: 2,
320
320
  offset: 0,
321
321
  });
322
322
 
323
323
  // Page 2
324
324
  const page2 = await handle.query({
325
325
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
326
- topK: 2,
326
+ limit: 2,
327
327
  offset: 2,
328
328
  });
329
329
 
330
330
  // Page 3
331
331
  const page3 = await handle.query({
332
332
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
333
- topK: 2,
333
+ limit: 2,
334
334
  offset: 4,
335
335
  });
336
336
 
@@ -348,13 +348,13 @@ describe.sequential("pgvector query integration tests", () => {
348
348
  // 2 docs in "ml" category
349
349
  const allMl = await handle.query({
350
350
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
351
- topK: 10,
351
+ limit: 10,
352
352
  filter: { category: "ml" },
353
353
  });
354
354
 
355
355
  const offsetMl = await handle.query({
356
356
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
357
- topK: 10,
357
+ limit: 10,
358
358
  offset: 1,
359
359
  filter: { category: "ml" },
360
360
  });
@@ -373,7 +373,7 @@ describe.sequential("pgvector query integration tests", () => {
373
373
  it("orders by integer field ascending", async () => {
374
374
  const hits = await handle.query({
375
375
  orderBy: { field: "priority", direction: "asc" },
376
- topK: 10,
376
+ limit: 10,
377
377
  });
378
378
 
379
379
  expect(hits.length).toBe(6);
@@ -391,7 +391,7 @@ describe.sequential("pgvector query integration tests", () => {
391
391
  it("orders by integer field descending", async () => {
392
392
  const hits = await handle.query({
393
393
  orderBy: { field: "priority", direction: "desc" },
394
- topK: 10,
394
+ limit: 10,
395
395
  });
396
396
 
397
397
  expect(hits[0].document?.priority).toBe(6);
@@ -402,7 +402,7 @@ describe.sequential("pgvector query integration tests", () => {
402
402
  // Note: We can still order by the "score" field even though it's excluded from the result
403
403
  const hits = await handle.query({
404
404
  orderBy: { field: "priority", direction: "desc" },
405
- topK: 10,
405
+ limit: 10,
406
406
  });
407
407
 
408
408
  // Verify descending order by priority
@@ -416,7 +416,7 @@ describe.sequential("pgvector query integration tests", () => {
416
416
  it("orders by string field", async () => {
417
417
  const hits = await handle.query({
418
418
  orderBy: { field: "title", direction: "asc" },
419
- topK: 10,
419
+ limit: 10,
420
420
  });
421
421
 
422
422
  // Verify alphabetical order
@@ -431,7 +431,7 @@ describe.sequential("pgvector query integration tests", () => {
431
431
  const hits = await handle.query({
432
432
  filter: { category: "ml" },
433
433
  orderBy: { field: "priority", direction: "desc" },
434
- topK: 10,
434
+ limit: 10,
435
435
  });
436
436
 
437
437
  expect(hits.length).toBe(2);
@@ -439,10 +439,10 @@ describe.sequential("pgvector query integration tests", () => {
439
439
  expect(hits[1].document?.priority).toBe(1);
440
440
  });
441
441
 
442
- it("orderBy with topK limits after ordering", async () => {
442
+ it("orderBy with limit limits after ordering", async () => {
443
443
  const hits = await handle.query({
444
444
  orderBy: { field: "priority", direction: "asc" },
445
- topK: 3,
445
+ limit: 3,
446
446
  });
447
447
 
448
448
  expect(hits.length).toBe(3);
@@ -454,7 +454,7 @@ describe.sequential("pgvector query integration tests", () => {
454
454
  it("orderBy with offset", async () => {
455
455
  const hits = await handle.query({
456
456
  orderBy: { field: "priority", direction: "asc" },
457
- topK: 2,
457
+ limit: 2,
458
458
  offset: 2,
459
459
  });
460
460
 
@@ -472,7 +472,7 @@ describe.sequential("pgvector query integration tests", () => {
472
472
  it("filter-only query returns all matching docs", async () => {
473
473
  const hits = await handle.query({
474
474
  filter: { category: "db" },
475
- topK: 10,
475
+ limit: 10,
476
476
  });
477
477
 
478
478
  expect(hits.length).toBe(2);
@@ -484,7 +484,7 @@ describe.sequential("pgvector query integration tests", () => {
484
484
  it("empty query with orderBy returns ordered docs", async () => {
485
485
  const hits = await handle.query({
486
486
  orderBy: { field: "priority", direction: "asc" },
487
- topK: 10,
487
+ limit: 10,
488
488
  });
489
489
 
490
490
  expect(hits.length).toBe(6);
@@ -495,7 +495,7 @@ describe.sequential("pgvector query integration tests", () => {
495
495
  const hits = await handle.query({
496
496
  filter: { category: "search" },
497
497
  orderBy: { field: "priority", direction: "desc" },
498
- topK: 10,
498
+ limit: 10,
499
499
  });
500
500
 
501
501
  expect(hits.length).toBe(2);
@@ -513,7 +513,7 @@ describe.sequential("pgvector query integration tests", () => {
513
513
  it("results have required fields", async () => {
514
514
  const hits = await handle.query({
515
515
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
516
- topK: 1,
516
+ limit: 1,
517
517
  });
518
518
 
519
519
  expect(hits.length).toBe(1);
@@ -527,7 +527,7 @@ describe.sequential("pgvector query integration tests", () => {
527
527
  it("score is a valid number", async () => {
528
528
  const hits = await handle.query({
529
529
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
530
- topK: 5,
530
+ limit: 5,
531
531
  });
532
532
 
533
533
  for (const hit of hits) {
@@ -539,7 +539,7 @@ describe.sequential("pgvector query integration tests", () => {
539
539
  it("document fields are included by default", async () => {
540
540
  const hits = await handle.query({
541
541
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
542
- topK: 1,
542
+ limit: 1,
543
543
  });
544
544
 
545
545
  expect(hits[0].document).toBeDefined();
@@ -556,7 +556,7 @@ describe.sequential("pgvector query integration tests", () => {
556
556
  it("index field matches query index", async () => {
557
557
  const hits = await handle.query({
558
558
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
559
- topK: 5,
559
+ limit: 5,
560
560
  });
561
561
 
562
562
  for (const hit of hits) {
@@ -574,7 +574,7 @@ describe.sequential("pgvector query integration tests", () => {
574
574
  const hits = await handle.query({
575
575
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
576
576
  filter: { category: "nonexistent" },
577
- topK: 10,
577
+ limit: 10,
578
578
  });
579
579
 
580
580
  expect(hits).toEqual([]);
@@ -583,17 +583,17 @@ describe.sequential("pgvector query integration tests", () => {
583
583
  it("offset beyond result count returns empty array", async () => {
584
584
  const hits = await handle.query({
585
585
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
586
- topK: 10,
586
+ limit: 10,
587
587
  offset: 1000,
588
588
  });
589
589
 
590
590
  expect(hits).toEqual([]);
591
591
  });
592
592
 
593
- it("topK 0 returns empty array", async () => {
593
+ it("limit 0 returns empty array", async () => {
594
594
  const hits = await handle.query({
595
595
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
596
- topK: 0,
596
+ limit: 0,
597
597
  });
598
598
 
599
599
  expect(hits).toEqual([]);
@@ -625,7 +625,7 @@ describe.sequential("pgvector query integration tests", () => {
625
625
 
626
626
  const hits = await eucHandle.query({
627
627
  query: [{ embedding: [1, 0, 0, 0] }],
628
- topK: 3,
628
+ limit: 3,
629
629
  });
630
630
 
631
631
  expect(hits[0].id).toBe("e1"); // Exact match
@@ -656,7 +656,7 @@ describe.sequential("pgvector query integration tests", () => {
656
656
 
657
657
  const hits = await dotHandle.query({
658
658
  query: [{ embedding: [1, 1, 0, 0] }],
659
- topK: 3,
659
+ limit: 3,
660
660
  });
661
661
 
662
662
  // Dot product: d1=1, d2=1, d3=1
@@ -210,7 +210,7 @@ describe.sequential("PGSearchIndex", () => {
210
210
 
211
211
  const results = await handle.query({
212
212
  query: [{ embedding: [0.1, 0.2, 0.3] }],
213
- topK: 1,
213
+ limit: 1,
214
214
  });
215
215
 
216
216
  expect(results).toHaveLength(1);
@@ -5,7 +5,7 @@ describe("SQL_LIMIT", () => {
5
5
  describe("encode", () => {
6
6
  it("builds LIMIT clause", () => {
7
7
  const result = SQL_LIMIT.encode({
8
- topK: 10,
8
+ limit: 10,
9
9
  offset: 0,
10
10
  startIdx: 1,
11
11
  });
@@ -15,7 +15,7 @@ describe("SQL_LIMIT", () => {
15
15
 
16
16
  it("respects startIdx for parameter numbering", () => {
17
17
  const result = SQL_LIMIT.encode({
18
- topK: 10,
18
+ limit: 10,
19
19
  offset: 0,
20
20
  startIdx: 5,
21
21
  });
@@ -25,7 +25,7 @@ describe("SQL_LIMIT", () => {
25
25
 
26
26
  it("includes OFFSET when offset > 0", () => {
27
27
  const result = SQL_LIMIT.encode({
28
- topK: 10,
28
+ limit: 10,
29
29
  offset: 20,
30
30
  startIdx: 1,
31
31
  });
@@ -35,7 +35,7 @@ describe("SQL_LIMIT", () => {
35
35
 
36
36
  it("skips OFFSET when offset is 0", () => {
37
37
  const result = SQL_LIMIT.encode({
38
- topK: 25,
38
+ limit: 25,
39
39
  offset: 0,
40
40
  startIdx: 3,
41
41
  });
@@ -46,7 +46,7 @@ describe("SQL_LIMIT", () => {
46
46
  it("handles pagination correctly", () => {
47
47
  // Page 1: offset 0
48
48
  const page1 = SQL_LIMIT.encode({
49
- topK: 20,
49
+ limit: 20,
50
50
  offset: 0,
51
51
  startIdx: 1,
52
52
  });
@@ -55,7 +55,7 @@ describe("SQL_LIMIT", () => {
55
55
 
56
56
  // Page 2: offset 20
57
57
  const page2 = SQL_LIMIT.encode({
58
- topK: 20,
58
+ limit: 20,
59
59
  offset: 20,
60
60
  startIdx: 1,
61
61
  });
@@ -64,7 +64,7 @@ describe("SQL_LIMIT", () => {
64
64
 
65
65
  // Page 3: offset 40
66
66
  const page3 = SQL_LIMIT.encode({
67
- topK: 20,
67
+ limit: 20,
68
68
  offset: 40,
69
69
  startIdx: 1,
70
70
  });
@@ -76,7 +76,7 @@ describe("SQL_LIMIT", () => {
76
76
  // Simulating: SELECT uses $1, WHERE uses $2-$4
77
77
  // LIMIT should start at $5
78
78
  const result = SQL_LIMIT.encode({
79
- topK: 10,
79
+ limit: 10,
80
80
  offset: 50,
81
81
  startIdx: 5,
82
82
  });
@@ -85,9 +85,9 @@ describe("SQL_LIMIT", () => {
85
85
  });
86
86
 
87
87
  describe("edge values", () => {
88
- it("handles topK: 0", () => {
88
+ it("handles limit: 0", () => {
89
89
  const result = SQL_LIMIT.encode({
90
- topK: 0,
90
+ limit: 0,
91
91
  offset: 0,
92
92
  startIdx: 1,
93
93
  });
@@ -96,9 +96,9 @@ describe("SQL_LIMIT", () => {
96
96
  expect(result.params).toEqual([0]);
97
97
  });
98
98
 
99
- it("handles topK: 1", () => {
99
+ it("handles limit: 1", () => {
100
100
  const result = SQL_LIMIT.encode({
101
- topK: 1,
101
+ limit: 1,
102
102
  offset: 0,
103
103
  startIdx: 1,
104
104
  });
@@ -106,9 +106,9 @@ describe("SQL_LIMIT", () => {
106
106
  expect(result.params).toEqual([1]);
107
107
  });
108
108
 
109
- it("handles very large topK", () => {
109
+ it("handles very large limit", () => {
110
110
  const result = SQL_LIMIT.encode({
111
- topK: 1000000,
111
+ limit: 1000000,
112
112
  offset: 0,
113
113
  startIdx: 1,
114
114
  });
@@ -118,7 +118,7 @@ describe("SQL_LIMIT", () => {
118
118
 
119
119
  it("handles very large offset", () => {
120
120
  const result = SQL_LIMIT.encode({
121
- topK: 10,
121
+ limit: 10,
122
122
  offset: 999999,
123
123
  startIdx: 1,
124
124
  });
@@ -128,7 +128,7 @@ describe("SQL_LIMIT", () => {
128
128
 
129
129
  it("handles very large startIdx", () => {
130
130
  const result = SQL_LIMIT.encode({
131
- topK: 10,
131
+ limit: 10,
132
132
  offset: 20,
133
133
  startIdx: 50,
134
134
  });
@@ -136,9 +136,9 @@ describe("SQL_LIMIT", () => {
136
136
  expect(result.params).toEqual([10, 20]);
137
137
  });
138
138
 
139
- it("handles startIdx: 1 with both topK and offset", () => {
139
+ it("handles startIdx: 1 with both limit and offset", () => {
140
140
  const result = SQL_LIMIT.encode({
141
- topK: 25,
141
+ limit: 25,
142
142
  offset: 100,
143
143
  startIdx: 1,
144
144
  });
@@ -150,7 +150,7 @@ describe("SQL_LIMIT", () => {
150
150
  describe("offset boundary", () => {
151
151
  it("includes OFFSET when offset is exactly 1", () => {
152
152
  const result = SQL_LIMIT.encode({
153
- topK: 10,
153
+ limit: 10,
154
154
  offset: 1,
155
155
  startIdx: 1,
156
156
  });
@@ -160,7 +160,7 @@ describe("SQL_LIMIT", () => {
160
160
 
161
161
  it("does not include OFFSET when offset is exactly 0", () => {
162
162
  const result = SQL_LIMIT.encode({
163
- topK: 10,
163
+ limit: 10,
164
164
  offset: 0,
165
165
  startIdx: 1,
166
166
  });
@@ -27,7 +27,7 @@ describe("sqlize", () => {
27
27
  schema: "public",
28
28
  table: "docs",
29
29
  });
30
- expect(result.limit).toEqual({ topK: 10, offset: 0 });
30
+ expect(result.limit).toEqual({ limit: 10, offset: 0 });
31
31
  });
32
32
 
33
33
  it("throws on multi-signal fusion (not supported by pgvector)", () => {
@@ -78,19 +78,19 @@ describe("sqlize", () => {
78
78
  const result = sqlize(
79
79
  {
80
80
  query: [{ embedding: [0.1, 0.2] }],
81
- topK: 25,
81
+ limit: 25,
82
82
  offset: 50,
83
83
  },
84
84
  { pkey: "id", schema: "public", table: "docs" },
85
85
  );
86
86
 
87
- expect(result.limit).toEqual({ topK: 25, offset: 50 });
87
+ expect(result.limit).toEqual({ limit: 25, offset: 50 });
88
88
  });
89
89
 
90
90
  it("uses default pagination values", () => {
91
91
  const result = sqlize({ query: [{ embedding: [0.1, 0.2] }] }, { pkey: "id", schema: "public", table: "docs" });
92
92
 
93
- expect(result.limit).toEqual({ topK: 10, offset: 0 });
93
+ expect(result.limit).toEqual({ limit: 10, offset: 0 });
94
94
  });
95
95
 
96
96
  it("passes binding through to all inputs", () => {
@@ -131,7 +131,7 @@ describe("sqlize", () => {
131
131
  {
132
132
  filter: { status: "active" },
133
133
  orderBy: { field: "created_at", direction: "desc" },
134
- topK: 100,
134
+ limit: 100,
135
135
  },
136
136
  { pkey: "id", schema: "public", table: "docs" },
137
137
  );
@@ -196,7 +196,7 @@ describe("sqlize", () => {
196
196
  };
197
197
 
198
198
  const result = sqlize(
199
- { filter: { status: "active" }, topK: 50 },
199
+ { filter: { status: "active" }, limit: 50 },
200
200
  { pkey: "id", schema: "public", table: "docs", binding },
201
201
  );
202
202
 
@@ -211,7 +211,7 @@ describe("sqlize", () => {
211
211
  const result = sqlize(
212
212
  {
213
213
  orderBy: { field: "created_at", direction: "asc" },
214
- topK: 20,
214
+ limit: 20,
215
215
  },
216
216
  { pkey: "id", schema: "public", table: "docs" },
217
217
  );
@@ -222,7 +222,7 @@ describe("sqlize", () => {
222
222
  field: "created_at",
223
223
  direction: "asc",
224
224
  });
225
- expect(result.limit).toEqual({ topK: 20, offset: 0 });
225
+ expect(result.limit).toEqual({ limit: 20, offset: 0 });
226
226
  });
227
227
  });
228
228
 
@@ -257,7 +257,7 @@ describe("sqlize", () => {
257
257
  query: [{ embedding: [0.1, 0.2, 0.3] }],
258
258
  filter: { status: "active", views: { $gt: 100 } },
259
259
  orderBy: { field: "created_at", direction: "desc" },
260
- topK: 50,
260
+ limit: 50,
261
261
  offset: 25,
262
262
  },
263
263
  { pkey: "doc_id", schema: "public", table: "documents", binding },
@@ -275,7 +275,7 @@ describe("sqlize", () => {
275
275
  direction: "desc",
276
276
  });
277
277
  expect(result.order.binding).toBe(binding);
278
- expect(result.limit).toEqual({ topK: 50, offset: 25 });
278
+ expect(result.limit).toEqual({ limit: 50, offset: 25 });
279
279
  });
280
280
 
281
281
  it("handles undefined filter", () => {
@@ -324,7 +324,7 @@ describe("full pipeline integration", () => {
324
324
  {
325
325
  query: [{ embedding: vector }],
326
326
  filter: { status: "active", views: { $gt: 100 } },
327
- topK: 25,
327
+ limit: 25,
328
328
  offset: 50,
329
329
  },
330
330
  { pkey: "doc_id", schema: "public", table: "documents", binding },
@@ -375,7 +375,7 @@ describe("full pipeline integration", () => {
375
375
  $or: [{ featured: true }, { views: { $gte: 1000 } }],
376
376
  },
377
377
  orderBy: { field: "created_at", direction: "desc" },
378
- topK: 10,
378
+ limit: 10,
379
379
  },
380
380
  { pkey: "id", schema: "public", table: "docs" },
381
381
  );
@@ -423,7 +423,7 @@ describe("full pipeline integration", () => {
423
423
  ],
424
424
  deleted_at: null,
425
425
  },
426
- topK: 5,
426
+ limit: 5,
427
427
  },
428
428
  { pkey: "id", schema: "public", table: "docs" },
429
429
  );
@@ -465,7 +465,7 @@ describe("full pipeline integration", () => {
465
465
  };
466
466
 
467
467
  const query = sqlize(
468
- { query: [{ embedding: vector }], topK: 10 },
468
+ { query: [{ embedding: vector }], limit: 10 },
469
469
  { pkey: "id", schema: "public", table: "docs", binding },
470
470
  );
471
471
 
@@ -496,7 +496,7 @@ describe("full pipeline integration", () => {
496
496
  };
497
497
 
498
498
  const query = sqlize(
499
- { query: [{ embedding: vector }], topK: 10 },
499
+ { query: [{ embedding: vector }], limit: 10 },
500
500
  { pkey: "id", schema: "public", table: "docs", binding },
501
501
  );
502
502
 
@@ -518,7 +518,7 @@ describe("full pipeline integration", () => {
518
518
  {
519
519
  query: [{ embedding: [0.1, 0.2] }],
520
520
  filter: { a: 1, b: 2, c: 3 },
521
- topK: 10,
521
+ limit: 10,
522
522
  offset: 20,
523
523
  },
524
524
  { pkey: "id", schema: "public", table: "docs" },
@@ -533,7 +533,7 @@ describe("full pipeline integration", () => {
533
533
  // Verify no gaps in indices
534
534
  // SELECT: $1 (vector)
535
535
  // WHERE: $2, $3, $4 (a, b, c)
536
- // LIMIT: $5, $6 (topK, offset)
536
+ // LIMIT: $5, $6 (limit, offset)
537
537
 
538
538
  expect(select.params).toHaveLength(1); // $1
539
539
  expect(whereStartIdx).toBe(2);