@kernl-sdk/pg 0.1.24 → 0.1.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/.turbo/turbo-build.log +1 -1
  2. package/CHANGELOG.md +19 -0
  3. package/dist/__tests__/memory-integration.test.js +1 -1
  4. package/dist/pgvector/__tests__/handle.test.js +13 -13
  5. package/dist/pgvector/__tests__/integration/document.integration.test.js +21 -21
  6. package/dist/pgvector/__tests__/integration/edge.integration.test.js +32 -32
  7. package/dist/pgvector/__tests__/integration/filters.integration.test.js +5 -5
  8. package/dist/pgvector/__tests__/integration/lifecycle.integration.test.js +12 -12
  9. package/dist/pgvector/__tests__/integration/query.integration.test.d.ts +1 -1
  10. package/dist/pgvector/__tests__/integration/query.integration.test.js +51 -51
  11. package/dist/pgvector/__tests__/search.test.js +1 -1
  12. package/dist/pgvector/sql/__tests__/limit.test.js +20 -20
  13. package/dist/pgvector/sql/__tests__/query.test.js +17 -17
  14. package/dist/pgvector/sql/limit.js +2 -2
  15. package/dist/pgvector/sql/query.d.ts +1 -1
  16. package/dist/pgvector/sql/query.d.ts.map +1 -1
  17. package/dist/pgvector/sql/query.js +1 -1
  18. package/package.json +7 -7
  19. package/src/__tests__/memory-integration.test.ts +1 -1
  20. package/src/pgvector/__tests__/handle.test.ts +13 -13
  21. package/src/pgvector/__tests__/integration/document.integration.test.ts +21 -21
  22. package/src/pgvector/__tests__/integration/edge.integration.test.ts +32 -32
  23. package/src/pgvector/__tests__/integration/filters.integration.test.ts +5 -5
  24. package/src/pgvector/__tests__/integration/lifecycle.integration.test.ts +12 -12
  25. package/src/pgvector/__tests__/integration/query.integration.test.ts +51 -51
  26. package/src/pgvector/__tests__/search.test.ts +1 -1
  27. package/src/pgvector/sql/__tests__/limit.test.ts +20 -20
  28. package/src/pgvector/sql/__tests__/query.test.ts +17 -17
  29. package/src/pgvector/sql/limit.ts +2 -2
  30. package/src/pgvector/sql/query.ts +2 -2
@@ -85,7 +85,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
85
85
  const handle = pgvec.index(indexId);
86
86
  const hits = await handle.query({
87
87
  query: [{ embedding: [1, 0, 0, 0] }],
88
- topK: 10,
88
+ limit: 10,
89
89
  });
90
90
  expect(hits).toHaveLength(3);
91
91
  expect(hits[0].id).toBe("doc-1");
@@ -95,7 +95,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
95
95
  const hits = await handle.query({
96
96
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
97
97
  filter: { published: true },
98
- topK: 10,
98
+ limit: 10,
99
99
  });
100
100
  expect(hits).toHaveLength(2);
101
101
  for (const hit of hits) {
@@ -111,7 +111,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
111
111
  });
112
112
  const hits = await handle.query({
113
113
  filter: { id: "doc-1" },
114
- topK: 1,
114
+ limit: 1,
115
115
  });
116
116
  expect(hits[0].document?.views).toBe(500);
117
117
  expect(hits[0].document?.title).toBe("Updated First Document");
@@ -121,7 +121,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
121
121
  await handle.delete("doc-3");
122
122
  const hits = await handle.query({
123
123
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
124
- topK: 10,
124
+ limit: 10,
125
125
  });
126
126
  expect(hits).toHaveLength(2);
127
127
  expect(hits.find((h) => h.id === "doc-3")).toBeUndefined();
@@ -199,7 +199,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
199
199
  const handle = pgvec.index("custom_binding");
200
200
  const hits = await handle.query({
201
201
  query: [{ embedding: [0.1, 0.2, 0.3, 0.4] }],
202
- topK: 10,
202
+ limit: 10,
203
203
  });
204
204
  expect(hits).toHaveLength(2);
205
205
  expect(hits[0].id).toBe("existing-1");
@@ -253,7 +253,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
253
253
  const handle = pgvec.index("custom_binding");
254
254
  const hits = await handle.query({
255
255
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
256
- topK: 1,
256
+ limit: 1,
257
257
  });
258
258
  expect(hits).toHaveLength(1);
259
259
  });
@@ -290,11 +290,11 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
290
290
  // Query each
291
291
  const hitsA = await handleA.query({
292
292
  query: [{ embedding: [1, 0, 0, 0] }],
293
- topK: 10,
293
+ limit: 10,
294
294
  });
295
295
  const hitsB = await handleB.query({
296
296
  query: [{ embedding: [0, 1, 0, 0] }],
297
- topK: 10,
297
+ limit: 10,
298
298
  });
299
299
  expect(hitsA).toHaveLength(1);
300
300
  expect(hitsA[0].document?.name).toBe("Index A Doc");
@@ -304,7 +304,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
304
304
  await pgvec.deleteIndex("multi_index_a");
305
305
  const stillExists = await handleB.query({
306
306
  query: [{ embedding: [0, 1, 0, 0] }],
307
- topK: 10,
307
+ limit: 10,
308
308
  });
309
309
  expect(stillExists).toHaveLength(1);
310
310
  // Cleanup
@@ -384,7 +384,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
384
384
  await handle.upsert({ id: "high-1", embedding: vec });
385
385
  const hits = await handle.query({
386
386
  query: [{ embedding: vec }],
387
- topK: 1,
387
+ limit: 1,
388
388
  });
389
389
  expect(hits).toHaveLength(1);
390
390
  expect(hits[0].document?.embedding).toHaveLength(1536);
@@ -409,7 +409,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
409
409
  // Query on first vector field
410
410
  const hits = await handle.query({
411
411
  query: [{ title_embedding: [1, 0, 0, 0] }],
412
- topK: 1,
412
+ limit: 1,
413
413
  });
414
414
  expect(hits).toHaveLength(1);
415
415
  await pgvec.deleteIndex("multi_vec");
@@ -440,7 +440,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
440
440
  const handle2 = pgvec2.index("persist_test");
441
441
  const hits = await handle2.query({
442
442
  query: [{ embedding: [1, 0, 0, 0] }],
443
- topK: 1,
443
+ limit: 1,
444
444
  });
445
445
  expect(hits[0].document?.name).toBe("Persisted");
446
446
  await pgvec.deleteIndex("persist_test");
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * Query behavior integration tests for pgvector.
3
3
  *
4
- * Tests vector search, topK behavior, offset pagination, orderBy,
4
+ * Tests vector search, limit behavior, offset pagination, orderBy,
5
5
  * and result structure against real PostgreSQL.
6
6
  */
7
7
  export {};
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * Query behavior integration tests for pgvector.
3
3
  *
4
- * Tests vector search, topK behavior, offset pagination, orderBy,
4
+ * Tests vector search, limit behavior, offset pagination, orderBy,
5
5
  * and result structure against real PostgreSQL.
6
6
  */
7
7
  import { describe, it, expect, beforeAll, afterAll } from "vitest";
@@ -107,7 +107,7 @@ describe.sequential("pgvector query integration tests", () => {
107
107
  // Query with basis vector 1 - should match vec-1 best
108
108
  const hits = await handle.query({
109
109
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
110
- topK: 10,
110
+ limit: 10,
111
111
  });
112
112
  expect(hits.length).toBeGreaterThan(0);
113
113
  expect(hits[0].id).toBe("vec-1");
@@ -116,7 +116,7 @@ describe.sequential("pgvector query integration tests", () => {
116
116
  // Query with basis vector 2
117
117
  const hits = await handle.query({
118
118
  query: [{ embedding: [0.0, 1.0, 0.0, 0.0] }],
119
- topK: 10,
119
+ limit: 10,
120
120
  });
121
121
  expect(hits.length).toBeGreaterThan(0);
122
122
  expect(hits[0].id).toBe("vec-2");
@@ -129,7 +129,7 @@ describe.sequential("pgvector query integration tests", () => {
129
129
  // Query with mix of basis 1 and 2 - should prefer vec-5 which has [0.5, 0.5, 0, 0]
130
130
  const hits = await handle.query({
131
131
  query: [{ embedding: [0.5, 0.5, 0.0, 0.0] }],
132
- topK: 10,
132
+ limit: 10,
133
133
  });
134
134
  expect(hits.length).toBeGreaterThan(0);
135
135
  expect(hits[0].id).toBe("vec-5");
@@ -137,7 +137,7 @@ describe.sequential("pgvector query integration tests", () => {
137
137
  it("returns high similarity score for exact match", async () => {
138
138
  const hits = await handle.query({
139
139
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
140
- topK: 1,
140
+ limit: 1,
141
141
  });
142
142
  // Cosine similarity of identical vectors should be very close to 1
143
143
  expect(hits[0].score).toBeGreaterThan(0.99);
@@ -145,7 +145,7 @@ describe.sequential("pgvector query integration tests", () => {
145
145
  it("returns lower similarity for orthogonal vectors", async () => {
146
146
  const hits = await handle.query({
147
147
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
148
- topK: 10,
148
+ limit: 10,
149
149
  });
150
150
  // Find vec-4 which is orthogonal to query
151
151
  const orthogonal = hits.find((h) => h.id === "vec-4");
@@ -157,7 +157,7 @@ describe.sequential("pgvector query integration tests", () => {
157
157
  // Already normalized
158
158
  const hits = await handle.query({
159
159
  query: [{ embedding: [0.707, 0.707, 0.0, 0.0] }],
160
- topK: 3,
160
+ limit: 3,
161
161
  });
162
162
  // Should still find vec-5 as best match
163
163
  expect(hits[0].id).toBe("vec-5");
@@ -166,51 +166,51 @@ describe.sequential("pgvector query integration tests", () => {
166
166
  // Not normalized (should still work with cosine similarity)
167
167
  const hits = await handle.query({
168
168
  query: [{ embedding: [2.0, 2.0, 0.0, 0.0] }],
169
- topK: 3,
169
+ limit: 3,
170
170
  });
171
171
  // Should still find vec-5 as best match
172
172
  expect(hits[0].id).toBe("vec-5");
173
173
  });
174
174
  });
175
175
  // ============================================================
176
- // TOPK BEHAVIOR
176
+ // LIMIT BEHAVIOR
177
177
  // ============================================================
178
- describe("topK behavior", () => {
179
- it("topK smaller than doc count returns exactly topK", async () => {
178
+ describe("limit behavior", () => {
179
+ it("limit smaller than doc count returns exactly limit", async () => {
180
180
  const hits = await handle.query({
181
181
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
182
- topK: 3,
182
+ limit: 3,
183
183
  });
184
184
  expect(hits.length).toBe(3);
185
185
  });
186
- it("topK larger than doc count returns all docs", async () => {
186
+ it("limit larger than doc count returns all docs", async () => {
187
187
  const hits = await handle.query({
188
188
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
189
- topK: 100,
189
+ limit: 100,
190
190
  });
191
191
  expect(hits.length).toBe(6);
192
192
  });
193
- it("topK of 1 returns single best match", async () => {
193
+ it("limit of 1 returns single best match", async () => {
194
194
  const hits = await handle.query({
195
195
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
196
- topK: 1,
196
+ limit: 1,
197
197
  });
198
198
  expect(hits.length).toBe(1);
199
199
  expect(hits[0].id).toBe("vec-1");
200
200
  });
201
- it("topK with filter returns limited filtered results", async () => {
201
+ it("limit with filter returns limited filtered results", async () => {
202
202
  const hits = await handle.query({
203
203
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
204
- topK: 1,
204
+ limit: 1,
205
205
  filter: { category: "db" },
206
206
  });
207
207
  expect(hits.length).toBe(1);
208
208
  expect(hits[0].document?.category).toBe("db");
209
209
  });
210
- it("topK of 0 returns empty array", async () => {
210
+ it("limit of 0 returns empty array", async () => {
211
211
  const hits = await handle.query({
212
212
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
213
- topK: 0,
213
+ limit: 0,
214
214
  });
215
215
  expect(hits.length).toBe(0);
216
216
  });
@@ -223,21 +223,21 @@ describe.sequential("pgvector query integration tests", () => {
223
223
  // Get all results
224
224
  const allHits = await handle.query({
225
225
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
226
- topK: 10,
226
+ limit: 10,
227
227
  });
228
228
  // Get results with offset
229
229
  const offsetHits = await handle.query({
230
230
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
231
- topK: 10,
231
+ limit: 10,
232
232
  offset: 2,
233
233
  });
234
234
  expect(offsetHits.length).toBe(4); // 6 total - 2 skipped
235
235
  expect(offsetHits[0].id).toBe(allHits[2].id);
236
236
  });
237
- it("offset with topK limits correctly", async () => {
237
+ it("offset with limit limits correctly", async () => {
238
238
  const hits = await handle.query({
239
239
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
240
- topK: 2,
240
+ limit: 2,
241
241
  offset: 2,
242
242
  });
243
243
  expect(hits.length).toBe(2);
@@ -245,7 +245,7 @@ describe.sequential("pgvector query integration tests", () => {
245
245
  it("offset beyond result count returns empty", async () => {
246
246
  const hits = await handle.query({
247
247
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
248
- topK: 10,
248
+ limit: 10,
249
249
  offset: 100,
250
250
  });
251
251
  expect(hits.length).toBe(0);
@@ -254,19 +254,19 @@ describe.sequential("pgvector query integration tests", () => {
254
254
  // Page 1
255
255
  const page1 = await handle.query({
256
256
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
257
- topK: 2,
257
+ limit: 2,
258
258
  offset: 0,
259
259
  });
260
260
  // Page 2
261
261
  const page2 = await handle.query({
262
262
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
263
- topK: 2,
263
+ limit: 2,
264
264
  offset: 2,
265
265
  });
266
266
  // Page 3
267
267
  const page3 = await handle.query({
268
268
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
269
- topK: 2,
269
+ limit: 2,
270
270
  offset: 4,
271
271
  });
272
272
  expect(page1.length).toBe(2);
@@ -281,12 +281,12 @@ describe.sequential("pgvector query integration tests", () => {
281
281
  // 2 docs in "ml" category
282
282
  const allMl = await handle.query({
283
283
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
284
- topK: 10,
284
+ limit: 10,
285
285
  filter: { category: "ml" },
286
286
  });
287
287
  const offsetMl = await handle.query({
288
288
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
289
- topK: 10,
289
+ limit: 10,
290
290
  offset: 1,
291
291
  filter: { category: "ml" },
292
292
  });
@@ -302,7 +302,7 @@ describe.sequential("pgvector query integration tests", () => {
302
302
  it("orders by integer field ascending", async () => {
303
303
  const hits = await handle.query({
304
304
  orderBy: { field: "priority", direction: "asc" },
305
- topK: 10,
305
+ limit: 10,
306
306
  });
307
307
  expect(hits.length).toBe(6);
308
308
  expect(hits[0].document?.priority).toBe(1);
@@ -315,7 +315,7 @@ describe.sequential("pgvector query integration tests", () => {
315
315
  it("orders by integer field descending", async () => {
316
316
  const hits = await handle.query({
317
317
  orderBy: { field: "priority", direction: "desc" },
318
- topK: 10,
318
+ limit: 10,
319
319
  });
320
320
  expect(hits[0].document?.priority).toBe(6);
321
321
  expect(hits[5].document?.priority).toBe(1);
@@ -324,7 +324,7 @@ describe.sequential("pgvector query integration tests", () => {
324
324
  // Note: We can still order by the "score" field even though it's excluded from the result
325
325
  const hits = await handle.query({
326
326
  orderBy: { field: "priority", direction: "desc" },
327
- topK: 10,
327
+ limit: 10,
328
328
  });
329
329
  // Verify descending order by priority
330
330
  for (let i = 1; i < hits.length; i++) {
@@ -334,7 +334,7 @@ describe.sequential("pgvector query integration tests", () => {
334
334
  it("orders by string field", async () => {
335
335
  const hits = await handle.query({
336
336
  orderBy: { field: "title", direction: "asc" },
337
- topK: 10,
337
+ limit: 10,
338
338
  });
339
339
  // Verify alphabetical order
340
340
  for (let i = 1; i < hits.length; i++) {
@@ -347,16 +347,16 @@ describe.sequential("pgvector query integration tests", () => {
347
347
  const hits = await handle.query({
348
348
  filter: { category: "ml" },
349
349
  orderBy: { field: "priority", direction: "desc" },
350
- topK: 10,
350
+ limit: 10,
351
351
  });
352
352
  expect(hits.length).toBe(2);
353
353
  expect(hits[0].document?.priority).toBe(2);
354
354
  expect(hits[1].document?.priority).toBe(1);
355
355
  });
356
- it("orderBy with topK limits after ordering", async () => {
356
+ it("orderBy with limit limits after ordering", async () => {
357
357
  const hits = await handle.query({
358
358
  orderBy: { field: "priority", direction: "asc" },
359
- topK: 3,
359
+ limit: 3,
360
360
  });
361
361
  expect(hits.length).toBe(3);
362
362
  expect(hits[0].document?.priority).toBe(1);
@@ -366,7 +366,7 @@ describe.sequential("pgvector query integration tests", () => {
366
366
  it("orderBy with offset", async () => {
367
367
  const hits = await handle.query({
368
368
  orderBy: { field: "priority", direction: "asc" },
369
- topK: 2,
369
+ limit: 2,
370
370
  offset: 2,
371
371
  });
372
372
  expect(hits.length).toBe(2);
@@ -381,7 +381,7 @@ describe.sequential("pgvector query integration tests", () => {
381
381
  it("filter-only query returns all matching docs", async () => {
382
382
  const hits = await handle.query({
383
383
  filter: { category: "db" },
384
- topK: 10,
384
+ limit: 10,
385
385
  });
386
386
  expect(hits.length).toBe(2);
387
387
  for (const hit of hits) {
@@ -391,7 +391,7 @@ describe.sequential("pgvector query integration tests", () => {
391
391
  it("empty query with orderBy returns ordered docs", async () => {
392
392
  const hits = await handle.query({
393
393
  orderBy: { field: "priority", direction: "asc" },
394
- topK: 10,
394
+ limit: 10,
395
395
  });
396
396
  expect(hits.length).toBe(6);
397
397
  expect(hits[0].document?.priority).toBe(1);
@@ -400,7 +400,7 @@ describe.sequential("pgvector query integration tests", () => {
400
400
  const hits = await handle.query({
401
401
  filter: { category: "search" },
402
402
  orderBy: { field: "priority", direction: "desc" },
403
- topK: 10,
403
+ limit: 10,
404
404
  });
405
405
  expect(hits.length).toBe(2);
406
406
  // Ordered by priority desc: vec-6 (priority 6), vec-5 (priority 5)
@@ -415,7 +415,7 @@ describe.sequential("pgvector query integration tests", () => {
415
415
  it("results have required fields", async () => {
416
416
  const hits = await handle.query({
417
417
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
418
- topK: 1,
418
+ limit: 1,
419
419
  });
420
420
  expect(hits.length).toBe(1);
421
421
  expect(hits[0]).toHaveProperty("id");
@@ -427,7 +427,7 @@ describe.sequential("pgvector query integration tests", () => {
427
427
  it("score is a valid number", async () => {
428
428
  const hits = await handle.query({
429
429
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
430
- topK: 5,
430
+ limit: 5,
431
431
  });
432
432
  for (const hit of hits) {
433
433
  expect(typeof hit.score).toBe("number");
@@ -437,7 +437,7 @@ describe.sequential("pgvector query integration tests", () => {
437
437
  it("document fields are included by default", async () => {
438
438
  const hits = await handle.query({
439
439
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
440
- topK: 1,
440
+ limit: 1,
441
441
  });
442
442
  expect(hits[0].document).toBeDefined();
443
443
  expect(hits[0].document).toHaveProperty("title");
@@ -452,7 +452,7 @@ describe.sequential("pgvector query integration tests", () => {
452
452
  it("index field matches query index", async () => {
453
453
  const hits = await handle.query({
454
454
  query: [{ embedding: [1.0, 0.0, 0.0, 0.0] }],
455
- topK: 5,
455
+ limit: 5,
456
456
  });
457
457
  for (const hit of hits) {
458
458
  expect(hit.index).toBe(testIndexId);
@@ -467,22 +467,22 @@ describe.sequential("pgvector query integration tests", () => {
467
467
  const hits = await handle.query({
468
468
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
469
469
  filter: { category: "nonexistent" },
470
- topK: 10,
470
+ limit: 10,
471
471
  });
472
472
  expect(hits).toEqual([]);
473
473
  });
474
474
  it("offset beyond result count returns empty array", async () => {
475
475
  const hits = await handle.query({
476
476
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
477
- topK: 10,
477
+ limit: 10,
478
478
  offset: 1000,
479
479
  });
480
480
  expect(hits).toEqual([]);
481
481
  });
482
- it("topK 0 returns empty array", async () => {
482
+ it("limit 0 returns empty array", async () => {
483
483
  const hits = await handle.query({
484
484
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
485
- topK: 0,
485
+ limit: 0,
486
486
  });
487
487
  expect(hits).toEqual([]);
488
488
  });
@@ -508,7 +508,7 @@ describe.sequential("pgvector query integration tests", () => {
508
508
  ]);
509
509
  const hits = await eucHandle.query({
510
510
  query: [{ embedding: [1, 0, 0, 0] }],
511
- topK: 3,
511
+ limit: 3,
512
512
  });
513
513
  expect(hits[0].id).toBe("e1"); // Exact match
514
514
  expect(hits[1].id).toBe("e3"); // Closest
@@ -534,7 +534,7 @@ describe.sequential("pgvector query integration tests", () => {
534
534
  ]);
535
535
  const hits = await dotHandle.query({
536
536
  query: [{ embedding: [1, 1, 0, 0] }],
537
- topK: 3,
537
+ limit: 3,
538
538
  });
539
539
  // Dot product: d1=1, d2=1, d3=1
540
540
  // With equal dot products, order may vary, but d3 should be competitive
@@ -157,7 +157,7 @@ describe.sequential("PGSearchIndex", () => {
157
157
  });
158
158
  const results = await handle.query({
159
159
  query: [{ embedding: [0.1, 0.2, 0.3] }],
160
- topK: 1,
160
+ limit: 1,
161
161
  });
162
162
  expect(results).toHaveLength(1);
163
163
  expect(results[0].id).toBe("test-1");
@@ -4,7 +4,7 @@ describe("SQL_LIMIT", () => {
4
4
  describe("encode", () => {
5
5
  it("builds LIMIT clause", () => {
6
6
  const result = SQL_LIMIT.encode({
7
- topK: 10,
7
+ limit: 10,
8
8
  offset: 0,
9
9
  startIdx: 1,
10
10
  });
@@ -13,7 +13,7 @@ describe("SQL_LIMIT", () => {
13
13
  });
14
14
  it("respects startIdx for parameter numbering", () => {
15
15
  const result = SQL_LIMIT.encode({
16
- topK: 10,
16
+ limit: 10,
17
17
  offset: 0,
18
18
  startIdx: 5,
19
19
  });
@@ -22,7 +22,7 @@ describe("SQL_LIMIT", () => {
22
22
  });
23
23
  it("includes OFFSET when offset > 0", () => {
24
24
  const result = SQL_LIMIT.encode({
25
- topK: 10,
25
+ limit: 10,
26
26
  offset: 20,
27
27
  startIdx: 1,
28
28
  });
@@ -31,7 +31,7 @@ describe("SQL_LIMIT", () => {
31
31
  });
32
32
  it("skips OFFSET when offset is 0", () => {
33
33
  const result = SQL_LIMIT.encode({
34
- topK: 25,
34
+ limit: 25,
35
35
  offset: 0,
36
36
  startIdx: 3,
37
37
  });
@@ -41,7 +41,7 @@ describe("SQL_LIMIT", () => {
41
41
  it("handles pagination correctly", () => {
42
42
  // Page 1: offset 0
43
43
  const page1 = SQL_LIMIT.encode({
44
- topK: 20,
44
+ limit: 20,
45
45
  offset: 0,
46
46
  startIdx: 1,
47
47
  });
@@ -49,7 +49,7 @@ describe("SQL_LIMIT", () => {
49
49
  expect(page1.params).toEqual([20]);
50
50
  // Page 2: offset 20
51
51
  const page2 = SQL_LIMIT.encode({
52
- topK: 20,
52
+ limit: 20,
53
53
  offset: 20,
54
54
  startIdx: 1,
55
55
  });
@@ -57,7 +57,7 @@ describe("SQL_LIMIT", () => {
57
57
  expect(page2.params).toEqual([20, 20]);
58
58
  // Page 3: offset 40
59
59
  const page3 = SQL_LIMIT.encode({
60
- topK: 20,
60
+ limit: 20,
61
61
  offset: 40,
62
62
  startIdx: 1,
63
63
  });
@@ -68,7 +68,7 @@ describe("SQL_LIMIT", () => {
68
68
  // Simulating: SELECT uses $1, WHERE uses $2-$4
69
69
  // LIMIT should start at $5
70
70
  const result = SQL_LIMIT.encode({
71
- topK: 10,
71
+ limit: 10,
72
72
  offset: 50,
73
73
  startIdx: 5,
74
74
  });
@@ -76,9 +76,9 @@ describe("SQL_LIMIT", () => {
76
76
  expect(result.params).toEqual([10, 50]);
77
77
  });
78
78
  describe("edge values", () => {
79
- it("handles topK: 0", () => {
79
+ it("handles limit: 0", () => {
80
80
  const result = SQL_LIMIT.encode({
81
- topK: 0,
81
+ limit: 0,
82
82
  offset: 0,
83
83
  startIdx: 1,
84
84
  });
@@ -86,18 +86,18 @@ describe("SQL_LIMIT", () => {
86
86
  expect(result.sql).toBe("LIMIT $1");
87
87
  expect(result.params).toEqual([0]);
88
88
  });
89
- it("handles topK: 1", () => {
89
+ it("handles limit: 1", () => {
90
90
  const result = SQL_LIMIT.encode({
91
- topK: 1,
91
+ limit: 1,
92
92
  offset: 0,
93
93
  startIdx: 1,
94
94
  });
95
95
  expect(result.sql).toBe("LIMIT $1");
96
96
  expect(result.params).toEqual([1]);
97
97
  });
98
- it("handles very large topK", () => {
98
+ it("handles very large limit", () => {
99
99
  const result = SQL_LIMIT.encode({
100
- topK: 1000000,
100
+ limit: 1000000,
101
101
  offset: 0,
102
102
  startIdx: 1,
103
103
  });
@@ -106,7 +106,7 @@ describe("SQL_LIMIT", () => {
106
106
  });
107
107
  it("handles very large offset", () => {
108
108
  const result = SQL_LIMIT.encode({
109
- topK: 10,
109
+ limit: 10,
110
110
  offset: 999999,
111
111
  startIdx: 1,
112
112
  });
@@ -115,16 +115,16 @@ describe("SQL_LIMIT", () => {
115
115
  });
116
116
  it("handles very large startIdx", () => {
117
117
  const result = SQL_LIMIT.encode({
118
- topK: 10,
118
+ limit: 10,
119
119
  offset: 20,
120
120
  startIdx: 50,
121
121
  });
122
122
  expect(result.sql).toBe("LIMIT $50 OFFSET $51");
123
123
  expect(result.params).toEqual([10, 20]);
124
124
  });
125
- it("handles startIdx: 1 with both topK and offset", () => {
125
+ it("handles startIdx: 1 with both limit and offset", () => {
126
126
  const result = SQL_LIMIT.encode({
127
- topK: 25,
127
+ limit: 25,
128
128
  offset: 100,
129
129
  startIdx: 1,
130
130
  });
@@ -135,7 +135,7 @@ describe("SQL_LIMIT", () => {
135
135
  describe("offset boundary", () => {
136
136
  it("includes OFFSET when offset is exactly 1", () => {
137
137
  const result = SQL_LIMIT.encode({
138
- topK: 10,
138
+ limit: 10,
139
139
  offset: 1,
140
140
  startIdx: 1,
141
141
  });
@@ -144,7 +144,7 @@ describe("SQL_LIMIT", () => {
144
144
  });
145
145
  it("does not include OFFSET when offset is exactly 0", () => {
146
146
  const result = SQL_LIMIT.encode({
147
- topK: 10,
147
+ limit: 10,
148
148
  offset: 0,
149
149
  startIdx: 1,
150
150
  });