@kernl-sdk/pg 0.1.24 → 0.1.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/.turbo/turbo-build.log +1 -1
  2. package/CHANGELOG.md +19 -0
  3. package/dist/__tests__/memory-integration.test.js +1 -1
  4. package/dist/pgvector/__tests__/handle.test.js +13 -13
  5. package/dist/pgvector/__tests__/integration/document.integration.test.js +21 -21
  6. package/dist/pgvector/__tests__/integration/edge.integration.test.js +32 -32
  7. package/dist/pgvector/__tests__/integration/filters.integration.test.js +5 -5
  8. package/dist/pgvector/__tests__/integration/lifecycle.integration.test.js +12 -12
  9. package/dist/pgvector/__tests__/integration/query.integration.test.d.ts +1 -1
  10. package/dist/pgvector/__tests__/integration/query.integration.test.js +51 -51
  11. package/dist/pgvector/__tests__/search.test.js +1 -1
  12. package/dist/pgvector/sql/__tests__/limit.test.js +20 -20
  13. package/dist/pgvector/sql/__tests__/query.test.js +17 -17
  14. package/dist/pgvector/sql/limit.js +2 -2
  15. package/dist/pgvector/sql/query.d.ts +1 -1
  16. package/dist/pgvector/sql/query.d.ts.map +1 -1
  17. package/dist/pgvector/sql/query.js +1 -1
  18. package/package.json +7 -7
  19. package/src/__tests__/memory-integration.test.ts +1 -1
  20. package/src/pgvector/__tests__/handle.test.ts +13 -13
  21. package/src/pgvector/__tests__/integration/document.integration.test.ts +21 -21
  22. package/src/pgvector/__tests__/integration/edge.integration.test.ts +32 -32
  23. package/src/pgvector/__tests__/integration/filters.integration.test.ts +5 -5
  24. package/src/pgvector/__tests__/integration/lifecycle.integration.test.ts +12 -12
  25. package/src/pgvector/__tests__/integration/query.integration.test.ts +51 -51
  26. package/src/pgvector/__tests__/search.test.ts +1 -1
  27. package/src/pgvector/sql/__tests__/limit.test.ts +20 -20
  28. package/src/pgvector/sql/__tests__/query.test.ts +17 -17
  29. package/src/pgvector/sql/limit.ts +2 -2
  30. package/src/pgvector/sql/query.ts +2 -2
@@ -1,4 +1,4 @@
1
1
 
2
- > @kernl-sdk/pg@0.1.24 build /home/runner/work/kernl/kernl/packages/storage/pg
2
+ > @kernl-sdk/pg@0.1.26 build /home/runner/work/kernl/kernl/packages/storage/pg
3
3
  > tsc && tsc-alias --resolve-full-paths
4
4
 
package/CHANGELOG.md CHANGED
@@ -1,5 +1,24 @@
1
1
  # @kernl/pg
2
2
 
3
+ ## 0.1.26
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [f593374]
8
+ - kernl@0.10.0
9
+ - @kernl-sdk/storage@0.1.26
10
+ - @kernl-sdk/retrieval@0.1.6
11
+
12
+ ## 0.1.25
13
+
14
+ ### Patch Changes
15
+
16
+ - Updated dependencies [25e46e7]
17
+ - kernl@0.9.1
18
+ - @kernl-sdk/shared@0.3.1
19
+ - @kernl-sdk/storage@0.1.25
20
+ - @kernl-sdk/retrieval@0.1.5
21
+
3
22
  ## 0.1.24
4
23
 
5
24
  ### Patch Changes
@@ -157,7 +157,7 @@ describe.sequential("Memory Integration with PGVector", { timeout: 30000 }, () =
157
157
  expect(results.length).toBe(1);
158
158
  expect(results[0].document?.id).toBe("m1");
159
159
  });
160
- it("respects topK limit", async () => {
160
+ it("respects limit", async () => {
161
161
  await kernl.memories.create({
162
162
  id: "m1",
163
163
  scope: { namespace: "test" },
@@ -107,7 +107,7 @@ describe.sequential("PGIndexHandle", () => {
107
107
  const handle = search.index("docs");
108
108
  const results = await handle.query({
109
109
  query: [{ embedding: [0.1, 0.2, 0.3] }],
110
- topK: 2,
110
+ limit: 2,
111
111
  });
112
112
  expect(results).toHaveLength(2);
113
113
  expect(results[0].id).toBe("doc1"); // closest match
@@ -120,7 +120,7 @@ describe.sequential("PGIndexHandle", () => {
120
120
  const results = await handle.query({
121
121
  query: [{ embedding: [0.1, 0.2, 0.3] }],
122
122
  filter: { status: "active" },
123
- topK: 10,
123
+ limit: 10,
124
124
  });
125
125
  expect(results).toHaveLength(2);
126
126
  results.forEach((r) => {
@@ -133,7 +133,7 @@ describe.sequential("PGIndexHandle", () => {
133
133
  const results = await handle.query({
134
134
  query: [{ embedding: [0.1, 0.2, 0.3] }],
135
135
  filter: { views: { $gte: 100 } },
136
- topK: 10,
136
+ limit: 10,
137
137
  });
138
138
  expect(results.length).toBeGreaterThanOrEqual(2);
139
139
  results.forEach((r) => {
@@ -148,7 +148,7 @@ describe.sequential("PGIndexHandle", () => {
148
148
  filter: {
149
149
  $or: [{ status: "draft" }, { status: "archived" }],
150
150
  },
151
- topK: 10,
151
+ limit: 10,
152
152
  });
153
153
  expect(results).toHaveLength(2);
154
154
  results.forEach((r) => {
@@ -161,16 +161,16 @@ describe.sequential("PGIndexHandle", () => {
161
161
  const results = await handle.query({
162
162
  query: [{ embedding: [0.1, 0.2, 0.3] }],
163
163
  filter: { status: { $in: ["active", "draft"] } },
164
- topK: 10,
164
+ limit: 10,
165
165
  });
166
166
  expect(results).toHaveLength(3);
167
167
  });
168
- it("respects topK limit", async () => {
168
+ it("respects limit", async () => {
169
169
  await insertDocs();
170
170
  const handle = search.index("docs");
171
171
  const results = await handle.query({
172
172
  query: [{ embedding: [0.5, 0.5, 0.5] }],
173
- topK: 2,
173
+ limit: 2,
174
174
  });
175
175
  expect(results).toHaveLength(2);
176
176
  });
@@ -180,13 +180,13 @@ describe.sequential("PGIndexHandle", () => {
180
180
  // Get first 2
181
181
  const page1 = await handle.query({
182
182
  query: [{ embedding: [0.5, 0.5, 0.5] }],
183
- topK: 2,
183
+ limit: 2,
184
184
  offset: 0,
185
185
  });
186
186
  // Get next 2
187
187
  const page2 = await handle.query({
188
188
  query: [{ embedding: [0.5, 0.5, 0.5] }],
189
- topK: 2,
189
+ limit: 2,
190
190
  offset: 2,
191
191
  });
192
192
  expect(page1).toHaveLength(2);
@@ -199,7 +199,7 @@ describe.sequential("PGIndexHandle", () => {
199
199
  const results = await handle.query({
200
200
  filter: { status: "active" },
201
201
  orderBy: { field: "views", direction: "desc" },
202
- topK: 10,
202
+ limit: 10,
203
203
  });
204
204
  expect(results).toHaveLength(2);
205
205
  expect(results[0].document?.views).toBe(200);
@@ -210,7 +210,7 @@ describe.sequential("PGIndexHandle", () => {
210
210
  const handle = search.index("docs");
211
211
  const results = await handle.query({
212
212
  query: [{ embedding: [0.1, 0.2, 0.3] }],
213
- topK: 1,
213
+ limit: 1,
214
214
  });
215
215
  expect(results[0].document).toHaveProperty("title");
216
216
  expect(results[0].document).toHaveProperty("content");
@@ -224,7 +224,7 @@ describe.sequential("PGIndexHandle", () => {
224
224
  const results = await handle.query({
225
225
  query: [{ embedding: [0.1, 0.2, 0.3] }],
226
226
  filter: { status: "nonexistent" },
227
- topK: 10,
227
+ limit: 10,
228
228
  });
229
229
  expect(results).toEqual([]);
230
230
  });
@@ -258,7 +258,7 @@ describe.sequential("PGIndexHandle", () => {
258
258
  const handle = search.index("typed-docs");
259
259
  const results = await handle.query({
260
260
  query: [{ embedding: [0.1, 0.2, 0.3] }],
261
- topK: 1,
261
+ limit: 1,
262
262
  });
263
263
  // TypeScript should allow these without errors
264
264
  const doc = results[0].document;
@@ -64,7 +64,7 @@ describe.sequential("pgvector document operations integration tests", () => {
64
64
  // Verify document was inserted
65
65
  const hits = await handle.query({
66
66
  query: [{ embedding: [0.1, 0.2, 0.3, 0.4] }],
67
- topK: 1,
67
+ limit: 1,
68
68
  });
69
69
  expect(hits).toHaveLength(1);
70
70
  expect(hits[0].id).toBe("doc-1");
@@ -126,7 +126,7 @@ describe.sequential("pgvector document operations integration tests", () => {
126
126
  // Verify update
127
127
  const hits = await handle.query({
128
128
  filter: { id: "doc-1" },
129
- topK: 1,
129
+ limit: 1,
130
130
  });
131
131
  expect(hits[0].document?.title).toBe("Updated Title");
132
132
  expect(hits[0].document?.views).toBe(100);
@@ -195,7 +195,7 @@ describe.sequential("pgvector document operations integration tests", () => {
195
195
  // Verify count
196
196
  const hits = await handle.query({
197
197
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
198
- topK: 1000,
198
+ limit: 1000,
199
199
  });
200
200
  expect(hits).toHaveLength(100);
201
201
  });
@@ -211,7 +211,7 @@ describe.sequential("pgvector document operations integration tests", () => {
211
211
  });
212
212
  const hits = await handle.query({
213
213
  filter: { id: "vec-test" },
214
- topK: 1,
214
+ limit: 1,
215
215
  });
216
216
  // Vector values should be preserved (within floating point precision)
217
217
  const stored = hits[0].document?.embedding;
@@ -240,7 +240,7 @@ describe.sequential("pgvector document operations integration tests", () => {
240
240
  });
241
241
  const hits = await nullHandle.query({
242
242
  filter: { id: "doc-null" },
243
- topK: 1,
243
+ limit: 1,
244
244
  });
245
245
  expect(hits[0].document?.title).toBe("Has Title");
246
246
  expect(hits[0].document?.subtitle).toBeNull();
@@ -279,7 +279,7 @@ describe.sequential("pgvector document operations integration tests", () => {
279
279
  expect(result.count).toBe(1);
280
280
  const hits = await handle.query({
281
281
  filter: { id: "patch-1" },
282
- topK: 1,
282
+ limit: 1,
283
283
  });
284
284
  expect(hits[0].document?.title).toBe("Updated Title");
285
285
  expect(hits[0].document?.content).toBe("Original Content 1"); // unchanged
@@ -295,7 +295,7 @@ describe.sequential("pgvector document operations integration tests", () => {
295
295
  expect(result.count).toBe(1);
296
296
  const hits = await handle.query({
297
297
  filter: { id: "patch-1" },
298
- topK: 1,
298
+ limit: 1,
299
299
  });
300
300
  expect(hits[0].document?.title).toBe("New Title");
301
301
  expect(hits[0].document?.views).toBe(999);
@@ -310,7 +310,7 @@ describe.sequential("pgvector document operations integration tests", () => {
310
310
  });
311
311
  const hits = await handle.query({
312
312
  filter: { id: "patch-1" },
313
- topK: 1,
313
+ limit: 1,
314
314
  });
315
315
  const stored = hits[0].document?.embedding;
316
316
  expect(stored?.[0]).toBeCloseTo(0.9, 5);
@@ -323,7 +323,7 @@ describe.sequential("pgvector document operations integration tests", () => {
323
323
  expect(result.count).toBe(2);
324
324
  const hits = await handle.query({
325
325
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
326
- topK: 10,
326
+ limit: 10,
327
327
  });
328
328
  const doc1 = hits.find((h) => h.id === "patch-1");
329
329
  const doc2 = hits.find((h) => h.id === "patch-2");
@@ -355,7 +355,7 @@ describe.sequential("pgvector document operations integration tests", () => {
355
355
  });
356
356
  const hits = await nullHandle.query({
357
357
  filter: { id: "doc-1" },
358
- topK: 1,
358
+ limit: 1,
359
359
  });
360
360
  expect(hits[0].document?.subtitle).toBeNull();
361
361
  });
@@ -378,7 +378,7 @@ describe.sequential("pgvector document operations integration tests", () => {
378
378
  });
379
379
  const hits = await handle.query({
380
380
  filter: { id: "patch-1" },
381
- topK: 1,
381
+ limit: 1,
382
382
  });
383
383
  expect(hits[0].document?.title).toBe("Updated");
384
384
  expect(hits[0].document?.content).toBe("Original Content 1"); // unchanged
@@ -430,7 +430,7 @@ describe.sequential("pgvector document operations integration tests", () => {
430
430
  // Verify deletion
431
431
  const hits = await handle.query({
432
432
  filter: { id: "del-1" },
433
- topK: 1,
433
+ limit: 1,
434
434
  });
435
435
  expect(hits).toHaveLength(0);
436
436
  });
@@ -440,7 +440,7 @@ describe.sequential("pgvector document operations integration tests", () => {
440
440
  // Verify only del-3 remains
441
441
  const hits = await handle.query({
442
442
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
443
- topK: 10,
443
+ limit: 10,
444
444
  });
445
445
  expect(hits).toHaveLength(1);
446
446
  expect(hits[0].id).toBe("del-3");
@@ -462,7 +462,7 @@ describe.sequential("pgvector document operations integration tests", () => {
462
462
  await handle.delete(["del-1", "del-2", "del-3"]);
463
463
  const hits = await handle.query({
464
464
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
465
- topK: 10,
465
+ limit: 10,
466
466
  });
467
467
  expect(hits).toHaveLength(0);
468
468
  });
@@ -482,7 +482,7 @@ describe.sequential("pgvector document operations integration tests", () => {
482
482
  });
483
483
  const hits = await handle.query({
484
484
  filter: { views: 0 },
485
- topK: 1,
485
+ limit: 1,
486
486
  });
487
487
  expect(hits).toHaveLength(1);
488
488
  expect(hits[0].document?.views).toBe(0);
@@ -498,7 +498,7 @@ describe.sequential("pgvector document operations integration tests", () => {
498
498
  });
499
499
  const hits = await handle.query({
500
500
  filter: { views: -50 },
501
- topK: 1,
501
+ limit: 1,
502
502
  });
503
503
  expect(hits).toHaveLength(1);
504
504
  expect(hits[0].document?.views).toBe(-50);
@@ -524,11 +524,11 @@ describe.sequential("pgvector document operations integration tests", () => {
524
524
  ]);
525
525
  const trueHits = await handle.query({
526
526
  filter: { published: true },
527
- topK: 10,
527
+ limit: 10,
528
528
  });
529
529
  const falseHits = await handle.query({
530
530
  filter: { published: false },
531
- topK: 10,
531
+ limit: 10,
532
532
  });
533
533
  expect(trueHits).toHaveLength(1);
534
534
  expect(trueHits[0].id).toBe("bool-true");
@@ -546,7 +546,7 @@ describe.sequential("pgvector document operations integration tests", () => {
546
546
  });
547
547
  const hits = await handle.query({
548
548
  filter: { title: "" },
549
- topK: 1,
549
+ limit: 1,
550
550
  });
551
551
  expect(hits).toHaveLength(1);
552
552
  expect(hits[0].document?.title).toBe("");
@@ -562,7 +562,7 @@ describe.sequential("pgvector document operations integration tests", () => {
562
562
  });
563
563
  const hits = await handle.query({
564
564
  filter: { id: "unicode-test" },
565
- topK: 1,
565
+ limit: 1,
566
566
  });
567
567
  expect(hits[0].document?.title).toBe("Hello 世界 🌍");
568
568
  expect(hits[0].document?.content).toBe("Привет мир");
@@ -579,7 +579,7 @@ describe.sequential("pgvector document operations integration tests", () => {
579
579
  });
580
580
  const hits = await handle.query({
581
581
  filter: { id: "long-string" },
582
- topK: 1,
582
+ limit: 1,
583
583
  });
584
584
  expect(hits[0].document?.content).toBe(longString);
585
585
  });
@@ -58,7 +58,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
58
58
  // Table should still exist and be queryable
59
59
  const hits = await handle.query({
60
60
  filter: { id: "injection-1" },
61
- topK: 1,
61
+ limit: 1,
62
62
  });
63
63
  expect(hits).toHaveLength(1);
64
64
  expect(hits[0].document?.title).toBe("'; DROP TABLE string_edge; --");
@@ -72,7 +72,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
72
72
  });
73
73
  const hits = await handle.query({
74
74
  filter: { title: "'; DROP TABLE string_edge; --" },
75
- topK: 10,
75
+ limit: 10,
76
76
  });
77
77
  // Should return no results, not crash
78
78
  expect(hits).toHaveLength(0);
@@ -86,7 +86,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
86
86
  });
87
87
  const hits = await handle.query({
88
88
  filter: { id: "quotes" },
89
- topK: 1,
89
+ limit: 1,
90
90
  });
91
91
  expect(hits[0].document?.title).toBe('He said "Hello"');
92
92
  expect(hits[0].document?.content).toBe("It's a test");
@@ -100,7 +100,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
100
100
  });
101
101
  const hits = await handle.query({
102
102
  filter: { id: "backslash" },
103
- topK: 1,
103
+ limit: 1,
104
104
  });
105
105
  expect(hits[0].document?.title).toBe("Path\\to\\file");
106
106
  });
@@ -113,7 +113,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
113
113
  });
114
114
  const hits = await handle.query({
115
115
  filter: { id: "whitespace" },
116
- topK: 1,
116
+ limit: 1,
117
117
  });
118
118
  expect(hits[0].document?.title).toBe("Line1\nLine2");
119
119
  expect(hits[0].document?.content).toBe("Col1\tCol2\tCol3");
@@ -145,7 +145,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
145
145
  });
146
146
  const hits = await handle.query({
147
147
  filter: { id: "unicode-edge-2" },
148
- topK: 1,
148
+ limit: 1,
149
149
  });
150
150
  expect(hits).toHaveLength(1);
151
151
  });
@@ -159,7 +159,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
159
159
  });
160
160
  const hits = await handle.query({
161
161
  filter: { id: "long" },
162
- topK: 1,
162
+ limit: 1,
163
163
  });
164
164
  expect(hits[0].document?.title).toHaveLength(100000);
165
165
  });
@@ -173,7 +173,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
173
173
  // Should not interpret % and _ as wildcards
174
174
  const hits = await handle.query({
175
175
  filter: { title: { $contains: "100%" } },
176
- topK: 10,
176
+ limit: 10,
177
177
  });
178
178
  expect(hits).toHaveLength(1);
179
179
  });
@@ -208,7 +208,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
208
208
  });
209
209
  const hits = await handle.query({
210
210
  filter: { int_val: 0 },
211
- topK: 1,
211
+ limit: 1,
212
212
  });
213
213
  expect(hits).toHaveLength(1);
214
214
  expect(hits[0].document?.int_val).toBe(0);
@@ -222,7 +222,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
222
222
  });
223
223
  const hits = await handle.query({
224
224
  filter: { int_val: { $lt: 0 } },
225
- topK: 1,
225
+ limit: 1,
226
226
  });
227
227
  expect(hits).toHaveLength(1);
228
228
  expect(hits[0].document?.int_val).toBe(-999);
@@ -237,7 +237,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
237
237
  });
238
238
  const hits = await handle.query({
239
239
  filter: { int_val: 2147483647 },
240
- topK: 1,
240
+ limit: 1,
241
241
  });
242
242
  expect(hits).toHaveLength(1);
243
243
  expect(hits[0].document?.int_val).toBe(2147483647);
@@ -251,7 +251,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
251
251
  });
252
252
  const hits = await handle.query({
253
253
  filter: { id: "precise" },
254
- topK: 1,
254
+ limit: 1,
255
255
  });
256
256
  // Double precision maintains about 15 significant digits
257
257
  expect(hits[0].document?.float_val).toBeCloseTo(0.123456789012345, 10);
@@ -267,7 +267,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
267
267
  });
268
268
  const hits = await handle.query({
269
269
  filter: { id: "extreme-float" },
270
- topK: 1,
270
+ limit: 1,
271
271
  });
272
272
  expect(hits[0].document?.float_val).toBe(1e308);
273
273
  });
@@ -300,7 +300,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
300
300
  // pgvector returns NaN for cosine of zero vectors
301
301
  const hits = await handle.query({
302
302
  filter: { id: "zero-vec" },
303
- topK: 1,
303
+ limit: 1,
304
304
  });
305
305
  expect(hits).toHaveLength(1);
306
306
  });
@@ -311,7 +311,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
311
311
  });
312
312
  const hits = await handle.query({
313
313
  query: [{ embedding: [-1, -0.5, 0.5, 1] }],
314
- topK: 1,
314
+ limit: 1,
315
315
  });
316
316
  expect(hits[0].id).toBe("negative-vec");
317
317
  expect(hits[0].score).toBeGreaterThan(0.99);
@@ -323,7 +323,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
323
323
  });
324
324
  const hits = await handle.query({
325
325
  filter: { id: "small-vec" },
326
- topK: 1,
326
+ limit: 1,
327
327
  });
328
328
  expect(hits).toHaveLength(1);
329
329
  });
@@ -334,7 +334,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
334
334
  });
335
335
  const hits = await handle.query({
336
336
  query: [{ embedding: [1e10, 1e10, 1e10, 1e10] }],
337
- topK: 1,
337
+ limit: 1,
338
338
  });
339
339
  expect(hits[0].id).toBe("large-vec");
340
340
  });
@@ -358,7 +358,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
358
358
  await highHandle.upsert({ id: "high-1", embedding: vec });
359
359
  const hits = await highHandle.query({
360
360
  query: [{ embedding: vec }],
361
- topK: 1,
361
+ limit: 1,
362
362
  });
363
363
  expect(hits[0].id).toBe("high-1");
364
364
  expect(hits[0].score).toBeGreaterThan(0.99);
@@ -394,7 +394,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
394
394
  });
395
395
  const hits = await handle.query({
396
396
  filter: { optional_field: null },
397
- topK: 1,
397
+ limit: 1,
398
398
  });
399
399
  expect(hits).toHaveLength(1);
400
400
  expect(hits[0].document?.optional_field).toBeNull();
@@ -416,11 +416,11 @@ describe.sequential("pgvector edge cases integration tests", () => {
416
416
  ]);
417
417
  const existsTrue = await handle.query({
418
418
  filter: { optional_field: { $exists: true } },
419
- topK: 10,
419
+ limit: 10,
420
420
  });
421
421
  const existsFalse = await handle.query({
422
422
  filter: { optional_field: { $exists: false } },
423
- topK: 10,
423
+ limit: 10,
424
424
  });
425
425
  expect(existsTrue).toHaveLength(1);
426
426
  expect(existsTrue[0].id).toBe("has-value");
@@ -440,7 +440,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
440
440
  });
441
441
  const hits = await handle.query({
442
442
  filter: { id: "patch-null" },
443
- topK: 1,
443
+ limit: 1,
444
444
  });
445
445
  expect(hits[0].document?.optional_field).toBeNull();
446
446
  });
@@ -474,7 +474,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
474
474
  });
475
475
  const hits = await handle.query({
476
476
  filter: { id: uuid },
477
- topK: 1,
477
+ limit: 1,
478
478
  });
479
479
  expect(hits[0].id).toBe(uuid);
480
480
  });
@@ -487,7 +487,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
487
487
  });
488
488
  const hits = await handle.query({
489
489
  filter: { id: longId },
490
- topK: 1,
490
+ limit: 1,
491
491
  });
492
492
  expect(hits[0].id).toBe(longId);
493
493
  });
@@ -500,7 +500,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
500
500
  });
501
501
  const hits = await handle.query({
502
502
  filter: { id: specialId },
503
- topK: 1,
503
+ limit: 1,
504
504
  });
505
505
  expect(hits[0].id).toBe(specialId);
506
506
  });
@@ -512,7 +512,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
512
512
  });
513
513
  const hits = await handle.query({
514
514
  filter: { id: "12345" },
515
- topK: 1,
515
+ limit: 1,
516
516
  });
517
517
  expect(hits[0].id).toBe("12345");
518
518
  });
@@ -525,7 +525,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
525
525
  const badHandle = pgvec.index("nonexistent_table");
526
526
  await expect(badHandle.query({
527
527
  query: [{ embedding: [0.1, 0.1, 0.1, 0.1] }],
528
- topK: 10,
528
+ limit: 10,
529
529
  })).rejects.toThrow();
530
530
  });
531
531
  it("throws on non-existent column in filter", async () => {
@@ -541,7 +541,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
541
541
  const handle = pgvec.index("error_test");
542
542
  await expect(handle.query({
543
543
  filter: { nonexistent_column: "value" },
544
- topK: 10,
544
+ limit: 10,
545
545
  })).rejects.toThrow();
546
546
  });
547
547
  it("throws on deleteIndex for non-bound index", async () => {
@@ -598,7 +598,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
598
598
  // Should have completed without errors
599
599
  const hits = await handle.query({
600
600
  filter: { id: "concurrent-1" },
601
- topK: 1,
601
+ limit: 1,
602
602
  });
603
603
  expect(hits).toHaveLength(1);
604
604
  // Counter should be one of the values
@@ -613,11 +613,11 @@ describe.sequential("pgvector edge cases integration tests", () => {
613
613
  }));
614
614
  await Promise.all(upserts);
615
615
  // Increase ef_search to allow HNSW to explore more candidates
616
- // Default is 40, which limits results to ~40 even with topK=100
616
+ // Default is 40, which limits results to ~40 even with limit=100
617
617
  await pool.query("SET hnsw.ef_search = 200");
618
618
  const hits = await handle.query({
619
619
  query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
620
- topK: 100,
620
+ limit: 100,
621
621
  });
622
622
  expect(hits).toHaveLength(50);
623
623
  });
@@ -631,7 +631,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
631
631
  // Query while upserting
632
632
  const queryPromise = handle.query({
633
633
  query: [{ embedding: [0.1, 0.1, 0.1, 0.1] }],
634
- topK: 1000,
634
+ limit: 1000,
635
635
  });
636
636
  const [, hits] = await Promise.all([upsertPromise, queryPromise]);
637
637
  // Query should succeed (may see partial or full results)
@@ -153,7 +153,7 @@ describe.sequential("pgvector filter integration tests", () => {
153
153
  async function queryIds(filter) {
154
154
  const hits = await handle.query({
155
155
  query: [{ embedding: QUERY_VECTOR }],
156
- topK: 100,
156
+ limit: 100,
157
157
  filter,
158
158
  });
159
159
  return hits.map((h) => h.id).sort();
@@ -520,7 +520,7 @@ describe.sequential("pgvector filter integration tests", () => {
520
520
  it("filtered results have correct field values", async () => {
521
521
  const hits = await handle.query({
522
522
  query: [{ embedding: QUERY_VECTOR }],
523
- topK: 100,
523
+ limit: 100,
524
524
  filter: { status: "pending" },
525
525
  });
526
526
  expect(hits.length).toBe(3);
@@ -535,7 +535,7 @@ describe.sequential("pgvector filter integration tests", () => {
535
535
  it("complex filter returns expected documents with correct data", async () => {
536
536
  const hits = await handle.query({
537
537
  query: [{ embedding: QUERY_VECTOR }],
538
- topK: 100,
538
+ limit: 100,
539
539
  filter: {
540
540
  $and: [{ num: { $gte: 20, $lte: 50 } }, { flag: true }],
541
541
  },
@@ -549,10 +549,10 @@ describe.sequential("pgvector filter integration tests", () => {
549
549
  expect(hit.document?.num).toBeLessThanOrEqual(50);
550
550
  }
551
551
  });
552
- it("combining filter + topK limits correctly", async () => {
552
+ it("combining filter + limit limits correctly", async () => {
553
553
  const hits = await handle.query({
554
554
  query: [{ embedding: QUERY_VECTOR }],
555
- topK: 2,
555
+ limit: 2,
556
556
  filter: { status: "active" },
557
557
  });
558
558
  expect(hits.length).toBe(2);