@kernl-sdk/pg 0.1.25 → 0.1.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +1 -1
- package/CHANGELOG.md +9 -0
- package/dist/__tests__/memory-integration.test.js +1 -1
- package/dist/pgvector/__tests__/handle.test.js +13 -13
- package/dist/pgvector/__tests__/integration/document.integration.test.js +21 -21
- package/dist/pgvector/__tests__/integration/edge.integration.test.js +32 -32
- package/dist/pgvector/__tests__/integration/filters.integration.test.js +5 -5
- package/dist/pgvector/__tests__/integration/lifecycle.integration.test.js +12 -12
- package/dist/pgvector/__tests__/integration/query.integration.test.d.ts +1 -1
- package/dist/pgvector/__tests__/integration/query.integration.test.js +51 -51
- package/dist/pgvector/__tests__/search.test.js +1 -1
- package/dist/pgvector/sql/__tests__/limit.test.js +20 -20
- package/dist/pgvector/sql/__tests__/query.test.js +17 -17
- package/dist/pgvector/sql/limit.js +2 -2
- package/dist/pgvector/sql/query.d.ts +1 -1
- package/dist/pgvector/sql/query.d.ts.map +1 -1
- package/dist/pgvector/sql/query.js +1 -1
- package/package.json +6 -6
- package/src/__tests__/memory-integration.test.ts +1 -1
- package/src/pgvector/__tests__/handle.test.ts +13 -13
- package/src/pgvector/__tests__/integration/document.integration.test.ts +21 -21
- package/src/pgvector/__tests__/integration/edge.integration.test.ts +32 -32
- package/src/pgvector/__tests__/integration/filters.integration.test.ts +5 -5
- package/src/pgvector/__tests__/integration/lifecycle.integration.test.ts +12 -12
- package/src/pgvector/__tests__/integration/query.integration.test.ts +51 -51
- package/src/pgvector/__tests__/search.test.ts +1 -1
- package/src/pgvector/sql/__tests__/limit.test.ts +20 -20
- package/src/pgvector/sql/__tests__/query.test.ts +17 -17
- package/src/pgvector/sql/limit.ts +2 -2
- package/src/pgvector/sql/query.ts +2 -2
|
@@ -74,7 +74,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
74
74
|
// Table should still exist and be queryable
|
|
75
75
|
const hits = await handle.query({
|
|
76
76
|
filter: { id: "injection-1" },
|
|
77
|
-
|
|
77
|
+
limit: 1,
|
|
78
78
|
});
|
|
79
79
|
|
|
80
80
|
expect(hits).toHaveLength(1);
|
|
@@ -91,7 +91,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
91
91
|
|
|
92
92
|
const hits = await handle.query({
|
|
93
93
|
filter: { title: "'; DROP TABLE string_edge; --" },
|
|
94
|
-
|
|
94
|
+
limit: 10,
|
|
95
95
|
});
|
|
96
96
|
|
|
97
97
|
// Should return no results, not crash
|
|
@@ -108,7 +108,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
108
108
|
|
|
109
109
|
const hits = await handle.query({
|
|
110
110
|
filter: { id: "quotes" },
|
|
111
|
-
|
|
111
|
+
limit: 1,
|
|
112
112
|
});
|
|
113
113
|
|
|
114
114
|
expect(hits[0].document?.title).toBe('He said "Hello"');
|
|
@@ -125,7 +125,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
125
125
|
|
|
126
126
|
const hits = await handle.query({
|
|
127
127
|
filter: { id: "backslash" },
|
|
128
|
-
|
|
128
|
+
limit: 1,
|
|
129
129
|
});
|
|
130
130
|
|
|
131
131
|
expect(hits[0].document?.title).toBe("Path\\to\\file");
|
|
@@ -141,7 +141,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
141
141
|
|
|
142
142
|
const hits = await handle.query({
|
|
143
143
|
filter: { id: "whitespace" },
|
|
144
|
-
|
|
144
|
+
limit: 1,
|
|
145
145
|
});
|
|
146
146
|
|
|
147
147
|
expect(hits[0].document?.title).toBe("Line1\nLine2");
|
|
@@ -180,7 +180,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
180
180
|
|
|
181
181
|
const hits = await handle.query({
|
|
182
182
|
filter: { id: "unicode-edge-2" },
|
|
183
|
-
|
|
183
|
+
limit: 1,
|
|
184
184
|
});
|
|
185
185
|
|
|
186
186
|
expect(hits).toHaveLength(1);
|
|
@@ -198,7 +198,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
198
198
|
|
|
199
199
|
const hits = await handle.query({
|
|
200
200
|
filter: { id: "long" },
|
|
201
|
-
|
|
201
|
+
limit: 1,
|
|
202
202
|
});
|
|
203
203
|
|
|
204
204
|
expect(hits[0].document?.title).toHaveLength(100000);
|
|
@@ -215,7 +215,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
215
215
|
// Should not interpret % and _ as wildcards
|
|
216
216
|
const hits = await handle.query({
|
|
217
217
|
filter: { title: { $contains: "100%" } },
|
|
218
|
-
|
|
218
|
+
limit: 10,
|
|
219
219
|
});
|
|
220
220
|
|
|
221
221
|
expect(hits).toHaveLength(1);
|
|
@@ -257,7 +257,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
257
257
|
|
|
258
258
|
const hits = await handle.query({
|
|
259
259
|
filter: { int_val: 0 },
|
|
260
|
-
|
|
260
|
+
limit: 1,
|
|
261
261
|
});
|
|
262
262
|
|
|
263
263
|
expect(hits).toHaveLength(1);
|
|
@@ -274,7 +274,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
274
274
|
|
|
275
275
|
const hits = await handle.query({
|
|
276
276
|
filter: { int_val: { $lt: 0 } },
|
|
277
|
-
|
|
277
|
+
limit: 1,
|
|
278
278
|
});
|
|
279
279
|
|
|
280
280
|
expect(hits).toHaveLength(1);
|
|
@@ -292,7 +292,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
292
292
|
|
|
293
293
|
const hits = await handle.query({
|
|
294
294
|
filter: { int_val: 2147483647 },
|
|
295
|
-
|
|
295
|
+
limit: 1,
|
|
296
296
|
});
|
|
297
297
|
|
|
298
298
|
expect(hits).toHaveLength(1);
|
|
@@ -309,7 +309,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
309
309
|
|
|
310
310
|
const hits = await handle.query({
|
|
311
311
|
filter: { id: "precise" },
|
|
312
|
-
|
|
312
|
+
limit: 1,
|
|
313
313
|
});
|
|
314
314
|
|
|
315
315
|
// Double precision maintains about 15 significant digits
|
|
@@ -328,7 +328,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
328
328
|
|
|
329
329
|
const hits = await handle.query({
|
|
330
330
|
filter: { id: "extreme-float" },
|
|
331
|
-
|
|
331
|
+
limit: 1,
|
|
332
332
|
});
|
|
333
333
|
|
|
334
334
|
expect(hits[0].document?.float_val).toBe(1e308);
|
|
@@ -368,7 +368,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
368
368
|
// pgvector returns NaN for cosine of zero vectors
|
|
369
369
|
const hits = await handle.query({
|
|
370
370
|
filter: { id: "zero-vec" },
|
|
371
|
-
|
|
371
|
+
limit: 1,
|
|
372
372
|
});
|
|
373
373
|
|
|
374
374
|
expect(hits).toHaveLength(1);
|
|
@@ -382,7 +382,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
382
382
|
|
|
383
383
|
const hits = await handle.query({
|
|
384
384
|
query: [{ embedding: [-1, -0.5, 0.5, 1] }],
|
|
385
|
-
|
|
385
|
+
limit: 1,
|
|
386
386
|
});
|
|
387
387
|
|
|
388
388
|
expect(hits[0].id).toBe("negative-vec");
|
|
@@ -397,7 +397,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
397
397
|
|
|
398
398
|
const hits = await handle.query({
|
|
399
399
|
filter: { id: "small-vec" },
|
|
400
|
-
|
|
400
|
+
limit: 1,
|
|
401
401
|
});
|
|
402
402
|
|
|
403
403
|
expect(hits).toHaveLength(1);
|
|
@@ -411,7 +411,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
411
411
|
|
|
412
412
|
const hits = await handle.query({
|
|
413
413
|
query: [{ embedding: [1e10, 1e10, 1e10, 1e10] }],
|
|
414
|
-
|
|
414
|
+
limit: 1,
|
|
415
415
|
});
|
|
416
416
|
|
|
417
417
|
expect(hits[0].id).toBe("large-vec");
|
|
@@ -443,7 +443,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
443
443
|
|
|
444
444
|
const hits = await highHandle.query({
|
|
445
445
|
query: [{ embedding: vec }],
|
|
446
|
-
|
|
446
|
+
limit: 1,
|
|
447
447
|
});
|
|
448
448
|
|
|
449
449
|
expect(hits[0].id).toBe("high-1");
|
|
@@ -486,7 +486,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
486
486
|
|
|
487
487
|
const hits = await handle.query({
|
|
488
488
|
filter: { optional_field: null },
|
|
489
|
-
|
|
489
|
+
limit: 1,
|
|
490
490
|
});
|
|
491
491
|
|
|
492
492
|
expect(hits).toHaveLength(1);
|
|
@@ -511,12 +511,12 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
511
511
|
|
|
512
512
|
const existsTrue = await handle.query({
|
|
513
513
|
filter: { optional_field: { $exists: true } },
|
|
514
|
-
|
|
514
|
+
limit: 10,
|
|
515
515
|
});
|
|
516
516
|
|
|
517
517
|
const existsFalse = await handle.query({
|
|
518
518
|
filter: { optional_field: { $exists: false } },
|
|
519
|
-
|
|
519
|
+
limit: 10,
|
|
520
520
|
});
|
|
521
521
|
|
|
522
522
|
expect(existsTrue).toHaveLength(1);
|
|
@@ -541,7 +541,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
541
541
|
|
|
542
542
|
const hits = await handle.query({
|
|
543
543
|
filter: { id: "patch-null" },
|
|
544
|
-
|
|
544
|
+
limit: 1,
|
|
545
545
|
});
|
|
546
546
|
|
|
547
547
|
expect(hits[0].document?.optional_field).toBeNull();
|
|
@@ -583,7 +583,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
583
583
|
|
|
584
584
|
const hits = await handle.query({
|
|
585
585
|
filter: { id: uuid },
|
|
586
|
-
|
|
586
|
+
limit: 1,
|
|
587
587
|
});
|
|
588
588
|
|
|
589
589
|
expect(hits[0].id).toBe(uuid);
|
|
@@ -600,7 +600,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
600
600
|
|
|
601
601
|
const hits = await handle.query({
|
|
602
602
|
filter: { id: longId },
|
|
603
|
-
|
|
603
|
+
limit: 1,
|
|
604
604
|
});
|
|
605
605
|
|
|
606
606
|
expect(hits[0].id).toBe(longId);
|
|
@@ -617,7 +617,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
617
617
|
|
|
618
618
|
const hits = await handle.query({
|
|
619
619
|
filter: { id: specialId },
|
|
620
|
-
|
|
620
|
+
limit: 1,
|
|
621
621
|
});
|
|
622
622
|
|
|
623
623
|
expect(hits[0].id).toBe(specialId);
|
|
@@ -632,7 +632,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
632
632
|
|
|
633
633
|
const hits = await handle.query({
|
|
634
634
|
filter: { id: "12345" },
|
|
635
|
-
|
|
635
|
+
limit: 1,
|
|
636
636
|
});
|
|
637
637
|
|
|
638
638
|
expect(hits[0].id).toBe("12345");
|
|
@@ -650,7 +650,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
650
650
|
await expect(
|
|
651
651
|
badHandle.query({
|
|
652
652
|
query: [{ embedding: [0.1, 0.1, 0.1, 0.1] }],
|
|
653
|
-
|
|
653
|
+
limit: 10,
|
|
654
654
|
}),
|
|
655
655
|
).rejects.toThrow();
|
|
656
656
|
});
|
|
@@ -671,7 +671,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
671
671
|
await expect(
|
|
672
672
|
handle.query({
|
|
673
673
|
filter: { nonexistent_column: "value" } as any,
|
|
674
|
-
|
|
674
|
+
limit: 10,
|
|
675
675
|
}),
|
|
676
676
|
).rejects.toThrow();
|
|
677
677
|
});
|
|
@@ -748,7 +748,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
748
748
|
// Should have completed without errors
|
|
749
749
|
const hits = await handle.query({
|
|
750
750
|
filter: { id: "concurrent-1" },
|
|
751
|
-
|
|
751
|
+
limit: 1,
|
|
752
752
|
});
|
|
753
753
|
|
|
754
754
|
expect(hits).toHaveLength(1);
|
|
@@ -769,12 +769,12 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
769
769
|
await Promise.all(upserts);
|
|
770
770
|
|
|
771
771
|
// Increase ef_search to allow HNSW to explore more candidates
|
|
772
|
-
// Default is 40, which limits results to ~40 even with
|
|
772
|
+
// Default is 40, which limits results to ~40 even with limit=100
|
|
773
773
|
await pool.query("SET hnsw.ef_search = 200");
|
|
774
774
|
|
|
775
775
|
const hits = await handle.query({
|
|
776
776
|
query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
|
|
777
|
-
|
|
777
|
+
limit: 100,
|
|
778
778
|
});
|
|
779
779
|
|
|
780
780
|
expect(hits).toHaveLength(50);
|
|
@@ -793,7 +793,7 @@ describe.sequential("pgvector edge cases integration tests", () => {
|
|
|
793
793
|
// Query while upserting
|
|
794
794
|
const queryPromise = handle.query({
|
|
795
795
|
query: [{ embedding: [0.1, 0.1, 0.1, 0.1] }],
|
|
796
|
-
|
|
796
|
+
limit: 1000,
|
|
797
797
|
});
|
|
798
798
|
|
|
799
799
|
const [, hits] = await Promise.all([upsertPromise, queryPromise]);
|
|
@@ -183,7 +183,7 @@ describe.sequential("pgvector filter integration tests", () => {
|
|
|
183
183
|
async function queryIds(filter: Filter): Promise<string[]> {
|
|
184
184
|
const hits = await handle.query({
|
|
185
185
|
query: [{ embedding: QUERY_VECTOR }],
|
|
186
|
-
|
|
186
|
+
limit: 100,
|
|
187
187
|
filter,
|
|
188
188
|
});
|
|
189
189
|
return hits.map((h) => h.id).sort();
|
|
@@ -616,7 +616,7 @@ describe.sequential("pgvector filter integration tests", () => {
|
|
|
616
616
|
it("filtered results have correct field values", async () => {
|
|
617
617
|
const hits = await handle.query({
|
|
618
618
|
query: [{ embedding: QUERY_VECTOR }],
|
|
619
|
-
|
|
619
|
+
limit: 100,
|
|
620
620
|
filter: { status: "pending" },
|
|
621
621
|
});
|
|
622
622
|
|
|
@@ -634,7 +634,7 @@ describe.sequential("pgvector filter integration tests", () => {
|
|
|
634
634
|
it("complex filter returns expected documents with correct data", async () => {
|
|
635
635
|
const hits = await handle.query({
|
|
636
636
|
query: [{ embedding: QUERY_VECTOR }],
|
|
637
|
-
|
|
637
|
+
limit: 100,
|
|
638
638
|
filter: {
|
|
639
639
|
$and: [{ num: { $gte: 20, $lte: 50 } }, { flag: true }],
|
|
640
640
|
},
|
|
@@ -651,10 +651,10 @@ describe.sequential("pgvector filter integration tests", () => {
|
|
|
651
651
|
}
|
|
652
652
|
});
|
|
653
653
|
|
|
654
|
-
it("combining filter +
|
|
654
|
+
it("combining filter + limit limits correctly", async () => {
|
|
655
655
|
const hits = await handle.query({
|
|
656
656
|
query: [{ embedding: QUERY_VECTOR }],
|
|
657
|
-
|
|
657
|
+
limit: 2,
|
|
658
658
|
filter: { status: "active" },
|
|
659
659
|
});
|
|
660
660
|
|
|
@@ -107,7 +107,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
|
|
|
107
107
|
|
|
108
108
|
const hits = await handle.query({
|
|
109
109
|
query: [{ embedding: [1, 0, 0, 0] }],
|
|
110
|
-
|
|
110
|
+
limit: 10,
|
|
111
111
|
});
|
|
112
112
|
|
|
113
113
|
expect(hits).toHaveLength(3);
|
|
@@ -120,7 +120,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
|
|
|
120
120
|
const hits = await handle.query({
|
|
121
121
|
query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
|
|
122
122
|
filter: { published: true },
|
|
123
|
-
|
|
123
|
+
limit: 10,
|
|
124
124
|
});
|
|
125
125
|
|
|
126
126
|
expect(hits).toHaveLength(2);
|
|
@@ -140,7 +140,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
|
|
|
140
140
|
|
|
141
141
|
const hits = await handle.query({
|
|
142
142
|
filter: { id: "doc-1" },
|
|
143
|
-
|
|
143
|
+
limit: 1,
|
|
144
144
|
});
|
|
145
145
|
|
|
146
146
|
expect(hits[0].document?.views).toBe(500);
|
|
@@ -154,7 +154,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
|
|
|
154
154
|
|
|
155
155
|
const hits = await handle.query({
|
|
156
156
|
query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
|
|
157
|
-
|
|
157
|
+
limit: 10,
|
|
158
158
|
});
|
|
159
159
|
|
|
160
160
|
expect(hits).toHaveLength(2);
|
|
@@ -253,7 +253,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
|
|
|
253
253
|
|
|
254
254
|
const hits = await handle.query({
|
|
255
255
|
query: [{ embedding: [0.1, 0.2, 0.3, 0.4] }],
|
|
256
|
-
|
|
256
|
+
limit: 10,
|
|
257
257
|
});
|
|
258
258
|
|
|
259
259
|
expect(hits).toHaveLength(2);
|
|
@@ -322,7 +322,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
|
|
|
322
322
|
const handle = pgvec.index("custom_binding");
|
|
323
323
|
const hits = await handle.query({
|
|
324
324
|
query: [{ embedding: [0.5, 0.5, 0.5, 0.5] }],
|
|
325
|
-
|
|
325
|
+
limit: 1,
|
|
326
326
|
});
|
|
327
327
|
|
|
328
328
|
expect(hits).toHaveLength(1);
|
|
@@ -366,12 +366,12 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
|
|
|
366
366
|
// Query each
|
|
367
367
|
const hitsA = await handleA.query({
|
|
368
368
|
query: [{ embedding: [1, 0, 0, 0] }],
|
|
369
|
-
|
|
369
|
+
limit: 10,
|
|
370
370
|
});
|
|
371
371
|
|
|
372
372
|
const hitsB = await handleB.query({
|
|
373
373
|
query: [{ embedding: [0, 1, 0, 0] }],
|
|
374
|
-
|
|
374
|
+
limit: 10,
|
|
375
375
|
});
|
|
376
376
|
|
|
377
377
|
expect(hitsA).toHaveLength(1);
|
|
@@ -385,7 +385,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
|
|
|
385
385
|
|
|
386
386
|
const stillExists = await handleB.query({
|
|
387
387
|
query: [{ embedding: [0, 1, 0, 0] }],
|
|
388
|
-
|
|
388
|
+
limit: 10,
|
|
389
389
|
});
|
|
390
390
|
expect(stillExists).toHaveLength(1);
|
|
391
391
|
|
|
@@ -489,7 +489,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
|
|
|
489
489
|
|
|
490
490
|
const hits = await handle.query({
|
|
491
491
|
query: [{ embedding: vec }],
|
|
492
|
-
|
|
492
|
+
limit: 1,
|
|
493
493
|
});
|
|
494
494
|
|
|
495
495
|
expect(hits).toHaveLength(1);
|
|
@@ -520,7 +520,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
|
|
|
520
520
|
// Query on first vector field
|
|
521
521
|
const hits = await handle.query({
|
|
522
522
|
query: [{ title_embedding: [1, 0, 0, 0] }],
|
|
523
|
-
|
|
523
|
+
limit: 1,
|
|
524
524
|
});
|
|
525
525
|
|
|
526
526
|
expect(hits).toHaveLength(1);
|
|
@@ -559,7 +559,7 @@ describe.sequential("pgvector index lifecycle integration tests", () => {
|
|
|
559
559
|
const handle2 = pgvec2.index("persist_test");
|
|
560
560
|
const hits = await handle2.query({
|
|
561
561
|
query: [{ embedding: [1, 0, 0, 0] }],
|
|
562
|
-
|
|
562
|
+
limit: 1,
|
|
563
563
|
});
|
|
564
564
|
|
|
565
565
|
expect(hits[0].document?.name).toBe("Persisted");
|