@kernl-sdk/pg 0.1.10 → 0.1.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +4 -5
- package/.turbo/turbo-check-types.log +36 -0
- package/CHANGELOG.md +41 -0
- package/README.md +124 -0
- package/dist/__tests__/integration.test.js +81 -1
- package/dist/__tests__/memory-integration.test.d.ts +2 -0
- package/dist/__tests__/memory-integration.test.d.ts.map +1 -0
- package/dist/__tests__/memory-integration.test.js +287 -0
- package/dist/__tests__/memory.test.d.ts +2 -0
- package/dist/__tests__/memory.test.d.ts.map +1 -0
- package/dist/__tests__/memory.test.js +357 -0
- package/dist/index.d.ts +5 -3
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +5 -3
- package/dist/memory/sql.d.ts +30 -0
- package/dist/memory/sql.d.ts.map +1 -0
- package/dist/memory/sql.js +100 -0
- package/dist/memory/store.d.ts +41 -0
- package/dist/memory/store.d.ts.map +1 -0
- package/dist/memory/store.js +114 -0
- package/dist/migrations.d.ts +1 -1
- package/dist/migrations.d.ts.map +1 -1
- package/dist/migrations.js +9 -3
- package/dist/pgvector/__tests__/handle.test.d.ts +2 -0
- package/dist/pgvector/__tests__/handle.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/handle.test.js +277 -0
- package/dist/pgvector/__tests__/hit.test.d.ts +2 -0
- package/dist/pgvector/__tests__/hit.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/hit.test.js +134 -0
- package/dist/pgvector/__tests__/integration/document.integration.test.d.ts +7 -0
- package/dist/pgvector/__tests__/integration/document.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/document.integration.test.js +587 -0
- package/dist/pgvector/__tests__/integration/edge.integration.test.d.ts +8 -0
- package/dist/pgvector/__tests__/integration/edge.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/edge.integration.test.js +663 -0
- package/dist/pgvector/__tests__/integration/filters.integration.test.d.ts +8 -0
- package/dist/pgvector/__tests__/integration/filters.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/filters.integration.test.js +609 -0
- package/dist/pgvector/__tests__/integration/lifecycle.integration.test.d.ts +8 -0
- package/dist/pgvector/__tests__/integration/lifecycle.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/lifecycle.integration.test.js +449 -0
- package/dist/pgvector/__tests__/integration/query.integration.test.d.ts +8 -0
- package/dist/pgvector/__tests__/integration/query.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/query.integration.test.js +544 -0
- package/dist/pgvector/__tests__/search.test.d.ts +2 -0
- package/dist/pgvector/__tests__/search.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/search.test.js +279 -0
- package/dist/pgvector/handle.d.ts +60 -0
- package/dist/pgvector/handle.d.ts.map +1 -0
- package/dist/pgvector/handle.js +213 -0
- package/dist/pgvector/hit.d.ts +10 -0
- package/dist/pgvector/hit.d.ts.map +1 -0
- package/dist/pgvector/hit.js +44 -0
- package/dist/pgvector/index.d.ts +7 -0
- package/dist/pgvector/index.d.ts.map +1 -0
- package/dist/pgvector/index.js +5 -0
- package/dist/pgvector/search.d.ts +60 -0
- package/dist/pgvector/search.d.ts.map +1 -0
- package/dist/pgvector/search.js +227 -0
- package/dist/pgvector/sql/__tests__/limit.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/limit.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/limit.test.js +161 -0
- package/dist/pgvector/sql/__tests__/order.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/order.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/order.test.js +218 -0
- package/dist/pgvector/sql/__tests__/query.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/query.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/query.test.js +392 -0
- package/dist/pgvector/sql/__tests__/select.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/select.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/select.test.js +293 -0
- package/dist/pgvector/sql/__tests__/where.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/where.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/where.test.js +488 -0
- package/dist/pgvector/sql/index.d.ts +7 -0
- package/dist/pgvector/sql/index.d.ts.map +1 -0
- package/dist/pgvector/sql/index.js +6 -0
- package/dist/pgvector/sql/limit.d.ts +8 -0
- package/dist/pgvector/sql/limit.d.ts.map +1 -0
- package/dist/pgvector/sql/limit.js +20 -0
- package/dist/pgvector/sql/order.d.ts +9 -0
- package/dist/pgvector/sql/order.d.ts.map +1 -0
- package/dist/pgvector/sql/order.js +47 -0
- package/dist/pgvector/sql/query.d.ts +46 -0
- package/dist/pgvector/sql/query.d.ts.map +1 -0
- package/dist/pgvector/sql/query.js +54 -0
- package/dist/pgvector/sql/schema.d.ts +16 -0
- package/dist/pgvector/sql/schema.d.ts.map +1 -0
- package/dist/pgvector/sql/schema.js +47 -0
- package/dist/pgvector/sql/select.d.ts +11 -0
- package/dist/pgvector/sql/select.d.ts.map +1 -0
- package/dist/pgvector/sql/select.js +87 -0
- package/dist/pgvector/sql/where.d.ts +8 -0
- package/dist/pgvector/sql/where.d.ts.map +1 -0
- package/dist/pgvector/sql/where.js +137 -0
- package/dist/pgvector/types.d.ts +20 -0
- package/dist/pgvector/types.d.ts.map +1 -0
- package/dist/pgvector/types.js +1 -0
- package/dist/pgvector/utils.d.ts +18 -0
- package/dist/pgvector/utils.d.ts.map +1 -0
- package/dist/pgvector/utils.js +22 -0
- package/dist/postgres.d.ts +19 -26
- package/dist/postgres.d.ts.map +1 -1
- package/dist/postgres.js +15 -27
- package/dist/storage.d.ts +62 -0
- package/dist/storage.d.ts.map +1 -1
- package/dist/storage.js +55 -10
- package/dist/thread/sql.d.ts +38 -0
- package/dist/thread/sql.d.ts.map +1 -0
- package/dist/thread/sql.js +112 -0
- package/dist/thread/store.d.ts +7 -3
- package/dist/thread/store.d.ts.map +1 -1
- package/dist/thread/store.js +46 -105
- package/package.json +8 -5
- package/src/__tests__/integration.test.ts +114 -15
- package/src/__tests__/memory-integration.test.ts +355 -0
- package/src/__tests__/memory.test.ts +428 -0
- package/src/index.ts +19 -3
- package/src/memory/sql.ts +141 -0
- package/src/memory/store.ts +166 -0
- package/src/migrations.ts +13 -3
- package/src/pgvector/README.md +50 -0
- package/src/pgvector/__tests__/handle.test.ts +335 -0
- package/src/pgvector/__tests__/hit.test.ts +165 -0
- package/src/pgvector/__tests__/integration/document.integration.test.ts +717 -0
- package/src/pgvector/__tests__/integration/edge.integration.test.ts +835 -0
- package/src/pgvector/__tests__/integration/filters.integration.test.ts +721 -0
- package/src/pgvector/__tests__/integration/lifecycle.integration.test.ts +570 -0
- package/src/pgvector/__tests__/integration/query.integration.test.ts +667 -0
- package/src/pgvector/__tests__/search.test.ts +366 -0
- package/src/pgvector/handle.ts +285 -0
- package/src/pgvector/hit.ts +56 -0
- package/src/pgvector/index.ts +7 -0
- package/src/pgvector/search.ts +330 -0
- package/src/pgvector/sql/__tests__/limit.test.ts +180 -0
- package/src/pgvector/sql/__tests__/order.test.ts +248 -0
- package/src/pgvector/sql/__tests__/query.test.ts +548 -0
- package/src/pgvector/sql/__tests__/select.test.ts +367 -0
- package/src/pgvector/sql/__tests__/where.test.ts +554 -0
- package/src/pgvector/sql/index.ts +14 -0
- package/src/pgvector/sql/limit.ts +29 -0
- package/src/pgvector/sql/order.ts +55 -0
- package/src/pgvector/sql/query.ts +112 -0
- package/src/pgvector/sql/schema.ts +61 -0
- package/src/pgvector/sql/select.ts +100 -0
- package/src/pgvector/sql/where.ts +152 -0
- package/src/pgvector/types.ts +21 -0
- package/src/pgvector/utils.ts +24 -0
- package/src/postgres.ts +31 -33
- package/src/storage.ts +102 -11
- package/src/thread/sql.ts +159 -0
- package/src/thread/store.ts +58 -127
- package/tsconfig.tsbuildinfo +1 -0
package/.turbo/turbo-build.log
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
>
|
|
4
|
-
|
|
5
|
-
|
|
1
|
+
|
|
2
|
+
> @kernl-sdk/pg@0.1.11 build /Users/andjones/Documents/projects/kernl/packages/storage/pg
|
|
3
|
+
> tsc && tsc-alias --resolve-full-paths
|
|
4
|
+
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
|
|
2
|
+
> @kernl-sdk/pg@0.1.11 check-types /Users/andjones/Documents/projects/kernl/packages/storage/pg
|
|
3
|
+
> tsc --noEmit
|
|
4
|
+
|
|
5
|
+
src/search/__tests__/hit.test.ts(75,55): error TS2345: Argument of type '{ schema: string; table: string; fields: { title: { column: string; type: "string"; }; content: { column: string; type: "string"; }; embedding: { column: string; type: "vector"; dimensions: number; }; }; }' is not assignable to parameter of type 'PGIndexConfig'.
|
|
6
|
+
Property 'pkey' is missing in type '{ schema: string; table: string; fields: { title: { column: string; type: "string"; }; content: { column: string; type: "string"; }; embedding: { column: string; type: "vector"; dimensions: number; }; }; }' but required in type 'PGIndexConfig'.
|
|
7
|
+
src/search/__tests__/hit.test.ts(93,55): error TS2345: Argument of type '{ schema: string; table: string; fields: { title: { column: string; type: "string"; }; content: { column: string; type: "string"; }; embedding: { column: string; type: "vector"; dimensions: number; }; }; }' is not assignable to parameter of type 'PGIndexConfig'.
|
|
8
|
+
Property 'pkey' is missing in type '{ schema: string; table: string; fields: { title: { column: string; type: "string"; }; content: { column: string; type: "string"; }; embedding: { column: string; type: "vector"; dimensions: number; }; }; }' but required in type 'PGIndexConfig'.
|
|
9
|
+
src/search/__tests__/hit.test.ts(110,55): error TS2345: Argument of type '{ schema: string; table: string; fields: { title: { column: string; type: "string"; }; content: { column: string; type: "string"; }; embedding: { column: string; type: "vector"; dimensions: number; }; }; }' is not assignable to parameter of type 'PGIndexConfig'.
|
|
10
|
+
Property 'pkey' is missing in type '{ schema: string; table: string; fields: { title: { column: string; type: "string"; }; content: { column: string; type: "string"; }; embedding: { column: string; type: "vector"; dimensions: number; }; }; }' but required in type 'PGIndexConfig'.
|
|
11
|
+
src/search/__tests__/hit.test.ts(124,55): error TS2345: Argument of type '{ schema: string; table: string; fields: { title: { column: string; type: "string"; }; content: { column: string; type: "string"; }; embedding: { column: string; type: "vector"; dimensions: number; }; }; }' is not assignable to parameter of type 'PGIndexConfig'.
|
|
12
|
+
Property 'pkey' is missing in type '{ schema: string; table: string; fields: { title: { column: string; type: "string"; }; content: { column: string; type: "string"; }; embedding: { column: string; type: "vector"; dimensions: number; }; }; }' but required in type 'PGIndexConfig'.
|
|
13
|
+
src/search/sql/__tests__/order.test.ts(58,11): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { embedding: { column: string; type: "vector"; dimensions: number; similarity: "cosine"; }; }; }' but required in type 'PGIndexConfig'.
|
|
14
|
+
src/search/sql/__tests__/order.test.ts(77,11): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { embedding: { column: string; type: "vector"; dimensions: number; similarity: "euclidean"; }; }; }' but required in type 'PGIndexConfig'.
|
|
15
|
+
src/search/sql/__tests__/order.test.ts(96,11): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { embedding: { column: string; type: "vector"; dimensions: number; similarity: "dot_product"; }; }; }' but required in type 'PGIndexConfig'.
|
|
16
|
+
src/search/sql/__tests__/order.test.ts(134,11): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { other_field: { column: string; type: "vector"; dimensions: number; similarity: "euclidean"; }; }; }' but required in type 'PGIndexConfig'.
|
|
17
|
+
src/search/sql/__tests__/query.test.ts(110,21): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { embedding: { column: string; type: "vector"; dimensions: number; similarity: "cosine"; }; }; }' but required in type 'PGIndexConfig'.
|
|
18
|
+
src/search/sql/__tests__/query.test.ts(196,23): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { embedding: { column: string; type: "vector"; dimensions: number; similarity: "cosine"; }; }; }' but required in type 'PGIndexConfig'.
|
|
19
|
+
src/search/sql/__tests__/query.test.ts(258,27): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { embedding: { column: string; type: "vector"; dimensions: number; similarity: "euclidean"; }; }; }' but required in type 'PGIndexConfig'.
|
|
20
|
+
src/search/sql/__tests__/query.test.ts(324,25): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { embedding: { column: string; type: "vector"; dimensions: number; similarity: "cosine"; }; }; }' but required in type 'PGIndexConfig'.
|
|
21
|
+
src/search/sql/__tests__/query.test.ts(462,21): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { embedding: { column: string; type: "vector"; dimensions: number; similarity: "euclidean"; }; }; }' but required in type 'PGIndexConfig'.
|
|
22
|
+
src/search/sql/__tests__/query.test.ts(492,21): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { embedding: { column: string; type: "vector"; dimensions: number; similarity: "dot_product"; }; }; }' but required in type 'PGIndexConfig'.
|
|
23
|
+
src/search/sql/__tests__/select.test.ts(43,9): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { embedding: { column: string; type: "vector"; dimensions: number; similarity: "cosine"; }; }; }' but required in type 'PGIndexConfig'.
|
|
24
|
+
src/search/sql/__tests__/select.test.ts(67,9): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { embedding: { column: string; type: "vector"; dimensions: number; similarity: "euclidean"; }; }; }' but required in type 'PGIndexConfig'.
|
|
25
|
+
src/search/sql/__tests__/select.test.ts(91,9): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { embedding: { column: string; type: "vector"; dimensions: number; similarity: "dot_product"; }; }; }' but required in type 'PGIndexConfig'.
|
|
26
|
+
src/search/sql/__tests__/select.test.ts(155,11): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { other_field: { column: string; type: "vector"; dimensions: number; similarity: "cosine"; }; }; }' but required in type 'PGIndexConfig'.
|
|
27
|
+
src/search/sql/__tests__/select.test.ts(300,9): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { title: { column: string; type: "string"; }; content: { column: string; type: "string"; }; embedding: { column: string; type: "vector"; dimensions: number; similarity: "cosine"; }; }; }' but required in type 'PGIndexConfig'.
|
|
28
|
+
src/search/sql/__tests__/select.test.ts(312,9): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { title: { column: string; type: "string"; }; content: { column: string; type: "string"; }; embedding: { column: string; type: "vector"; dimensions: number; similarity: "cosine"; }; }; }' but required in type 'PGIndexConfig'.
|
|
29
|
+
src/search/sql/__tests__/select.test.ts(325,9): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { title: { column: string; type: "string"; }; content: { column: string; type: "string"; }; embedding: { column: string; type: "vector"; dimensions: number; similarity: "cosine"; }; }; }' but required in type 'PGIndexConfig'.
|
|
30
|
+
src/search/sql/__tests__/select.test.ts(336,9): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { title: { column: string; type: "string"; }; content: { column: string; type: "string"; }; embedding: { column: string; type: "vector"; dimensions: number; similarity: "cosine"; }; }; }' but required in type 'PGIndexConfig'.
|
|
31
|
+
src/search/sql/__tests__/select.test.ts(347,9): error TS2741: Property 'pkey' is missing in type '{ schema: string; table: string; fields: { title: { column: string; type: "string"; }; content: { column: string; type: "string"; }; embedding: { column: string; type: "vector"; dimensions: number; similarity: "cosine"; }; }; }' but required in type 'PGIndexConfig'.
|
|
32
|
+
src/search/sql/__tests__/where.test.ts(357,28): error TS2322: Type 'string' is not assignable to type 'Filter'.
|
|
33
|
+
Type 'string' is not assignable to type '{ [field: string]: Filter | ScalarValue | FieldOps | Filter[] | undefined; }'.
|
|
34
|
+
src/search/sql/__tests__/where.test.ts(357,33): error TS2322: Type 'string' is not assignable to type 'Filter'.
|
|
35
|
+
Type 'string' is not assignable to type '{ [field: string]: Filter | ScalarValue | FieldOps | Filter[] | undefined; }'.
|
|
36
|
+
ELIFECYCLE Command failed with exit code 2.
|
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,46 @@
|
|
|
1
1
|
# @kernl/pg
|
|
2
2
|
|
|
3
|
+
## 0.1.12
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- a7d6138: Add agent.memories API and memory integration with vector backends
|
|
8
|
+
|
|
9
|
+
**@kernl-sdk/retrieval**
|
|
10
|
+
- Add `planQuery()` for adapting queries based on backend capabilities
|
|
11
|
+
- Add `SearchCapabilities` interface to describe backend features
|
|
12
|
+
- Gracefully degrade hybrid queries when not supported
|
|
13
|
+
|
|
14
|
+
**@kernl-sdk/pg**
|
|
15
|
+
- Add `capabilities()` method to PGVectorSearchIndex
|
|
16
|
+
- Fix hit decoding to include id in document
|
|
17
|
+
|
|
18
|
+
**@kernl-sdk/turbopuffer**
|
|
19
|
+
- Add `capabilities()` method describing supported search modes
|
|
20
|
+
- Add bigint type mapping for timestamps
|
|
21
|
+
- Fix hit decoding to include id in document
|
|
22
|
+
- Add memory integration tests
|
|
23
|
+
|
|
24
|
+
**kernl**
|
|
25
|
+
- Add `agent.memories.create()` with simplified syntax (auto-generated IDs, flattened scope)
|
|
26
|
+
- Add `agent.memories.search()` scoped to agent
|
|
27
|
+
- Add backend-aware codecs for Turbopuffer field mapping (tvec → vector)
|
|
28
|
+
- Default `include: true` for Turbopuffer queries to return all attributes
|
|
29
|
+
|
|
30
|
+
- Updated dependencies [a7d6138]
|
|
31
|
+
- @kernl-sdk/retrieval@0.1.0
|
|
32
|
+
- kernl@0.6.3
|
|
33
|
+
- @kernl-sdk/storage@0.1.12
|
|
34
|
+
|
|
35
|
+
## 0.1.11
|
|
36
|
+
|
|
37
|
+
### Patch Changes
|
|
38
|
+
|
|
39
|
+
- c5a5fcf: Storage now auto-initializes on first operation - no need to call init() manually
|
|
40
|
+
- Updated dependencies [c5a5fcf]
|
|
41
|
+
- kernl@0.6.2
|
|
42
|
+
- @kernl-sdk/storage@0.1.11
|
|
43
|
+
|
|
3
44
|
## 0.1.10
|
|
4
45
|
|
|
5
46
|
### Patch Changes
|
package/README.md
ADDED
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
# @kernl-sdk/pg
|
|
2
|
+
|
|
3
|
+
PostgreSQL storage adapter for kernl.
|
|
4
|
+
|
|
5
|
+
## Prerequisites
|
|
6
|
+
|
|
7
|
+
Vector search requires the [pgvector](https://github.com/pgvector/pgvector) extension. This must be installed by a superuser before enabling `vector: true`:
|
|
8
|
+
|
|
9
|
+
```sql
|
|
10
|
+
CREATE EXTENSION IF NOT EXISTS vector;
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
The storage adapter will automatically create the embedding column and index when `vector` is configured.
|
|
14
|
+
|
|
15
|
+
## Installation
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
pnpm i @kernl-sdk/pg
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
## Usage
|
|
22
|
+
|
|
23
|
+
```typescript
|
|
24
|
+
import { postgres } from "@kernl-sdk/pg";
|
|
25
|
+
|
|
26
|
+
// :a: connection string
|
|
27
|
+
const storage = postgres({ connstr: process.env.DATABASE_URL });
|
|
28
|
+
|
|
29
|
+
// :b: individual credentials
|
|
30
|
+
const storage = postgres({
|
|
31
|
+
host: "localhost",
|
|
32
|
+
port: 5432,
|
|
33
|
+
database: "mydb",
|
|
34
|
+
user: "user",
|
|
35
|
+
password: "password",
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
// :c: existing pool
|
|
39
|
+
import { Pool } from "pg";
|
|
40
|
+
const pool = new Pool({ connectionString: process.env.DATABASE_URL });
|
|
41
|
+
const storage = postgres({ pool });
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Configuration
|
|
45
|
+
|
|
46
|
+
Connection options (one of):
|
|
47
|
+
|
|
48
|
+
| Option | Type | Description |
|
|
49
|
+
|--------|------|-------------|
|
|
50
|
+
| `connstr` | `string` | PostgreSQL connection string |
|
|
51
|
+
| `pool` | `Pool` | Existing pg Pool instance |
|
|
52
|
+
| `host`, `port`, `database`, `user`, `password` | `string`/`number` | Individual connection credentials |
|
|
53
|
+
|
|
54
|
+
Additional options:
|
|
55
|
+
|
|
56
|
+
| Option | Type | Default | Description |
|
|
57
|
+
|--------|------|---------|-------------|
|
|
58
|
+
| `vector` | `boolean \| PGVectorConfig` | `undefined` | Enable pgvector support |
|
|
59
|
+
|
|
60
|
+
### Vector Configuration
|
|
61
|
+
|
|
62
|
+
When `vector: true`, defaults are applied:
|
|
63
|
+
|
|
64
|
+
| Option | Type | Default | Description |
|
|
65
|
+
|--------|------|---------|-------------|
|
|
66
|
+
| `dimensions` | `number` | `1536` | Vector dimensions (matches OpenAI text-embedding-3-small) |
|
|
67
|
+
| `similarity` | `"cosine" \| "euclidean" \| "dot_product"` | `"cosine"` | Distance metric |
|
|
68
|
+
|
|
69
|
+
```ts
|
|
70
|
+
// Use defaults (1536 dimensions, cosine similarity)
|
|
71
|
+
const storage = postgres({ pool, vector: true });
|
|
72
|
+
|
|
73
|
+
// Custom configuration
|
|
74
|
+
const storage = postgres({ pool, vector: { dimensions: 768, similarity: "dot_product" } });
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
## pgvector
|
|
78
|
+
|
|
79
|
+
kernl follows simple conventions so most indexes “just work” without extra configuration:
|
|
80
|
+
|
|
81
|
+
```ts
|
|
82
|
+
const pgvec = pgvector({ pool });
|
|
83
|
+
const docs = pgvec.index<Doc>("docs"); // "public.docs"
|
|
84
|
+
await docs.upsert({ id: "doc-1", title: "Hello", embedding: [/* ... */] });
|
|
85
|
+
await docs.query({ title: "Hello" });
|
|
86
|
+
```
|
|
87
|
+
|
|
88
|
+
### Index id = table name
|
|
89
|
+
|
|
90
|
+
By default, `index(name)` refers to the "public" schema and the name would be the table name. So:
|
|
91
|
+
|
|
92
|
+
- `search.index("docs")` refers to the table `public.docs`.
|
|
93
|
+
- `search.index("analytics.events")` refers to the table `analytics.events`.
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
### Field conventions
|
|
97
|
+
|
|
98
|
+
- field names map directly to column names,
|
|
99
|
+
- `title` → `"title"`,
|
|
100
|
+
- `content` → `"content"`,
|
|
101
|
+
- `embedding` → `"embedding"`, etc.
|
|
102
|
+
- any field you pass a `number[]` for is used as a pgvector `vector` column with the same name.
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
### Primary key column
|
|
106
|
+
|
|
107
|
+
- kernl assumes PK column is `id` by default,
|
|
108
|
+
- Upserts use `INSERT ... ON CONFLICT ("id") DO UPDATE ...`.
|
|
109
|
+
- If your table uses a different key name, you must explicitly bind the index:
|
|
110
|
+
|
|
111
|
+
```ts
|
|
112
|
+
const pgvec = pgvector({ pool });
|
|
113
|
+
|
|
114
|
+
pgvec.bindIndex("docs", {
|
|
115
|
+
schema: "public",
|
|
116
|
+
table: "articles", // ← table name differs from passed schema name (atypical)
|
|
117
|
+
pkey: "article_id", // ← primary key is not "id"
|
|
118
|
+
fields: {
|
|
119
|
+
embedding: { column: "embed_vec", type: "vector", dimensions: 1536, similarity: "cosine" },
|
|
120
|
+
title: { column: "article_title", type: "string" },
|
|
121
|
+
// ...
|
|
122
|
+
},
|
|
123
|
+
});
|
|
124
|
+
```
|
|
@@ -4,6 +4,86 @@ import { PGStorage } from "../storage.js";
|
|
|
4
4
|
import { Agent, Context, } from "kernl";
|
|
5
5
|
import { Thread } from "kernl/internal";
|
|
6
6
|
const TEST_DB_URL = process.env.KERNL_PG_TEST_URL;
|
|
7
|
+
describe.sequential("PGStorage auto-initialization", () => {
|
|
8
|
+
if (!TEST_DB_URL) {
|
|
9
|
+
it.skip("requires KERNL_PG_TEST_URL to be set", () => { });
|
|
10
|
+
return;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Verifies that ALL store methods auto-initialize without explicit init() call.
|
|
14
|
+
*
|
|
15
|
+
* This is critical for DX - users should not need to remember to call init().
|
|
16
|
+
* Each method must internally call ensureInit() before any DB operation.
|
|
17
|
+
*
|
|
18
|
+
* Methods covered: get, list, insert, update, delete, history, append
|
|
19
|
+
*/
|
|
20
|
+
it("auto-initializes on first store operation (no explicit init required)", async () => {
|
|
21
|
+
const pool = new Pool({ connectionString: TEST_DB_URL });
|
|
22
|
+
const storage = new PGStorage({ pool });
|
|
23
|
+
// Clean slate - drop schema to prove init runs automatically
|
|
24
|
+
await pool.query('DROP SCHEMA IF EXISTS "kernl" CASCADE');
|
|
25
|
+
// Bind minimal registries
|
|
26
|
+
const model = {
|
|
27
|
+
spec: "1.0",
|
|
28
|
+
provider: "test",
|
|
29
|
+
modelId: "auto-init-model",
|
|
30
|
+
};
|
|
31
|
+
const agent = new Agent({
|
|
32
|
+
id: "auto-init-agent",
|
|
33
|
+
name: "Auto Init Agent",
|
|
34
|
+
instructions: () => "test",
|
|
35
|
+
model,
|
|
36
|
+
});
|
|
37
|
+
const agents = new Map([["auto-init-agent", agent]]);
|
|
38
|
+
const models = new Map([
|
|
39
|
+
["test/auto-init-model", model],
|
|
40
|
+
]);
|
|
41
|
+
storage.bind({ agents, models });
|
|
42
|
+
const store = storage.threads;
|
|
43
|
+
const tid = "auto-init-thread";
|
|
44
|
+
// 1) list() - should auto-init
|
|
45
|
+
const threads = await store.list();
|
|
46
|
+
expect(threads).toEqual([]);
|
|
47
|
+
// 2) get() - should work (returns null for non-existent)
|
|
48
|
+
const got = await store.get(tid);
|
|
49
|
+
expect(got).toBeNull();
|
|
50
|
+
// 3) insert() - should work
|
|
51
|
+
const inserted = await store.insert({
|
|
52
|
+
id: tid,
|
|
53
|
+
namespace: "kernl",
|
|
54
|
+
agentId: "auto-init-agent",
|
|
55
|
+
model: "test/auto-init-model",
|
|
56
|
+
});
|
|
57
|
+
expect(inserted.tid).toBe(tid);
|
|
58
|
+
// 4) update() - should work
|
|
59
|
+
await store.update(tid, { tick: 1 });
|
|
60
|
+
const tickResult = await pool.query(`SELECT tick FROM "kernl"."threads" WHERE id = $1`, [tid]);
|
|
61
|
+
expect(tickResult.rows[0]?.tick).toBe(1);
|
|
62
|
+
// 5) history() - should work (empty)
|
|
63
|
+
const hist = await store.history(tid);
|
|
64
|
+
expect(hist).toEqual([]);
|
|
65
|
+
// 6) append() - should work
|
|
66
|
+
await store.append([
|
|
67
|
+
{
|
|
68
|
+
id: "evt-1",
|
|
69
|
+
tid,
|
|
70
|
+
seq: 0,
|
|
71
|
+
kind: "message",
|
|
72
|
+
timestamp: new Date(),
|
|
73
|
+
data: { role: "user", text: "test" },
|
|
74
|
+
metadata: null,
|
|
75
|
+
},
|
|
76
|
+
]);
|
|
77
|
+
// 7) delete() - should work
|
|
78
|
+
await store.delete(tid);
|
|
79
|
+
const afterDelete = await store.get(tid);
|
|
80
|
+
expect(afterDelete).toBeNull();
|
|
81
|
+
// Verify schema was created
|
|
82
|
+
const schemaResult = await pool.query(`SELECT schema_name FROM information_schema.schemata WHERE schema_name = 'kernl'`);
|
|
83
|
+
expect(schemaResult.rows).toHaveLength(1);
|
|
84
|
+
await storage.close();
|
|
85
|
+
});
|
|
86
|
+
});
|
|
7
87
|
describe.sequential("PGStorage integration", () => {
|
|
8
88
|
if (!TEST_DB_URL) {
|
|
9
89
|
it.skip("requires KERNL_PG_TEST_URL to be set", () => {
|
|
@@ -39,7 +119,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
39
119
|
// ---- verify migrations recorded ----
|
|
40
120
|
const migrationsResult = await pool.query(`SELECT id FROM "kernl".migrations ORDER BY applied_at ASC`);
|
|
41
121
|
const appliedMigrationIds = migrationsResult.rows.map((row) => row.id);
|
|
42
|
-
expect(appliedMigrationIds).toEqual(["
|
|
122
|
+
expect(appliedMigrationIds).toEqual(["001_threads", "002_memories"]);
|
|
43
123
|
// ---- verify indexes created by table definitions ----
|
|
44
124
|
const indexesResult = await pool.query(`
|
|
45
125
|
SELECT indexname, tablename
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"memory-integration.test.d.ts","sourceRoot":"","sources":["../../src/__tests__/memory-integration.test.ts"],"names":[],"mappings":"AAIA,OAAO,sBAAsB,CAAC"}
|
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
import { describe, it, expect, beforeAll, afterAll, beforeEach } from "vitest";
|
|
2
|
+
import { Pool } from "pg";
|
|
3
|
+
import { Kernl, Agent } from "kernl";
|
|
4
|
+
import "@kernl-sdk/ai/openai"; // Register OpenAI embedding provider
|
|
5
|
+
import { postgres, pgvector } from "../index.js";
|
|
6
|
+
const TEST_DB_URL = process.env.KERNL_PG_TEST_URL;
|
|
7
|
+
describe.sequential("Memory Integration with PGVector", { timeout: 30000 }, () => {
|
|
8
|
+
if (!TEST_DB_URL) {
|
|
9
|
+
it.skip("requires KERNL_PG_TEST_URL environment variable", () => { });
|
|
10
|
+
return;
|
|
11
|
+
}
|
|
12
|
+
if (!process.env.OPENAI_API_KEY) {
|
|
13
|
+
it.skip("requires OPENAI_API_KEY environment variable", () => { });
|
|
14
|
+
return;
|
|
15
|
+
}
|
|
16
|
+
let pool;
|
|
17
|
+
let kernl;
|
|
18
|
+
beforeAll(async () => {
|
|
19
|
+
pool = new Pool({ connectionString: TEST_DB_URL });
|
|
20
|
+
// Clean slate
|
|
21
|
+
await pool.query('DROP SCHEMA IF EXISTS "kernl" CASCADE');
|
|
22
|
+
// Create Kernl with PG + pgvector
|
|
23
|
+
kernl = new Kernl({
|
|
24
|
+
storage: {
|
|
25
|
+
db: postgres({ pool }),
|
|
26
|
+
vector: pgvector({ pool }),
|
|
27
|
+
},
|
|
28
|
+
memory: {
|
|
29
|
+
embeddingModel: "openai/text-embedding-3-small",
|
|
30
|
+
dimensions: 1536,
|
|
31
|
+
},
|
|
32
|
+
});
|
|
33
|
+
// Register a dummy agent for test scope
|
|
34
|
+
const model = {
|
|
35
|
+
spec: "1.0",
|
|
36
|
+
provider: "test",
|
|
37
|
+
modelId: "test-model",
|
|
38
|
+
};
|
|
39
|
+
const agent = new Agent({
|
|
40
|
+
id: "test-agent",
|
|
41
|
+
name: "Test Agent",
|
|
42
|
+
instructions: () => "test instructions",
|
|
43
|
+
model,
|
|
44
|
+
});
|
|
45
|
+
kernl.register(agent);
|
|
46
|
+
// Initialize storage (creates "kernl" schema and tables)
|
|
47
|
+
await kernl.storage.init();
|
|
48
|
+
});
|
|
49
|
+
afterAll(async () => {
|
|
50
|
+
if (kernl) {
|
|
51
|
+
await kernl.storage.close();
|
|
52
|
+
}
|
|
53
|
+
});
|
|
54
|
+
beforeEach(async () => {
|
|
55
|
+
// Clean memories between tests
|
|
56
|
+
await pool.query('DELETE FROM "kernl"."memories"');
|
|
57
|
+
// Vector index table may not exist yet (created lazily on first memory operation)
|
|
58
|
+
try {
|
|
59
|
+
await pool.query('DELETE FROM "kernl"."memories_sindex"');
|
|
60
|
+
}
|
|
61
|
+
catch (err) {
|
|
62
|
+
if (!err.message?.includes("does not exist")) {
|
|
63
|
+
throw err;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
it("creates memory and indexes it in pgvector on first operation", async () => {
|
|
68
|
+
const memory = await kernl.memories.create({
|
|
69
|
+
id: "m1",
|
|
70
|
+
scope: { namespace: "test", agentId: "test-agent" },
|
|
71
|
+
kind: "semantic",
|
|
72
|
+
collection: "facts",
|
|
73
|
+
content: { text: "The user loves TypeScript programming" },
|
|
74
|
+
});
|
|
75
|
+
expect(memory.id).toBe("m1");
|
|
76
|
+
expect(memory.content.text).toBe("The user loves TypeScript programming");
|
|
77
|
+
// Verify memory exists in DB
|
|
78
|
+
const dbResult = await pool.query('SELECT * FROM "kernl"."memories" WHERE id = $1', ["m1"]);
|
|
79
|
+
expect(dbResult.rows).toHaveLength(1);
|
|
80
|
+
// Verify memory was indexed in pgvector
|
|
81
|
+
const vectorResult = await pool.query('SELECT * FROM "kernl"."memories_sindex" WHERE id = $1', ["m1"]);
|
|
82
|
+
expect(vectorResult.rows).toHaveLength(1);
|
|
83
|
+
expect(vectorResult.rows[0].text).toBe("The user loves TypeScript programming");
|
|
84
|
+
expect(vectorResult.rows[0].tvec).toBeTruthy(); // vector embedding exists
|
|
85
|
+
});
|
|
86
|
+
it("searches memories using vector similarity", async () => {
|
|
87
|
+
// Create several memories
|
|
88
|
+
await kernl.memories.create({
|
|
89
|
+
id: "m1",
|
|
90
|
+
scope: { namespace: "test" },
|
|
91
|
+
kind: "semantic",
|
|
92
|
+
collection: "facts",
|
|
93
|
+
content: { text: "The user loves TypeScript programming" },
|
|
94
|
+
});
|
|
95
|
+
await kernl.memories.create({
|
|
96
|
+
id: "m2",
|
|
97
|
+
scope: { namespace: "test" },
|
|
98
|
+
kind: "semantic",
|
|
99
|
+
collection: "facts",
|
|
100
|
+
content: { text: "The user enjoys cooking Italian food" },
|
|
101
|
+
});
|
|
102
|
+
await kernl.memories.create({
|
|
103
|
+
id: "m3",
|
|
104
|
+
scope: { namespace: "test" },
|
|
105
|
+
kind: "semantic",
|
|
106
|
+
collection: "facts",
|
|
107
|
+
content: { text: "TypeScript has excellent type safety" },
|
|
108
|
+
});
|
|
109
|
+
// Search for TypeScript-related memories
|
|
110
|
+
const results = await kernl.memories.search({
|
|
111
|
+
query: "programming languages",
|
|
112
|
+
limit: 10,
|
|
113
|
+
});
|
|
114
|
+
expect(results.length).toBeGreaterThan(0);
|
|
115
|
+
// Should find TypeScript-related memories with higher scores
|
|
116
|
+
const ids = results.map((r) => r.document?.id);
|
|
117
|
+
expect(ids).toContain("m1"); // Direct match
|
|
118
|
+
expect(ids).toContain("m3"); // Related to TypeScript
|
|
119
|
+
});
|
|
120
|
+
it("returns no results when filters exclude all matches", async () => {
|
|
121
|
+
await kernl.memories.create({
|
|
122
|
+
id: "m1",
|
|
123
|
+
scope: { namespace: "ns1", agentId: "test-agent" },
|
|
124
|
+
kind: "semantic",
|
|
125
|
+
collection: "facts",
|
|
126
|
+
content: { text: "User likes hiking" },
|
|
127
|
+
});
|
|
128
|
+
// Filter for a different namespace that has no memories
|
|
129
|
+
const results = await kernl.memories.search({
|
|
130
|
+
query: "hiking",
|
|
131
|
+
filter: { scope: { namespace: "ns2" } },
|
|
132
|
+
limit: 10,
|
|
133
|
+
});
|
|
134
|
+
expect(results.length).toBe(0);
|
|
135
|
+
});
|
|
136
|
+
it("filters search results by scope", async () => {
|
|
137
|
+
await kernl.memories.create({
|
|
138
|
+
id: "m1",
|
|
139
|
+
scope: { namespace: "user1", agentId: "test-agent" },
|
|
140
|
+
kind: "semantic",
|
|
141
|
+
collection: "facts",
|
|
142
|
+
content: { text: "User 1 likes cats" },
|
|
143
|
+
});
|
|
144
|
+
await kernl.memories.create({
|
|
145
|
+
id: "m2",
|
|
146
|
+
scope: { namespace: "user2", agentId: "test-agent" },
|
|
147
|
+
kind: "semantic",
|
|
148
|
+
collection: "facts",
|
|
149
|
+
content: { text: "User 2 likes cats" },
|
|
150
|
+
});
|
|
151
|
+
// Search only in user1 namespace
|
|
152
|
+
const results = await kernl.memories.search({
|
|
153
|
+
query: "cats",
|
|
154
|
+
filter: { scope: { namespace: "user1" } },
|
|
155
|
+
limit: 10,
|
|
156
|
+
});
|
|
157
|
+
expect(results.length).toBe(1);
|
|
158
|
+
expect(results[0].document?.id).toBe("m1");
|
|
159
|
+
});
|
|
160
|
+
it("respects topK limit", async () => {
|
|
161
|
+
await kernl.memories.create({
|
|
162
|
+
id: "m1",
|
|
163
|
+
scope: { namespace: "test" },
|
|
164
|
+
kind: "semantic",
|
|
165
|
+
collection: "facts",
|
|
166
|
+
content: { text: "The user likes TypeScript" },
|
|
167
|
+
});
|
|
168
|
+
await kernl.memories.create({
|
|
169
|
+
id: "m2",
|
|
170
|
+
scope: { namespace: "test" },
|
|
171
|
+
kind: "semantic",
|
|
172
|
+
collection: "facts",
|
|
173
|
+
content: { text: "The user likes JavaScript" },
|
|
174
|
+
});
|
|
175
|
+
await kernl.memories.create({
|
|
176
|
+
id: "m3",
|
|
177
|
+
scope: { namespace: "test" },
|
|
178
|
+
kind: "semantic",
|
|
179
|
+
collection: "facts",
|
|
180
|
+
content: { text: "The user likes Rust" },
|
|
181
|
+
});
|
|
182
|
+
const results = await kernl.memories.search({
|
|
183
|
+
query: "programming languages",
|
|
184
|
+
limit: 1,
|
|
185
|
+
});
|
|
186
|
+
expect(results.length).toBe(1);
|
|
187
|
+
});
|
|
188
|
+
it("handles index creation idempotently across Kernl instances", async () => {
|
|
189
|
+
// Create memory with first Kernl instance
|
|
190
|
+
await kernl.memories.create({
|
|
191
|
+
id: "m1",
|
|
192
|
+
scope: { namespace: "test" },
|
|
193
|
+
kind: "semantic",
|
|
194
|
+
collection: "facts",
|
|
195
|
+
content: { text: "First instance memory" },
|
|
196
|
+
});
|
|
197
|
+
// Close first instance (also closes the pool)
|
|
198
|
+
await kernl.storage.close();
|
|
199
|
+
// Create new pool and Kernl instance - reassign both so afterAll and beforeEach work
|
|
200
|
+
pool = new Pool({ connectionString: TEST_DB_URL });
|
|
201
|
+
kernl = new Kernl({
|
|
202
|
+
storage: {
|
|
203
|
+
db: postgres({ pool }),
|
|
204
|
+
vector: pgvector({ pool }),
|
|
205
|
+
},
|
|
206
|
+
memory: {
|
|
207
|
+
embeddingModel: "openai/text-embedding-3-small",
|
|
208
|
+
dimensions: 1536,
|
|
209
|
+
},
|
|
210
|
+
});
|
|
211
|
+
// Should be able to search without errors (index already exists)
|
|
212
|
+
const results = await kernl.memories.search({
|
|
213
|
+
query: "memory",
|
|
214
|
+
limit: 10,
|
|
215
|
+
});
|
|
216
|
+
expect(results.length).toBeGreaterThan(0);
|
|
217
|
+
expect(results[0].document?.id).toBe("m1");
|
|
218
|
+
});
|
|
219
|
+
it("updates memory content and re-indexes", async () => {
|
|
220
|
+
await kernl.memories.create({
|
|
221
|
+
id: "m1",
|
|
222
|
+
scope: { namespace: "test" },
|
|
223
|
+
kind: "semantic",
|
|
224
|
+
collection: "facts",
|
|
225
|
+
content: { text: "Original content about dogs" },
|
|
226
|
+
});
|
|
227
|
+
// Update content
|
|
228
|
+
await kernl.memories.update({
|
|
229
|
+
id: "m1",
|
|
230
|
+
content: { text: "Updated content about cats" },
|
|
231
|
+
});
|
|
232
|
+
// Search should find updated content
|
|
233
|
+
const results = await kernl.memories.search({
|
|
234
|
+
query: "cats",
|
|
235
|
+
limit: 10,
|
|
236
|
+
});
|
|
237
|
+
expect(results.length).toBeGreaterThan(0);
|
|
238
|
+
const match = results.find((r) => r.document?.id === "m1");
|
|
239
|
+
expect(match).toBeDefined();
|
|
240
|
+
expect(match?.document?.text).toBe("Updated content about cats");
|
|
241
|
+
});
|
|
242
|
+
it("patches memory metadata without re-indexing", async () => {
|
|
243
|
+
await kernl.memories.create({
|
|
244
|
+
id: "m1",
|
|
245
|
+
scope: { namespace: "test" },
|
|
246
|
+
kind: "semantic",
|
|
247
|
+
collection: "facts",
|
|
248
|
+
content: { text: "Cats are great pets" },
|
|
249
|
+
metadata: { version: 1 },
|
|
250
|
+
});
|
|
251
|
+
// Update only metadata (should patch, not full re-index)
|
|
252
|
+
await kernl.memories.update({
|
|
253
|
+
id: "m1",
|
|
254
|
+
metadata: { version: 2, updated: true },
|
|
255
|
+
});
|
|
256
|
+
// Verify metadata updated in vector index
|
|
257
|
+
const vectorResult = await pool.query('SELECT metadata FROM "kernl"."memories_sindex" WHERE id = $1', ["m1"]);
|
|
258
|
+
expect(vectorResult.rows[0].metadata).toEqual({
|
|
259
|
+
version: 2,
|
|
260
|
+
updated: true,
|
|
261
|
+
});
|
|
262
|
+
});
|
|
263
|
+
it("creates memories with multimodal content", async () => {
|
|
264
|
+
await kernl.memories.create({
|
|
265
|
+
id: "m1",
|
|
266
|
+
scope: { namespace: "test" },
|
|
267
|
+
kind: "semantic",
|
|
268
|
+
collection: "media",
|
|
269
|
+
content: {
|
|
270
|
+
text: "A beautiful sunset",
|
|
271
|
+
image: {
|
|
272
|
+
data: "base64encodedimage",
|
|
273
|
+
mime: "image/png",
|
|
274
|
+
alt: "Sunset over the ocean",
|
|
275
|
+
},
|
|
276
|
+
},
|
|
277
|
+
});
|
|
278
|
+
// Should be searchable by text
|
|
279
|
+
const results = await kernl.memories.search({
|
|
280
|
+
query: "sunset",
|
|
281
|
+
limit: 10,
|
|
282
|
+
});
|
|
283
|
+
expect(results.length).toBeGreaterThan(0);
|
|
284
|
+
const match = results.find((r) => r.document?.id === "m1");
|
|
285
|
+
expect(match).toBeDefined();
|
|
286
|
+
});
|
|
287
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"memory.test.d.ts","sourceRoot":"","sources":["../../src/__tests__/memory.test.ts"],"names":[],"mappings":""}
|