@kernl-sdk/pg 0.1.10 → 0.1.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (153) hide show
  1. package/.turbo/turbo-build.log +4 -5
  2. package/.turbo/turbo-check-types.log +36 -0
  3. package/CHANGELOG.md +41 -0
  4. package/README.md +124 -0
  5. package/dist/__tests__/integration.test.js +81 -1
  6. package/dist/__tests__/memory-integration.test.d.ts +2 -0
  7. package/dist/__tests__/memory-integration.test.d.ts.map +1 -0
  8. package/dist/__tests__/memory-integration.test.js +287 -0
  9. package/dist/__tests__/memory.test.d.ts +2 -0
  10. package/dist/__tests__/memory.test.d.ts.map +1 -0
  11. package/dist/__tests__/memory.test.js +357 -0
  12. package/dist/index.d.ts +5 -3
  13. package/dist/index.d.ts.map +1 -1
  14. package/dist/index.js +5 -3
  15. package/dist/memory/sql.d.ts +30 -0
  16. package/dist/memory/sql.d.ts.map +1 -0
  17. package/dist/memory/sql.js +100 -0
  18. package/dist/memory/store.d.ts +41 -0
  19. package/dist/memory/store.d.ts.map +1 -0
  20. package/dist/memory/store.js +114 -0
  21. package/dist/migrations.d.ts +1 -1
  22. package/dist/migrations.d.ts.map +1 -1
  23. package/dist/migrations.js +9 -3
  24. package/dist/pgvector/__tests__/handle.test.d.ts +2 -0
  25. package/dist/pgvector/__tests__/handle.test.d.ts.map +1 -0
  26. package/dist/pgvector/__tests__/handle.test.js +277 -0
  27. package/dist/pgvector/__tests__/hit.test.d.ts +2 -0
  28. package/dist/pgvector/__tests__/hit.test.d.ts.map +1 -0
  29. package/dist/pgvector/__tests__/hit.test.js +134 -0
  30. package/dist/pgvector/__tests__/integration/document.integration.test.d.ts +7 -0
  31. package/dist/pgvector/__tests__/integration/document.integration.test.d.ts.map +1 -0
  32. package/dist/pgvector/__tests__/integration/document.integration.test.js +587 -0
  33. package/dist/pgvector/__tests__/integration/edge.integration.test.d.ts +8 -0
  34. package/dist/pgvector/__tests__/integration/edge.integration.test.d.ts.map +1 -0
  35. package/dist/pgvector/__tests__/integration/edge.integration.test.js +663 -0
  36. package/dist/pgvector/__tests__/integration/filters.integration.test.d.ts +8 -0
  37. package/dist/pgvector/__tests__/integration/filters.integration.test.d.ts.map +1 -0
  38. package/dist/pgvector/__tests__/integration/filters.integration.test.js +609 -0
  39. package/dist/pgvector/__tests__/integration/lifecycle.integration.test.d.ts +8 -0
  40. package/dist/pgvector/__tests__/integration/lifecycle.integration.test.d.ts.map +1 -0
  41. package/dist/pgvector/__tests__/integration/lifecycle.integration.test.js +449 -0
  42. package/dist/pgvector/__tests__/integration/query.integration.test.d.ts +8 -0
  43. package/dist/pgvector/__tests__/integration/query.integration.test.d.ts.map +1 -0
  44. package/dist/pgvector/__tests__/integration/query.integration.test.js +544 -0
  45. package/dist/pgvector/__tests__/search.test.d.ts +2 -0
  46. package/dist/pgvector/__tests__/search.test.d.ts.map +1 -0
  47. package/dist/pgvector/__tests__/search.test.js +279 -0
  48. package/dist/pgvector/handle.d.ts +60 -0
  49. package/dist/pgvector/handle.d.ts.map +1 -0
  50. package/dist/pgvector/handle.js +213 -0
  51. package/dist/pgvector/hit.d.ts +10 -0
  52. package/dist/pgvector/hit.d.ts.map +1 -0
  53. package/dist/pgvector/hit.js +44 -0
  54. package/dist/pgvector/index.d.ts +7 -0
  55. package/dist/pgvector/index.d.ts.map +1 -0
  56. package/dist/pgvector/index.js +5 -0
  57. package/dist/pgvector/search.d.ts +60 -0
  58. package/dist/pgvector/search.d.ts.map +1 -0
  59. package/dist/pgvector/search.js +227 -0
  60. package/dist/pgvector/sql/__tests__/limit.test.d.ts +2 -0
  61. package/dist/pgvector/sql/__tests__/limit.test.d.ts.map +1 -0
  62. package/dist/pgvector/sql/__tests__/limit.test.js +161 -0
  63. package/dist/pgvector/sql/__tests__/order.test.d.ts +2 -0
  64. package/dist/pgvector/sql/__tests__/order.test.d.ts.map +1 -0
  65. package/dist/pgvector/sql/__tests__/order.test.js +218 -0
  66. package/dist/pgvector/sql/__tests__/query.test.d.ts +2 -0
  67. package/dist/pgvector/sql/__tests__/query.test.d.ts.map +1 -0
  68. package/dist/pgvector/sql/__tests__/query.test.js +392 -0
  69. package/dist/pgvector/sql/__tests__/select.test.d.ts +2 -0
  70. package/dist/pgvector/sql/__tests__/select.test.d.ts.map +1 -0
  71. package/dist/pgvector/sql/__tests__/select.test.js +293 -0
  72. package/dist/pgvector/sql/__tests__/where.test.d.ts +2 -0
  73. package/dist/pgvector/sql/__tests__/where.test.d.ts.map +1 -0
  74. package/dist/pgvector/sql/__tests__/where.test.js +488 -0
  75. package/dist/pgvector/sql/index.d.ts +7 -0
  76. package/dist/pgvector/sql/index.d.ts.map +1 -0
  77. package/dist/pgvector/sql/index.js +6 -0
  78. package/dist/pgvector/sql/limit.d.ts +8 -0
  79. package/dist/pgvector/sql/limit.d.ts.map +1 -0
  80. package/dist/pgvector/sql/limit.js +20 -0
  81. package/dist/pgvector/sql/order.d.ts +9 -0
  82. package/dist/pgvector/sql/order.d.ts.map +1 -0
  83. package/dist/pgvector/sql/order.js +47 -0
  84. package/dist/pgvector/sql/query.d.ts +46 -0
  85. package/dist/pgvector/sql/query.d.ts.map +1 -0
  86. package/dist/pgvector/sql/query.js +54 -0
  87. package/dist/pgvector/sql/schema.d.ts +16 -0
  88. package/dist/pgvector/sql/schema.d.ts.map +1 -0
  89. package/dist/pgvector/sql/schema.js +47 -0
  90. package/dist/pgvector/sql/select.d.ts +11 -0
  91. package/dist/pgvector/sql/select.d.ts.map +1 -0
  92. package/dist/pgvector/sql/select.js +87 -0
  93. package/dist/pgvector/sql/where.d.ts +8 -0
  94. package/dist/pgvector/sql/where.d.ts.map +1 -0
  95. package/dist/pgvector/sql/where.js +137 -0
  96. package/dist/pgvector/types.d.ts +20 -0
  97. package/dist/pgvector/types.d.ts.map +1 -0
  98. package/dist/pgvector/types.js +1 -0
  99. package/dist/pgvector/utils.d.ts +18 -0
  100. package/dist/pgvector/utils.d.ts.map +1 -0
  101. package/dist/pgvector/utils.js +22 -0
  102. package/dist/postgres.d.ts +19 -26
  103. package/dist/postgres.d.ts.map +1 -1
  104. package/dist/postgres.js +15 -27
  105. package/dist/storage.d.ts +62 -0
  106. package/dist/storage.d.ts.map +1 -1
  107. package/dist/storage.js +55 -10
  108. package/dist/thread/sql.d.ts +38 -0
  109. package/dist/thread/sql.d.ts.map +1 -0
  110. package/dist/thread/sql.js +112 -0
  111. package/dist/thread/store.d.ts +7 -3
  112. package/dist/thread/store.d.ts.map +1 -1
  113. package/dist/thread/store.js +46 -105
  114. package/package.json +8 -5
  115. package/src/__tests__/integration.test.ts +114 -15
  116. package/src/__tests__/memory-integration.test.ts +355 -0
  117. package/src/__tests__/memory.test.ts +428 -0
  118. package/src/index.ts +19 -3
  119. package/src/memory/sql.ts +141 -0
  120. package/src/memory/store.ts +166 -0
  121. package/src/migrations.ts +13 -3
  122. package/src/pgvector/README.md +50 -0
  123. package/src/pgvector/__tests__/handle.test.ts +335 -0
  124. package/src/pgvector/__tests__/hit.test.ts +165 -0
  125. package/src/pgvector/__tests__/integration/document.integration.test.ts +717 -0
  126. package/src/pgvector/__tests__/integration/edge.integration.test.ts +835 -0
  127. package/src/pgvector/__tests__/integration/filters.integration.test.ts +721 -0
  128. package/src/pgvector/__tests__/integration/lifecycle.integration.test.ts +570 -0
  129. package/src/pgvector/__tests__/integration/query.integration.test.ts +667 -0
  130. package/src/pgvector/__tests__/search.test.ts +366 -0
  131. package/src/pgvector/handle.ts +285 -0
  132. package/src/pgvector/hit.ts +56 -0
  133. package/src/pgvector/index.ts +7 -0
  134. package/src/pgvector/search.ts +330 -0
  135. package/src/pgvector/sql/__tests__/limit.test.ts +180 -0
  136. package/src/pgvector/sql/__tests__/order.test.ts +248 -0
  137. package/src/pgvector/sql/__tests__/query.test.ts +548 -0
  138. package/src/pgvector/sql/__tests__/select.test.ts +367 -0
  139. package/src/pgvector/sql/__tests__/where.test.ts +554 -0
  140. package/src/pgvector/sql/index.ts +14 -0
  141. package/src/pgvector/sql/limit.ts +29 -0
  142. package/src/pgvector/sql/order.ts +55 -0
  143. package/src/pgvector/sql/query.ts +112 -0
  144. package/src/pgvector/sql/schema.ts +61 -0
  145. package/src/pgvector/sql/select.ts +100 -0
  146. package/src/pgvector/sql/where.ts +152 -0
  147. package/src/pgvector/types.ts +21 -0
  148. package/src/pgvector/utils.ts +24 -0
  149. package/src/postgres.ts +31 -33
  150. package/src/storage.ts +102 -11
  151. package/src/thread/sql.ts +159 -0
  152. package/src/thread/store.ts +58 -127
  153. package/tsconfig.tsbuildinfo +1 -0
@@ -0,0 +1,166 @@
1
+ /**
2
+ * PostgreSQL Memory store implementation.
3
+ */
4
+
5
+ import type { Pool, PoolClient } from "pg";
6
+
7
+ import type {
8
+ MemoryStore,
9
+ MemoryRecord,
10
+ NewMemory,
11
+ MemoryRecordUpdate,
12
+ MemoryListOptions,
13
+ } from "kernl";
14
+ import {
15
+ KERNL_SCHEMA_NAME,
16
+ MemoryRecordCodec,
17
+ NewMemoryCodec,
18
+ type MemoryDBRecord,
19
+ } from "@kernl-sdk/storage";
20
+
21
+ import { SQL_WHERE, ORDER, SQL_UPDATE } from "./sql";
22
+
23
+ /**
24
+ * PostgreSQL memory store implementation.
25
+ *
26
+ * All async methods call `ensureInit()` before database operations
27
+ * to ensure schema/tables exist.
28
+ */
29
+ export class PGMemoryStore implements MemoryStore {
30
+ private db: Pool | PoolClient;
31
+ private ensureInit: () => Promise<void>;
32
+
33
+ constructor(db: Pool | PoolClient, ensureInit: () => Promise<void>) {
34
+ this.db = db;
35
+ this.ensureInit = ensureInit;
36
+ }
37
+
38
+ /**
39
+ * Get a memory by ID.
40
+ */
41
+ async get(id: string): Promise<MemoryRecord | null> {
42
+ await this.ensureInit();
43
+
44
+ const result = await this.db.query<MemoryDBRecord>(
45
+ `SELECT * FROM ${KERNL_SCHEMA_NAME}.memories WHERE id = $1`,
46
+ [id],
47
+ );
48
+
49
+ if (result.rows.length === 0) {
50
+ return null;
51
+ }
52
+
53
+ return MemoryRecordCodec.decode(result.rows[0]);
54
+ }
55
+
56
+ /**
57
+ * List memories matching optional filter criteria.
58
+ */
59
+ async list(options?: MemoryListOptions): Promise<MemoryRecord[]> {
60
+ await this.ensureInit();
61
+
62
+ const { sql: where, params } = SQL_WHERE.encode({
63
+ filter: options?.filter,
64
+ startIdx: 1,
65
+ });
66
+
67
+ let idx = params.length + 1;
68
+ let query = `SELECT * FROM ${KERNL_SCHEMA_NAME}.memories`;
69
+
70
+ // build where + order by
71
+ if (where) query += ` WHERE ${where}`;
72
+ query += ` ORDER BY ${ORDER.encode({ order: options?.order })}`;
73
+
74
+ // add limit + offset
75
+ if (options?.limit) {
76
+ query += ` LIMIT $${idx++}`;
77
+ params.push(options.limit);
78
+ }
79
+ if (options?.offset) {
80
+ query += ` OFFSET $${idx++}`;
81
+ params.push(options.offset);
82
+ }
83
+
84
+ const result = await this.db.query<MemoryDBRecord>(query, params);
85
+ return result.rows.map((row) => MemoryRecordCodec.decode(row));
86
+ }
87
+
88
+ /**
89
+ * Create a new memory record.
90
+ */
91
+ async create(memory: NewMemory): Promise<MemoryRecord> {
92
+ await this.ensureInit();
93
+
94
+ const row = NewMemoryCodec.encode(memory);
95
+
96
+ const result = await this.db.query<MemoryDBRecord>(
97
+ `INSERT INTO ${KERNL_SCHEMA_NAME}.memories
98
+ (id, namespace, entity_id, agent_id, kind, collection, content, wmem, smem_expires_at, timestamp, created_at, updated_at, metadata)
99
+ VALUES ($1, $2, $3, $4, $5, $6, $7::jsonb, $8, $9, $10, $11, $12, $13::jsonb)
100
+ RETURNING *`,
101
+ [
102
+ row.id,
103
+ row.namespace,
104
+ row.entity_id,
105
+ row.agent_id,
106
+ row.kind,
107
+ row.collection,
108
+ JSON.stringify(row.content), // ??
109
+ row.wmem,
110
+ row.smem_expires_at,
111
+ row.timestamp,
112
+ row.created_at,
113
+ row.updated_at,
114
+ row.metadata ? JSON.stringify(row.metadata) : null,
115
+ ],
116
+ );
117
+
118
+ return MemoryRecordCodec.decode(result.rows[0]);
119
+ }
120
+
121
+ /**
122
+ * Update a memory record.
123
+ */
124
+ async update(id: string, patch: MemoryRecordUpdate): Promise<MemoryRecord> {
125
+ await this.ensureInit();
126
+
127
+ const { sql: updates, params } = SQL_UPDATE.encode({ patch, startIdx: 1 });
128
+ const idx = params.length + 1;
129
+ params.push(id);
130
+
131
+ // (TODO): might we not want to return the whole record sometimes?
132
+ const result = await this.db.query<MemoryDBRecord>(
133
+ `UPDATE ${KERNL_SCHEMA_NAME}.memories SET ${updates} WHERE id = $${idx} RETURNING *`,
134
+ params,
135
+ );
136
+
137
+ if (result.rows.length === 0) {
138
+ throw new Error(`memory not found: ${id}`);
139
+ }
140
+
141
+ return MemoryRecordCodec.decode(result.rows[0]);
142
+ }
143
+
144
+ /**
145
+ * Delete a memory by ID.
146
+ */
147
+ async delete(id: string): Promise<void> {
148
+ await this.ensureInit();
149
+ await this.db.query(
150
+ `DELETE FROM ${KERNL_SCHEMA_NAME}.memories WHERE id = $1`,
151
+ [id],
152
+ );
153
+ }
154
+
155
+ /**
156
+ * Delete multiple memories by ID.
157
+ */
158
+ async mdelete(ids: string[]): Promise<void> {
159
+ if (ids.length === 0) return;
160
+ await this.ensureInit();
161
+ await this.db.query(
162
+ `DELETE FROM ${KERNL_SCHEMA_NAME}.memories WHERE id = ANY($1)`,
163
+ [ids],
164
+ );
165
+ }
166
+ }
package/src/migrations.ts CHANGED
@@ -4,7 +4,11 @@
4
4
 
5
5
  import type { PoolClient } from "pg";
6
6
  import type { Table, Column } from "@kernl-sdk/storage";
7
- import { TABLE_THREADS, TABLE_THREAD_EVENTS, SCHEMA_NAME } from "@kernl-sdk/storage";
7
+ import {
8
+ TABLE_THREADS,
9
+ TABLE_THREAD_EVENTS,
10
+ TABLE_MEMORIES,
11
+ } from "@kernl-sdk/storage";
8
12
 
9
13
  /**
10
14
  * Migration context with helpers.
@@ -22,14 +26,20 @@ export interface Migration {
22
26
  /**
23
27
  * List of all migrations in order.
24
28
  */
25
- export const migrations: Migration[] = [
29
+ export const MIGRATIONS: Migration[] = [
26
30
  {
27
- id: "0001_initial",
31
+ id: "001_threads",
28
32
  async up(ctx) {
29
33
  await ctx.createTable(TABLE_THREADS);
30
34
  await ctx.createTable(TABLE_THREAD_EVENTS);
31
35
  },
32
36
  },
37
+ {
38
+ id: "002_memories",
39
+ async up(ctx) {
40
+ await ctx.createTable(TABLE_MEMORIES);
41
+ },
42
+ },
33
43
  ];
34
44
 
35
45
  /**
@@ -0,0 +1,50 @@
1
+ # kernl :: pgvector
2
+
3
+ ## Index conventions
4
+
5
+ kernl follows simple conventions so most indexes “just work” without extra configuration:
6
+
7
+ ```ts
8
+ const pgvec = pgvector({ pool });
9
+ const docs = pgvec.index<Doc>("docs"); // "public.docs"
10
+ await docs.upsert({ id: "doc-1", title: "Hello", embedding: [/* ... */] });
11
+ await docs.query({ title: "Hello" });
12
+ ```
13
+
14
+ ### Index id = table name
15
+
16
+ By default, `index(name)` refers to the "public" schema and the name would be the table name. So:
17
+
18
+ - `search.index("docs")` refers to the table `public.docs`.
19
+ - `search.index("analytics.events")` refers to the table `analytics.events`.
20
+
21
+
22
+ ### Field conventions
23
+
24
+ - field names map directly to column names,
25
+ - `title` → `"title"`,
26
+ - `content` → `"content"`,
27
+ - `embedding` → `"embedding"`, etc.
28
+ - any field you pass a `number[]` for is used as a pgvector `vector` column with the same name.
29
+
30
+
31
+ ### Primary key column
32
+
33
+ - kernl assumes PK column is `id` by default,
34
+ - Upserts use `INSERT ... ON CONFLICT ("id") DO UPDATE ...`.
35
+ - If your table uses a different key name, you must explicitly bind the index:
36
+
37
+ ```ts
38
+ const pgvec = pgvector({ pool });
39
+
40
+ pgvec.bindIndex("docs", {
41
+ schema: "public",
42
+ table: "articles", // ← table name differs from passed schema name (atypical)
43
+ pkey: "article_id", // ← primary key is not "id"
44
+ fields: {
45
+ embedding: { column: "embed_vec", type: "vector", dimensions: 1536, similarity: "cosine" },
46
+ title: { column: "article_title", type: "string" },
47
+ // ...
48
+ },
49
+ });
50
+ ```
@@ -0,0 +1,335 @@
1
+ import { describe, it, expect, beforeAll, afterAll, beforeEach } from "vitest";
2
+ import { Pool } from "pg";
3
+ import { PGSearchIndex } from "../search";
4
+
5
+ const TEST_DB_URL = process.env.KERNL_PG_TEST_URL;
6
+ const SCHEMA = "kernl_search_test";
7
+
8
+ describe.sequential("PGIndexHandle", () => {
9
+ if (!TEST_DB_URL) {
10
+ it.skip("requires KERNL_PG_TEST_URL environment variable", () => {});
11
+ return;
12
+ }
13
+
14
+ let pool: Pool;
15
+ let search: PGSearchIndex;
16
+ let initialized = false;
17
+
18
+ const ensureInit = async () => {
19
+ if (initialized) return;
20
+ await pool.query(`CREATE EXTENSION IF NOT EXISTS vector`);
21
+ await pool.query(`CREATE SCHEMA IF NOT EXISTS "${SCHEMA}"`);
22
+ await pool.query(`
23
+ CREATE TABLE IF NOT EXISTS "${SCHEMA}"."documents" (
24
+ id TEXT PRIMARY KEY,
25
+ title TEXT,
26
+ content TEXT,
27
+ status TEXT,
28
+ views INTEGER DEFAULT 0,
29
+ embedding vector(3)
30
+ )
31
+ `);
32
+ await pool.query(`
33
+ CREATE INDEX IF NOT EXISTS documents_embedding_idx
34
+ ON "${SCHEMA}"."documents"
35
+ USING hnsw (embedding vector_cosine_ops)
36
+ `);
37
+ initialized = true;
38
+ };
39
+
40
+ beforeAll(async () => {
41
+ pool = new Pool({ connectionString: TEST_DB_URL });
42
+ search = new PGSearchIndex({ pool, ensureInit });
43
+
44
+ // Clean slate
45
+ await pool.query(`DROP SCHEMA IF EXISTS "${SCHEMA}" CASCADE`);
46
+ });
47
+
48
+ afterAll(async () => {
49
+ await pool.query(`DROP SCHEMA IF EXISTS "${SCHEMA}" CASCADE`);
50
+ await pool.end();
51
+ });
52
+
53
+ beforeEach(async () => {
54
+ // Clear table between tests
55
+ if (initialized) {
56
+ await pool.query(`DELETE FROM "${SCHEMA}"."documents"`);
57
+ }
58
+ });
59
+
60
+ describe("bindIndex", () => {
61
+ it("registers binding for later use", async () => {
62
+ await search.bindIndex("docs", {
63
+ schema: SCHEMA,
64
+ table: "documents",
65
+ pkey: "id",
66
+ fields: {
67
+ title: { column: "title", type: "string" },
68
+ content: { column: "content", type: "string" },
69
+ embedding: {
70
+ column: "embedding",
71
+ type: "vector",
72
+ dimensions: 3,
73
+ similarity: "cosine",
74
+ },
75
+ },
76
+ });
77
+
78
+ // Can get handle without error
79
+ const handle = search.index("docs");
80
+ expect(handle.id).toBe("docs");
81
+ });
82
+ });
83
+
84
+ describe("index().query()", () => {
85
+ beforeAll(async () => {
86
+ await search.bindIndex("docs", {
87
+ schema: SCHEMA,
88
+ table: "documents",
89
+ pkey: "id",
90
+ fields: {
91
+ title: { column: "title", type: "string" },
92
+ content: { column: "content", type: "string" },
93
+ status: { column: "status", type: "string" },
94
+ views: { column: "views", type: "int" },
95
+ embedding: {
96
+ column: "embedding",
97
+ type: "vector",
98
+ dimensions: 3,
99
+ similarity: "cosine",
100
+ },
101
+ },
102
+ });
103
+ });
104
+
105
+ async function insertDocs() {
106
+ await ensureInit();
107
+ await pool.query(`
108
+ INSERT INTO "${SCHEMA}"."documents" (id, title, content, status, views, embedding)
109
+ VALUES
110
+ ('doc1', 'Hello World', 'First document', 'active', 100, '[0.1, 0.2, 0.3]'),
111
+ ('doc2', 'Goodbye World', 'Second document', 'active', 200, '[0.4, 0.5, 0.6]'),
112
+ ('doc3', 'Hello Again', 'Third document', 'draft', 50, '[0.15, 0.25, 0.35]'),
113
+ ('doc4', 'Final Doc', 'Fourth document', 'archived', 500, '[0.9, 0.8, 0.7]')
114
+ `);
115
+ }
116
+
117
+ it("queries with vector search", async () => {
118
+ await insertDocs();
119
+
120
+ const handle = search.index("docs");
121
+ const results = await handle.query({
122
+ query: [{ embedding: [0.1, 0.2, 0.3] }],
123
+ topK: 2,
124
+ });
125
+
126
+ expect(results).toHaveLength(2);
127
+ expect(results[0].id).toBe("doc1"); // closest match
128
+ expect(results[0].score).toBeGreaterThan(0.9); // high similarity
129
+ expect(results[0].index).toBe("docs");
130
+ });
131
+
132
+ it("queries with filter", async () => {
133
+ await insertDocs();
134
+
135
+ const handle = search.index("docs");
136
+ const results = await handle.query({
137
+ query: [{ embedding: [0.1, 0.2, 0.3] }],
138
+ filter: { status: "active" },
139
+ topK: 10,
140
+ });
141
+
142
+ expect(results).toHaveLength(2);
143
+ results.forEach((r) => {
144
+ expect(r.document?.status).toBe("active");
145
+ });
146
+ });
147
+
148
+ it("queries with comparison filter", async () => {
149
+ await insertDocs();
150
+
151
+ const handle = search.index("docs");
152
+ const results = await handle.query({
153
+ query: [{ embedding: [0.1, 0.2, 0.3] }],
154
+ filter: { views: { $gte: 100 } },
155
+ topK: 10,
156
+ });
157
+
158
+ expect(results.length).toBeGreaterThanOrEqual(2);
159
+ results.forEach((r) => {
160
+ expect(r.document?.views).toBeGreaterThanOrEqual(100);
161
+ });
162
+ });
163
+
164
+ it("queries with $or filter", async () => {
165
+ await insertDocs();
166
+
167
+ const handle = search.index("docs");
168
+ const results = await handle.query({
169
+ query: [{ embedding: [0.1, 0.2, 0.3] }],
170
+ filter: {
171
+ $or: [{ status: "draft" }, { status: "archived" }],
172
+ },
173
+ topK: 10,
174
+ });
175
+
176
+ expect(results).toHaveLength(2);
177
+ results.forEach((r) => {
178
+ expect(["draft", "archived"]).toContain(r.document?.status);
179
+ });
180
+ });
181
+
182
+ it("queries with $in filter", async () => {
183
+ await insertDocs();
184
+
185
+ const handle = search.index("docs");
186
+ const results = await handle.query({
187
+ query: [{ embedding: [0.1, 0.2, 0.3] }],
188
+ filter: { status: { $in: ["active", "draft"] } },
189
+ topK: 10,
190
+ });
191
+
192
+ expect(results).toHaveLength(3);
193
+ });
194
+
195
+ it("respects topK limit", async () => {
196
+ await insertDocs();
197
+
198
+ const handle = search.index("docs");
199
+ const results = await handle.query({
200
+ query: [{ embedding: [0.5, 0.5, 0.5] }],
201
+ topK: 2,
202
+ });
203
+
204
+ expect(results).toHaveLength(2);
205
+ });
206
+
207
+ it("respects offset for pagination", async () => {
208
+ await insertDocs();
209
+
210
+ const handle = search.index("docs");
211
+
212
+ // Get first 2
213
+ const page1 = await handle.query({
214
+ query: [{ embedding: [0.5, 0.5, 0.5] }],
215
+ topK: 2,
216
+ offset: 0,
217
+ });
218
+
219
+ // Get next 2
220
+ const page2 = await handle.query({
221
+ query: [{ embedding: [0.5, 0.5, 0.5] }],
222
+ topK: 2,
223
+ offset: 2,
224
+ });
225
+
226
+ expect(page1).toHaveLength(2);
227
+ expect(page2).toHaveLength(2);
228
+ expect(page1[0].id).not.toBe(page2[0].id);
229
+ });
230
+
231
+ it("queries with orderBy (non-vector)", async () => {
232
+ await insertDocs();
233
+
234
+ const handle = search.index("docs");
235
+ const results = await handle.query({
236
+ filter: { status: "active" },
237
+ orderBy: { field: "views", direction: "desc" },
238
+ topK: 10,
239
+ });
240
+
241
+ expect(results).toHaveLength(2);
242
+ expect(results[0].document?.views).toBe(200);
243
+ expect(results[1].document?.views).toBe(100);
244
+ });
245
+
246
+ it("returns documents with mapped field names", async () => {
247
+ await insertDocs();
248
+
249
+ const handle = search.index("docs");
250
+ const results = await handle.query({
251
+ query: [{ embedding: [0.1, 0.2, 0.3] }],
252
+ topK: 1,
253
+ });
254
+
255
+ expect(results[0].document).toHaveProperty("title");
256
+ expect(results[0].document).toHaveProperty("content");
257
+ expect(results[0].document).toHaveProperty("status");
258
+ expect(results[0].document).toHaveProperty("views");
259
+ expect(results[0].document).toHaveProperty("embedding");
260
+ });
261
+
262
+ it("returns empty array when no matches", async () => {
263
+ await insertDocs();
264
+
265
+ const handle = search.index("docs");
266
+ const results = await handle.query({
267
+ query: [{ embedding: [0.1, 0.2, 0.3] }],
268
+ filter: { status: "nonexistent" },
269
+ topK: 10,
270
+ });
271
+
272
+ expect(results).toEqual([]);
273
+ });
274
+ });
275
+
276
+ describe("index() generic type", () => {
277
+ interface DocFields {
278
+ title: string;
279
+ content: string;
280
+ status: string;
281
+ views: number;
282
+ }
283
+
284
+ beforeAll(async () => {
285
+ await search.bindIndex("typed-docs", {
286
+ schema: SCHEMA,
287
+ table: "documents",
288
+ pkey: "id",
289
+ fields: {
290
+ title: { column: "title", type: "string" },
291
+ content: { column: "content", type: "string" },
292
+ status: { column: "status", type: "string" },
293
+ views: { column: "views", type: "int" },
294
+ embedding: {
295
+ column: "embedding",
296
+ type: "vector",
297
+ dimensions: 3,
298
+ similarity: "cosine",
299
+ },
300
+ },
301
+ });
302
+ });
303
+
304
+ it("provides typed document access", async () => {
305
+ await ensureInit();
306
+ await pool.query(`
307
+ INSERT INTO "${SCHEMA}"."documents" (id, title, content, status, views, embedding)
308
+ VALUES ('typed1', 'Typed Doc', 'Typed content', 'active', 42, '[0.1, 0.2, 0.3]')
309
+ `);
310
+
311
+ const handle = search.index<DocFields>("typed-docs");
312
+ const results = await handle.query({
313
+ query: [{ embedding: [0.1, 0.2, 0.3] }],
314
+ topK: 1,
315
+ });
316
+
317
+ // TypeScript should allow these without errors
318
+ const doc = results[0].document;
319
+ expect(doc?.title).toBe("Typed Doc");
320
+ expect(doc?.views).toBe(42);
321
+ });
322
+ });
323
+
324
+ describe("error handling", () => {
325
+ it("throws when table does not exist", async () => {
326
+ const handle = search.index("unbound-index");
327
+
328
+ // Convention-based indexing tries to use "public.unbound-index"
329
+ // which doesn't exist, so PostgreSQL throws an error
330
+ await expect(
331
+ handle.query({ query: [{ embedding: [0.1, 0.2, 0.3] }] }),
332
+ ).rejects.toThrow('relation "public.unbound-index" does not exist');
333
+ });
334
+ });
335
+ });