@kernl-sdk/pg 0.1.11 → 0.1.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +1 -1
- package/.turbo/turbo-check-types.log +36 -0
- package/CHANGELOG.md +32 -0
- package/README.md +124 -0
- package/dist/__tests__/integration.test.js +2 -2
- package/dist/__tests__/memory-integration.test.d.ts +2 -0
- package/dist/__tests__/memory-integration.test.d.ts.map +1 -0
- package/dist/__tests__/memory-integration.test.js +287 -0
- package/dist/__tests__/memory.test.d.ts +2 -0
- package/dist/__tests__/memory.test.d.ts.map +1 -0
- package/dist/__tests__/memory.test.js +357 -0
- package/dist/index.d.ts +5 -3
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +5 -3
- package/dist/memory/sql.d.ts +30 -0
- package/dist/memory/sql.d.ts.map +1 -0
- package/dist/memory/sql.js +100 -0
- package/dist/memory/store.d.ts +41 -0
- package/dist/memory/store.d.ts.map +1 -0
- package/dist/memory/store.js +114 -0
- package/dist/migrations.d.ts +1 -1
- package/dist/migrations.d.ts.map +1 -1
- package/dist/migrations.js +9 -3
- package/dist/pgvector/__tests__/handle.test.d.ts +2 -0
- package/dist/pgvector/__tests__/handle.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/handle.test.js +277 -0
- package/dist/pgvector/__tests__/hit.test.d.ts +2 -0
- package/dist/pgvector/__tests__/hit.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/hit.test.js +134 -0
- package/dist/pgvector/__tests__/integration/document.integration.test.d.ts +7 -0
- package/dist/pgvector/__tests__/integration/document.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/document.integration.test.js +587 -0
- package/dist/pgvector/__tests__/integration/edge.integration.test.d.ts +8 -0
- package/dist/pgvector/__tests__/integration/edge.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/edge.integration.test.js +663 -0
- package/dist/pgvector/__tests__/integration/filters.integration.test.d.ts +8 -0
- package/dist/pgvector/__tests__/integration/filters.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/filters.integration.test.js +609 -0
- package/dist/pgvector/__tests__/integration/lifecycle.integration.test.d.ts +8 -0
- package/dist/pgvector/__tests__/integration/lifecycle.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/lifecycle.integration.test.js +449 -0
- package/dist/pgvector/__tests__/integration/query.integration.test.d.ts +8 -0
- package/dist/pgvector/__tests__/integration/query.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/query.integration.test.js +544 -0
- package/dist/pgvector/__tests__/search.test.d.ts +2 -0
- package/dist/pgvector/__tests__/search.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/search.test.js +279 -0
- package/dist/pgvector/handle.d.ts +60 -0
- package/dist/pgvector/handle.d.ts.map +1 -0
- package/dist/pgvector/handle.js +213 -0
- package/dist/pgvector/hit.d.ts +10 -0
- package/dist/pgvector/hit.d.ts.map +1 -0
- package/dist/pgvector/hit.js +44 -0
- package/dist/pgvector/index.d.ts +7 -0
- package/dist/pgvector/index.d.ts.map +1 -0
- package/dist/pgvector/index.js +5 -0
- package/dist/pgvector/search.d.ts +60 -0
- package/dist/pgvector/search.d.ts.map +1 -0
- package/dist/pgvector/search.js +227 -0
- package/dist/pgvector/sql/__tests__/limit.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/limit.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/limit.test.js +161 -0
- package/dist/pgvector/sql/__tests__/order.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/order.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/order.test.js +218 -0
- package/dist/pgvector/sql/__tests__/query.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/query.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/query.test.js +392 -0
- package/dist/pgvector/sql/__tests__/select.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/select.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/select.test.js +293 -0
- package/dist/pgvector/sql/__tests__/where.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/where.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/where.test.js +488 -0
- package/dist/pgvector/sql/index.d.ts +7 -0
- package/dist/pgvector/sql/index.d.ts.map +1 -0
- package/dist/pgvector/sql/index.js +6 -0
- package/dist/pgvector/sql/limit.d.ts +8 -0
- package/dist/pgvector/sql/limit.d.ts.map +1 -0
- package/dist/pgvector/sql/limit.js +20 -0
- package/dist/pgvector/sql/order.d.ts +9 -0
- package/dist/pgvector/sql/order.d.ts.map +1 -0
- package/dist/pgvector/sql/order.js +47 -0
- package/dist/pgvector/sql/query.d.ts +46 -0
- package/dist/pgvector/sql/query.d.ts.map +1 -0
- package/dist/pgvector/sql/query.js +54 -0
- package/dist/pgvector/sql/schema.d.ts +16 -0
- package/dist/pgvector/sql/schema.d.ts.map +1 -0
- package/dist/pgvector/sql/schema.js +47 -0
- package/dist/pgvector/sql/select.d.ts +11 -0
- package/dist/pgvector/sql/select.d.ts.map +1 -0
- package/dist/pgvector/sql/select.js +87 -0
- package/dist/pgvector/sql/where.d.ts +8 -0
- package/dist/pgvector/sql/where.d.ts.map +1 -0
- package/dist/pgvector/sql/where.js +137 -0
- package/dist/pgvector/types.d.ts +20 -0
- package/dist/pgvector/types.d.ts.map +1 -0
- package/dist/pgvector/types.js +1 -0
- package/dist/pgvector/utils.d.ts +18 -0
- package/dist/pgvector/utils.d.ts.map +1 -0
- package/dist/pgvector/utils.js +22 -0
- package/dist/postgres.d.ts +19 -26
- package/dist/postgres.d.ts.map +1 -1
- package/dist/postgres.js +15 -27
- package/dist/storage.d.ts +48 -0
- package/dist/storage.d.ts.map +1 -1
- package/dist/storage.js +32 -9
- package/dist/thread/sql.d.ts +38 -0
- package/dist/thread/sql.d.ts.map +1 -0
- package/dist/thread/sql.js +112 -0
- package/dist/thread/store.d.ts +2 -2
- package/dist/thread/store.d.ts.map +1 -1
- package/dist/thread/store.js +32 -102
- package/package.json +7 -4
- package/src/__tests__/integration.test.ts +15 -17
- package/src/__tests__/memory-integration.test.ts +355 -0
- package/src/__tests__/memory.test.ts +428 -0
- package/src/index.ts +19 -3
- package/src/memory/sql.ts +141 -0
- package/src/memory/store.ts +166 -0
- package/src/migrations.ts +13 -3
- package/src/pgvector/README.md +50 -0
- package/src/pgvector/__tests__/handle.test.ts +335 -0
- package/src/pgvector/__tests__/hit.test.ts +165 -0
- package/src/pgvector/__tests__/integration/document.integration.test.ts +717 -0
- package/src/pgvector/__tests__/integration/edge.integration.test.ts +835 -0
- package/src/pgvector/__tests__/integration/filters.integration.test.ts +721 -0
- package/src/pgvector/__tests__/integration/lifecycle.integration.test.ts +570 -0
- package/src/pgvector/__tests__/integration/query.integration.test.ts +667 -0
- package/src/pgvector/__tests__/search.test.ts +366 -0
- package/src/pgvector/handle.ts +285 -0
- package/src/pgvector/hit.ts +56 -0
- package/src/pgvector/index.ts +7 -0
- package/src/pgvector/search.ts +330 -0
- package/src/pgvector/sql/__tests__/limit.test.ts +180 -0
- package/src/pgvector/sql/__tests__/order.test.ts +248 -0
- package/src/pgvector/sql/__tests__/query.test.ts +548 -0
- package/src/pgvector/sql/__tests__/select.test.ts +367 -0
- package/src/pgvector/sql/__tests__/where.test.ts +554 -0
- package/src/pgvector/sql/index.ts +14 -0
- package/src/pgvector/sql/limit.ts +29 -0
- package/src/pgvector/sql/order.ts +55 -0
- package/src/pgvector/sql/query.ts +112 -0
- package/src/pgvector/sql/schema.ts +61 -0
- package/src/pgvector/sql/select.ts +100 -0
- package/src/pgvector/sql/where.ts +152 -0
- package/src/pgvector/types.ts +21 -0
- package/src/pgvector/utils.ts +24 -0
- package/src/postgres.ts +31 -33
- package/src/storage.ts +77 -9
- package/src/thread/sql.ts +159 -0
- package/src/thread/store.ts +40 -127
- package/tsconfig.tsbuildinfo +1 -0
|
@@ -0,0 +1,330 @@
|
|
|
1
|
+
import type { Pool } from "pg";
|
|
2
|
+
import { CursorPage, type CursorPageResponse } from "@kernl-sdk/shared";
|
|
3
|
+
import { KERNL_SCHEMA_NAME } from "@kernl-sdk/storage";
|
|
4
|
+
import type {
|
|
5
|
+
SearchIndex,
|
|
6
|
+
IndexHandle,
|
|
7
|
+
NewIndexParams,
|
|
8
|
+
ListIndexesParams,
|
|
9
|
+
IndexSummary,
|
|
10
|
+
IndexStats,
|
|
11
|
+
UnknownDocument,
|
|
12
|
+
SearchCapabilities,
|
|
13
|
+
} from "@kernl-sdk/retrieval";
|
|
14
|
+
|
|
15
|
+
import { PGIndexHandle } from "./handle";
|
|
16
|
+
import { FIELD_TYPE, SIMILARITY } from "./sql";
|
|
17
|
+
import type { PGIndexConfig, PGFieldBinding } from "./types";
|
|
18
|
+
|
|
19
|
+
const META_TABLE = "search_indexes";
|
|
20
|
+
|
|
21
|
+
export interface PGSearchIndexConfig {
|
|
22
|
+
pool: Pool;
|
|
23
|
+
ensureInit?: () => Promise<void>;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* pgvector-backed SearchIndex implementation.
|
|
28
|
+
*/
|
|
29
|
+
export class PGSearchIndex implements SearchIndex<PGIndexConfig> {
|
|
30
|
+
readonly id = "pgvector";
|
|
31
|
+
|
|
32
|
+
private pool: Pool;
|
|
33
|
+
private userInit: () => Promise<void>;
|
|
34
|
+
private configs = new Map<string, PGIndexConfig>();
|
|
35
|
+
private ready = false;
|
|
36
|
+
|
|
37
|
+
constructor(config: PGSearchIndexConfig) {
|
|
38
|
+
this.pool = config.pool;
|
|
39
|
+
this.userInit = config.ensureInit ?? (() => Promise.resolve());
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Create a new index table.
|
|
44
|
+
*
|
|
45
|
+
* @param params.id - Table name
|
|
46
|
+
* @param params.schema - Field definitions (one field must have pk: true)
|
|
47
|
+
* @param params.providerOptions.schema - Postgres schema (default: "public")
|
|
48
|
+
*/
|
|
49
|
+
async createIndex(params: NewIndexParams): Promise<void> {
|
|
50
|
+
await this.ensureInit();
|
|
51
|
+
|
|
52
|
+
const schemaName = (params.providerOptions?.schema as string) ?? "public";
|
|
53
|
+
|
|
54
|
+
// find primary key field
|
|
55
|
+
const pkEntry = Object.entries(params.schema).find(([, f]) => f.pk);
|
|
56
|
+
if (!pkEntry) {
|
|
57
|
+
throw new Error("schema must have a field with pk: true");
|
|
58
|
+
}
|
|
59
|
+
const pkey = pkEntry[0];
|
|
60
|
+
|
|
61
|
+
const columns: string[] = [];
|
|
62
|
+
const vectorFields: Array<{
|
|
63
|
+
name: string;
|
|
64
|
+
dimensions: number;
|
|
65
|
+
similarity?: string;
|
|
66
|
+
}> = [];
|
|
67
|
+
|
|
68
|
+
for (const [name, field] of Object.entries(params.schema)) {
|
|
69
|
+
const colDef = `"${name}" ${FIELD_TYPE.encode(field)}${field.pk ? " PRIMARY KEY" : ""}`;
|
|
70
|
+
columns.push(colDef);
|
|
71
|
+
|
|
72
|
+
if (field.type === "vector") {
|
|
73
|
+
vectorFields.push({
|
|
74
|
+
name,
|
|
75
|
+
dimensions: field.dimensions,
|
|
76
|
+
similarity: field.similarity,
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// create table
|
|
82
|
+
await this.pool.query(`
|
|
83
|
+
CREATE TABLE "${schemaName}"."${params.id}" (
|
|
84
|
+
${columns.join(",\n ")}
|
|
85
|
+
)
|
|
86
|
+
`);
|
|
87
|
+
|
|
88
|
+
// create HNSW indexes for vector fields
|
|
89
|
+
for (const vf of vectorFields) {
|
|
90
|
+
await this.pool.query(`
|
|
91
|
+
CREATE INDEX "${params.id}_${vf.name}_idx"
|
|
92
|
+
ON "${schemaName}"."${params.id}"
|
|
93
|
+
USING hnsw ("${vf.name}" ${SIMILARITY.encode(vf.similarity as "cosine" | "euclidean" | "dot_product" | undefined)})
|
|
94
|
+
`);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// auto-bind the created table
|
|
98
|
+
const fields: Record<string, PGFieldBinding> = {};
|
|
99
|
+
for (const [name, field] of Object.entries(params.schema)) {
|
|
100
|
+
fields[name] = {
|
|
101
|
+
column: name,
|
|
102
|
+
type: field.type,
|
|
103
|
+
...(field.type === "vector" && {
|
|
104
|
+
dimensions: field.dimensions,
|
|
105
|
+
similarity: field.similarity,
|
|
106
|
+
}),
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
const config: PGIndexConfig = {
|
|
111
|
+
schema: schemaName,
|
|
112
|
+
table: params.id,
|
|
113
|
+
pkey,
|
|
114
|
+
fields,
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
// persist to metadata table
|
|
118
|
+
await this.pool.query(
|
|
119
|
+
`INSERT INTO "${KERNL_SCHEMA_NAME}"."${META_TABLE}" (id, backend, config, created_at)
|
|
120
|
+
VALUES ($1, $2, $3, $4)`,
|
|
121
|
+
[params.id, this.id, JSON.stringify(config), Date.now()],
|
|
122
|
+
);
|
|
123
|
+
|
|
124
|
+
this.configs.set(params.id, config);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* List all indexes.
|
|
129
|
+
*/
|
|
130
|
+
async listIndexes(
|
|
131
|
+
params?: ListIndexesParams,
|
|
132
|
+
): Promise<CursorPage<IndexSummary>> {
|
|
133
|
+
await this.ensureInit();
|
|
134
|
+
|
|
135
|
+
const loader = async (
|
|
136
|
+
p: ListIndexesParams,
|
|
137
|
+
): Promise<CursorPageResponse<IndexSummary>> => {
|
|
138
|
+
const limit = p.limit ?? 100;
|
|
139
|
+
|
|
140
|
+
let sql = `
|
|
141
|
+
SELECT id FROM "${KERNL_SCHEMA_NAME}"."${META_TABLE}"
|
|
142
|
+
WHERE backend = $1
|
|
143
|
+
`;
|
|
144
|
+
const sqlParams: unknown[] = [this.id];
|
|
145
|
+
let idx = 2;
|
|
146
|
+
|
|
147
|
+
if (p.prefix) {
|
|
148
|
+
sql += ` AND id LIKE $${idx}`;
|
|
149
|
+
sqlParams.push(`${p.prefix}%`);
|
|
150
|
+
idx++;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
if (p.cursor) {
|
|
154
|
+
sql += ` AND id > $${idx}`;
|
|
155
|
+
sqlParams.push(p.cursor);
|
|
156
|
+
idx++;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
sql += ` ORDER BY id ASC LIMIT $${idx}`;
|
|
160
|
+
sqlParams.push(limit + 1);
|
|
161
|
+
|
|
162
|
+
const result = await this.pool.query<{ id: string }>(sql, sqlParams);
|
|
163
|
+
|
|
164
|
+
const hasMore = result.rows.length > limit;
|
|
165
|
+
const rows = hasMore ? result.rows.slice(0, -1) : result.rows;
|
|
166
|
+
|
|
167
|
+
const data: IndexSummary[] = rows.map((row) => ({
|
|
168
|
+
id: row.id,
|
|
169
|
+
status: "ready" as const,
|
|
170
|
+
}));
|
|
171
|
+
|
|
172
|
+
return {
|
|
173
|
+
data,
|
|
174
|
+
next: hasMore ? (rows[rows.length - 1]?.id ?? null) : null,
|
|
175
|
+
last: !hasMore,
|
|
176
|
+
};
|
|
177
|
+
};
|
|
178
|
+
|
|
179
|
+
const response = await loader(params ?? {});
|
|
180
|
+
|
|
181
|
+
return new CursorPage({
|
|
182
|
+
params: params ?? {},
|
|
183
|
+
response,
|
|
184
|
+
loader,
|
|
185
|
+
});
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
/**
|
|
189
|
+
* Get index statistics.
|
|
190
|
+
*/
|
|
191
|
+
async describeIndex(id: string): Promise<IndexStats> {
|
|
192
|
+
await this.ensureInit();
|
|
193
|
+
|
|
194
|
+
const cfg = this.configs.get(id);
|
|
195
|
+
if (!cfg) {
|
|
196
|
+
throw new Error(`Index "${id}" not bound`);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// get row count
|
|
200
|
+
const countRes = await this.pool.query<{ count: string }>(
|
|
201
|
+
`SELECT COUNT(*) as count FROM "${cfg.schema}"."${cfg.table}"`,
|
|
202
|
+
);
|
|
203
|
+
const count = parseInt(countRes.rows[0]?.count ?? "0", 10);
|
|
204
|
+
|
|
205
|
+
// get table size in bytes
|
|
206
|
+
const sizeRes = await this.pool.query<{ size: string }>(
|
|
207
|
+
`SELECT pg_total_relation_size('"${cfg.schema}"."${cfg.table}"') as size`,
|
|
208
|
+
);
|
|
209
|
+
const sizeb = parseInt(sizeRes.rows[0]?.size ?? "0", 10);
|
|
210
|
+
|
|
211
|
+
// find vector field for dimensions/similarity
|
|
212
|
+
const vectorField = Object.values(cfg.fields).find(
|
|
213
|
+
(f) => f.type === "vector",
|
|
214
|
+
);
|
|
215
|
+
|
|
216
|
+
return {
|
|
217
|
+
id,
|
|
218
|
+
count,
|
|
219
|
+
sizeb,
|
|
220
|
+
dimensions: vectorField?.dimensions,
|
|
221
|
+
similarity: vectorField?.similarity,
|
|
222
|
+
status: "ready",
|
|
223
|
+
};
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
/**
|
|
227
|
+
* Delete an index and all its documents.
|
|
228
|
+
*/
|
|
229
|
+
async deleteIndex(id: string): Promise<void> {
|
|
230
|
+
await this.ensureInit();
|
|
231
|
+
|
|
232
|
+
const cfg = this.configs.get(id);
|
|
233
|
+
if (!cfg) {
|
|
234
|
+
throw new Error(`Index "${id}" not bound`);
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
await this.pool.query(
|
|
238
|
+
`DROP TABLE IF EXISTS "${cfg.schema}"."${cfg.table}"`,
|
|
239
|
+
);
|
|
240
|
+
await this.pool.query(
|
|
241
|
+
`DELETE FROM "${KERNL_SCHEMA_NAME}"."${META_TABLE}" WHERE id = $1`,
|
|
242
|
+
[id],
|
|
243
|
+
);
|
|
244
|
+
this.configs.delete(id);
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
/**
|
|
248
|
+
* No-op for pgvector.
|
|
249
|
+
*/
|
|
250
|
+
async warm(_id: string): Promise<void> {}
|
|
251
|
+
|
|
252
|
+
/**
|
|
253
|
+
* pgvector capabilities.
|
|
254
|
+
*/
|
|
255
|
+
capabilities(): SearchCapabilities {
|
|
256
|
+
return {
|
|
257
|
+
modes: new Set(["vector"]),
|
|
258
|
+
multiSignal: false,
|
|
259
|
+
multiVector: false,
|
|
260
|
+
filters: true,
|
|
261
|
+
orderBy: true,
|
|
262
|
+
};
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
/**
|
|
266
|
+
* Get a handle for operating on a specific index.
|
|
267
|
+
*/
|
|
268
|
+
index<TDocument = UnknownDocument>(id: string): IndexHandle<TDocument> {
|
|
269
|
+
const cfg = this.configs.get(id);
|
|
270
|
+
return new PGIndexHandle<TDocument>(
|
|
271
|
+
this.pool,
|
|
272
|
+
() => this.ensureInit(),
|
|
273
|
+
id,
|
|
274
|
+
cfg,
|
|
275
|
+
);
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
/**
|
|
279
|
+
* Bind an existing Postgres table as an index.
|
|
280
|
+
*/
|
|
281
|
+
async bindIndex(id: string, config: PGIndexConfig): Promise<void> {
|
|
282
|
+
await this.ensureInit();
|
|
283
|
+
|
|
284
|
+
// upsert to metadata table
|
|
285
|
+
await this.pool.query(
|
|
286
|
+
`INSERT INTO "${KERNL_SCHEMA_NAME}"."${META_TABLE}" (id, backend, config, created_at)
|
|
287
|
+
VALUES ($1, $2, $3, $4)
|
|
288
|
+
ON CONFLICT (id) DO UPDATE SET config = $3`,
|
|
289
|
+
[id, this.id, JSON.stringify(config), Date.now()],
|
|
290
|
+
);
|
|
291
|
+
|
|
292
|
+
this.configs.set(id, config);
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
/* --- internal utils --- */
|
|
296
|
+
|
|
297
|
+
/**
|
|
298
|
+
* Ensure metadata table exists and load configs.
|
|
299
|
+
*/
|
|
300
|
+
private async ensureInit(): Promise<void> {
|
|
301
|
+
if (this.ready) return;
|
|
302
|
+
|
|
303
|
+
await this.userInit();
|
|
304
|
+
|
|
305
|
+
// create metadata table
|
|
306
|
+
await this.pool.query(`
|
|
307
|
+
CREATE TABLE IF NOT EXISTS "${KERNL_SCHEMA_NAME}"."${META_TABLE}" (
|
|
308
|
+
id TEXT PRIMARY KEY,
|
|
309
|
+
backend TEXT NOT NULL,
|
|
310
|
+
config JSONB NOT NULL,
|
|
311
|
+
created_at BIGINT NOT NULL
|
|
312
|
+
)
|
|
313
|
+
`);
|
|
314
|
+
|
|
315
|
+
// load existing configs for this backend
|
|
316
|
+
const result = await this.pool.query<{
|
|
317
|
+
id: string;
|
|
318
|
+
config: PGIndexConfig;
|
|
319
|
+
}>(
|
|
320
|
+
`SELECT id, config FROM "${KERNL_SCHEMA_NAME}"."${META_TABLE}" WHERE backend = $1`,
|
|
321
|
+
[this.id],
|
|
322
|
+
);
|
|
323
|
+
|
|
324
|
+
for (const row of result.rows) {
|
|
325
|
+
this.configs.set(row.id, row.config);
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
this.ready = true;
|
|
329
|
+
}
|
|
330
|
+
}
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
import { describe, it, expect } from "vitest";
|
|
2
|
+
import { SQL_LIMIT } from "../limit";
|
|
3
|
+
|
|
4
|
+
describe("SQL_LIMIT", () => {
|
|
5
|
+
describe("encode", () => {
|
|
6
|
+
it("builds LIMIT clause", () => {
|
|
7
|
+
const result = SQL_LIMIT.encode({
|
|
8
|
+
topK: 10,
|
|
9
|
+
offset: 0,
|
|
10
|
+
startIdx: 1,
|
|
11
|
+
});
|
|
12
|
+
expect(result.sql).toBe("LIMIT $1");
|
|
13
|
+
expect(result.params).toEqual([10]);
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
it("respects startIdx for parameter numbering", () => {
|
|
17
|
+
const result = SQL_LIMIT.encode({
|
|
18
|
+
topK: 10,
|
|
19
|
+
offset: 0,
|
|
20
|
+
startIdx: 5,
|
|
21
|
+
});
|
|
22
|
+
expect(result.sql).toBe("LIMIT $5");
|
|
23
|
+
expect(result.params).toEqual([10]);
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
it("includes OFFSET when offset > 0", () => {
|
|
27
|
+
const result = SQL_LIMIT.encode({
|
|
28
|
+
topK: 10,
|
|
29
|
+
offset: 20,
|
|
30
|
+
startIdx: 1,
|
|
31
|
+
});
|
|
32
|
+
expect(result.sql).toBe("LIMIT $1 OFFSET $2");
|
|
33
|
+
expect(result.params).toEqual([10, 20]);
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
it("skips OFFSET when offset is 0", () => {
|
|
37
|
+
const result = SQL_LIMIT.encode({
|
|
38
|
+
topK: 25,
|
|
39
|
+
offset: 0,
|
|
40
|
+
startIdx: 3,
|
|
41
|
+
});
|
|
42
|
+
expect(result.sql).toBe("LIMIT $3");
|
|
43
|
+
expect(result.params).toEqual([25]);
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
it("handles pagination correctly", () => {
|
|
47
|
+
// Page 1: offset 0
|
|
48
|
+
const page1 = SQL_LIMIT.encode({
|
|
49
|
+
topK: 20,
|
|
50
|
+
offset: 0,
|
|
51
|
+
startIdx: 1,
|
|
52
|
+
});
|
|
53
|
+
expect(page1.sql).toBe("LIMIT $1");
|
|
54
|
+
expect(page1.params).toEqual([20]);
|
|
55
|
+
|
|
56
|
+
// Page 2: offset 20
|
|
57
|
+
const page2 = SQL_LIMIT.encode({
|
|
58
|
+
topK: 20,
|
|
59
|
+
offset: 20,
|
|
60
|
+
startIdx: 1,
|
|
61
|
+
});
|
|
62
|
+
expect(page2.sql).toBe("LIMIT $1 OFFSET $2");
|
|
63
|
+
expect(page2.params).toEqual([20, 20]);
|
|
64
|
+
|
|
65
|
+
// Page 3: offset 40
|
|
66
|
+
const page3 = SQL_LIMIT.encode({
|
|
67
|
+
topK: 20,
|
|
68
|
+
offset: 40,
|
|
69
|
+
startIdx: 1,
|
|
70
|
+
});
|
|
71
|
+
expect(page3.sql).toBe("LIMIT $1 OFFSET $2");
|
|
72
|
+
expect(page3.params).toEqual([20, 40]);
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
it("correctly increments param index after SELECT and WHERE", () => {
|
|
76
|
+
// Simulating: SELECT uses $1, WHERE uses $2-$4
|
|
77
|
+
// LIMIT should start at $5
|
|
78
|
+
const result = SQL_LIMIT.encode({
|
|
79
|
+
topK: 10,
|
|
80
|
+
offset: 50,
|
|
81
|
+
startIdx: 5,
|
|
82
|
+
});
|
|
83
|
+
expect(result.sql).toBe("LIMIT $5 OFFSET $6");
|
|
84
|
+
expect(result.params).toEqual([10, 50]);
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
describe("edge values", () => {
|
|
88
|
+
it("handles topK: 0", () => {
|
|
89
|
+
const result = SQL_LIMIT.encode({
|
|
90
|
+
topK: 0,
|
|
91
|
+
offset: 0,
|
|
92
|
+
startIdx: 1,
|
|
93
|
+
});
|
|
94
|
+
// LIMIT 0 is valid SQL - returns no rows
|
|
95
|
+
expect(result.sql).toBe("LIMIT $1");
|
|
96
|
+
expect(result.params).toEqual([0]);
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
it("handles topK: 1", () => {
|
|
100
|
+
const result = SQL_LIMIT.encode({
|
|
101
|
+
topK: 1,
|
|
102
|
+
offset: 0,
|
|
103
|
+
startIdx: 1,
|
|
104
|
+
});
|
|
105
|
+
expect(result.sql).toBe("LIMIT $1");
|
|
106
|
+
expect(result.params).toEqual([1]);
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
it("handles very large topK", () => {
|
|
110
|
+
const result = SQL_LIMIT.encode({
|
|
111
|
+
topK: 1000000,
|
|
112
|
+
offset: 0,
|
|
113
|
+
startIdx: 1,
|
|
114
|
+
});
|
|
115
|
+
expect(result.sql).toBe("LIMIT $1");
|
|
116
|
+
expect(result.params).toEqual([1000000]);
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
it("handles very large offset", () => {
|
|
120
|
+
const result = SQL_LIMIT.encode({
|
|
121
|
+
topK: 10,
|
|
122
|
+
offset: 999999,
|
|
123
|
+
startIdx: 1,
|
|
124
|
+
});
|
|
125
|
+
expect(result.sql).toBe("LIMIT $1 OFFSET $2");
|
|
126
|
+
expect(result.params).toEqual([10, 999999]);
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
it("handles very large startIdx", () => {
|
|
130
|
+
const result = SQL_LIMIT.encode({
|
|
131
|
+
topK: 10,
|
|
132
|
+
offset: 20,
|
|
133
|
+
startIdx: 50,
|
|
134
|
+
});
|
|
135
|
+
expect(result.sql).toBe("LIMIT $50 OFFSET $51");
|
|
136
|
+
expect(result.params).toEqual([10, 20]);
|
|
137
|
+
});
|
|
138
|
+
|
|
139
|
+
it("handles startIdx: 1 with both topK and offset", () => {
|
|
140
|
+
const result = SQL_LIMIT.encode({
|
|
141
|
+
topK: 25,
|
|
142
|
+
offset: 100,
|
|
143
|
+
startIdx: 1,
|
|
144
|
+
});
|
|
145
|
+
expect(result.sql).toBe("LIMIT $1 OFFSET $2");
|
|
146
|
+
expect(result.params).toEqual([25, 100]);
|
|
147
|
+
});
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
describe("offset boundary", () => {
|
|
151
|
+
it("includes OFFSET when offset is exactly 1", () => {
|
|
152
|
+
const result = SQL_LIMIT.encode({
|
|
153
|
+
topK: 10,
|
|
154
|
+
offset: 1,
|
|
155
|
+
startIdx: 1,
|
|
156
|
+
});
|
|
157
|
+
expect(result.sql).toBe("LIMIT $1 OFFSET $2");
|
|
158
|
+
expect(result.params).toEqual([10, 1]);
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
it("does not include OFFSET when offset is exactly 0", () => {
|
|
162
|
+
const result = SQL_LIMIT.encode({
|
|
163
|
+
topK: 10,
|
|
164
|
+
offset: 0,
|
|
165
|
+
startIdx: 1,
|
|
166
|
+
});
|
|
167
|
+
expect(result.sql).toBe("LIMIT $1");
|
|
168
|
+
expect(result.params).toEqual([10]);
|
|
169
|
+
});
|
|
170
|
+
});
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
describe("decode", () => {
|
|
174
|
+
it("throws not implemented", () => {
|
|
175
|
+
expect(() => SQL_LIMIT.decode({} as any)).toThrow(
|
|
176
|
+
"SQL_LIMIT.decode not implemented",
|
|
177
|
+
);
|
|
178
|
+
});
|
|
179
|
+
});
|
|
180
|
+
});
|