@kernl-sdk/libsql 0.1.36 → 0.1.39
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +5 -4
- package/CHANGELOG.md +20 -0
- package/README.md +225 -0
- package/dist/__tests__/constraints.test.d.ts +2 -0
- package/dist/__tests__/constraints.test.d.ts.map +1 -0
- package/dist/__tests__/constraints.test.js +97 -0
- package/dist/__tests__/helpers.d.ts +36 -0
- package/dist/__tests__/helpers.d.ts.map +1 -0
- package/dist/__tests__/helpers.js +80 -0
- package/dist/__tests__/memory.create-get.test.d.ts +2 -0
- package/dist/__tests__/memory.create-get.test.d.ts.map +1 -0
- package/dist/__tests__/memory.create-get.test.js +8 -0
- package/dist/__tests__/memory.delete.test.d.ts +2 -0
- package/dist/__tests__/memory.delete.test.d.ts.map +1 -0
- package/dist/__tests__/memory.delete.test.js +6 -0
- package/dist/__tests__/memory.list.test.d.ts +2 -0
- package/dist/__tests__/memory.list.test.d.ts.map +1 -0
- package/dist/__tests__/memory.list.test.js +8 -0
- package/dist/__tests__/memory.update.test.d.ts +2 -0
- package/dist/__tests__/memory.update.test.d.ts.map +1 -0
- package/dist/__tests__/memory.update.test.js +8 -0
- package/dist/__tests__/migrations.test.d.ts +2 -0
- package/dist/__tests__/migrations.test.d.ts.map +1 -0
- package/dist/__tests__/migrations.test.js +68 -0
- package/dist/__tests__/row-codecs.test.d.ts +2 -0
- package/dist/__tests__/row-codecs.test.d.ts.map +1 -0
- package/dist/__tests__/row-codecs.test.js +175 -0
- package/dist/__tests__/sql-utils.test.d.ts +2 -0
- package/dist/__tests__/sql-utils.test.d.ts.map +1 -0
- package/dist/__tests__/sql-utils.test.js +45 -0
- package/dist/__tests__/storage.init.test.d.ts +2 -0
- package/dist/__tests__/storage.init.test.d.ts.map +1 -0
- package/dist/__tests__/storage.init.test.js +63 -0
- package/dist/__tests__/thread.lifecycle.test.d.ts +2 -0
- package/dist/__tests__/thread.lifecycle.test.d.ts.map +1 -0
- package/dist/__tests__/thread.lifecycle.test.js +172 -0
- package/dist/__tests__/transaction.test.d.ts +2 -0
- package/dist/__tests__/transaction.test.d.ts.map +1 -0
- package/dist/__tests__/transaction.test.js +16 -0
- package/dist/__tests__/utils.test.d.ts +2 -0
- package/dist/__tests__/utils.test.d.ts.map +1 -0
- package/dist/__tests__/utils.test.js +31 -0
- package/dist/client.d.ts +46 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/client.js +46 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +5 -1
- package/dist/memory/__tests__/create-get.test.d.ts +2 -0
- package/dist/memory/__tests__/create-get.test.d.ts.map +1 -0
- package/dist/memory/__tests__/create-get.test.js +126 -0
- package/dist/memory/__tests__/delete.test.d.ts +2 -0
- package/dist/memory/__tests__/delete.test.d.ts.map +1 -0
- package/dist/memory/__tests__/delete.test.js +96 -0
- package/dist/memory/__tests__/list.test.d.ts +2 -0
- package/dist/memory/__tests__/list.test.d.ts.map +1 -0
- package/dist/memory/__tests__/list.test.js +168 -0
- package/dist/memory/__tests__/sql.test.d.ts +2 -0
- package/dist/memory/__tests__/sql.test.d.ts.map +1 -0
- package/dist/memory/__tests__/sql.test.js +159 -0
- package/dist/memory/__tests__/update.test.d.ts +2 -0
- package/dist/memory/__tests__/update.test.d.ts.map +1 -0
- package/dist/memory/__tests__/update.test.js +113 -0
- package/dist/memory/row.d.ts +11 -0
- package/dist/memory/row.d.ts.map +1 -0
- package/dist/memory/row.js +29 -0
- package/dist/memory/sql.d.ts +34 -0
- package/dist/memory/sql.d.ts.map +1 -0
- package/dist/memory/sql.js +109 -0
- package/dist/memory/store.d.ts +41 -0
- package/dist/memory/store.d.ts.map +1 -0
- package/dist/memory/store.js +132 -0
- package/dist/migrations.d.ts +32 -0
- package/dist/migrations.d.ts.map +1 -0
- package/dist/migrations.js +157 -0
- package/dist/sql.d.ts +28 -0
- package/dist/sql.d.ts.map +1 -0
- package/dist/sql.js +22 -0
- package/dist/storage.d.ts +75 -0
- package/dist/storage.d.ts.map +1 -0
- package/dist/storage.js +123 -0
- package/dist/thread/__tests__/append.test.d.ts +2 -0
- package/dist/thread/__tests__/append.test.d.ts.map +1 -0
- package/dist/thread/__tests__/append.test.js +141 -0
- package/dist/thread/__tests__/history.test.d.ts +2 -0
- package/dist/thread/__tests__/history.test.d.ts.map +1 -0
- package/dist/thread/__tests__/history.test.js +146 -0
- package/dist/thread/__tests__/sql.test.d.ts +2 -0
- package/dist/thread/__tests__/sql.test.d.ts.map +1 -0
- package/dist/thread/__tests__/sql.test.js +129 -0
- package/dist/thread/__tests__/store.test.d.ts +2 -0
- package/dist/thread/__tests__/store.test.d.ts.map +1 -0
- package/dist/thread/__tests__/store.test.js +170 -0
- package/dist/thread/row.d.ts +19 -0
- package/dist/thread/row.d.ts.map +1 -0
- package/dist/thread/row.js +65 -0
- package/dist/thread/sql.d.ts +33 -0
- package/dist/thread/sql.d.ts.map +1 -0
- package/dist/thread/sql.js +112 -0
- package/dist/thread/store.d.ts +67 -0
- package/dist/thread/store.d.ts.map +1 -0
- package/dist/thread/store.js +282 -0
- package/dist/utils.d.ts +10 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/utils.js +21 -0
- package/package.json +15 -11
- package/src/__tests__/constraints.test.ts +123 -0
- package/src/__tests__/helpers.ts +98 -0
- package/src/__tests__/migrations.test.ts +114 -0
- package/src/__tests__/row-codecs.test.ts +201 -0
- package/src/__tests__/sql-utils.test.ts +52 -0
- package/src/__tests__/storage.init.test.ts +92 -0
- package/src/__tests__/thread.lifecycle.test.ts +234 -0
- package/src/__tests__/transaction.test.ts +25 -0
- package/src/__tests__/utils.test.ts +38 -0
- package/src/client.ts +71 -0
- package/src/index.ts +10 -0
- package/src/memory/__tests__/create-get.test.ts +161 -0
- package/src/memory/__tests__/delete.test.ts +124 -0
- package/src/memory/__tests__/list.test.ts +198 -0
- package/src/memory/__tests__/sql.test.ts +186 -0
- package/src/memory/__tests__/update.test.ts +148 -0
- package/src/memory/row.ts +36 -0
- package/src/memory/sql.ts +142 -0
- package/src/memory/store.ts +173 -0
- package/src/migrations.ts +206 -0
- package/src/sql.ts +35 -0
- package/src/storage.ts +170 -0
- package/src/thread/__tests__/append.test.ts +201 -0
- package/src/thread/__tests__/history.test.ts +198 -0
- package/src/thread/__tests__/sql.test.ts +154 -0
- package/src/thread/__tests__/store.test.ts +219 -0
- package/src/thread/row.ts +77 -0
- package/src/thread/sql.ts +153 -0
- package/src/thread/store.ts +381 -0
- package/src/utils.ts +20 -0
- package/LICENSE +0 -201
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import { create_client, create_storage, testid } from "../../__tests__/helpers.js";
|
|
3
|
+
describe("LibSQLMemoryStore update", () => {
|
|
4
|
+
let client;
|
|
5
|
+
let storage;
|
|
6
|
+
let memId;
|
|
7
|
+
beforeEach(async () => {
|
|
8
|
+
client = create_client();
|
|
9
|
+
storage = create_storage(client);
|
|
10
|
+
await storage.memories.list(); // init
|
|
11
|
+
memId = testid("mem");
|
|
12
|
+
await storage.memories.create({
|
|
13
|
+
id: memId,
|
|
14
|
+
scope: { namespace: "default", entityId: "user-1" },
|
|
15
|
+
kind: "semantic",
|
|
16
|
+
collection: "facts",
|
|
17
|
+
content: { text: "Original content" },
|
|
18
|
+
wmem: false,
|
|
19
|
+
metadata: { version: 1 },
|
|
20
|
+
});
|
|
21
|
+
});
|
|
22
|
+
afterEach(() => {
|
|
23
|
+
client.close();
|
|
24
|
+
});
|
|
25
|
+
it("updates content and bumps updated_at", async () => {
|
|
26
|
+
const before = await storage.memories.get(memId);
|
|
27
|
+
const originalUpdatedAt = before.updatedAt;
|
|
28
|
+
// Small delay to ensure timestamp difference
|
|
29
|
+
await new Promise((r) => setTimeout(r, 10));
|
|
30
|
+
const updated = await storage.memories.update(memId, {
|
|
31
|
+
id: memId,
|
|
32
|
+
content: { text: "Updated content" },
|
|
33
|
+
});
|
|
34
|
+
expect(updated.content).toEqual({ text: "Updated content" });
|
|
35
|
+
expect(updated.updatedAt).toBeGreaterThan(originalUpdatedAt);
|
|
36
|
+
// Verify persisted
|
|
37
|
+
const found = await storage.memories.get(memId);
|
|
38
|
+
expect(found?.content).toEqual({ text: "Updated content" });
|
|
39
|
+
});
|
|
40
|
+
it("updates wmem flag", async () => {
|
|
41
|
+
const updated = await storage.memories.update(memId, {
|
|
42
|
+
id: memId,
|
|
43
|
+
wmem: true,
|
|
44
|
+
});
|
|
45
|
+
expect(updated.wmem).toBe(true);
|
|
46
|
+
const found = await storage.memories.get(memId);
|
|
47
|
+
expect(found?.wmem).toBe(true);
|
|
48
|
+
});
|
|
49
|
+
it("updates smem expiration", async () => {
|
|
50
|
+
const expiresAt = Date.now() + 7200000;
|
|
51
|
+
const updated = await storage.memories.update(memId, {
|
|
52
|
+
id: memId,
|
|
53
|
+
smem: { expiresAt },
|
|
54
|
+
});
|
|
55
|
+
expect(updated.smem.expiresAt).toBe(expiresAt);
|
|
56
|
+
const found = await storage.memories.get(memId);
|
|
57
|
+
expect(found?.smem.expiresAt).toBe(expiresAt);
|
|
58
|
+
});
|
|
59
|
+
it("clears smem expiration", async () => {
|
|
60
|
+
// First set an expiration
|
|
61
|
+
await storage.memories.update(memId, {
|
|
62
|
+
id: memId,
|
|
63
|
+
smem: { expiresAt: Date.now() + 3600000 },
|
|
64
|
+
});
|
|
65
|
+
// Then clear it
|
|
66
|
+
const updated = await storage.memories.update(memId, {
|
|
67
|
+
id: memId,
|
|
68
|
+
smem: { expiresAt: null },
|
|
69
|
+
});
|
|
70
|
+
expect(updated.smem.expiresAt).toBeNull();
|
|
71
|
+
});
|
|
72
|
+
it("updates metadata", async () => {
|
|
73
|
+
const newMetadata = { version: 2, source: "api", edited: true };
|
|
74
|
+
const updated = await storage.memories.update(memId, {
|
|
75
|
+
id: memId,
|
|
76
|
+
metadata: newMetadata,
|
|
77
|
+
});
|
|
78
|
+
expect(updated.metadata).toEqual(newMetadata);
|
|
79
|
+
const found = await storage.memories.get(memId);
|
|
80
|
+
expect(found?.metadata).toEqual(newMetadata);
|
|
81
|
+
});
|
|
82
|
+
it("updates multiple fields at once", async () => {
|
|
83
|
+
const updated = await storage.memories.update(memId, {
|
|
84
|
+
id: memId,
|
|
85
|
+
content: { text: "New content" },
|
|
86
|
+
wmem: true,
|
|
87
|
+
metadata: { version: 3 },
|
|
88
|
+
});
|
|
89
|
+
expect(updated.content).toEqual({ text: "New content" });
|
|
90
|
+
expect(updated.wmem).toBe(true);
|
|
91
|
+
expect(updated.metadata).toEqual({ version: 3 });
|
|
92
|
+
});
|
|
93
|
+
it("throws when memory does not exist", async () => {
|
|
94
|
+
await expect(storage.memories.update("nonexistent", {
|
|
95
|
+
id: "nonexistent",
|
|
96
|
+
content: { text: "test" },
|
|
97
|
+
})).rejects.toThrow(/not found/i);
|
|
98
|
+
});
|
|
99
|
+
it("preserves unchanged fields", async () => {
|
|
100
|
+
const before = await storage.memories.get(memId);
|
|
101
|
+
await storage.memories.update(memId, {
|
|
102
|
+
id: memId,
|
|
103
|
+
wmem: true,
|
|
104
|
+
});
|
|
105
|
+
const after = await storage.memories.get(memId);
|
|
106
|
+
// Content should be unchanged
|
|
107
|
+
expect(after?.content).toEqual(before?.content);
|
|
108
|
+
expect(after?.kind).toBe(before?.kind);
|
|
109
|
+
expect(after?.collection).toBe(before?.collection);
|
|
110
|
+
// wmem should be updated
|
|
111
|
+
expect(after?.wmem).toBe(true);
|
|
112
|
+
});
|
|
113
|
+
});
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LibSQL row codecs for memory data.
|
|
3
|
+
*/
|
|
4
|
+
import type { Row } from "@libsql/client";
|
|
5
|
+
import type { Codec } from "@kernl-sdk/shared/lib";
|
|
6
|
+
import type { MemoryDBRecord } from "@kernl-sdk/storage";
|
|
7
|
+
/**
|
|
8
|
+
* Codec for converting LibSQL rows to MemoryDBRecord.
|
|
9
|
+
*/
|
|
10
|
+
export declare const RowToMemoryRecord: Codec<Row, MemoryDBRecord>;
|
|
11
|
+
//# sourceMappingURL=row.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"row.d.ts","sourceRoot":"","sources":["../../src/memory/row.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,GAAG,EAAE,MAAM,gBAAgB,CAAC;AAC1C,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,uBAAuB,CAAC;AACnD,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,oBAAoB,CAAC;AAIzD;;GAEG;AACH,eAAO,MAAM,iBAAiB,EAAE,KAAK,CAAC,GAAG,EAAE,cAAc,CAsBxD,CAAC"}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LibSQL row codecs for memory data.
|
|
3
|
+
*/
|
|
4
|
+
import { parsejson } from "../utils.js";
|
|
5
|
+
/**
|
|
6
|
+
* Codec for converting LibSQL rows to MemoryDBRecord.
|
|
7
|
+
*/
|
|
8
|
+
export const RowToMemoryRecord = {
|
|
9
|
+
encode(row) {
|
|
10
|
+
return {
|
|
11
|
+
id: row.id,
|
|
12
|
+
namespace: row.namespace,
|
|
13
|
+
entity_id: row.entity_id,
|
|
14
|
+
agent_id: row.agent_id,
|
|
15
|
+
kind: row.kind,
|
|
16
|
+
collection: row.collection,
|
|
17
|
+
content: parsejson(row.content) ?? {},
|
|
18
|
+
wmem: Boolean(row.wmem), // Convert SQLite 0/1 to boolean
|
|
19
|
+
smem_expires_at: row.smem_expires_at,
|
|
20
|
+
timestamp: row.timestamp,
|
|
21
|
+
created_at: row.created_at,
|
|
22
|
+
updated_at: row.updated_at,
|
|
23
|
+
metadata: parsejson(row.metadata),
|
|
24
|
+
};
|
|
25
|
+
},
|
|
26
|
+
decode() {
|
|
27
|
+
throw new Error("RowToMemoryRecord.decode not implemented");
|
|
28
|
+
},
|
|
29
|
+
};
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Memory SQL conversion codecs for LibSQL.
|
|
3
|
+
*
|
|
4
|
+
* Uses ? placeholders instead of PostgreSQL's $1, $2, etc.
|
|
5
|
+
*/
|
|
6
|
+
import type { Codec } from "@kernl-sdk/shared/lib";
|
|
7
|
+
import type { MemoryFilter, MemoryRecordUpdate } from "kernl";
|
|
8
|
+
import { type SQLClause } from "../sql.js";
|
|
9
|
+
export interface WhereInput {
|
|
10
|
+
filter?: MemoryFilter;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Encode MemoryFilter to SQL WHERE clause with ? placeholders.
|
|
14
|
+
*/
|
|
15
|
+
export declare const SQL_WHERE: Codec<WhereInput, SQLClause>;
|
|
16
|
+
type OrderDirection = "asc" | "desc";
|
|
17
|
+
export interface OrderInput {
|
|
18
|
+
order?: OrderDirection;
|
|
19
|
+
defaultColumn?: string;
|
|
20
|
+
defaultDirection?: OrderDirection;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Encode order options to SQL ORDER BY clause.
|
|
24
|
+
*/
|
|
25
|
+
export declare const ORDER: Codec<OrderInput, string>;
|
|
26
|
+
export interface PatchInput {
|
|
27
|
+
patch: MemoryRecordUpdate;
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Encode MemoryRecordUpdate to SQL SET clause with ? placeholders.
|
|
31
|
+
*/
|
|
32
|
+
export declare const SQL_UPDATE: Codec<PatchInput, SQLClause>;
|
|
33
|
+
export {};
|
|
34
|
+
//# sourceMappingURL=sql.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sql.d.ts","sourceRoot":"","sources":["../../src/memory/sql.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,uBAAuB,CAAC;AACnD,OAAO,KAAK,EAAE,YAAY,EAAE,kBAAkB,EAAE,MAAM,OAAO,CAAC;AAE9D,OAAO,EAAE,KAAK,SAAS,EAAe,MAAM,QAAQ,CAAC;AAErD,MAAM,WAAW,UAAU;IACzB,MAAM,CAAC,EAAE,YAAY,CAAC;CACvB;AAED;;GAEG;AACH,eAAO,MAAM,SAAS,EAAE,KAAK,CAAC,UAAU,EAAE,SAAS,CAyDlD,CAAC;AAEF,KAAK,cAAc,GAAG,KAAK,GAAG,MAAM,CAAC;AAErC,MAAM,WAAW,UAAU;IACzB,KAAK,CAAC,EAAE,cAAc,CAAC;IACvB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,gBAAgB,CAAC,EAAE,cAAc,CAAC;CACnC;AAED;;GAEG;AACH,eAAO,MAAM,KAAK,EAAE,KAAK,CAAC,UAAU,EAAE,MAAM,CAS3C,CAAC;AAEF,MAAM,WAAW,UAAU;IACzB,KAAK,EAAE,kBAAkB,CAAC;CAC3B;AAED;;GAEG;AACH,eAAO,MAAM,UAAU,EAAE,KAAK,CAAC,UAAU,EAAE,SAAS,CAmCnD,CAAC"}
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Memory SQL conversion codecs for LibSQL.
|
|
3
|
+
*
|
|
4
|
+
* Uses ? placeholders instead of PostgreSQL's $1, $2, etc.
|
|
5
|
+
*/
|
|
6
|
+
import { expandarray } from "../sql.js";
|
|
7
|
+
/**
|
|
8
|
+
* Encode MemoryFilter to SQL WHERE clause with ? placeholders.
|
|
9
|
+
*/
|
|
10
|
+
export const SQL_WHERE = {
|
|
11
|
+
encode({ filter }) {
|
|
12
|
+
if (!filter) {
|
|
13
|
+
return { sql: "", params: [] };
|
|
14
|
+
}
|
|
15
|
+
const conditions = [];
|
|
16
|
+
const params = [];
|
|
17
|
+
if (filter.scope?.namespace !== undefined) {
|
|
18
|
+
conditions.push(`namespace = ?`);
|
|
19
|
+
params.push(filter.scope.namespace);
|
|
20
|
+
}
|
|
21
|
+
if (filter.scope?.entityId !== undefined) {
|
|
22
|
+
conditions.push(`entity_id = ?`);
|
|
23
|
+
params.push(filter.scope.entityId);
|
|
24
|
+
}
|
|
25
|
+
if (filter.scope?.agentId !== undefined) {
|
|
26
|
+
conditions.push(`agent_id = ?`);
|
|
27
|
+
params.push(filter.scope.agentId);
|
|
28
|
+
}
|
|
29
|
+
if (filter.collections && filter.collections.length > 0) {
|
|
30
|
+
const { placeholders, params: collectionParams } = expandarray(filter.collections);
|
|
31
|
+
conditions.push(`collection IN (${placeholders})`);
|
|
32
|
+
params.push(...collectionParams);
|
|
33
|
+
}
|
|
34
|
+
if (filter.wmem !== undefined) {
|
|
35
|
+
conditions.push(`wmem = ?`);
|
|
36
|
+
params.push(filter.wmem ? 1 : 0); // SQLite uses 0/1 for boolean
|
|
37
|
+
}
|
|
38
|
+
if (filter.smem === true) {
|
|
39
|
+
conditions.push(`(smem_expires_at IS NOT NULL AND smem_expires_at > ?)`);
|
|
40
|
+
params.push(Date.now());
|
|
41
|
+
}
|
|
42
|
+
else if (filter.smem === false) {
|
|
43
|
+
conditions.push(`(smem_expires_at IS NULL OR smem_expires_at <= ?)`);
|
|
44
|
+
params.push(Date.now());
|
|
45
|
+
}
|
|
46
|
+
if (filter.after !== undefined) {
|
|
47
|
+
conditions.push(`timestamp > ?`);
|
|
48
|
+
params.push(filter.after);
|
|
49
|
+
}
|
|
50
|
+
if (filter.before !== undefined) {
|
|
51
|
+
conditions.push(`timestamp < ?`);
|
|
52
|
+
params.push(filter.before);
|
|
53
|
+
}
|
|
54
|
+
return {
|
|
55
|
+
sql: conditions.length > 0 ? conditions.join(" AND ") : "",
|
|
56
|
+
params,
|
|
57
|
+
};
|
|
58
|
+
},
|
|
59
|
+
decode() {
|
|
60
|
+
throw new Error("SQL_WHERE.decode not implemented");
|
|
61
|
+
},
|
|
62
|
+
};
|
|
63
|
+
/**
|
|
64
|
+
* Encode order options to SQL ORDER BY clause.
|
|
65
|
+
*/
|
|
66
|
+
export const ORDER = {
|
|
67
|
+
encode({ order, defaultColumn = "timestamp", defaultDirection = "desc" }) {
|
|
68
|
+
const dir = (order ?? defaultDirection).toUpperCase();
|
|
69
|
+
return `${defaultColumn} ${dir}`;
|
|
70
|
+
},
|
|
71
|
+
decode() {
|
|
72
|
+
throw new Error("ORDER.decode not implemented");
|
|
73
|
+
},
|
|
74
|
+
};
|
|
75
|
+
/**
|
|
76
|
+
* Encode MemoryRecordUpdate to SQL SET clause with ? placeholders.
|
|
77
|
+
*/
|
|
78
|
+
export const SQL_UPDATE = {
|
|
79
|
+
encode({ patch }) {
|
|
80
|
+
const sets = [];
|
|
81
|
+
const params = [];
|
|
82
|
+
if (patch.content !== undefined) {
|
|
83
|
+
sets.push(`content = ?`);
|
|
84
|
+
params.push(JSON.stringify(patch.content));
|
|
85
|
+
}
|
|
86
|
+
if (patch.wmem !== undefined) {
|
|
87
|
+
sets.push(`wmem = ?`);
|
|
88
|
+
params.push(patch.wmem ? 1 : 0); // SQLite uses 0/1 for boolean
|
|
89
|
+
}
|
|
90
|
+
if (patch.smem !== undefined) {
|
|
91
|
+
sets.push(`smem_expires_at = ?`);
|
|
92
|
+
params.push(patch.smem.expiresAt);
|
|
93
|
+
}
|
|
94
|
+
if (patch.metadata !== undefined) {
|
|
95
|
+
sets.push(`metadata = ?`);
|
|
96
|
+
params.push(patch.metadata ? JSON.stringify(patch.metadata) : null);
|
|
97
|
+
}
|
|
98
|
+
// always update updated_at
|
|
99
|
+
sets.push(`updated_at = ?`);
|
|
100
|
+
params.push(Date.now());
|
|
101
|
+
return {
|
|
102
|
+
sql: sets.join(", "),
|
|
103
|
+
params,
|
|
104
|
+
};
|
|
105
|
+
},
|
|
106
|
+
decode() {
|
|
107
|
+
throw new Error("SQL_UPDATE.decode not implemented");
|
|
108
|
+
},
|
|
109
|
+
};
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LibSQL Memory store implementation.
|
|
3
|
+
*/
|
|
4
|
+
import type { Client } from "@libsql/client";
|
|
5
|
+
import type { MemoryStore, MemoryRecord, NewMemory, MemoryRecordUpdate, MemoryListOptions } from "kernl";
|
|
6
|
+
/**
|
|
7
|
+
* LibSQL memory store implementation.
|
|
8
|
+
*
|
|
9
|
+
* All async methods call `ensureInit()` before database operations
|
|
10
|
+
* to ensure schema/tables exist.
|
|
11
|
+
*/
|
|
12
|
+
export declare class LibSQLMemoryStore implements MemoryStore {
|
|
13
|
+
private db;
|
|
14
|
+
private ensureInit;
|
|
15
|
+
constructor(db: Client, ensureInit: () => Promise<void>);
|
|
16
|
+
/**
|
|
17
|
+
* Get a memory by ID.
|
|
18
|
+
*/
|
|
19
|
+
get(id: string): Promise<MemoryRecord | null>;
|
|
20
|
+
/**
|
|
21
|
+
* List memories matching optional filter criteria.
|
|
22
|
+
*/
|
|
23
|
+
list(options?: MemoryListOptions): Promise<MemoryRecord[]>;
|
|
24
|
+
/**
|
|
25
|
+
* Create a new memory record.
|
|
26
|
+
*/
|
|
27
|
+
create(memory: NewMemory): Promise<MemoryRecord>;
|
|
28
|
+
/**
|
|
29
|
+
* Update a memory record.
|
|
30
|
+
*/
|
|
31
|
+
update(id: string, patch: MemoryRecordUpdate): Promise<MemoryRecord>;
|
|
32
|
+
/**
|
|
33
|
+
* Delete a memory by ID.
|
|
34
|
+
*/
|
|
35
|
+
delete(id: string): Promise<void>;
|
|
36
|
+
/**
|
|
37
|
+
* Delete multiple memories by ID.
|
|
38
|
+
*/
|
|
39
|
+
mdelete(ids: string[]): Promise<void>;
|
|
40
|
+
}
|
|
41
|
+
//# sourceMappingURL=store.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"store.d.ts","sourceRoot":"","sources":["../../src/memory/store.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,MAAM,EAAW,MAAM,gBAAgB,CAAC;AAEtD,OAAO,KAAK,EACV,WAAW,EACX,YAAY,EACZ,SAAS,EACT,kBAAkB,EAClB,iBAAiB,EAClB,MAAM,OAAO,CAAC;AAcf;;;;;GAKG;AACH,qBAAa,iBAAkB,YAAW,WAAW;IACnD,OAAO,CAAC,EAAE,CAAS;IACnB,OAAO,CAAC,UAAU,CAAsB;gBAE5B,EAAE,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC;IAKvD;;OAEG;IACG,GAAG,CAAC,EAAE,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,GAAG,IAAI,CAAC;IAenD;;OAEG;IACG,IAAI,CAAC,OAAO,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,YAAY,EAAE,CAAC;IAgChE;;OAEG;IACG,MAAM,CAAC,MAAM,EAAE,SAAS,GAAG,OAAO,CAAC,YAAY,CAAC;IA8BtD;;OAEG;IACG,MAAM,CAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,kBAAkB,GAAG,OAAO,CAAC,YAAY,CAAC;IAkB1E;;OAEG;IACG,MAAM,CAAC,EAAE,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAQvC;;OAEG;IACG,OAAO,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;CAU5C"}
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LibSQL Memory store implementation.
|
|
3
|
+
*/
|
|
4
|
+
import { KERNL_SCHEMA_NAME, MemoryRecordCodec, NewMemoryCodec, } from "@kernl-sdk/storage";
|
|
5
|
+
import { SQL_WHERE, ORDER, SQL_UPDATE } from "./sql.js";
|
|
6
|
+
import { RowToMemoryRecord } from "./row.js";
|
|
7
|
+
import { expandarray } from "../sql.js";
|
|
8
|
+
// SQLite doesn't support schemas, so we use table name prefix
|
|
9
|
+
const MEMORIES_TABLE = `${KERNL_SCHEMA_NAME}_memories`;
|
|
10
|
+
/**
|
|
11
|
+
* LibSQL memory store implementation.
|
|
12
|
+
*
|
|
13
|
+
* All async methods call `ensureInit()` before database operations
|
|
14
|
+
* to ensure schema/tables exist.
|
|
15
|
+
*/
|
|
16
|
+
export class LibSQLMemoryStore {
|
|
17
|
+
db;
|
|
18
|
+
ensureInit;
|
|
19
|
+
constructor(db, ensureInit) {
|
|
20
|
+
this.db = db;
|
|
21
|
+
this.ensureInit = ensureInit;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Get a memory by ID.
|
|
25
|
+
*/
|
|
26
|
+
async get(id) {
|
|
27
|
+
await this.ensureInit();
|
|
28
|
+
const result = await this.db.execute({
|
|
29
|
+
sql: `SELECT * FROM ${MEMORIES_TABLE} WHERE id = ?`,
|
|
30
|
+
args: [id],
|
|
31
|
+
});
|
|
32
|
+
if (result.rows.length === 0) {
|
|
33
|
+
return null;
|
|
34
|
+
}
|
|
35
|
+
return MemoryRecordCodec.decode(RowToMemoryRecord.encode(result.rows[0]));
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* List memories matching optional filter criteria.
|
|
39
|
+
*/
|
|
40
|
+
async list(options) {
|
|
41
|
+
await this.ensureInit();
|
|
42
|
+
const { sql: where, params } = SQL_WHERE.encode({
|
|
43
|
+
filter: options?.filter,
|
|
44
|
+
});
|
|
45
|
+
let query = `SELECT * FROM ${MEMORIES_TABLE}`;
|
|
46
|
+
// build where + order by
|
|
47
|
+
if (where)
|
|
48
|
+
query += ` WHERE ${where}`;
|
|
49
|
+
query += ` ORDER BY ${ORDER.encode({ order: options?.order })}`;
|
|
50
|
+
const args = [...params];
|
|
51
|
+
// add limit + offset
|
|
52
|
+
// SQLite requires LIMIT when using OFFSET, so use -1 for unlimited
|
|
53
|
+
if (options?.limit || options?.offset) {
|
|
54
|
+
query += ` LIMIT ?`;
|
|
55
|
+
args.push(options?.limit ?? -1);
|
|
56
|
+
}
|
|
57
|
+
if (options?.offset) {
|
|
58
|
+
query += ` OFFSET ?`;
|
|
59
|
+
args.push(options.offset);
|
|
60
|
+
}
|
|
61
|
+
const result = await this.db.execute({ sql: query, args });
|
|
62
|
+
return result.rows.map((row) => MemoryRecordCodec.decode(RowToMemoryRecord.encode(row)));
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Create a new memory record.
|
|
66
|
+
*/
|
|
67
|
+
async create(memory) {
|
|
68
|
+
await this.ensureInit();
|
|
69
|
+
const row = NewMemoryCodec.encode(memory);
|
|
70
|
+
const result = await this.db.execute({
|
|
71
|
+
sql: `INSERT INTO ${MEMORIES_TABLE}
|
|
72
|
+
(id, namespace, entity_id, agent_id, kind, collection, content, wmem, smem_expires_at, timestamp, created_at, updated_at, metadata)
|
|
73
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
74
|
+
RETURNING *`,
|
|
75
|
+
args: [
|
|
76
|
+
row.id,
|
|
77
|
+
row.namespace,
|
|
78
|
+
row.entity_id,
|
|
79
|
+
row.agent_id,
|
|
80
|
+
row.kind,
|
|
81
|
+
row.collection,
|
|
82
|
+
JSON.stringify(row.content),
|
|
83
|
+
row.wmem ? 1 : 0, // SQLite uses 0/1 for boolean
|
|
84
|
+
row.smem_expires_at,
|
|
85
|
+
row.timestamp,
|
|
86
|
+
row.created_at,
|
|
87
|
+
row.updated_at,
|
|
88
|
+
row.metadata ? JSON.stringify(row.metadata) : null,
|
|
89
|
+
],
|
|
90
|
+
});
|
|
91
|
+
return MemoryRecordCodec.decode(RowToMemoryRecord.encode(result.rows[0]));
|
|
92
|
+
}
|
|
93
|
+
/**
|
|
94
|
+
* Update a memory record.
|
|
95
|
+
*/
|
|
96
|
+
async update(id, patch) {
|
|
97
|
+
await this.ensureInit();
|
|
98
|
+
const { sql: updates, params } = SQL_UPDATE.encode({ patch });
|
|
99
|
+
const args = [...params, id];
|
|
100
|
+
const result = await this.db.execute({
|
|
101
|
+
sql: `UPDATE ${MEMORIES_TABLE} SET ${updates} WHERE id = ? RETURNING *`,
|
|
102
|
+
args,
|
|
103
|
+
});
|
|
104
|
+
if (result.rows.length === 0) {
|
|
105
|
+
throw new Error(`memory not found: ${id}`);
|
|
106
|
+
}
|
|
107
|
+
return MemoryRecordCodec.decode(RowToMemoryRecord.encode(result.rows[0]));
|
|
108
|
+
}
|
|
109
|
+
/**
|
|
110
|
+
* Delete a memory by ID.
|
|
111
|
+
*/
|
|
112
|
+
async delete(id) {
|
|
113
|
+
await this.ensureInit();
|
|
114
|
+
await this.db.execute({
|
|
115
|
+
sql: `DELETE FROM ${MEMORIES_TABLE} WHERE id = ?`,
|
|
116
|
+
args: [id],
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
/**
|
|
120
|
+
* Delete multiple memories by ID.
|
|
121
|
+
*/
|
|
122
|
+
async mdelete(ids) {
|
|
123
|
+
if (ids.length === 0)
|
|
124
|
+
return;
|
|
125
|
+
await this.ensureInit();
|
|
126
|
+
const { placeholders, params } = expandarray(ids);
|
|
127
|
+
await this.db.execute({
|
|
128
|
+
sql: `DELETE FROM ${MEMORIES_TABLE} WHERE id IN (${placeholders})`,
|
|
129
|
+
args: params,
|
|
130
|
+
});
|
|
131
|
+
}
|
|
132
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database migrations for LibSQL.
|
|
3
|
+
*/
|
|
4
|
+
import type { Client, Transaction } from "@libsql/client";
|
|
5
|
+
import type { Table, Column } from "@kernl-sdk/storage";
|
|
6
|
+
/**
|
|
7
|
+
* Migration context with helpers.
|
|
8
|
+
*/
|
|
9
|
+
export interface MigrationContext {
|
|
10
|
+
client: Client | Transaction;
|
|
11
|
+
createTable: (table: Table<string, Record<string, Column>>) => Promise<void>;
|
|
12
|
+
}
|
|
13
|
+
export interface Migration {
|
|
14
|
+
id: string;
|
|
15
|
+
up: (ctx: MigrationContext) => Promise<void>;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* List of all migrations in order.
|
|
19
|
+
*/
|
|
20
|
+
export declare const MIGRATIONS: Migration[];
|
|
21
|
+
/**
|
|
22
|
+
* Minimum schema version required by this version of @kernl/libsql.
|
|
23
|
+
*/
|
|
24
|
+
export declare const REQUIRED_SCHEMA_VERSION = "0001_initial";
|
|
25
|
+
/**
|
|
26
|
+
* Create a table from its definition.
|
|
27
|
+
*
|
|
28
|
+
* This function handles the conversion from the generic table definition
|
|
29
|
+
* to SQLite-compatible DDL.
|
|
30
|
+
*/
|
|
31
|
+
export declare function createTable(client: Client | Transaction, table: Table<string, Record<string, Column>>): Promise<void>;
|
|
32
|
+
//# sourceMappingURL=migrations.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"migrations.d.ts","sourceRoot":"","sources":["../src/migrations.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC1D,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAUxD;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B,MAAM,EAAE,MAAM,GAAG,WAAW,CAAC;IAC7B,WAAW,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;CAC9E;AAED,MAAM,WAAW,SAAS;IACxB,EAAE,EAAE,MAAM,CAAC;IACX,EAAE,EAAE,CAAC,GAAG,EAAE,gBAAgB,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;CAC9C;AAED;;GAEG;AACH,eAAO,MAAM,UAAU,EAAE,SAAS,EAcjC,CAAC;AAEF;;GAEG;AACH,eAAO,MAAM,uBAAuB,iBAAiB,CAAC;AAwCtD;;;;;GAKG;AACH,wBAAsB,WAAW,CAC/B,MAAM,EAAE,MAAM,GAAG,WAAW,EAC5B,KAAK,EAAE,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,GAC3C,OAAO,CAAC,IAAI,CAAC,CA0Gf"}
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database migrations for LibSQL.
|
|
3
|
+
*/
|
|
4
|
+
import { KERNL_SCHEMA_NAME, TABLE_THREADS, TABLE_THREAD_EVENTS, TABLE_MEMORIES, } from "@kernl-sdk/storage";
|
|
5
|
+
import { SQL_IDENTIFIER_REGEX } from "./sql.js";
|
|
6
|
+
/**
|
|
7
|
+
* List of all migrations in order.
|
|
8
|
+
*/
|
|
9
|
+
export const MIGRATIONS = [
|
|
10
|
+
{
|
|
11
|
+
id: "001_threads",
|
|
12
|
+
async up(ctx) {
|
|
13
|
+
await ctx.createTable(TABLE_THREADS);
|
|
14
|
+
await ctx.createTable(TABLE_THREAD_EVENTS);
|
|
15
|
+
},
|
|
16
|
+
},
|
|
17
|
+
{
|
|
18
|
+
id: "002_memories",
|
|
19
|
+
async up(ctx) {
|
|
20
|
+
await ctx.createTable(TABLE_MEMORIES);
|
|
21
|
+
},
|
|
22
|
+
},
|
|
23
|
+
];
|
|
24
|
+
/**
|
|
25
|
+
* Minimum schema version required by this version of @kernl/libsql.
|
|
26
|
+
*/
|
|
27
|
+
export const REQUIRED_SCHEMA_VERSION = "0001_initial";
|
|
28
|
+
/**
|
|
29
|
+
* Map PostgreSQL types to SQLite types.
|
|
30
|
+
*/
|
|
31
|
+
function mapColumnType(type) {
|
|
32
|
+
switch (type.toLowerCase()) {
|
|
33
|
+
case "jsonb":
|
|
34
|
+
return "TEXT"; // Store JSON as TEXT
|
|
35
|
+
case "bigint":
|
|
36
|
+
return "INTEGER"; // SQLite INTEGER can hold 64-bit
|
|
37
|
+
case "boolean":
|
|
38
|
+
return "INTEGER"; // SQLite uses 0/1 for boolean
|
|
39
|
+
default:
|
|
40
|
+
return type.toUpperCase();
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Encode a value for use in a DEFAULT clause.
|
|
45
|
+
*/
|
|
46
|
+
function encodeDefault(col, value) {
|
|
47
|
+
if (value === null || value === undefined)
|
|
48
|
+
return "NULL";
|
|
49
|
+
const type = col.type.toLowerCase();
|
|
50
|
+
switch (type) {
|
|
51
|
+
case "text":
|
|
52
|
+
return `'${String(value).replace(/'/g, "''")}'`;
|
|
53
|
+
case "integer":
|
|
54
|
+
case "bigint":
|
|
55
|
+
return String(value);
|
|
56
|
+
case "boolean":
|
|
57
|
+
return value ? "1" : "0";
|
|
58
|
+
case "jsonb":
|
|
59
|
+
return `'${JSON.stringify(value)}'`;
|
|
60
|
+
default:
|
|
61
|
+
return String(value);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Create a table from its definition.
|
|
66
|
+
*
|
|
67
|
+
* This function handles the conversion from the generic table definition
|
|
68
|
+
* to SQLite-compatible DDL.
|
|
69
|
+
*/
|
|
70
|
+
export async function createTable(client, table) {
|
|
71
|
+
if (!SQL_IDENTIFIER_REGEX.test(table.name)) {
|
|
72
|
+
throw new Error(`Invalid table name: ${table.name}`);
|
|
73
|
+
}
|
|
74
|
+
const columns = [];
|
|
75
|
+
const tableConstraints = [];
|
|
76
|
+
const indexes = [];
|
|
77
|
+
// build column definitions
|
|
78
|
+
for (const name in table.columns) {
|
|
79
|
+
const col = table.columns[name];
|
|
80
|
+
const sqlType = mapColumnType(col.type);
|
|
81
|
+
const constraints = [];
|
|
82
|
+
if (col._pk)
|
|
83
|
+
constraints.push("PRIMARY KEY");
|
|
84
|
+
if (col._unique)
|
|
85
|
+
constraints.push("UNIQUE");
|
|
86
|
+
if (!col._nullable && !col._pk)
|
|
87
|
+
constraints.push("NOT NULL");
|
|
88
|
+
if (col._default !== undefined) {
|
|
89
|
+
constraints.push(`DEFAULT ${encodeDefault(col, col._default)}`);
|
|
90
|
+
}
|
|
91
|
+
// foreign key reference
|
|
92
|
+
if (col._fk) {
|
|
93
|
+
let ref = `REFERENCES "${KERNL_SCHEMA_NAME}_${col._fk.table}" ("${col._fk.column}")`;
|
|
94
|
+
if (col._onDelete) {
|
|
95
|
+
ref += ` ON DELETE ${col._onDelete}`;
|
|
96
|
+
}
|
|
97
|
+
constraints.push(ref);
|
|
98
|
+
}
|
|
99
|
+
columns.push(`"${name}" ${sqlType} ${constraints.join(" ")}`.trim());
|
|
100
|
+
}
|
|
101
|
+
// table-level constraints
|
|
102
|
+
if (table.constraints) {
|
|
103
|
+
for (const constraint of table.constraints) {
|
|
104
|
+
switch (constraint.kind) {
|
|
105
|
+
case "unique": {
|
|
106
|
+
const name = constraint.name ??
|
|
107
|
+
`${table.name}_${constraint.columns.join("_")}_unique`;
|
|
108
|
+
const cols = constraint.columns.map((c) => `"${c}"`).join(", ");
|
|
109
|
+
tableConstraints.push(`CONSTRAINT "${name}" UNIQUE (${cols})`);
|
|
110
|
+
break;
|
|
111
|
+
}
|
|
112
|
+
case "pkey": {
|
|
113
|
+
const name = constraint.name ?? `${table.name}_pkey`;
|
|
114
|
+
const cols = constraint.columns.map((c) => `"${c}"`).join(", ");
|
|
115
|
+
tableConstraints.push(`CONSTRAINT "${name}" PRIMARY KEY (${cols})`);
|
|
116
|
+
break;
|
|
117
|
+
}
|
|
118
|
+
case "fkey": {
|
|
119
|
+
throw new Error("Composite foreign keys not yet supported. Use column-level .references() for single-column FKs.");
|
|
120
|
+
}
|
|
121
|
+
case "check": {
|
|
122
|
+
const name = constraint.name ?? `${table.name}_check`;
|
|
123
|
+
tableConstraints.push(`CONSTRAINT "${name}" CHECK (${constraint.expression})`);
|
|
124
|
+
break;
|
|
125
|
+
}
|
|
126
|
+
case "index": {
|
|
127
|
+
// collect indexes to create after table
|
|
128
|
+
indexes.push({
|
|
129
|
+
name: `idx_${table.name}_${constraint.columns.join("_")}`,
|
|
130
|
+
columns: constraint.columns,
|
|
131
|
+
unique: constraint.unique,
|
|
132
|
+
});
|
|
133
|
+
break;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
const allConstraints = [...columns, ...tableConstraints];
|
|
139
|
+
// Use schema prefix in table name (SQLite doesn't have schemas)
|
|
140
|
+
const fullTableName = `${KERNL_SCHEMA_NAME}_${table.name}`;
|
|
141
|
+
const sql = `
|
|
142
|
+
CREATE TABLE IF NOT EXISTS "${fullTableName}" (
|
|
143
|
+
${allConstraints.join(",\n ")}
|
|
144
|
+
)
|
|
145
|
+
`.trim();
|
|
146
|
+
await client.execute(sql);
|
|
147
|
+
// create indexes
|
|
148
|
+
for (const index of indexes) {
|
|
149
|
+
const uniqueKeyword = index.unique ? "UNIQUE " : "";
|
|
150
|
+
const cols = index.columns.map((c) => `"${c}"`).join(", ");
|
|
151
|
+
const indexSql = `
|
|
152
|
+
CREATE ${uniqueKeyword}INDEX IF NOT EXISTS "${index.name}"
|
|
153
|
+
ON "${fullTableName}" (${cols})
|
|
154
|
+
`.trim();
|
|
155
|
+
await client.execute(indexSql);
|
|
156
|
+
}
|
|
157
|
+
}
|