zen-code 4.7.3 → 4.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/app.js +271 -0
- package/dist/chunk-0fhpz98a.js +2 -0
- package/dist/chunk-15smn69q.js +31 -0
- package/dist/chunk-3ep2tag7.js +2 -0
- package/dist/chunk-3jek9sxm.js +2 -0
- package/dist/chunk-3n2b4eb4.js +2 -0
- package/dist/chunk-3znrwnd8.js +1 -0
- package/dist/chunk-4b8yaknt.js +1 -0
- package/dist/chunk-5fqt5yg1.js +81 -0
- package/dist/chunk-5nzs0q25.js +1 -0
- package/dist/chunk-5wn1nfqy.js +6 -0
- package/dist/chunk-6ty22kkt.js +4 -0
- package/dist/chunk-70a0e941.js +1 -0
- package/dist/chunk-7dp8kq2f.js +2 -0
- package/dist/chunk-8nhayyat.js +1 -0
- package/dist/chunk-9k57afdm.js +2 -0
- package/dist/chunk-9kc9cea3.js +7 -0
- package/dist/chunk-bgpcx3bm.js +2 -0
- package/dist/chunk-bjywwcgn.js +26 -0
- package/dist/chunk-c9q11tk2.js +88 -0
- package/dist/chunk-cga0m5sy.js +1 -0
- package/dist/chunk-cze71w02.js +2 -0
- package/dist/chunk-czz2fs2w.js +7 -0
- package/dist/chunk-dh3jz71n.js +1 -0
- package/dist/chunk-e0sqjq2h.js +1 -0
- package/dist/chunk-efhkdw2z.js +1 -0
- package/dist/chunk-ew7249h9.js +1 -0
- package/dist/chunk-h5nnz4dy.js +1 -0
- package/dist/chunk-hww94vjn.js +1 -0
- package/dist/chunk-j1n8jp1w.js +253 -0
- package/dist/chunk-j204fejq.js +1 -0
- package/dist/chunk-jkbtx9va.js +2 -0
- package/dist/chunk-k67epfhc.js +1 -0
- package/dist/chunk-kbs4px1b.js +6 -0
- package/dist/chunk-keqk70wm.js +2 -0
- package/dist/chunk-kxk71nn0.js +1 -0
- package/dist/chunk-mg8zpgaz.js +1 -0
- package/dist/chunk-n1d8xx5k.js +2 -0
- package/dist/chunk-ndtwq2zx.js +2 -0
- package/dist/chunk-pdkbrds7.js +1 -0
- package/dist/chunk-q8arjxg1.js +23 -0
- package/dist/chunk-rdc9pxf6.js +1 -0
- package/dist/chunk-rrkzfahh.js +91 -0
- package/dist/chunk-s3fyfrka.js +2 -0
- package/dist/chunk-sft6ep0c.js +310 -0
- package/dist/chunk-vc384abg.js +3 -0
- package/dist/chunk-vjj00e3e.js +6 -0
- package/dist/chunk-vkng13p1.js +424 -0
- package/dist/chunk-wbn1hbxf.js +1 -0
- package/dist/chunk-wwn7gw8g.js +274 -0
- package/dist/chunk-yaa4eesd.js +2 -0
- package/dist/chunk-yvhq458c.js +25 -0
- package/dist/cli.js +2 -0
- package/dist/nonInteractive.js +4 -0
- package/dist/zen-keyboard.js +3 -0
- package/package.json +88 -87
- package/LICENSE +0 -201
- package/dist/FileSystemPluginStore-ChortK7z.mjs +0 -64
- package/dist/FileSystemSkillStore-Dl5briBv.mjs +0 -108
- package/dist/MultiLineTextInput-CL8nm9nv.mjs +0 -24492
- package/dist/_commonjsHelpers-DQNKXVTB.mjs +0 -33
- package/dist/app-CK3nD8Gj.mjs +0 -22428
- package/dist/checkpoint-C5AFBYE--CczNxXBB.mjs +0 -355
- package/dist/checkpoint-DxiUsHMy-B5iaccT2.mjs +0 -394
- package/dist/cli.mjs +0 -57
- package/dist/devtools-CzaVuYnh.mjs +0 -2667
- package/dist/graphBuilder-DJoMXf6J.mjs +0 -28909
- package/dist/index-BPG5Np-8.mjs +0 -144
- package/dist/index-Dxfhvdx8.mjs +0 -23
- package/dist/index-F6_SKYUd.mjs +0 -463
- package/dist/load-iYJqYrjn.mjs +0 -119
- package/dist/memories-CaHDnBK1.mjs +0 -115
- package/dist/metadata-XVTVXthy.mjs +0 -6
- package/dist/nonInteractive.mjs +0 -55
- package/dist/pg-adapter-BFtir1GE-BbUXBpaX.mjs +0 -60
- package/dist/queue-DySatFkr-CG2GdO4P.mjs +0 -106
- package/dist/remote-threads-CrG03ZS7-C9duTCnB.mjs +0 -157
- package/dist/shallow-G4eKoZ7M.mjs +0 -331
- package/dist/shallow-checkpoint-BEhTdp7z-DfHxnfoU.mjs +0 -383
- package/dist/sqlite-adapter-oBA95xba-BrBWorSV.mjs +0 -82
- package/dist/subTasks-DNjIN4eU.mjs +0 -18
- package/dist/zen-code.mjs +0 -3
- package/dist/zen-keyboard.mjs +0 -13
|
@@ -1,355 +0,0 @@
|
|
|
1
|
-
import { Kysely as m, sql as d } from "kysely";
|
|
2
|
-
import { B as b, T as l, c as E, m as g } from "./app-CK3nD8Gj.mjs";
|
|
3
|
-
import "./graphBuilder-DJoMXf6J.mjs";
|
|
4
|
-
const u = {
|
|
5
|
-
maxRetries: 3,
|
|
6
|
-
baseDelayMs: 100,
|
|
7
|
-
isRetryableError: (w) => {
|
|
8
|
-
const e = w?.message?.toLowerCase() || "";
|
|
9
|
-
return e.includes("sqlite_busy") || e.includes("database is locked") || e.includes("database disk image is malformed") || e === "sqlite_busy" || e === "database is locked";
|
|
10
|
-
}
|
|
11
|
-
};
|
|
12
|
-
async function T(w, e) {
|
|
13
|
-
let i = null;
|
|
14
|
-
for (let n = 0; n < u.maxRetries; n++)
|
|
15
|
-
try {
|
|
16
|
-
return await w();
|
|
17
|
-
} catch (a) {
|
|
18
|
-
if (i = a, !u.isRetryableError(a))
|
|
19
|
-
throw a;
|
|
20
|
-
if (n < u.maxRetries - 1) {
|
|
21
|
-
const c = u.baseDelayMs * Math.pow(2, n);
|
|
22
|
-
console.warn(
|
|
23
|
-
`SQLite lock detected${e ? ` (${e})` : ""}, retrying in ${c}ms (attempt ${n + 1}/${u.maxRetries})`
|
|
24
|
-
), await new Promise((t) => setTimeout(t, c));
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
throw i;
|
|
28
|
-
}
|
|
29
|
-
const A = ["source", "step", "parents"];
|
|
30
|
-
const N = A;
|
|
31
|
-
class f extends b {
|
|
32
|
-
db;
|
|
33
|
-
isSetup;
|
|
34
|
-
constructor(e, i) {
|
|
35
|
-
super(i), this.db = new m({
|
|
36
|
-
dialect: e
|
|
37
|
-
}), this.isSetup = !1;
|
|
38
|
-
}
|
|
39
|
-
static async fromConnStringAsync(e) {
|
|
40
|
-
let i;
|
|
41
|
-
if (globalThis.Bun) {
|
|
42
|
-
console.log("LG | Using BunWorkerDialect " + e);
|
|
43
|
-
const { BunWorkerDialect: n } = await import("kysely-bun-worker");
|
|
44
|
-
i = new f(new n({ url: e }));
|
|
45
|
-
} else {
|
|
46
|
-
console.log("LG | Using NodeWasmDialect");
|
|
47
|
-
const { default: n } = await import("node-sqlite3-wasm"), { NodeWasmDialect: a } = await import("kysely-wasm");
|
|
48
|
-
console.log(e);
|
|
49
|
-
const c = new a({
|
|
50
|
-
database: new n.Database(e)
|
|
51
|
-
});
|
|
52
|
-
i = new f(c);
|
|
53
|
-
}
|
|
54
|
-
return await i.setup(), i;
|
|
55
|
-
}
|
|
56
|
-
async setup() {
|
|
57
|
-
this.isSetup || (await d`PRAGMA busy_timeout = 5000`.execute(this.db), await d`PRAGMA journal_mode = WAL`.execute(this.db), await d`PRAGMA synchronous = NORMAL`.execute(this.db), await d`PRAGMA wal_autocheckpoint = 1000`.execute(this.db), await d`
|
|
58
|
-
CREATE TABLE IF NOT EXISTS checkpoints (
|
|
59
|
-
thread_id TEXT NOT NULL,
|
|
60
|
-
checkpoint_ns TEXT NOT NULL DEFAULT '',
|
|
61
|
-
checkpoint_id TEXT NOT NULL,
|
|
62
|
-
parent_checkpoint_id TEXT,
|
|
63
|
-
type TEXT,
|
|
64
|
-
checkpoint BLOB,
|
|
65
|
-
metadata BLOB,
|
|
66
|
-
PRIMARY KEY (thread_id, checkpoint_ns, checkpoint_id)
|
|
67
|
-
)`.execute(this.db), await d`
|
|
68
|
-
CREATE TABLE IF NOT EXISTS writes (
|
|
69
|
-
thread_id TEXT NOT NULL,
|
|
70
|
-
checkpoint_ns TEXT NOT NULL DEFAULT '',
|
|
71
|
-
checkpoint_id TEXT NOT NULL,
|
|
72
|
-
task_id TEXT NOT NULL,
|
|
73
|
-
idx INTEGER NOT NULL,
|
|
74
|
-
channel TEXT NOT NULL,
|
|
75
|
-
type TEXT,
|
|
76
|
-
value BLOB,
|
|
77
|
-
PRIMARY KEY (thread_id, checkpoint_ns, checkpoint_id, task_id, idx)
|
|
78
|
-
)`.execute(this.db), this.isSetup = !0);
|
|
79
|
-
}
|
|
80
|
-
async getTuple(e) {
|
|
81
|
-
await this.setup();
|
|
82
|
-
const { thread_id: i, checkpoint_ns: n = "", checkpoint_id: a } = e.configurable ?? {};
|
|
83
|
-
let c = this.db.selectFrom("checkpoints").select([
|
|
84
|
-
"thread_id",
|
|
85
|
-
"checkpoint_ns",
|
|
86
|
-
"checkpoint_id",
|
|
87
|
-
"parent_checkpoint_id",
|
|
88
|
-
"type",
|
|
89
|
-
"checkpoint",
|
|
90
|
-
"metadata",
|
|
91
|
-
d`(
|
|
92
|
-
SELECT json_group_array(
|
|
93
|
-
json_object(
|
|
94
|
-
'task_id', pw.task_id,
|
|
95
|
-
'channel', pw.channel,
|
|
96
|
-
'type', pw.type,
|
|
97
|
-
'value', CAST(pw.value AS TEXT)
|
|
98
|
-
)
|
|
99
|
-
)
|
|
100
|
-
FROM writes as pw
|
|
101
|
-
WHERE pw.thread_id = checkpoints.thread_id
|
|
102
|
-
AND pw.checkpoint_ns = checkpoints.checkpoint_ns
|
|
103
|
-
AND pw.checkpoint_id = checkpoints.checkpoint_id
|
|
104
|
-
)`.as("pending_writes"),
|
|
105
|
-
d`(
|
|
106
|
-
SELECT json_group_array(
|
|
107
|
-
json_object(
|
|
108
|
-
'type', ps.type,
|
|
109
|
-
'value', CAST(ps.value AS TEXT)
|
|
110
|
-
)
|
|
111
|
-
)
|
|
112
|
-
FROM writes as ps
|
|
113
|
-
WHERE ps.thread_id = checkpoints.thread_id
|
|
114
|
-
AND ps.checkpoint_ns = checkpoints.checkpoint_ns
|
|
115
|
-
AND ps.checkpoint_id = checkpoints.parent_checkpoint_id
|
|
116
|
-
AND ps.channel = ${l}
|
|
117
|
-
ORDER BY ps.idx
|
|
118
|
-
)`.as("pending_sends")
|
|
119
|
-
]).where("thread_id", "=", i).where("checkpoint_ns", "=", n);
|
|
120
|
-
a ? c = c.where("checkpoint_id", "=", a) : c = c.orderBy("checkpoint_id", "desc").limit(1);
|
|
121
|
-
const t = await c.executeTakeFirst();
|
|
122
|
-
if (!t) return;
|
|
123
|
-
let r = e;
|
|
124
|
-
if (a || (r = {
|
|
125
|
-
configurable: {
|
|
126
|
-
thread_id: t.thread_id,
|
|
127
|
-
checkpoint_ns: n,
|
|
128
|
-
checkpoint_id: t.checkpoint_id
|
|
129
|
-
}
|
|
130
|
-
}), r.configurable?.thread_id === void 0 || r.configurable?.checkpoint_id === void 0)
|
|
131
|
-
throw new Error("Missing thread_id or checkpoint_id");
|
|
132
|
-
const s = await Promise.all(
|
|
133
|
-
JSON.parse(t.pending_writes).map(async (h) => [
|
|
134
|
-
h.task_id,
|
|
135
|
-
h.channel,
|
|
136
|
-
await this.serde.loadsTyped(h.type ?? "json", h.value ?? "")
|
|
137
|
-
])
|
|
138
|
-
), p = await this.serde.loadsTyped(
|
|
139
|
-
t.type ?? "json",
|
|
140
|
-
new TextDecoder().decode(t.checkpoint)
|
|
141
|
-
);
|
|
142
|
-
return p.v < 4 && t.parent_checkpoint_id != null && await this.migratePendingSends(p, t.thread_id, t.parent_checkpoint_id), {
|
|
143
|
-
checkpoint: p,
|
|
144
|
-
config: r,
|
|
145
|
-
metadata: await this.serde.loadsTyped(
|
|
146
|
-
t.type ?? "json",
|
|
147
|
-
new TextDecoder().decode(t.metadata)
|
|
148
|
-
),
|
|
149
|
-
parentConfig: t.parent_checkpoint_id ? {
|
|
150
|
-
configurable: {
|
|
151
|
-
thread_id: t.thread_id,
|
|
152
|
-
checkpoint_ns: n,
|
|
153
|
-
checkpoint_id: t.parent_checkpoint_id
|
|
154
|
-
}
|
|
155
|
-
} : void 0,
|
|
156
|
-
pendingWrites: s
|
|
157
|
-
};
|
|
158
|
-
}
|
|
159
|
-
async *list(e, i) {
|
|
160
|
-
const { limit: n, before: a, filter: c } = i ?? {};
|
|
161
|
-
await this.setup();
|
|
162
|
-
const t = e.configurable?.thread_id, r = e.configurable?.checkpoint_ns;
|
|
163
|
-
let s = this.db.selectFrom("checkpoints").select([
|
|
164
|
-
"thread_id",
|
|
165
|
-
"checkpoint_ns",
|
|
166
|
-
"checkpoint_id",
|
|
167
|
-
"parent_checkpoint_id",
|
|
168
|
-
"type",
|
|
169
|
-
"checkpoint",
|
|
170
|
-
"metadata",
|
|
171
|
-
d`(
|
|
172
|
-
SELECT json_group_array(
|
|
173
|
-
json_object(
|
|
174
|
-
'task_id', pw.task_id,
|
|
175
|
-
'channel', pw.channel,
|
|
176
|
-
'type', pw.type,
|
|
177
|
-
'value', CAST(pw.value AS TEXT)
|
|
178
|
-
)
|
|
179
|
-
)
|
|
180
|
-
FROM writes as pw
|
|
181
|
-
WHERE pw.thread_id = checkpoints.thread_id
|
|
182
|
-
AND pw.checkpoint_ns = checkpoints.checkpoint_ns
|
|
183
|
-
AND pw.checkpoint_id = checkpoints.checkpoint_id
|
|
184
|
-
)`.as("pending_writes"),
|
|
185
|
-
d`(
|
|
186
|
-
SELECT json_group_array(
|
|
187
|
-
json_object(
|
|
188
|
-
'type', ps.type,
|
|
189
|
-
'value', CAST(ps.value AS TEXT)
|
|
190
|
-
)
|
|
191
|
-
)
|
|
192
|
-
FROM writes as ps
|
|
193
|
-
WHERE ps.thread_id = checkpoints.thread_id
|
|
194
|
-
AND ps.checkpoint_ns = checkpoints.checkpoint_ns
|
|
195
|
-
AND ps.checkpoint_id = checkpoints.parent_checkpoint_id
|
|
196
|
-
AND ps.channel = ${l}
|
|
197
|
-
ORDER BY ps.idx
|
|
198
|
-
)`.as("pending_sends")
|
|
199
|
-
]);
|
|
200
|
-
t && (s = s.where("thread_id", "=", t)), r != null && (s = s.where("checkpoint_ns", "=", r)), a?.configurable?.checkpoint_id !== void 0 && (s = s.where("checkpoint_id", "<", a.configurable.checkpoint_id));
|
|
201
|
-
const p = Object.fromEntries(
|
|
202
|
-
Object.entries(c ?? {}).filter(
|
|
203
|
-
([o, _]) => _ !== void 0 && N.includes(o)
|
|
204
|
-
)
|
|
205
|
-
);
|
|
206
|
-
for (const [o, _] of Object.entries(p))
|
|
207
|
-
s = s.where(
|
|
208
|
-
d`json_extract(CAST(metadata AS TEXT), ${d.lit("$." + o)})`,
|
|
209
|
-
"=",
|
|
210
|
-
d.lit(JSON.stringify(_))
|
|
211
|
-
);
|
|
212
|
-
s = s.orderBy("checkpoint_id", "desc"), n && (s = s.limit(parseInt(n, 10)));
|
|
213
|
-
const h = await s.execute();
|
|
214
|
-
for (const o of h) {
|
|
215
|
-
const _ = await Promise.all(
|
|
216
|
-
JSON.parse(o.pending_writes).map(async (k) => [
|
|
217
|
-
k.task_id,
|
|
218
|
-
k.channel,
|
|
219
|
-
await this.serde.loadsTyped(k.type ?? "json", k.value ?? "")
|
|
220
|
-
])
|
|
221
|
-
), y = await this.serde.loadsTyped(
|
|
222
|
-
o.type ?? "json",
|
|
223
|
-
new TextDecoder().decode(o.checkpoint)
|
|
224
|
-
);
|
|
225
|
-
y.v < 4 && o.parent_checkpoint_id != null && await this.migratePendingSends(y, o.thread_id, o.parent_checkpoint_id), yield {
|
|
226
|
-
config: {
|
|
227
|
-
configurable: {
|
|
228
|
-
thread_id: o.thread_id,
|
|
229
|
-
checkpoint_ns: o.checkpoint_ns,
|
|
230
|
-
checkpoint_id: o.checkpoint_id
|
|
231
|
-
}
|
|
232
|
-
},
|
|
233
|
-
checkpoint: y,
|
|
234
|
-
metadata: await this.serde.loadsTyped(
|
|
235
|
-
o.type ?? "json",
|
|
236
|
-
new TextDecoder().decode(o.metadata)
|
|
237
|
-
),
|
|
238
|
-
parentConfig: o.parent_checkpoint_id ? {
|
|
239
|
-
configurable: {
|
|
240
|
-
thread_id: o.thread_id,
|
|
241
|
-
checkpoint_ns: o.checkpoint_ns,
|
|
242
|
-
checkpoint_id: o.parent_checkpoint_id
|
|
243
|
-
}
|
|
244
|
-
} : void 0,
|
|
245
|
-
pendingWrites: _
|
|
246
|
-
};
|
|
247
|
-
}
|
|
248
|
-
}
|
|
249
|
-
async put(e, i, n) {
|
|
250
|
-
if (await this.setup(), !e.configurable)
|
|
251
|
-
throw new Error("Empty configuration supplied.");
|
|
252
|
-
const a = e.configurable?.thread_id, c = e.configurable?.checkpoint_ns ?? "", t = e.configurable?.checkpoint_id;
|
|
253
|
-
if (!a)
|
|
254
|
-
throw new Error('Missing "thread_id" field in passed "config.configurable".');
|
|
255
|
-
const r = E(i), [[s, p], [h, o]] = await Promise.all([
|
|
256
|
-
this.serde.dumpsTyped(r),
|
|
257
|
-
this.serde.dumpsTyped(n)
|
|
258
|
-
]);
|
|
259
|
-
if (s !== h)
|
|
260
|
-
throw new Error("Failed to serialized checkpoint and metadata to the same type.");
|
|
261
|
-
return await T(
|
|
262
|
-
async () => {
|
|
263
|
-
await this.db.insertInto("checkpoints").values({
|
|
264
|
-
thread_id: a,
|
|
265
|
-
checkpoint_ns: c,
|
|
266
|
-
checkpoint_id: i.id,
|
|
267
|
-
parent_checkpoint_id: t ?? null,
|
|
268
|
-
type: s,
|
|
269
|
-
checkpoint: new Uint8Array(Buffer.from(p)),
|
|
270
|
-
metadata: new Uint8Array(Buffer.from(o))
|
|
271
|
-
}).onConflict(
|
|
272
|
-
(_) => _.columns(["thread_id", "checkpoint_ns", "checkpoint_id"]).doUpdateSet({
|
|
273
|
-
parent_checkpoint_id: t ?? null,
|
|
274
|
-
type: s,
|
|
275
|
-
checkpoint: new Uint8Array(Buffer.from(p)),
|
|
276
|
-
metadata: new Uint8Array(Buffer.from(o))
|
|
277
|
-
})
|
|
278
|
-
).execute();
|
|
279
|
-
},
|
|
280
|
-
`put(${a}/${i.id})`
|
|
281
|
-
), {
|
|
282
|
-
configurable: {
|
|
283
|
-
thread_id: a,
|
|
284
|
-
checkpoint_ns: c,
|
|
285
|
-
checkpoint_id: i.id
|
|
286
|
-
}
|
|
287
|
-
};
|
|
288
|
-
}
|
|
289
|
-
async putWrites(e, i, n) {
|
|
290
|
-
if (await this.setup(), !e.configurable)
|
|
291
|
-
throw new Error("Empty configuration supplied.");
|
|
292
|
-
if (!e.configurable?.thread_id)
|
|
293
|
-
throw new Error("Missing thread_id field in config.configurable.");
|
|
294
|
-
if (!e.configurable?.checkpoint_id)
|
|
295
|
-
throw new Error("Missing checkpoint_id field in config.configurable.");
|
|
296
|
-
const a = await Promise.all(
|
|
297
|
-
i.map(async (r, s) => {
|
|
298
|
-
const [p, h] = await this.serde.dumpsTyped(r[1]);
|
|
299
|
-
return {
|
|
300
|
-
thread_id: e.configurable.thread_id,
|
|
301
|
-
checkpoint_ns: e.configurable.checkpoint_ns ?? "",
|
|
302
|
-
checkpoint_id: e.configurable.checkpoint_id,
|
|
303
|
-
task_id: n,
|
|
304
|
-
idx: s,
|
|
305
|
-
channel: r[0],
|
|
306
|
-
type: p,
|
|
307
|
-
value: new Uint8Array(Buffer.from(h))
|
|
308
|
-
};
|
|
309
|
-
})
|
|
310
|
-
);
|
|
311
|
-
if (a.length === 0) return;
|
|
312
|
-
const c = e.configurable.thread_id, t = e.configurable.checkpoint_id;
|
|
313
|
-
await T(
|
|
314
|
-
async () => {
|
|
315
|
-
await this.db.transaction().execute(async (r) => {
|
|
316
|
-
await r.deleteFrom("writes").where("thread_id", "=", c).where("checkpoint_ns", "=", a[0].checkpoint_ns).where("checkpoint_id", "=", t).where("task_id", "=", n).execute();
|
|
317
|
-
for (const s of a)
|
|
318
|
-
await r.insertInto("writes").values(s).execute();
|
|
319
|
-
});
|
|
320
|
-
},
|
|
321
|
-
`putWrites(${c}/${t}/${n})`
|
|
322
|
-
);
|
|
323
|
-
}
|
|
324
|
-
async deleteThread(e) {
|
|
325
|
-
await T(
|
|
326
|
-
async () => {
|
|
327
|
-
await this.db.transaction().execute(async (i) => {
|
|
328
|
-
await i.deleteFrom("checkpoints").where("thread_id", "=", e).execute(), await i.deleteFrom("writes").where("thread_id", "=", e).execute();
|
|
329
|
-
});
|
|
330
|
-
},
|
|
331
|
-
`deleteThread(${e})`
|
|
332
|
-
);
|
|
333
|
-
}
|
|
334
|
-
async migratePendingSends(e, i, n) {
|
|
335
|
-
const a = await this.db.selectFrom("writes as ps").select([
|
|
336
|
-
"ps.checkpoint_id",
|
|
337
|
-
d`json_group_array(
|
|
338
|
-
json_object(
|
|
339
|
-
'type', ps.type,
|
|
340
|
-
'value', CAST(ps.value AS TEXT)
|
|
341
|
-
)
|
|
342
|
-
)`.as("pending_sends")
|
|
343
|
-
]).where("ps.thread_id", "=", i).where("ps.checkpoint_id", "=", n).where("ps.channel", "=", l).orderBy("ps.idx").executeTakeFirst();
|
|
344
|
-
if (!a) return;
|
|
345
|
-
const c = e;
|
|
346
|
-
c.channel_values ??= {}, c.channel_values[l] = await Promise.all(
|
|
347
|
-
JSON.parse(a.pending_sends).map(
|
|
348
|
-
({ type: t, value: r }) => this.serde.loadsTyped(t, r)
|
|
349
|
-
)
|
|
350
|
-
), c.channel_versions[l] = Object.keys(e.channel_versions).length > 0 ? g(...Object.values(e.channel_versions)) : this.getNextVersion(void 0);
|
|
351
|
-
}
|
|
352
|
-
}
|
|
353
|
-
export {
|
|
354
|
-
f as SqliteSaver
|
|
355
|
-
};
|