zen-code 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +3 -0
- package/dist/_commonjsHelpers-DQNKXVTB.mjs +33 -0
- package/dist/app-CV_FJyjI.mjs +100971 -0
- package/dist/assets/worker-I4QjJYba.js +1 -0
- package/dist/checkpoint-1sAx_j1E-WswZeQFl.mjs +318 -0
- package/dist/checkpoint-DxiUsHMy-BWKnWBL7.mjs +393 -0
- package/dist/chunk-YWE62C55-DID9N9eS.mjs +193 -0
- package/dist/devtools-m8Lnvjy_.mjs +2658 -0
- package/dist/index-BUGHAUbY.mjs +117 -0
- package/dist/index-Butw72lT.mjs +462 -0
- package/dist/index-C5j-48Ft.mjs +75 -0
- package/dist/index-DS5HVciX.mjs +216 -0
- package/dist/kysely-Bchvsze0.mjs +4316 -0
- package/dist/migrator-BatO36Tk.mjs +592 -0
- package/dist/pg-adapter-BFtir1GE-BU2H39HC.mjs +62 -0
- package/dist/postgres-dialect-DaHvQ_AZ.mjs +211 -0
- package/dist/queue-BSCnCent-Dtq-TPST.mjs +81 -0
- package/dist/shallow-BKrykESK.mjs +330 -0
- package/dist/sql-CJsUpKEQ.mjs +8106 -0
- package/dist/sqlite-adapter-5PeLHaxe-CUccULPN.mjs +64 -0
- package/dist/zen-code.mjs +1 -0
- package/package.json +49 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
(function(){"use strict";function u(e){return e==null?void 0:BigInt(e)}function f(e,r){const n=r?"query":"prepare",t=(s,a)=>e[n](s,a);return{db:e,query:(s,a,o)=>{const c=t(a,o);if(c.columnNames.length>0)return{rows:c.all()};{const{changes:i,lastInsertRowid:l}=c.run();return{numAffectedRows:u(i),insertId:u(l),rows:[]}}},close:()=>e.close(),iterator:(s,a,o)=>w(t(a),o)}}async function*w(e,r){if(!("iterate"in e))throw new Error("Streaming not supported, please upgrade to Bun@1.1.31 or later");for(const n of e.iterate(...r||[]))yield n}var g={},v="0",b="1",d="2",h="3",E="4";function k(e,r,n){let t;return async([s,a,o,c])=>{const i=[s,null,null];try{switch(s){case v:{t=await e(a);break}case b:{i[1]=await t.query(a,o,c);break}case d:{await t.close();break}case h:{if(!t.iterator)throw new Error("streamQuery() is not supported.");const l=t.iterator(a,o,c);for await(const q of l)r([s,q,null]);i[0]=E;break}default:}}catch(l){i[2]=l}r(i)}}function p(e,r){const n=k(e,t=>globalThis.postMessage(t),r);globalThis.onmessage=({data:t})=>n(t)}var y=(e,r)=>new g(e,r);function m(e,r){p(async({cache:n,fileName:t,opt:s})=>{const a=await e(t,s);return f(a,n)},r)}m(y)})();
|
|
@@ -0,0 +1,318 @@
|
|
|
1
|
+
import { K as T } from "./kysely-Bchvsze0.mjs";
|
|
2
|
+
import { F as r } from "./sql-CJsUpKEQ.mjs";
|
|
3
|
+
import { B as f, T as l, c as y, m } from "./app-CV_FJyjI.mjs";
|
|
4
|
+
const g = ["source", "step", "parents"];
|
|
5
|
+
const E = g;
|
|
6
|
+
class w extends f {
|
|
7
|
+
db;
|
|
8
|
+
isSetup;
|
|
9
|
+
constructor(e, n) {
|
|
10
|
+
super(n), this.db = new T({
|
|
11
|
+
dialect: e
|
|
12
|
+
}), this.isSetup = !1;
|
|
13
|
+
}
|
|
14
|
+
static async fromConnStringAsync(e) {
|
|
15
|
+
let n;
|
|
16
|
+
if (globalThis.Bun) {
|
|
17
|
+
console.log("LG | Using BunWorkerDialect " + e);
|
|
18
|
+
const { BunWorkerDialect: o } = await import("./index-BUGHAUbY.mjs");
|
|
19
|
+
n = new w(new o({ url: e }));
|
|
20
|
+
} else {
|
|
21
|
+
console.log("LG | Using NodeWasmDialect");
|
|
22
|
+
const { default: o } = await import("node-sqlite3-wasm"), { NodeWasmDialect: s } = await import("./index-C5j-48Ft.mjs");
|
|
23
|
+
console.log(e);
|
|
24
|
+
const a = new s({
|
|
25
|
+
database: new o.Database(e)
|
|
26
|
+
});
|
|
27
|
+
n = new w(a);
|
|
28
|
+
}
|
|
29
|
+
return await n.setup(), n;
|
|
30
|
+
}
|
|
31
|
+
async setup() {
|
|
32
|
+
this.isSetup || (await r`PRAGMA journal_mode = WAL`.execute(this.db), await r`
|
|
33
|
+
CREATE TABLE IF NOT EXISTS checkpoints (
|
|
34
|
+
thread_id TEXT NOT NULL,
|
|
35
|
+
checkpoint_ns TEXT NOT NULL DEFAULT '',
|
|
36
|
+
checkpoint_id TEXT NOT NULL,
|
|
37
|
+
parent_checkpoint_id TEXT,
|
|
38
|
+
type TEXT,
|
|
39
|
+
checkpoint BLOB,
|
|
40
|
+
metadata BLOB,
|
|
41
|
+
PRIMARY KEY (thread_id, checkpoint_ns, checkpoint_id)
|
|
42
|
+
)`.execute(this.db), await r`
|
|
43
|
+
CREATE TABLE IF NOT EXISTS writes (
|
|
44
|
+
thread_id TEXT NOT NULL,
|
|
45
|
+
checkpoint_ns TEXT NOT NULL DEFAULT '',
|
|
46
|
+
checkpoint_id TEXT NOT NULL,
|
|
47
|
+
task_id TEXT NOT NULL,
|
|
48
|
+
idx INTEGER NOT NULL,
|
|
49
|
+
channel TEXT NOT NULL,
|
|
50
|
+
type TEXT,
|
|
51
|
+
value BLOB,
|
|
52
|
+
PRIMARY KEY (thread_id, checkpoint_ns, checkpoint_id, task_id, idx)
|
|
53
|
+
)`.execute(this.db), this.isSetup = !0);
|
|
54
|
+
}
|
|
55
|
+
async getTuple(e) {
|
|
56
|
+
await this.setup();
|
|
57
|
+
const { thread_id: n, checkpoint_ns: o = "", checkpoint_id: s } = e.configurable ?? {};
|
|
58
|
+
let a = this.db.selectFrom("checkpoints").select([
|
|
59
|
+
"thread_id",
|
|
60
|
+
"checkpoint_ns",
|
|
61
|
+
"checkpoint_id",
|
|
62
|
+
"parent_checkpoint_id",
|
|
63
|
+
"type",
|
|
64
|
+
"checkpoint",
|
|
65
|
+
"metadata",
|
|
66
|
+
r`(
|
|
67
|
+
SELECT json_group_array(
|
|
68
|
+
json_object(
|
|
69
|
+
'task_id', pw.task_id,
|
|
70
|
+
'channel', pw.channel,
|
|
71
|
+
'type', pw.type,
|
|
72
|
+
'value', CAST(pw.value AS TEXT)
|
|
73
|
+
)
|
|
74
|
+
)
|
|
75
|
+
FROM writes as pw
|
|
76
|
+
WHERE pw.thread_id = checkpoints.thread_id
|
|
77
|
+
AND pw.checkpoint_ns = checkpoints.checkpoint_ns
|
|
78
|
+
AND pw.checkpoint_id = checkpoints.checkpoint_id
|
|
79
|
+
)`.as("pending_writes"),
|
|
80
|
+
r`(
|
|
81
|
+
SELECT json_group_array(
|
|
82
|
+
json_object(
|
|
83
|
+
'type', ps.type,
|
|
84
|
+
'value', CAST(ps.value AS TEXT)
|
|
85
|
+
)
|
|
86
|
+
)
|
|
87
|
+
FROM writes as ps
|
|
88
|
+
WHERE ps.thread_id = checkpoints.thread_id
|
|
89
|
+
AND ps.checkpoint_ns = checkpoints.checkpoint_ns
|
|
90
|
+
AND ps.checkpoint_id = checkpoints.parent_checkpoint_id
|
|
91
|
+
AND ps.channel = ${l}
|
|
92
|
+
ORDER BY ps.idx
|
|
93
|
+
)`.as("pending_sends")
|
|
94
|
+
]).where("thread_id", "=", n).where("checkpoint_ns", "=", o);
|
|
95
|
+
s ? a = a.where("checkpoint_id", "=", s) : a = a.orderBy("checkpoint_id", "desc").limit(1);
|
|
96
|
+
const t = await a.executeTakeFirst();
|
|
97
|
+
if (!t) return;
|
|
98
|
+
let d = e;
|
|
99
|
+
if (s || (d = {
|
|
100
|
+
configurable: {
|
|
101
|
+
thread_id: t.thread_id,
|
|
102
|
+
checkpoint_ns: o,
|
|
103
|
+
checkpoint_id: t.checkpoint_id
|
|
104
|
+
}
|
|
105
|
+
}), d.configurable?.thread_id === void 0 || d.configurable?.checkpoint_id === void 0)
|
|
106
|
+
throw new Error("Missing thread_id or checkpoint_id");
|
|
107
|
+
const c = await Promise.all(
|
|
108
|
+
JSON.parse(t.pending_writes).map(async (h) => [
|
|
109
|
+
h.task_id,
|
|
110
|
+
h.channel,
|
|
111
|
+
await this.serde.loadsTyped(h.type ?? "json", h.value ?? "")
|
|
112
|
+
])
|
|
113
|
+
), p = await this.serde.loadsTyped(
|
|
114
|
+
t.type ?? "json",
|
|
115
|
+
new TextDecoder().decode(t.checkpoint)
|
|
116
|
+
);
|
|
117
|
+
return p.v < 4 && t.parent_checkpoint_id != null && await this.migratePendingSends(p, t.thread_id, t.parent_checkpoint_id), {
|
|
118
|
+
checkpoint: p,
|
|
119
|
+
config: d,
|
|
120
|
+
metadata: await this.serde.loadsTyped(
|
|
121
|
+
t.type ?? "json",
|
|
122
|
+
new TextDecoder().decode(t.metadata)
|
|
123
|
+
),
|
|
124
|
+
parentConfig: t.parent_checkpoint_id ? {
|
|
125
|
+
configurable: {
|
|
126
|
+
thread_id: t.thread_id,
|
|
127
|
+
checkpoint_ns: o,
|
|
128
|
+
checkpoint_id: t.parent_checkpoint_id
|
|
129
|
+
}
|
|
130
|
+
} : void 0,
|
|
131
|
+
pendingWrites: c
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
async *list(e, n) {
|
|
135
|
+
const { limit: o, before: s, filter: a } = n ?? {};
|
|
136
|
+
await this.setup();
|
|
137
|
+
const t = e.configurable?.thread_id, d = e.configurable?.checkpoint_ns;
|
|
138
|
+
let c = this.db.selectFrom("checkpoints").select([
|
|
139
|
+
"thread_id",
|
|
140
|
+
"checkpoint_ns",
|
|
141
|
+
"checkpoint_id",
|
|
142
|
+
"parent_checkpoint_id",
|
|
143
|
+
"type",
|
|
144
|
+
"checkpoint",
|
|
145
|
+
"metadata",
|
|
146
|
+
r`(
|
|
147
|
+
SELECT json_group_array(
|
|
148
|
+
json_object(
|
|
149
|
+
'task_id', pw.task_id,
|
|
150
|
+
'channel', pw.channel,
|
|
151
|
+
'type', pw.type,
|
|
152
|
+
'value', CAST(pw.value AS TEXT)
|
|
153
|
+
)
|
|
154
|
+
)
|
|
155
|
+
FROM writes as pw
|
|
156
|
+
WHERE pw.thread_id = checkpoints.thread_id
|
|
157
|
+
AND pw.checkpoint_ns = checkpoints.checkpoint_ns
|
|
158
|
+
AND pw.checkpoint_id = checkpoints.checkpoint_id
|
|
159
|
+
)`.as("pending_writes"),
|
|
160
|
+
r`(
|
|
161
|
+
SELECT json_group_array(
|
|
162
|
+
json_object(
|
|
163
|
+
'type', ps.type,
|
|
164
|
+
'value', CAST(ps.value AS TEXT)
|
|
165
|
+
)
|
|
166
|
+
)
|
|
167
|
+
FROM writes as ps
|
|
168
|
+
WHERE ps.thread_id = checkpoints.thread_id
|
|
169
|
+
AND ps.checkpoint_ns = checkpoints.checkpoint_ns
|
|
170
|
+
AND ps.checkpoint_id = checkpoints.parent_checkpoint_id
|
|
171
|
+
AND ps.channel = ${l}
|
|
172
|
+
ORDER BY ps.idx
|
|
173
|
+
)`.as("pending_sends")
|
|
174
|
+
]);
|
|
175
|
+
t && (c = c.where("thread_id", "=", t)), d != null && (c = c.where("checkpoint_ns", "=", d)), s?.configurable?.checkpoint_id !== void 0 && (c = c.where("checkpoint_id", "<", s.configurable.checkpoint_id));
|
|
176
|
+
const p = Object.fromEntries(
|
|
177
|
+
Object.entries(a ?? {}).filter(
|
|
178
|
+
([i, _]) => _ !== void 0 && E.includes(i)
|
|
179
|
+
)
|
|
180
|
+
);
|
|
181
|
+
for (const [i, _] of Object.entries(p))
|
|
182
|
+
c = c.where(
|
|
183
|
+
r`json_extract(CAST(metadata AS TEXT), ${r.lit("$." + i)})`,
|
|
184
|
+
"=",
|
|
185
|
+
r.lit(JSON.stringify(_))
|
|
186
|
+
);
|
|
187
|
+
c = c.orderBy("checkpoint_id", "desc"), o && (c = c.limit(parseInt(o, 10)));
|
|
188
|
+
const h = await c.execute();
|
|
189
|
+
for (const i of h) {
|
|
190
|
+
const _ = await Promise.all(
|
|
191
|
+
JSON.parse(i.pending_writes).map(async (k) => [
|
|
192
|
+
k.task_id,
|
|
193
|
+
k.channel,
|
|
194
|
+
await this.serde.loadsTyped(k.type ?? "json", k.value ?? "")
|
|
195
|
+
])
|
|
196
|
+
), u = await this.serde.loadsTyped(
|
|
197
|
+
i.type ?? "json",
|
|
198
|
+
new TextDecoder().decode(i.checkpoint)
|
|
199
|
+
);
|
|
200
|
+
u.v < 4 && i.parent_checkpoint_id != null && await this.migratePendingSends(u, i.thread_id, i.parent_checkpoint_id), yield {
|
|
201
|
+
config: {
|
|
202
|
+
configurable: {
|
|
203
|
+
thread_id: i.thread_id,
|
|
204
|
+
checkpoint_ns: i.checkpoint_ns,
|
|
205
|
+
checkpoint_id: i.checkpoint_id
|
|
206
|
+
}
|
|
207
|
+
},
|
|
208
|
+
checkpoint: u,
|
|
209
|
+
metadata: await this.serde.loadsTyped(
|
|
210
|
+
i.type ?? "json",
|
|
211
|
+
new TextDecoder().decode(i.metadata)
|
|
212
|
+
),
|
|
213
|
+
parentConfig: i.parent_checkpoint_id ? {
|
|
214
|
+
configurable: {
|
|
215
|
+
thread_id: i.thread_id,
|
|
216
|
+
checkpoint_ns: i.checkpoint_ns,
|
|
217
|
+
checkpoint_id: i.parent_checkpoint_id
|
|
218
|
+
}
|
|
219
|
+
} : void 0,
|
|
220
|
+
pendingWrites: _
|
|
221
|
+
};
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
async put(e, n, o) {
|
|
225
|
+
if (await this.setup(), !e.configurable)
|
|
226
|
+
throw new Error("Empty configuration supplied.");
|
|
227
|
+
const s = e.configurable?.thread_id, a = e.configurable?.checkpoint_ns ?? "", t = e.configurable?.checkpoint_id;
|
|
228
|
+
if (!s)
|
|
229
|
+
throw new Error('Missing "thread_id" field in passed "config.configurable".');
|
|
230
|
+
const d = y(n), [[c, p], [h, i]] = await Promise.all([
|
|
231
|
+
this.serde.dumpsTyped(d),
|
|
232
|
+
this.serde.dumpsTyped(o)
|
|
233
|
+
]);
|
|
234
|
+
if (c !== h)
|
|
235
|
+
throw new Error("Failed to serialized checkpoint and metadata to the same type.");
|
|
236
|
+
return await this.db.insertInto("checkpoints").values({
|
|
237
|
+
thread_id: s,
|
|
238
|
+
checkpoint_ns: a,
|
|
239
|
+
checkpoint_id: n.id,
|
|
240
|
+
parent_checkpoint_id: t ?? null,
|
|
241
|
+
type: c,
|
|
242
|
+
checkpoint: new Uint8Array(Buffer.from(p)),
|
|
243
|
+
metadata: new Uint8Array(Buffer.from(i))
|
|
244
|
+
}).onConflict(
|
|
245
|
+
(_) => _.columns(["thread_id", "checkpoint_ns", "checkpoint_id"]).doUpdateSet({
|
|
246
|
+
parent_checkpoint_id: t ?? null,
|
|
247
|
+
type: c,
|
|
248
|
+
checkpoint: new Uint8Array(Buffer.from(p)),
|
|
249
|
+
metadata: new Uint8Array(Buffer.from(i))
|
|
250
|
+
})
|
|
251
|
+
).execute(), {
|
|
252
|
+
configurable: {
|
|
253
|
+
thread_id: s,
|
|
254
|
+
checkpoint_ns: a,
|
|
255
|
+
checkpoint_id: n.id
|
|
256
|
+
}
|
|
257
|
+
};
|
|
258
|
+
}
|
|
259
|
+
async putWrites(e, n, o) {
|
|
260
|
+
if (await this.setup(), !e.configurable)
|
|
261
|
+
throw new Error("Empty configuration supplied.");
|
|
262
|
+
if (!e.configurable?.thread_id)
|
|
263
|
+
throw new Error("Missing thread_id field in config.configurable.");
|
|
264
|
+
if (!e.configurable?.checkpoint_id)
|
|
265
|
+
throw new Error("Missing checkpoint_id field in config.configurable.");
|
|
266
|
+
const s = await Promise.all(
|
|
267
|
+
n.map(async (a, t) => {
|
|
268
|
+
const [d, c] = await this.serde.dumpsTyped(a[1]);
|
|
269
|
+
return {
|
|
270
|
+
thread_id: e.configurable.thread_id,
|
|
271
|
+
checkpoint_ns: e.configurable.checkpoint_ns ?? "",
|
|
272
|
+
checkpoint_id: e.configurable.checkpoint_id,
|
|
273
|
+
task_id: o,
|
|
274
|
+
idx: t,
|
|
275
|
+
channel: a[0],
|
|
276
|
+
type: d,
|
|
277
|
+
value: new Uint8Array(Buffer.from(c))
|
|
278
|
+
};
|
|
279
|
+
})
|
|
280
|
+
);
|
|
281
|
+
s.length > 0 && await this.db.transaction().execute(async (a) => {
|
|
282
|
+
for (const t of s)
|
|
283
|
+
await a.insertInto("writes").values(t).onConflict(
|
|
284
|
+
(d) => d.columns(["thread_id", "checkpoint_ns", "checkpoint_id", "task_id", "idx"]).doUpdateSet({
|
|
285
|
+
channel: t.channel,
|
|
286
|
+
type: t.type,
|
|
287
|
+
value: t.value
|
|
288
|
+
})
|
|
289
|
+
).execute();
|
|
290
|
+
});
|
|
291
|
+
}
|
|
292
|
+
async deleteThread(e) {
|
|
293
|
+
await this.db.transaction().execute(async (n) => {
|
|
294
|
+
await n.deleteFrom("checkpoints").where("thread_id", "=", e).execute(), await n.deleteFrom("writes").where("thread_id", "=", e).execute();
|
|
295
|
+
});
|
|
296
|
+
}
|
|
297
|
+
async migratePendingSends(e, n, o) {
|
|
298
|
+
const s = await this.db.selectFrom("writes as ps").select([
|
|
299
|
+
"ps.checkpoint_id",
|
|
300
|
+
r`json_group_array(
|
|
301
|
+
json_object(
|
|
302
|
+
'type', ps.type,
|
|
303
|
+
'value', CAST(ps.value AS TEXT)
|
|
304
|
+
)
|
|
305
|
+
)`.as("pending_sends")
|
|
306
|
+
]).where("ps.thread_id", "=", n).where("ps.checkpoint_id", "=", o).where("ps.channel", "=", l).orderBy("ps.idx").executeTakeFirst();
|
|
307
|
+
if (!s) return;
|
|
308
|
+
const a = e;
|
|
309
|
+
a.channel_values ??= {}, a.channel_values[l] = await Promise.all(
|
|
310
|
+
JSON.parse(s.pending_sends).map(
|
|
311
|
+
({ type: t, value: d }) => this.serde.loadsTyped(t, d)
|
|
312
|
+
)
|
|
313
|
+
), a.channel_versions[l] = Object.keys(e.channel_versions).length > 0 ? m(...Object.values(e.channel_versions)) : this.getNextVersion(void 0);
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
export {
|
|
317
|
+
w as SqliteSaver
|
|
318
|
+
};
|