zen-code 4.6.1 → 4.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,106 @@
1
+ import { a as h, C as d } from "./app-CcnStH9S.mjs";
2
+ import { createClient as m } from "redis";
3
+ class o extends h {
4
+ // 轮询间隔(毫秒)
5
+ constructor(e, t = !0, s = 300) {
6
+ super(e, !0, s), this.id = e, this.compressMessages = t, this.ttl = s, this.streamKey = `stream:${this.id}`, this.listKey = `queue:${this.id}`, this.redis = m({
7
+ url: process.env.REDIS_URL
8
+ }), this.cancelSignal = new AbortController(), this.redis.isOpen || this.redis.connect(), this.isConnected = !0;
9
+ }
10
+ redis;
11
+ streamKey;
12
+ listKey;
13
+ isConnected = !1;
14
+ cancelSignal;
15
+ lastStreamId = "0";
16
+ // 最后读取的 Stream ID
17
+ pollInterval = 100;
18
+ /**
19
+ * 推送消息到 Redis Stream 和 List
20
+ * - Stream: 用于实时推送(集群友好)
21
+ * - List: 用于 getAll() 批量获取历史数据
22
+ */
23
+ async push(e) {
24
+ const t = await this.encodeData(e), s = Buffer.from(t).toString("base64"), a = Buffer.from(t);
25
+ await this.redis.xAdd(this.streamKey, "*", { data: s }), await this.redis.expire(this.streamKey, this.ttl), await this.redis.rPush(this.listKey, a), await this.redis.expire(this.listKey, this.ttl), this.emit("dataChange", s);
26
+ }
27
+ /**
28
+ * 异步生成器:使用 Redis Streams XREAD 轮询消费队列数据
29
+ */
30
+ async *onDataReceive() {
31
+ let e = !1;
32
+ if (this.cancelSignal.signal.aborted)
33
+ return;
34
+ const t = () => {
35
+ e = !0;
36
+ };
37
+ this.cancelSignal.signal.addEventListener("abort", t);
38
+ try {
39
+ for (; !e && !this.cancelSignal.signal.aborted; ) {
40
+ const s = await this.redis.xRead([{ key: this.streamKey, id: this.lastStreamId }], {
41
+ BLOCK: this.pollInterval,
42
+ COUNT: 10
43
+ });
44
+ if (s && s.length > 0)
45
+ for (const a of s) {
46
+ for (const r of a.messages) {
47
+ this.lastStreamId = r.id;
48
+ const n = r.message.data, l = Buffer.from(n, "base64"), i = await this.decodeData(l);
49
+ if ((i.event === "__stream_end__" || i.event === "__stream_error__" || i.event === "__stream_cancel__") && (await new Promise((c) => setTimeout(c, 300)), e = !0, i.event === "__stream_cancel__" && await this.cancel()), yield i, e)
50
+ break;
51
+ }
52
+ if (e)
53
+ break;
54
+ }
55
+ !e && !this.cancelSignal.signal.aborted && await new Promise((a) => setTimeout(a, this.pollInterval));
56
+ }
57
+ } finally {
58
+ this.cancelSignal.signal.removeEventListener("abort", t);
59
+ }
60
+ }
61
+ /**
62
+ * 获取队列中的所有数据(从 List 获取历史数据)
63
+ */
64
+ async getAll() {
65
+ const e = await this.redis.lRange(this.listKey, 0, -1);
66
+ return !e || e.length === 0 ? [] : this.compressMessages ? await Promise.all(
67
+ e.map((t) => {
68
+ const s = typeof t == "string" ? Buffer.from(t, "binary") : t;
69
+ return this.decodeData(s);
70
+ })
71
+ ) : e.map((t) => JSON.parse(t));
72
+ }
73
+ /**
74
+ * 清空队列
75
+ */
76
+ clear() {
77
+ this.isConnected && (this.redis.del(this.streamKey), this.redis.del(this.listKey));
78
+ }
79
+ /**
80
+ * 取消操作
81
+ */
82
+ async cancel() {
83
+ this.cancelSignal.abort("user cancel this run"), await this.push(new d());
84
+ }
85
+ /**
86
+ * 复制队列到另一个队列
87
+ */
88
+ async copyToQueue(e, t) {
89
+ const s = new o(e, this.compressMessages, t ?? this.ttl);
90
+ await this.redis.copy(this.listKey, s.listKey), await this.redis.expire(s.listKey, t ?? this.ttl);
91
+ const a = await this.redis.xRange(this.streamKey, "-", "+");
92
+ if (a && a.length > 0) {
93
+ for (const r of a) {
94
+ const n = {};
95
+ for (const [l, i] of Object.entries(r.message))
96
+ n[l] = String(i);
97
+ await this.redis.xAdd(s.streamKey, "*", n);
98
+ }
99
+ await this.redis.expire(s.streamKey, t ?? this.ttl);
100
+ }
101
+ return s;
102
+ }
103
+ }
104
+ export {
105
+ o as RedisStreamQueue
106
+ };
@@ -0,0 +1,157 @@
1
+ var h = /* @__PURE__ */ ((a) => (a.NETWORK_ERROR = "NETWORK_ERROR", a.CONNECTION_TIMEOUT = "CONNECTION_TIMEOUT", a.INTERNAL_ERROR = "INTERNAL_ERROR", a.THREAD_NOT_FOUND = "THREAD_NOT_FOUND", a.THREAD_BUSY = "THREAD_BUSY", a.RUN_NOT_FOUND = "RUN_NOT_FOUND", a.GRAPH_NOT_FOUND = "GRAPH_NOT_FOUND", a.INVALID_REQUEST = "INVALID_REQUEST", a))(h || {});
2
+ class d extends Error {
3
+ constructor(s, t, e) {
4
+ super(t), this.code = s, this.statusCode = e, this.name = "RemoteApiError";
5
+ }
6
+ }
7
+ async function c(a, s, t) {
8
+ try {
9
+ let e = a;
10
+ if (t?.query) {
11
+ const f = new URLSearchParams();
12
+ Object.entries(t.query).forEach(([v, N]) => {
13
+ f.append(v, String(N));
14
+ }), e += `?${f.toString()}`;
15
+ }
16
+ const i = await fetch(e, {
17
+ method: s,
18
+ headers: {
19
+ "Content-Type": "application/json"
20
+ },
21
+ body: t?.body ? JSON.stringify(t.body) : void 0
22
+ }), n = await i.json();
23
+ if (!i.ok || !n.success)
24
+ throw new d(
25
+ n.error?.code || h.INTERNAL_ERROR,
26
+ n.error?.message || "Unknown error",
27
+ i.status
28
+ );
29
+ return n;
30
+ } catch (e) {
31
+ throw e instanceof d ? e : new d(
32
+ h.NETWORK_ERROR,
33
+ `Network error: ${e instanceof Error ? e.message : "Unknown error"}`
34
+ );
35
+ }
36
+ }
37
+ async function o(a, s) {
38
+ return c(a, "GET", { query: s });
39
+ }
40
+ async function r(a, s, t) {
41
+ return c(a, "POST", { body: s, query: t });
42
+ }
43
+ async function l(a, s, t) {
44
+ return c(a, "PUT", { body: s, query: t });
45
+ }
46
+ async function O(a, s) {
47
+ return c(a, "DELETE", { query: s });
48
+ }
49
+ class p {
50
+ constructor(s, t) {
51
+ this.serverUrl = s, this.httpClient = t, this.serverUrl = s.replace(/\/$/, ""), this.httpClient = t || fetch;
52
+ }
53
+ /**
54
+ * 初始化数据库
55
+ */
56
+ async setup() {
57
+ await r(`${this.serverUrl}/setup`);
58
+ }
59
+ /**
60
+ * 创建线程
61
+ */
62
+ async create(s) {
63
+ return (await r(`${this.serverUrl}/threads`, s)).data;
64
+ }
65
+ /**
66
+ * 搜索线程
67
+ */
68
+ async search(s) {
69
+ const t = {};
70
+ return s?.ids !== void 0 && s.ids.length > 0 && (t.ids = JSON.stringify(s.ids)), s?.metadata !== void 0 && (t.metadata = JSON.stringify(s.metadata)), s?.limit !== void 0 && (t.limit = s.limit), s?.offset !== void 0 && (t.offset = s.offset), s?.status !== void 0 && (t.status = s.status), s?.sortBy !== void 0 && (t.sortBy = s.sortBy), s?.sortOrder !== void 0 && (t.sortOrder = s.sortOrder), s?.values !== void 0 && (t.values = JSON.stringify(s.values)), s?.select !== void 0 && (t.select = JSON.stringify(s.select)), s?.withoutDetails !== void 0 && (t.withoutDetails = s.withoutDetails), (await o(`${this.serverUrl}/threads`, t)).data;
71
+ }
72
+ /**
73
+ * 获取线程
74
+ */
75
+ async get(s) {
76
+ return (await o(`${this.serverUrl}/threads/${s}`)).data;
77
+ }
78
+ /**
79
+ * 更新线程
80
+ */
81
+ async set(s, t) {
82
+ await l(`${this.serverUrl}/threads/${s}`, t);
83
+ }
84
+ /**
85
+ * 删除线程
86
+ */
87
+ async delete(s) {
88
+ await O(`${this.serverUrl}/threads/${s}`);
89
+ }
90
+ /**
91
+ * 更新状态
92
+ */
93
+ async updateState(s, t) {
94
+ return (await r(
95
+ `${this.serverUrl}/threads/${s}/state`,
96
+ t
97
+ )).data;
98
+ }
99
+ /**
100
+ * 创建运行
101
+ */
102
+ async createRun(s, t, e) {
103
+ return (await r(`${this.serverUrl}/threads/${s}/runs`, e || {}, {
104
+ assistantId: t
105
+ })).data;
106
+ }
107
+ /**
108
+ * 列出运行
109
+ */
110
+ async listRuns(s, t) {
111
+ const e = {};
112
+ return t?.limit !== void 0 && (e.limit = t.limit), t?.offset !== void 0 && (e.offset = t.offset), t?.status !== void 0 && (e.status = t.status), (await o(`${this.serverUrl}/threads/${s}/runs`, e)).data;
113
+ }
114
+ /**
115
+ * 更新运行
116
+ */
117
+ async updateRun(s, t) {
118
+ await l(`${this.serverUrl}/runs/${s}`, t);
119
+ }
120
+ // New methods for Threads API
121
+ /**
122
+ * 计算线程数量
123
+ */
124
+ async count(s) {
125
+ const t = {};
126
+ return s?.ids !== void 0 && s.ids.length > 0 && (t.ids = JSON.stringify(s.ids)), s?.metadata !== void 0 && (t.metadata = JSON.stringify(s.metadata)), s?.status !== void 0 && (t.status = s.status), s?.values !== void 0 && (t.values = JSON.stringify(s.values)), (await o(`${this.serverUrl}/threads/count`, t)).data;
127
+ }
128
+ /**
129
+ * 更新线程元数据
130
+ */
131
+ async patch(s, t) {
132
+ return (await r(`${this.serverUrl}/threads/${s}`, t)).data;
133
+ }
134
+ /**
135
+ * 获取线程状态
136
+ */
137
+ async getState(s, t) {
138
+ const e = {};
139
+ return t?.subgraphs !== void 0 && (e.subgraphs = t.subgraphs), t?.checkpointId !== void 0 && (e.checkpointId = t.checkpointId), (await r(`${this.serverUrl}/threads/${s}/state`, e)).data;
140
+ }
141
+ /**
142
+ * 获取线程历史
143
+ */
144
+ async getStateHistory(s, t) {
145
+ const e = {};
146
+ return t?.limit !== void 0 && (e.limit = t.limit), t?.before !== void 0 && (e.before = t.before), (await r(`${this.serverUrl}/threads/${s}/history`, e)).data;
147
+ }
148
+ /**
149
+ * 复制线程
150
+ */
151
+ async copy(s) {
152
+ return (await r(`${this.serverUrl}/threads/${s}/copy`)).data;
153
+ }
154
+ }
155
+ export {
156
+ p as RemoteKyselyThreadsManager
157
+ };
@@ -0,0 +1,331 @@
1
+ import { B as $, u as g } from "./app-CcnStH9S.mjs";
2
+ import "./graphBuilder-skVEY6RJ.mjs";
3
+ import { createClient as T } from "redis";
4
+ function k(u) {
5
+ if (u === null || typeof u != "object") return JSON.stringify(u);
6
+ if (Array.isArray(u)) return JSON.stringify(u.map((i) => k(i)));
7
+ const t = {}, e = Object.keys(u).sort();
8
+ for (const i of e) t[i] = u[i];
9
+ return JSON.stringify(t, (i, n) => {
10
+ if (n !== null && typeof n == "object" && !Array.isArray(n)) {
11
+ const s = {}, c = Object.keys(n).sort();
12
+ for (const r of c) s[r] = n[r];
13
+ return s;
14
+ }
15
+ return n;
16
+ });
17
+ }
18
+ const S = [{
19
+ index: "checkpoints",
20
+ prefix: "checkpoint:",
21
+ schema: {
22
+ "$.thread_id": {
23
+ type: "TAG",
24
+ AS: "thread_id"
25
+ },
26
+ "$.checkpoint_ns": {
27
+ type: "TAG",
28
+ AS: "checkpoint_ns"
29
+ },
30
+ "$.checkpoint_id": {
31
+ type: "TAG",
32
+ AS: "checkpoint_id"
33
+ },
34
+ "$.parent_checkpoint_id": {
35
+ type: "TAG",
36
+ AS: "parent_checkpoint_id"
37
+ },
38
+ "$.checkpoint_ts": {
39
+ type: "NUMERIC",
40
+ AS: "checkpoint_ts"
41
+ },
42
+ "$.has_writes": {
43
+ type: "TAG",
44
+ AS: "has_writes"
45
+ },
46
+ "$.source": {
47
+ type: "TAG",
48
+ AS: "source"
49
+ },
50
+ "$.step": {
51
+ type: "NUMERIC",
52
+ AS: "step"
53
+ }
54
+ }
55
+ }, {
56
+ index: "checkpoint_writes",
57
+ prefix: "checkpoint_write:",
58
+ schema: {
59
+ "$.thread_id": {
60
+ type: "TAG",
61
+ AS: "thread_id"
62
+ },
63
+ "$.checkpoint_ns": {
64
+ type: "TAG",
65
+ AS: "checkpoint_ns"
66
+ },
67
+ "$.checkpoint_id": {
68
+ type: "TAG",
69
+ AS: "checkpoint_id"
70
+ },
71
+ "$.task_id": {
72
+ type: "TAG",
73
+ AS: "task_id"
74
+ },
75
+ "$.idx": {
76
+ type: "NUMERIC",
77
+ AS: "idx"
78
+ },
79
+ "$.channel": {
80
+ type: "TAG",
81
+ AS: "channel"
82
+ },
83
+ "$.type": {
84
+ type: "TAG",
85
+ AS: "type"
86
+ }
87
+ }
88
+ }];
89
+ var C = class w extends $ {
90
+ client;
91
+ ttlConfig;
92
+ constructor(t, e) {
93
+ super(), this.client = t, this.ttlConfig = e;
94
+ }
95
+ static async fromUrl(t, e) {
96
+ const i = T({ url: t });
97
+ await i.connect();
98
+ const n = new w(i, e);
99
+ return await n.ensureIndexes(), n;
100
+ }
101
+ async get(t) {
102
+ return (await this.getTuple(t))?.checkpoint;
103
+ }
104
+ async put(t, e, i, n) {
105
+ await this.ensureIndexes();
106
+ const s = t.configurable?.thread_id, c = t.configurable?.checkpoint_ns ?? "", r = t.configurable?.checkpoint_id;
107
+ if (!s) throw new Error("thread_id is required");
108
+ const a = e.id || g(0), l = `checkpoint:${s}:${c}:shallow`;
109
+ let h = null, p = null;
110
+ try {
111
+ h = await this.client.json.get(l), h && typeof h == "object" && (p = h.checkpoint_id);
112
+ } catch {
113
+ }
114
+ p && p !== a && await this.cleanupOldCheckpoint(s, c, p);
115
+ const o = {
116
+ ...e,
117
+ channel_values: e.channel_values || {},
118
+ channel_blobs: void 0
119
+ }, d = {
120
+ thread_id: s,
121
+ checkpoint_ns: c,
122
+ checkpoint_id: a,
123
+ parent_checkpoint_id: r || null,
124
+ checkpoint: o,
125
+ metadata: this.sanitizeMetadata(i),
126
+ checkpoint_ts: Date.now(),
127
+ has_writes: "false"
128
+ };
129
+ return this.addSearchableMetadataFields(d, i), await this.client.json.set(l, "$", d), this.ttlConfig?.defaultTTL && await this.applyTTL(l), { configurable: {
130
+ thread_id: s,
131
+ checkpoint_ns: c,
132
+ checkpoint_id: a
133
+ } };
134
+ }
135
+ async getTuple(t) {
136
+ const e = t.configurable?.thread_id, i = t.configurable?.checkpoint_ns ?? "", n = t.configurable?.checkpoint_id;
137
+ if (!e) return;
138
+ const s = `checkpoint:${e}:${i}:shallow`, c = await this.client.json.get(s);
139
+ if (!c || n && c.checkpoint_id !== n) return;
140
+ this.ttlConfig?.refreshOnRead && this.ttlConfig?.defaultTTL && await this.applyTTL(s);
141
+ const r = await this.serde.loadsTyped("json", JSON.stringify(c.checkpoint));
142
+ let a;
143
+ return c.has_writes === "true" && (a = await this.loadPendingWrites(c.thread_id, c.checkpoint_ns, c.checkpoint_id)), await this.createCheckpointTuple(c, r, a);
144
+ }
145
+ async *list(t, e) {
146
+ if (await this.ensureIndexes(), t?.configurable?.thread_id) {
147
+ const i = await this.getTuple(t);
148
+ i && (e?.filter ? this.checkMetadataFilterMatch(i.metadata, e.filter) && (yield i) : yield i);
149
+ } else {
150
+ const i = [];
151
+ if (e?.filter)
152
+ for (const [c, r] of Object.entries(e.filter)) r === void 0 || r === null || (typeof r == "string" ? i.push(`(@${c}:{${r}})`) : typeof r == "number" && i.push(`(@${c}:[${r} ${r}])`));
153
+ i.length === 0 && i.push("*");
154
+ const n = i.join(" "), s = e?.limit ?? 10;
155
+ try {
156
+ const c = await this.client.ft.search("checkpoints", n, {
157
+ LIMIT: {
158
+ from: 0,
159
+ size: s * 2
160
+ },
161
+ SORTBY: {
162
+ BY: "checkpoint_ts",
163
+ DIRECTION: "DESC"
164
+ }
165
+ }), r = /* @__PURE__ */ new Set();
166
+ let a = 0;
167
+ for (const l of c.documents) {
168
+ if (a >= s) break;
169
+ const h = l.value, p = `${h.thread_id}:${h.checkpoint_ns}`;
170
+ if (r.has(p) || (r.add(p), e?.filter && !this.checkMetadataFilterMatch(h.metadata, e.filter)))
171
+ continue;
172
+ const o = await this.serde.loadsTyped("json", JSON.stringify(h.checkpoint));
173
+ yield await this.createCheckpointTuple(h, o), a++;
174
+ }
175
+ } catch (c) {
176
+ if (c.message?.includes("no such index")) {
177
+ const a = await this.client.keys("checkpoint:*:*:shallow");
178
+ if (a.length === 0) return;
179
+ a.sort().reverse();
180
+ const l = /* @__PURE__ */ new Set();
181
+ let h = 0;
182
+ const p = e?.limit ?? 10;
183
+ for (const o of a) {
184
+ if (h >= p) break;
185
+ const d = await this.client.json.get(o);
186
+ if (!d) continue;
187
+ const f = `${d.thread_id}:${d.checkpoint_ns}`;
188
+ if (l.has(f) || (l.add(f), e?.filter && !this.checkMetadataFilterMatch(d.metadata, e.filter)))
189
+ continue;
190
+ const y = await this.serde.loadsTyped("json", JSON.stringify(d.checkpoint));
191
+ yield await this.createCheckpointTuple(d, y), h++;
192
+ }
193
+ return;
194
+ }
195
+ throw c;
196
+ }
197
+ }
198
+ }
199
+ async putWrites(t, e, i) {
200
+ await this.ensureIndexes();
201
+ const n = t.configurable?.thread_id, s = t.configurable?.checkpoint_ns ?? "", c = t.configurable?.checkpoint_id;
202
+ if (!n || !c) throw new Error("thread_id and checkpoint_id are required");
203
+ const r = `checkpoint_write:${n}:${s}:${c}:${i}:*`, a = await this.client.keys(r);
204
+ a.length > 0 && await this.client.del(a);
205
+ const l = [];
206
+ for (let o = 0; o < e.length; o++) {
207
+ const [d, f] = e[o], y = `checkpoint_write:${n}:${s}:${c}:${i}:${o}`;
208
+ l.push(y);
209
+ const _ = {
210
+ thread_id: n,
211
+ checkpoint_ns: s,
212
+ checkpoint_id: c,
213
+ task_id: i,
214
+ idx: o,
215
+ channel: d,
216
+ type: typeof f == "object" ? "json" : "string",
217
+ value: f
218
+ };
219
+ await this.client.json.set(y, "$", _);
220
+ }
221
+ if (l.length > 0) {
222
+ const o = `write_keys_zset:${n}:${s}:${c}`, d = {};
223
+ l.forEach((f, y) => {
224
+ d[f] = y;
225
+ }), await this.client.zAdd(o, Object.entries(d).map(([f, y]) => ({
226
+ score: y,
227
+ value: f
228
+ }))), this.ttlConfig?.defaultTTL && await this.applyTTL(...l, o);
229
+ }
230
+ const h = `checkpoint:${n}:${s}:shallow`;
231
+ if (await this.client.exists(h)) {
232
+ const o = await this.client.json.get(h);
233
+ o && (o.has_writes = "true", await this.client.json.set(h, "$", o));
234
+ }
235
+ }
236
+ async deleteThread(t) {
237
+ const e = `checkpoint:${t}:*:shallow`, i = await this.client.keys(e);
238
+ i.length > 0 && await this.client.del(i);
239
+ const n = `checkpoint_write:${t}:*`, s = await this.client.keys(n);
240
+ s.length > 0 && await this.client.del(s);
241
+ const c = `write_keys_zset:${t}:*`, r = await this.client.keys(c);
242
+ r.length > 0 && await this.client.del(r);
243
+ }
244
+ async end() {
245
+ await this.client.quit();
246
+ }
247
+ addSearchableMetadataFields(t, e) {
248
+ e && ("source" in e && (t.source = e.source), "step" in e && (t.step = e.step), "writes" in e && (t.writes = typeof e.writes == "object" ? JSON.stringify(e.writes) : e.writes), "score" in e && (t.score = e.score));
249
+ }
250
+ async createCheckpointTuple(t, e, i) {
251
+ const n = await this.serde.loadsTyped("json", JSON.stringify(t.metadata));
252
+ return {
253
+ config: { configurable: {
254
+ thread_id: t.thread_id,
255
+ checkpoint_ns: t.checkpoint_ns,
256
+ checkpoint_id: t.checkpoint_id
257
+ } },
258
+ checkpoint: e,
259
+ metadata: n,
260
+ parentConfig: t.parent_checkpoint_id ? { configurable: {
261
+ thread_id: t.thread_id,
262
+ checkpoint_ns: t.checkpoint_ns,
263
+ checkpoint_id: t.parent_checkpoint_id
264
+ } } : void 0,
265
+ pendingWrites: i
266
+ };
267
+ }
268
+ async applyTTL(...t) {
269
+ if (!this.ttlConfig?.defaultTTL) return;
270
+ const e = Math.floor(this.ttlConfig.defaultTTL * 60), i = await Promise.allSettled(t.map((n) => this.client.expire(n, e)));
271
+ for (let n = 0; n < i.length; n++) i[n].status === "rejected" && console.warn(`Failed to set TTL for key ${t[n]}:`, i[n].reason);
272
+ }
273
+ async loadPendingWrites(t, e, i) {
274
+ const n = `write_keys_zset:${t}:${e}:${i}`, s = await this.client.zRange(n, 0, -1);
275
+ if (s.length === 0) return;
276
+ const c = [];
277
+ for (const r of s) {
278
+ const a = await this.client.json.get(r);
279
+ if (a) {
280
+ const l = await this.serde.loadsTyped("json", JSON.stringify(a.value));
281
+ c.push([
282
+ a.task_id,
283
+ a.channel,
284
+ l
285
+ ]);
286
+ }
287
+ }
288
+ return c;
289
+ }
290
+ checkMetadataFilterMatch(t, e) {
291
+ for (const [i, n] of Object.entries(e)) {
292
+ const s = t?.[i];
293
+ if (n === null) {
294
+ if (!(i in (t || {})) || s !== null) return !1;
295
+ } else if (typeof n == "object" && !Array.isArray(n)) {
296
+ if (typeof s != "object" || s === null || k(n) !== k(s)) return !1;
297
+ } else if (s !== n) return !1;
298
+ }
299
+ return !0;
300
+ }
301
+ async cleanupOldCheckpoint(t, e, i) {
302
+ const n = `checkpoint_write:${t}:${e}:${i}:*`, s = await this.client.keys(n);
303
+ s.length > 0 && await this.client.del(s);
304
+ const c = `write_keys_zset:${t}:${e}:${i}`;
305
+ await this.client.del(c);
306
+ const r = `checkpoint_blob:${t}:${e}:${i}:*`, a = await this.client.keys(r);
307
+ a.length > 0 && await this.client.del(a);
308
+ }
309
+ sanitizeMetadata(t) {
310
+ if (!t) return {};
311
+ const e = {};
312
+ for (const [i, n] of Object.entries(t)) {
313
+ const s = i.replace(/\x00/g, "");
314
+ e[s] = typeof n == "string" ? n.replace(/\x00/g, "") : n;
315
+ }
316
+ return e;
317
+ }
318
+ async ensureIndexes() {
319
+ for (const t of S) try {
320
+ await this.client.ft.create(t.index, t.schema, {
321
+ ON: "JSON",
322
+ PREFIX: t.prefix
323
+ });
324
+ } catch (e) {
325
+ e.message?.includes("Index already exists") || console.error(`Failed to create index ${t.index}:`, e.message);
326
+ }
327
+ }
328
+ };
329
+ export {
330
+ C as ShallowRedisSaver
331
+ };