zen-code 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,211 @@
1
+ import { aS as p, ah as h, F as r, bb as l, bi as d, bc as y, bj as w, c } from "./sql-CJsUpKEQ.mjs";
2
+ import { D as g, b as _, c as f, a as b, C as o } from "./migrator-BatO36Tk.mjs";
3
+ function i(n, e) {
4
+ return p.createWithChildren([
5
+ p.createWithSql(`${n} `),
6
+ h.create(e)
7
+ // ensures savepointName gets sanitized
8
+ ]);
9
+ }
10
+ const T = /"/g;
11
+ class E extends g {
12
+ sanitizeIdentifier(e) {
13
+ return e.replace(T, '""');
14
+ }
15
+ }
16
+ class C {
17
+ #e;
18
+ constructor(e) {
19
+ this.#e = e;
20
+ }
21
+ async getSchemas() {
22
+ return (await this.#e.selectFrom("pg_catalog.pg_namespace").select("nspname").$castTo().execute()).map((a) => ({ name: a.nspname }));
23
+ }
24
+ async getTables(e = { withInternalKyselyTables: !1 }) {
25
+ let a = this.#e.selectFrom("pg_catalog.pg_attribute as a").innerJoin("pg_catalog.pg_class as c", "a.attrelid", "c.oid").innerJoin("pg_catalog.pg_namespace as ns", "c.relnamespace", "ns.oid").innerJoin("pg_catalog.pg_type as typ", "a.atttypid", "typ.oid").innerJoin("pg_catalog.pg_namespace as dtns", "typ.typnamespace", "dtns.oid").select([
26
+ "a.attname as column",
27
+ "a.attnotnull as not_null",
28
+ "a.atthasdef as has_default",
29
+ "c.relname as table",
30
+ "c.relkind as table_type",
31
+ "ns.nspname as schema",
32
+ "typ.typname as type",
33
+ "dtns.nspname as type_schema",
34
+ r`col_description(a.attrelid, a.attnum)`.as("column_description"),
35
+ r`pg_get_serial_sequence(quote_ident(ns.nspname) || '.' || quote_ident(c.relname), a.attname)`.as("auto_incrementing")
36
+ ]).where("c.relkind", "in", [
37
+ "r",
38
+ "v",
39
+ "p"
40
+ ]).where("ns.nspname", "!~", "^pg_").where("ns.nspname", "!=", "information_schema").where("ns.nspname", "!=", "crdb_internal").where(r`has_schema_privilege(ns.nspname, 'USAGE')`).where("a.attnum", ">=", 0).where("a.attisdropped", "!=", !0).orderBy("ns.nspname").orderBy("c.relname").orderBy("a.attnum").$castTo();
41
+ e.withInternalKyselyTables || (a = a.where("c.relname", "!=", _).where("c.relname", "!=", f));
42
+ const t = await a.execute();
43
+ return this.#a(t);
44
+ }
45
+ async getMetadata(e) {
46
+ return {
47
+ tables: await this.getTables(e)
48
+ };
49
+ }
50
+ #a(e) {
51
+ return e.reduce((a, t) => {
52
+ let s = a.find((u) => u.name === t.table && u.schema === t.schema);
53
+ return s || (s = l({
54
+ name: t.table,
55
+ isView: t.table_type === "v",
56
+ schema: t.schema,
57
+ columns: []
58
+ }), a.push(s)), s.columns.push(l({
59
+ name: t.column,
60
+ dataType: t.type,
61
+ dataTypeSchema: t.type_schema,
62
+ isNullable: !t.not_null,
63
+ isAutoIncrementing: t.auto_incrementing !== null,
64
+ hasDefaultValue: t.has_default,
65
+ comment: t.column_description ?? void 0
66
+ })), a;
67
+ }, []);
68
+ }
69
+ }
70
+ const I = BigInt("3853314791062309107");
71
+ class k extends b {
72
+ get supportsTransactionalDdl() {
73
+ return !0;
74
+ }
75
+ get supportsReturning() {
76
+ return !0;
77
+ }
78
+ async acquireMigrationLock(e, a) {
79
+ await r`select pg_advisory_xact_lock(${r.lit(I)})`.execute(e);
80
+ }
81
+ async releaseMigrationLock(e, a) {
82
+ }
83
+ }
84
+ function v(n, e) {
85
+ if (x(n) && e.stack) {
86
+ const a = e.stack.split(`
87
+ `).slice(1).join(`
88
+ `);
89
+ return n.stack += `
90
+ ${a}`, n;
91
+ }
92
+ return n;
93
+ }
94
+ function x(n) {
95
+ return d(n) && y(n.stack);
96
+ }
97
+ const m = /* @__PURE__ */ Symbol();
98
+ class A {
99
+ #e;
100
+ #a = /* @__PURE__ */ new WeakMap();
101
+ #t;
102
+ constructor(e) {
103
+ this.#e = l({ ...e });
104
+ }
105
+ async init() {
106
+ this.#t = w(this.#e.pool) ? await this.#e.pool() : this.#e.pool;
107
+ }
108
+ async acquireConnection() {
109
+ const e = await this.#t.connect();
110
+ let a = this.#a.get(e);
111
+ return a || (a = new D(e, {
112
+ cursor: this.#e.cursor ?? null
113
+ }), this.#a.set(e, a), this.#e.onCreateConnection && await this.#e.onCreateConnection(a)), this.#e.onReserveConnection && await this.#e.onReserveConnection(a), a;
114
+ }
115
+ async beginTransaction(e, a) {
116
+ if (a.isolationLevel || a.accessMode) {
117
+ let t = "start transaction";
118
+ a.isolationLevel && (t += ` isolation level ${a.isolationLevel}`), a.accessMode && (t += ` ${a.accessMode}`), await e.executeQuery(o.raw(t));
119
+ } else
120
+ await e.executeQuery(o.raw("begin"));
121
+ }
122
+ async commitTransaction(e) {
123
+ await e.executeQuery(o.raw("commit"));
124
+ }
125
+ async rollbackTransaction(e) {
126
+ await e.executeQuery(o.raw("rollback"));
127
+ }
128
+ async savepoint(e, a, t) {
129
+ await e.executeQuery(t(i("savepoint", a), c()));
130
+ }
131
+ async rollbackToSavepoint(e, a, t) {
132
+ await e.executeQuery(t(i("rollback to", a), c()));
133
+ }
134
+ async releaseSavepoint(e, a, t) {
135
+ await e.executeQuery(t(i("release", a), c()));
136
+ }
137
+ async releaseConnection(e) {
138
+ e[m]();
139
+ }
140
+ async destroy() {
141
+ if (this.#t) {
142
+ const e = this.#t;
143
+ this.#t = void 0, await e.end();
144
+ }
145
+ }
146
+ }
147
+ class D {
148
+ #e;
149
+ #a;
150
+ constructor(e, a) {
151
+ this.#e = e, this.#a = a;
152
+ }
153
+ async executeQuery(e) {
154
+ try {
155
+ const { command: a, rowCount: t, rows: s } = await this.#e.query(e.sql, [...e.parameters]);
156
+ return {
157
+ numAffectedRows: a === "INSERT" || a === "UPDATE" || a === "DELETE" || a === "MERGE" ? BigInt(t) : void 0,
158
+ rows: s ?? []
159
+ };
160
+ } catch (a) {
161
+ throw v(a, new Error());
162
+ }
163
+ }
164
+ async *streamQuery(e, a) {
165
+ if (!this.#a.cursor)
166
+ throw new Error("'cursor' is not present in your postgres dialect config. It's required to make streaming work in postgres.");
167
+ if (!Number.isInteger(a) || a <= 0)
168
+ throw new Error("chunkSize must be a positive integer");
169
+ const t = this.#e.query(new this.#a.cursor(e.sql, e.parameters.slice()));
170
+ try {
171
+ for (; ; ) {
172
+ const s = await t.read(a);
173
+ if (s.length === 0)
174
+ break;
175
+ yield {
176
+ rows: s
177
+ };
178
+ }
179
+ } finally {
180
+ await t.close();
181
+ }
182
+ }
183
+ [m]() {
184
+ this.#e.release();
185
+ }
186
+ }
187
+ class L {
188
+ #e;
189
+ constructor(e) {
190
+ this.#e = e;
191
+ }
192
+ createDriver() {
193
+ return new A(this.#e);
194
+ }
195
+ createQueryCompiler() {
196
+ return new E();
197
+ }
198
+ createAdapter() {
199
+ return new k();
200
+ }
201
+ createIntrospector(e) {
202
+ return new C(e);
203
+ }
204
+ }
205
+ export {
206
+ E as P,
207
+ C as a,
208
+ k as b,
209
+ A as c,
210
+ L as d
211
+ };
@@ -0,0 +1,81 @@
1
+ import { a as u, C as h } from "./app-CV_FJyjI.mjs";
2
+ import { createClient as n } from "redis";
3
+ class r extends u {
4
+ constructor(s, e = !0, i = 300) {
5
+ super(s, !0, i), this.id = s, this.compressMessages = e, this.ttl = i, this.queueKey = `queue:${this.id}`, this.channelKey = `channel:${this.id}`, this.redis = r.redis, this.subscriberRedis = r.subscriberRedis, this.cancelSignal = new AbortController(), this.redis.isOpen || this.redis.connect(), this.subscriberRedis.isOpen || this.subscriberRedis.connect(), this.isConnected = !0;
6
+ }
7
+ static redis = n({ url: process.env.REDIS_URL });
8
+ static subscriberRedis = n({ url: process.env.REDIS_URL });
9
+ static isQueueExist(s) {
10
+ return this.redis.exists(`queue:${s}`).then((e) => e > 0);
11
+ }
12
+ redis;
13
+ subscriberRedis;
14
+ queueKey;
15
+ channelKey;
16
+ isConnected = !1;
17
+ cancelSignal;
18
+ /**
19
+ * 推送消息到 Redis 队列
20
+ */
21
+ async push(s) {
22
+ const e = await this.encodeData(s), i = Buffer.from(e);
23
+ await this.redis.rPush(this.queueKey, i), await this.redis.expire(this.queueKey, this.ttl), await this.redis.publish(this.channelKey, i), this.emit("dataChange", e);
24
+ }
25
+ /**
26
+ * 异步生成器:支持 for await...of 方式消费队列数据
27
+ */
28
+ async *onDataReceive() {
29
+ let s = [], e = null, i = !1;
30
+ const c = async (t) => {
31
+ const a = await this.decodeData(t);
32
+ s.push(a), (a.event === "__stream_end__" || a.event === "__stream_error__" || a.event === "__stream_cancel__") && (setTimeout(() => {
33
+ i = !0, e && (e(), e = null);
34
+ }, 300), a.event === "__stream_cancel__" && this.cancel()), e && (e(), e = null);
35
+ };
36
+ await this.subscriberRedis.subscribe(this.channelKey, (t) => {
37
+ c(t);
38
+ });
39
+ try {
40
+ for (; !i; )
41
+ if (s.length > 0) {
42
+ for (const t of s)
43
+ yield t;
44
+ s = [];
45
+ } else
46
+ await new Promise((t) => {
47
+ e = t;
48
+ });
49
+ } finally {
50
+ await this.subscriberRedis.unsubscribe(this.channelKey);
51
+ }
52
+ }
53
+ /**
54
+ * 获取队列中的所有数据
55
+ */
56
+ async getAll() {
57
+ const s = await this.redis.lRange(this.queueKey, 0, -1);
58
+ return !s || s.length === 0 ? [] : this.compressMessages ? await Promise.all(
59
+ s.map((e) => this.decodeData(e))
60
+ ) : s.map((e) => JSON.parse(e));
61
+ }
62
+ /**
63
+ * 清空队列
64
+ */
65
+ clear() {
66
+ this.isConnected && this.redis.del(this.queueKey);
67
+ }
68
+ /**
69
+ * 取消操作
70
+ */
71
+ cancel() {
72
+ this.push(new h()), this.cancelSignal.abort("user cancel this run");
73
+ }
74
+ async copyToQueue(s, e) {
75
+ const i = new r(s, this.compressMessages, e ?? this.ttl);
76
+ return await this.redis.copy(this.queueKey, i.queueKey), await this.redis.expire(i.queueKey, e ?? this.ttl), i;
77
+ }
78
+ }
79
+ export {
80
+ r as RedisStreamQueue
81
+ };
@@ -0,0 +1,330 @@
1
+ import { B as $, u as g } from "./app-CV_FJyjI.mjs";
2
+ import { createClient as T } from "redis";
3
+ function k(u) {
4
+ if (u === null || typeof u != "object") return JSON.stringify(u);
5
+ if (Array.isArray(u)) return JSON.stringify(u.map((i) => k(i)));
6
+ const t = {}, e = Object.keys(u).sort();
7
+ for (const i of e) t[i] = u[i];
8
+ return JSON.stringify(t, (i, n) => {
9
+ if (n !== null && typeof n == "object" && !Array.isArray(n)) {
10
+ const s = {}, c = Object.keys(n).sort();
11
+ for (const r of c) s[r] = n[r];
12
+ return s;
13
+ }
14
+ return n;
15
+ });
16
+ }
17
+ const S = [{
18
+ index: "checkpoints",
19
+ prefix: "checkpoint:",
20
+ schema: {
21
+ "$.thread_id": {
22
+ type: "TAG",
23
+ AS: "thread_id"
24
+ },
25
+ "$.checkpoint_ns": {
26
+ type: "TAG",
27
+ AS: "checkpoint_ns"
28
+ },
29
+ "$.checkpoint_id": {
30
+ type: "TAG",
31
+ AS: "checkpoint_id"
32
+ },
33
+ "$.parent_checkpoint_id": {
34
+ type: "TAG",
35
+ AS: "parent_checkpoint_id"
36
+ },
37
+ "$.checkpoint_ts": {
38
+ type: "NUMERIC",
39
+ AS: "checkpoint_ts"
40
+ },
41
+ "$.has_writes": {
42
+ type: "TAG",
43
+ AS: "has_writes"
44
+ },
45
+ "$.source": {
46
+ type: "TAG",
47
+ AS: "source"
48
+ },
49
+ "$.step": {
50
+ type: "NUMERIC",
51
+ AS: "step"
52
+ }
53
+ }
54
+ }, {
55
+ index: "checkpoint_writes",
56
+ prefix: "checkpoint_write:",
57
+ schema: {
58
+ "$.thread_id": {
59
+ type: "TAG",
60
+ AS: "thread_id"
61
+ },
62
+ "$.checkpoint_ns": {
63
+ type: "TAG",
64
+ AS: "checkpoint_ns"
65
+ },
66
+ "$.checkpoint_id": {
67
+ type: "TAG",
68
+ AS: "checkpoint_id"
69
+ },
70
+ "$.task_id": {
71
+ type: "TAG",
72
+ AS: "task_id"
73
+ },
74
+ "$.idx": {
75
+ type: "NUMERIC",
76
+ AS: "idx"
77
+ },
78
+ "$.channel": {
79
+ type: "TAG",
80
+ AS: "channel"
81
+ },
82
+ "$.type": {
83
+ type: "TAG",
84
+ AS: "type"
85
+ }
86
+ }
87
+ }];
88
+ var x = class w extends $ {
89
+ client;
90
+ ttlConfig;
91
+ constructor(t, e) {
92
+ super(), this.client = t, this.ttlConfig = e;
93
+ }
94
+ static async fromUrl(t, e) {
95
+ const i = T({ url: t });
96
+ await i.connect();
97
+ const n = new w(i, e);
98
+ return await n.ensureIndexes(), n;
99
+ }
100
+ async get(t) {
101
+ return (await this.getTuple(t))?.checkpoint;
102
+ }
103
+ async put(t, e, i, n) {
104
+ await this.ensureIndexes();
105
+ const s = t.configurable?.thread_id, c = t.configurable?.checkpoint_ns ?? "", r = t.configurable?.checkpoint_id;
106
+ if (!s) throw new Error("thread_id is required");
107
+ const a = e.id || g(0), l = `checkpoint:${s}:${c}:shallow`;
108
+ let h = null, p = null;
109
+ try {
110
+ h = await this.client.json.get(l), h && typeof h == "object" && (p = h.checkpoint_id);
111
+ } catch {
112
+ }
113
+ p && p !== a && await this.cleanupOldCheckpoint(s, c, p);
114
+ const o = {
115
+ ...e,
116
+ channel_values: e.channel_values || {},
117
+ channel_blobs: void 0
118
+ }, d = {
119
+ thread_id: s,
120
+ checkpoint_ns: c,
121
+ checkpoint_id: a,
122
+ parent_checkpoint_id: r || null,
123
+ checkpoint: o,
124
+ metadata: this.sanitizeMetadata(i),
125
+ checkpoint_ts: Date.now(),
126
+ has_writes: "false"
127
+ };
128
+ return this.addSearchableMetadataFields(d, i), await this.client.json.set(l, "$", d), this.ttlConfig?.defaultTTL && await this.applyTTL(l), { configurable: {
129
+ thread_id: s,
130
+ checkpoint_ns: c,
131
+ checkpoint_id: a
132
+ } };
133
+ }
134
+ async getTuple(t) {
135
+ const e = t.configurable?.thread_id, i = t.configurable?.checkpoint_ns ?? "", n = t.configurable?.checkpoint_id;
136
+ if (!e) return;
137
+ const s = `checkpoint:${e}:${i}:shallow`, c = await this.client.json.get(s);
138
+ if (!c || n && c.checkpoint_id !== n) return;
139
+ this.ttlConfig?.refreshOnRead && this.ttlConfig?.defaultTTL && await this.applyTTL(s);
140
+ const r = await this.serde.loadsTyped("json", JSON.stringify(c.checkpoint));
141
+ let a;
142
+ return c.has_writes === "true" && (a = await this.loadPendingWrites(c.thread_id, c.checkpoint_ns, c.checkpoint_id)), await this.createCheckpointTuple(c, r, a);
143
+ }
144
+ async *list(t, e) {
145
+ if (await this.ensureIndexes(), t?.configurable?.thread_id) {
146
+ const i = await this.getTuple(t);
147
+ i && (e?.filter ? this.checkMetadataFilterMatch(i.metadata, e.filter) && (yield i) : yield i);
148
+ } else {
149
+ const i = [];
150
+ if (e?.filter)
151
+ for (const [c, r] of Object.entries(e.filter)) r === void 0 || r === null || (typeof r == "string" ? i.push(`(@${c}:{${r}})`) : typeof r == "number" && i.push(`(@${c}:[${r} ${r}])`));
152
+ i.length === 0 && i.push("*");
153
+ const n = i.join(" "), s = e?.limit ?? 10;
154
+ try {
155
+ const c = await this.client.ft.search("checkpoints", n, {
156
+ LIMIT: {
157
+ from: 0,
158
+ size: s * 2
159
+ },
160
+ SORTBY: {
161
+ BY: "checkpoint_ts",
162
+ DIRECTION: "DESC"
163
+ }
164
+ }), r = /* @__PURE__ */ new Set();
165
+ let a = 0;
166
+ for (const l of c.documents) {
167
+ if (a >= s) break;
168
+ const h = l.value, p = `${h.thread_id}:${h.checkpoint_ns}`;
169
+ if (r.has(p) || (r.add(p), e?.filter && !this.checkMetadataFilterMatch(h.metadata, e.filter)))
170
+ continue;
171
+ const o = await this.serde.loadsTyped("json", JSON.stringify(h.checkpoint));
172
+ yield await this.createCheckpointTuple(h, o), a++;
173
+ }
174
+ } catch (c) {
175
+ if (c.message?.includes("no such index")) {
176
+ const a = await this.client.keys("checkpoint:*:*:shallow");
177
+ if (a.length === 0) return;
178
+ a.sort().reverse();
179
+ const l = /* @__PURE__ */ new Set();
180
+ let h = 0;
181
+ const p = e?.limit ?? 10;
182
+ for (const o of a) {
183
+ if (h >= p) break;
184
+ const d = await this.client.json.get(o);
185
+ if (!d) continue;
186
+ const f = `${d.thread_id}:${d.checkpoint_ns}`;
187
+ if (l.has(f) || (l.add(f), e?.filter && !this.checkMetadataFilterMatch(d.metadata, e.filter)))
188
+ continue;
189
+ const y = await this.serde.loadsTyped("json", JSON.stringify(d.checkpoint));
190
+ yield await this.createCheckpointTuple(d, y), h++;
191
+ }
192
+ return;
193
+ }
194
+ throw c;
195
+ }
196
+ }
197
+ }
198
+ async putWrites(t, e, i) {
199
+ await this.ensureIndexes();
200
+ const n = t.configurable?.thread_id, s = t.configurable?.checkpoint_ns ?? "", c = t.configurable?.checkpoint_id;
201
+ if (!n || !c) throw new Error("thread_id and checkpoint_id are required");
202
+ const r = `checkpoint_write:${n}:${s}:${c}:${i}:*`, a = await this.client.keys(r);
203
+ a.length > 0 && await this.client.del(a);
204
+ const l = [];
205
+ for (let o = 0; o < e.length; o++) {
206
+ const [d, f] = e[o], y = `checkpoint_write:${n}:${s}:${c}:${i}:${o}`;
207
+ l.push(y);
208
+ const _ = {
209
+ thread_id: n,
210
+ checkpoint_ns: s,
211
+ checkpoint_id: c,
212
+ task_id: i,
213
+ idx: o,
214
+ channel: d,
215
+ type: typeof f == "object" ? "json" : "string",
216
+ value: f
217
+ };
218
+ await this.client.json.set(y, "$", _);
219
+ }
220
+ if (l.length > 0) {
221
+ const o = `write_keys_zset:${n}:${s}:${c}`, d = {};
222
+ l.forEach((f, y) => {
223
+ d[f] = y;
224
+ }), await this.client.zAdd(o, Object.entries(d).map(([f, y]) => ({
225
+ score: y,
226
+ value: f
227
+ }))), this.ttlConfig?.defaultTTL && await this.applyTTL(...l, o);
228
+ }
229
+ const h = `checkpoint:${n}:${s}:shallow`;
230
+ if (await this.client.exists(h)) {
231
+ const o = await this.client.json.get(h);
232
+ o && (o.has_writes = "true", await this.client.json.set(h, "$", o));
233
+ }
234
+ }
235
+ async deleteThread(t) {
236
+ const e = `checkpoint:${t}:*:shallow`, i = await this.client.keys(e);
237
+ i.length > 0 && await this.client.del(i);
238
+ const n = `checkpoint_write:${t}:*`, s = await this.client.keys(n);
239
+ s.length > 0 && await this.client.del(s);
240
+ const c = `write_keys_zset:${t}:*`, r = await this.client.keys(c);
241
+ r.length > 0 && await this.client.del(r);
242
+ }
243
+ async end() {
244
+ await this.client.quit();
245
+ }
246
+ addSearchableMetadataFields(t, e) {
247
+ e && ("source" in e && (t.source = e.source), "step" in e && (t.step = e.step), "writes" in e && (t.writes = typeof e.writes == "object" ? JSON.stringify(e.writes) : e.writes), "score" in e && (t.score = e.score));
248
+ }
249
+ async createCheckpointTuple(t, e, i) {
250
+ const n = await this.serde.loadsTyped("json", JSON.stringify(t.metadata));
251
+ return {
252
+ config: { configurable: {
253
+ thread_id: t.thread_id,
254
+ checkpoint_ns: t.checkpoint_ns,
255
+ checkpoint_id: t.checkpoint_id
256
+ } },
257
+ checkpoint: e,
258
+ metadata: n,
259
+ parentConfig: t.parent_checkpoint_id ? { configurable: {
260
+ thread_id: t.thread_id,
261
+ checkpoint_ns: t.checkpoint_ns,
262
+ checkpoint_id: t.parent_checkpoint_id
263
+ } } : void 0,
264
+ pendingWrites: i
265
+ };
266
+ }
267
+ async applyTTL(...t) {
268
+ if (!this.ttlConfig?.defaultTTL) return;
269
+ const e = Math.floor(this.ttlConfig.defaultTTL * 60), i = await Promise.allSettled(t.map((n) => this.client.expire(n, e)));
270
+ for (let n = 0; n < i.length; n++) i[n].status === "rejected" && console.warn(`Failed to set TTL for key ${t[n]}:`, i[n].reason);
271
+ }
272
+ async loadPendingWrites(t, e, i) {
273
+ const n = `write_keys_zset:${t}:${e}:${i}`, s = await this.client.zRange(n, 0, -1);
274
+ if (s.length === 0) return;
275
+ const c = [];
276
+ for (const r of s) {
277
+ const a = await this.client.json.get(r);
278
+ if (a) {
279
+ const l = await this.serde.loadsTyped("json", JSON.stringify(a.value));
280
+ c.push([
281
+ a.task_id,
282
+ a.channel,
283
+ l
284
+ ]);
285
+ }
286
+ }
287
+ return c;
288
+ }
289
+ checkMetadataFilterMatch(t, e) {
290
+ for (const [i, n] of Object.entries(e)) {
291
+ const s = t?.[i];
292
+ if (n === null) {
293
+ if (!(i in (t || {})) || s !== null) return !1;
294
+ } else if (typeof n == "object" && !Array.isArray(n)) {
295
+ if (typeof s != "object" || s === null || k(n) !== k(s)) return !1;
296
+ } else if (s !== n) return !1;
297
+ }
298
+ return !0;
299
+ }
300
+ async cleanupOldCheckpoint(t, e, i) {
301
+ const n = `checkpoint_write:${t}:${e}:${i}:*`, s = await this.client.keys(n);
302
+ s.length > 0 && await this.client.del(s);
303
+ const c = `write_keys_zset:${t}:${e}:${i}`;
304
+ await this.client.del(c);
305
+ const r = `checkpoint_blob:${t}:${e}:${i}:*`, a = await this.client.keys(r);
306
+ a.length > 0 && await this.client.del(a);
307
+ }
308
+ sanitizeMetadata(t) {
309
+ if (!t) return {};
310
+ const e = {};
311
+ for (const [i, n] of Object.entries(t)) {
312
+ const s = i.replace(/\x00/g, "");
313
+ e[s] = typeof n == "string" ? n.replace(/\x00/g, "") : n;
314
+ }
315
+ return e;
316
+ }
317
+ async ensureIndexes() {
318
+ for (const t of S) try {
319
+ await this.client.ft.create(t.index, t.schema, {
320
+ ON: "JSON",
321
+ PREFIX: t.prefix
322
+ });
323
+ } catch (e) {
324
+ e.message?.includes("Index already exists") || console.error(`Failed to create index ${t.index}:`, e.message);
325
+ }
326
+ }
327
+ };
328
+ export {
329
+ x as ShallowRedisSaver
330
+ };