@rotorsoft/act-pg 0.3.0 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/.tsbuildinfo +1 -1
- package/dist/@types/PostgresStore.d.ts +13 -3
- package/dist/@types/PostgresStore.d.ts.map +1 -1
- package/dist/@types/index.d.ts +0 -1
- package/dist/@types/index.d.ts.map +1 -1
- package/dist/index.cjs +119 -106
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +109 -95
- package/dist/index.js.map +1 -1
- package/package.json +2 -2
- package/dist/@types/config.d.ts +0 -24
- package/dist/@types/config.d.ts.map +0 -1
- package/dist/@types/seed.d.ts +0 -2
- package/dist/@types/seed.d.ts.map +0 -1
package/dist/index.js
CHANGED
|
@@ -1,80 +1,7 @@
|
|
|
1
|
-
// src/config.ts
|
|
2
|
-
import { extend, config as target } from "@rotorsoft/act";
|
|
3
|
-
import { z } from "zod/v4";
|
|
4
|
-
var { PG_HOST, PG_USER, PG_PASSWORD, PG_DATABASE, PG_PORT } = process.env;
|
|
5
|
-
var config = extend(
|
|
6
|
-
{
|
|
7
|
-
pg: {
|
|
8
|
-
host: PG_HOST || "localhost",
|
|
9
|
-
user: PG_USER || "postgres",
|
|
10
|
-
password: PG_PASSWORD || "postgres",
|
|
11
|
-
database: PG_DATABASE || "postgres",
|
|
12
|
-
port: Number.parseInt(PG_PORT || "5431")
|
|
13
|
-
}
|
|
14
|
-
},
|
|
15
|
-
z.object({
|
|
16
|
-
pg: z.object({
|
|
17
|
-
host: z.string().min(1),
|
|
18
|
-
user: z.string().min(1),
|
|
19
|
-
password: z.string().min(1),
|
|
20
|
-
database: z.string().min(1),
|
|
21
|
-
port: z.number().int().min(1e3).max(65535)
|
|
22
|
-
})
|
|
23
|
-
}),
|
|
24
|
-
target()
|
|
25
|
-
);
|
|
26
|
-
|
|
27
1
|
// src/PostgresStore.ts
|
|
28
2
|
import { ConcurrencyError, SNAP_EVENT, logger } from "@rotorsoft/act";
|
|
29
3
|
import pg from "pg";
|
|
30
4
|
|
|
31
|
-
// src/seed.ts
|
|
32
|
-
var seed_store = (table) => `
|
|
33
|
-
-- events
|
|
34
|
-
CREATE TABLE IF NOT EXISTS public."${table}"
|
|
35
|
-
(
|
|
36
|
-
id serial PRIMARY KEY,
|
|
37
|
-
name varchar(100) COLLATE pg_catalog."default" NOT NULL,
|
|
38
|
-
data jsonb,
|
|
39
|
-
stream varchar(100) COLLATE pg_catalog."default" NOT NULL,
|
|
40
|
-
version int NOT NULL,
|
|
41
|
-
created timestamptz NOT NULL DEFAULT now(),
|
|
42
|
-
meta jsonb
|
|
43
|
-
) TABLESPACE pg_default;
|
|
44
|
-
|
|
45
|
-
CREATE UNIQUE INDEX IF NOT EXISTS "${table}_stream_ix"
|
|
46
|
-
ON public."${table}" USING btree (stream COLLATE pg_catalog."default" ASC, version ASC)
|
|
47
|
-
TABLESPACE pg_default;
|
|
48
|
-
|
|
49
|
-
CREATE INDEX IF NOT EXISTS "${table}_name_ix"
|
|
50
|
-
ON public."${table}" USING btree (name COLLATE pg_catalog."default" ASC)
|
|
51
|
-
TABLESPACE pg_default;
|
|
52
|
-
|
|
53
|
-
CREATE INDEX IF NOT EXISTS "${table}_created_id_ix"
|
|
54
|
-
ON public."${table}" USING btree (created ASC, id ASC)
|
|
55
|
-
TABLESPACE pg_default;
|
|
56
|
-
|
|
57
|
-
CREATE INDEX IF NOT EXISTS "${table}_correlation_ix"
|
|
58
|
-
ON public."${table}" USING btree ((meta ->> 'correlation'::text) COLLATE pg_catalog."default" ASC NULLS LAST)
|
|
59
|
-
TABLESPACE pg_default;
|
|
60
|
-
|
|
61
|
-
-- streams
|
|
62
|
-
CREATE TABLE IF NOT EXISTS public."${table}_streams"
|
|
63
|
-
(
|
|
64
|
-
stream varchar(100) COLLATE pg_catalog."default" PRIMARY KEY,
|
|
65
|
-
at int not null default(-1),
|
|
66
|
-
retry smallint not null default(0),
|
|
67
|
-
blocked boolean not null default(false),
|
|
68
|
-
leased_at int,
|
|
69
|
-
leased_by uuid,
|
|
70
|
-
leased_until timestamptz
|
|
71
|
-
) TABLESPACE pg_default;
|
|
72
|
-
|
|
73
|
-
-- supports order by { blocked, at } when fetching
|
|
74
|
-
CREATE INDEX IF NOT EXISTS "${table}_streams_fetch_ix"
|
|
75
|
-
ON public."${table}_streams" USING btree (blocked, at) TABLESPACE pg_default;
|
|
76
|
-
`;
|
|
77
|
-
|
|
78
5
|
// src/utils.ts
|
|
79
6
|
var ISO_8601 = /^(\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(\.\d+)?(Z|[+-][0-2][0-9]:[0-5][0-9])?$/;
|
|
80
7
|
var dateReviver = (key, value) => {
|
|
@@ -90,22 +17,105 @@ types.setTypeParser(
|
|
|
90
17
|
types.builtins.JSONB,
|
|
91
18
|
(val) => JSON.parse(val, dateReviver)
|
|
92
19
|
);
|
|
20
|
+
var DEFAULT_CONFIG = {
|
|
21
|
+
host: "localhost",
|
|
22
|
+
port: 5432,
|
|
23
|
+
database: "postgres",
|
|
24
|
+
user: "postgres",
|
|
25
|
+
password: "postgres",
|
|
26
|
+
schema: "public",
|
|
27
|
+
table: "events",
|
|
28
|
+
leaseMillis: 3e4
|
|
29
|
+
};
|
|
93
30
|
var PostgresStore = class {
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
31
|
+
_pool;
|
|
32
|
+
config;
|
|
33
|
+
constructor(config = {}) {
|
|
34
|
+
this.config = { ...DEFAULT_CONFIG, ...config };
|
|
35
|
+
this._pool = new Pool(this.config);
|
|
97
36
|
}
|
|
98
|
-
_pool = new Pool(config.pg);
|
|
99
37
|
async dispose() {
|
|
100
38
|
await this._pool.end();
|
|
101
39
|
}
|
|
102
40
|
async seed() {
|
|
103
|
-
const
|
|
104
|
-
|
|
41
|
+
const client = await this._pool.connect();
|
|
42
|
+
try {
|
|
43
|
+
await client.query("BEGIN");
|
|
44
|
+
await client.query(
|
|
45
|
+
`CREATE SCHEMA IF NOT EXISTS "${this.config.schema}";`
|
|
46
|
+
);
|
|
47
|
+
await client.query(
|
|
48
|
+
`CREATE TABLE IF NOT EXISTS "${this.config.schema}"."${this.config.table}" (
|
|
49
|
+
id serial PRIMARY KEY,
|
|
50
|
+
name varchar(100) COLLATE pg_catalog."default" NOT NULL,
|
|
51
|
+
data jsonb,
|
|
52
|
+
stream varchar(100) COLLATE pg_catalog."default" NOT NULL,
|
|
53
|
+
version int NOT NULL,
|
|
54
|
+
created timestamptz NOT NULL DEFAULT now(),
|
|
55
|
+
meta jsonb
|
|
56
|
+
) TABLESPACE pg_default;`
|
|
57
|
+
);
|
|
58
|
+
await client.query(
|
|
59
|
+
`CREATE UNIQUE INDEX IF NOT EXISTS "${this.config.table}_stream_ix"
|
|
60
|
+
ON "${this.config.schema}"."${this.config.table}" (stream COLLATE pg_catalog."default", version);`
|
|
61
|
+
);
|
|
62
|
+
await client.query(
|
|
63
|
+
`CREATE INDEX IF NOT EXISTS "${this.config.table}_name_ix"
|
|
64
|
+
ON "${this.config.schema}"."${this.config.table}" (name COLLATE pg_catalog."default");`
|
|
65
|
+
);
|
|
66
|
+
await client.query(
|
|
67
|
+
`CREATE INDEX IF NOT EXISTS "${this.config.table}_created_id_ix"
|
|
68
|
+
ON "${this.config.schema}"."${this.config.table}" (created, id);`
|
|
69
|
+
);
|
|
70
|
+
await client.query(
|
|
71
|
+
`CREATE INDEX IF NOT EXISTS "${this.config.table}_correlation_ix"
|
|
72
|
+
ON "${this.config.schema}"."${this.config.table}" ((meta ->> 'correlation') COLLATE pg_catalog."default");`
|
|
73
|
+
);
|
|
74
|
+
await client.query(
|
|
75
|
+
`CREATE TABLE IF NOT EXISTS "${this.config.schema}"."${this.config.table}_streams" (
|
|
76
|
+
stream varchar(100) COLLATE pg_catalog."default" PRIMARY KEY,
|
|
77
|
+
at int NOT NULL DEFAULT -1,
|
|
78
|
+
retry smallint NOT NULL DEFAULT 0,
|
|
79
|
+
blocked boolean NOT NULL DEFAULT false,
|
|
80
|
+
leased_at int,
|
|
81
|
+
leased_by uuid,
|
|
82
|
+
leased_until timestamptz
|
|
83
|
+
) TABLESPACE pg_default;`
|
|
84
|
+
);
|
|
85
|
+
await client.query(
|
|
86
|
+
`CREATE INDEX IF NOT EXISTS "${this.config.table}_streams_fetch_ix"
|
|
87
|
+
ON "${this.config.schema}"."${this.config.table}_streams" (blocked, at);`
|
|
88
|
+
);
|
|
89
|
+
await client.query("COMMIT");
|
|
90
|
+
logger.info(
|
|
91
|
+
`Seeded schema "${this.config.schema}" with table "${this.config.table}"`
|
|
92
|
+
);
|
|
93
|
+
} catch (error) {
|
|
94
|
+
await client.query("ROLLBACK");
|
|
95
|
+
logger.error("Failed to seed store:", error);
|
|
96
|
+
throw error;
|
|
97
|
+
} finally {
|
|
98
|
+
client.release();
|
|
99
|
+
}
|
|
105
100
|
}
|
|
106
101
|
async drop() {
|
|
107
|
-
await this._pool.query(
|
|
108
|
-
|
|
102
|
+
await this._pool.query(
|
|
103
|
+
`
|
|
104
|
+
DO $$
|
|
105
|
+
BEGIN
|
|
106
|
+
IF EXISTS (SELECT 1 FROM information_schema.schemata
|
|
107
|
+
WHERE schema_name = '${this.config.schema}'
|
|
108
|
+
) THEN
|
|
109
|
+
EXECUTE 'DROP TABLE IF EXISTS "${this.config.schema}"."${this.config.table}"';
|
|
110
|
+
EXECUTE 'DROP TABLE IF EXISTS "${this.config.schema}"."${this.config.table}_streams"';
|
|
111
|
+
IF '${this.config.schema}' <> 'public' THEN
|
|
112
|
+
EXECUTE 'DROP SCHEMA "${this.config.schema}" CASCADE';
|
|
113
|
+
END IF;
|
|
114
|
+
END IF;
|
|
115
|
+
END
|
|
116
|
+
$$;
|
|
117
|
+
`
|
|
118
|
+
);
|
|
109
119
|
}
|
|
110
120
|
async query(callback, query, withSnaps = false) {
|
|
111
121
|
const {
|
|
@@ -119,12 +129,12 @@ var PostgresStore = class {
|
|
|
119
129
|
backward,
|
|
120
130
|
correlation
|
|
121
131
|
} = query || {};
|
|
122
|
-
let sql = `SELECT * FROM "${this.table}" WHERE`;
|
|
132
|
+
let sql = `SELECT * FROM "${this.config.schema}"."${this.config.table}" WHERE`;
|
|
123
133
|
const values = [];
|
|
124
134
|
if (withSnaps)
|
|
125
135
|
sql = sql.concat(
|
|
126
136
|
` id>=COALESCE((SELECT id
|
|
127
|
-
FROM "${this.table}"
|
|
137
|
+
FROM "${this.config.schema}"."${this.config.table}"
|
|
128
138
|
WHERE stream='${stream}' AND name='${SNAP_EVENT}'
|
|
129
139
|
ORDER BY id DESC LIMIT 1), 0)
|
|
130
140
|
AND stream='${stream}'`
|
|
@@ -174,7 +184,9 @@ var PostgresStore = class {
|
|
|
174
184
|
try {
|
|
175
185
|
await client.query("BEGIN");
|
|
176
186
|
const last = await client.query(
|
|
177
|
-
`SELECT version
|
|
187
|
+
`SELECT version
|
|
188
|
+
FROM "${this.config.schema}"."${this.config.table}"
|
|
189
|
+
WHERE stream=$1 ORDER BY version DESC LIMIT 1`,
|
|
178
190
|
[stream]
|
|
179
191
|
);
|
|
180
192
|
version = last.rowCount ? last.rows[0].version : -1;
|
|
@@ -188,7 +200,7 @@ var PostgresStore = class {
|
|
|
188
200
|
msgs.map(async ({ name, data }) => {
|
|
189
201
|
version++;
|
|
190
202
|
const sql = `
|
|
191
|
-
INSERT INTO "${this.table}"(name, data, stream, version, meta)
|
|
203
|
+
INSERT INTO "${this.config.schema}"."${this.config.table}"(name, data, stream, version, meta)
|
|
192
204
|
VALUES($1, $2, $3, $4, $5) RETURNING *`;
|
|
193
205
|
const vals = [name, data, stream, version, meta];
|
|
194
206
|
const { rows } = await client.query(sql, vals);
|
|
@@ -197,7 +209,7 @@ var PostgresStore = class {
|
|
|
197
209
|
);
|
|
198
210
|
await client.query(
|
|
199
211
|
`
|
|
200
|
-
NOTIFY "${this.table}", '${JSON.stringify({
|
|
212
|
+
NOTIFY "${this.config.table}", '${JSON.stringify({
|
|
201
213
|
operation: "INSERT",
|
|
202
214
|
id: committed[0].name,
|
|
203
215
|
position: committed[0].id
|
|
@@ -225,7 +237,7 @@ var PostgresStore = class {
|
|
|
225
237
|
const { rows } = await this._pool.query(
|
|
226
238
|
`
|
|
227
239
|
SELECT stream, at
|
|
228
|
-
FROM "${this.table}_streams"
|
|
240
|
+
FROM "${this.config.schema}"."${this.config.table}_streams"
|
|
229
241
|
WHERE blocked=false
|
|
230
242
|
ORDER BY at ASC
|
|
231
243
|
LIMIT $1::integer
|
|
@@ -234,7 +246,10 @@ var PostgresStore = class {
|
|
|
234
246
|
);
|
|
235
247
|
const after = rows.length ? rows.reduce((min, r) => Math.min(min, r.at), Number.MAX_SAFE_INTEGER) : -1;
|
|
236
248
|
const events = [];
|
|
237
|
-
await this.query((e) => events.push(e), {
|
|
249
|
+
await this.query((e) => e.name !== SNAP_EVENT && events.push(e), {
|
|
250
|
+
after,
|
|
251
|
+
limit
|
|
252
|
+
});
|
|
238
253
|
return { streams: rows.map(({ stream }) => stream), events };
|
|
239
254
|
}
|
|
240
255
|
async lease(leases) {
|
|
@@ -245,7 +260,7 @@ var PostgresStore = class {
|
|
|
245
260
|
await client.query("BEGIN");
|
|
246
261
|
await client.query(
|
|
247
262
|
`
|
|
248
|
-
INSERT INTO "${this.table}_streams" (stream)
|
|
263
|
+
INSERT INTO "${this.config.schema}"."${this.config.table}_streams" (stream)
|
|
249
264
|
SELECT UNNEST($1::text[])
|
|
250
265
|
ON CONFLICT (stream) DO NOTHING
|
|
251
266
|
`,
|
|
@@ -254,11 +269,11 @@ var PostgresStore = class {
|
|
|
254
269
|
const { rows } = await client.query(
|
|
255
270
|
`
|
|
256
271
|
WITH free AS (
|
|
257
|
-
SELECT * FROM "${this.table}_streams"
|
|
272
|
+
SELECT * FROM "${this.config.schema}"."${this.config.table}_streams"
|
|
258
273
|
WHERE stream = ANY($1::text[]) AND (leased_by IS NULL OR leased_until <= NOW())
|
|
259
274
|
FOR UPDATE
|
|
260
275
|
)
|
|
261
|
-
UPDATE "${this.table}_streams" U
|
|
276
|
+
UPDATE "${this.config.schema}"."${this.config.table}_streams" U
|
|
262
277
|
SET
|
|
263
278
|
leased_by = $2::uuid,
|
|
264
279
|
leased_at = $3::integer,
|
|
@@ -267,7 +282,7 @@ var PostgresStore = class {
|
|
|
267
282
|
WHERE U.stream = free.stream
|
|
268
283
|
RETURNING U.stream, U.leased_at, U.retry
|
|
269
284
|
`,
|
|
270
|
-
[streams, by, at, this.leaseMillis]
|
|
285
|
+
[streams, by, at, this.config.leaseMillis]
|
|
271
286
|
);
|
|
272
287
|
await client.query("COMMIT");
|
|
273
288
|
return rows.map(({ stream, leased_at, retry }) => ({
|
|
@@ -291,7 +306,7 @@ var PostgresStore = class {
|
|
|
291
306
|
await client.query("BEGIN");
|
|
292
307
|
for (const { stream, by, at, retry, block } of leases) {
|
|
293
308
|
await client.query(
|
|
294
|
-
`UPDATE "${this.table}_streams"
|
|
309
|
+
`UPDATE "${this.config.schema}"."${this.config.table}_streams"
|
|
295
310
|
SET
|
|
296
311
|
at = $3::integer,
|
|
297
312
|
retry = $4::integer,
|
|
@@ -315,7 +330,6 @@ var PostgresStore = class {
|
|
|
315
330
|
}
|
|
316
331
|
};
|
|
317
332
|
export {
|
|
318
|
-
PostgresStore
|
|
319
|
-
config
|
|
333
|
+
PostgresStore
|
|
320
334
|
};
|
|
321
335
|
//# sourceMappingURL=index.js.map
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/config.ts","../src/PostgresStore.ts","../src/seed.ts","../src/utils.ts"],"sourcesContent":["import { extend, config as target } from \"@rotorsoft/act\";\nimport { z } from \"zod/v4\";\n\nconst { PG_HOST, PG_USER, PG_PASSWORD, PG_DATABASE, PG_PORT } = process.env;\n\nexport const config = extend(\n {\n pg: {\n host: PG_HOST || \"localhost\",\n user: PG_USER || \"postgres\",\n password: PG_PASSWORD || \"postgres\",\n database: PG_DATABASE || \"postgres\",\n port: Number.parseInt(PG_PORT || \"5431\"),\n },\n },\n z.object({\n pg: z.object({\n host: z.string().min(1),\n user: z.string().min(1),\n password: z.string().min(1),\n database: z.string().min(1),\n port: z.number().int().min(1000).max(65535),\n }),\n }),\n target()\n);\n","import type {\n Committed,\n EventMeta,\n Lease,\n Message,\n Query,\n Schemas,\n Store,\n} from \"@rotorsoft/act\";\nimport { ConcurrencyError, SNAP_EVENT, logger } from \"@rotorsoft/act\";\nimport pg from \"pg\";\nimport { config } from \"./config.js\";\nimport { seed_store } from \"./seed.js\";\nimport { dateReviver } from \"./utils.js\";\n\nconst { Pool, types } = pg;\ntypes.setTypeParser(types.builtins.JSONB, (val) =>\n JSON.parse(val, dateReviver)\n);\n\nexport class PostgresStore implements Store {\n private _pool = new Pool(config.pg);\n\n constructor(\n readonly table: string,\n readonly leaseMillis = 30_000\n ) {}\n async dispose() {\n await this._pool.end();\n }\n\n async seed() {\n const seed = seed_store(this.table);\n await this._pool.query(seed);\n }\n\n async drop() {\n await this._pool.query(`DROP TABLE IF EXISTS \"${this.table}\"`);\n await this._pool.query(`DROP TABLE IF EXISTS \"${this.table}_streams\"`);\n }\n\n async query<E extends Schemas>(\n callback: (event: Committed<E, keyof E>) => void,\n query?: Query,\n withSnaps = false\n ) {\n const {\n stream,\n names,\n before,\n after,\n limit,\n created_before,\n created_after,\n backward,\n correlation,\n } = query || {};\n\n let sql = `SELECT * FROM \"${this.table}\" WHERE`;\n const values: any[] = [];\n\n if (withSnaps)\n sql = sql.concat(\n ` id>=COALESCE((SELECT id\n FROM \"${this.table}\"\n WHERE stream='${stream}' AND name='${SNAP_EVENT}'\n ORDER BY id DESC LIMIT 1), 0)\n AND stream='${stream}'`\n );\n else if (query) {\n if (typeof after !== \"undefined\") {\n values.push(after);\n sql = sql.concat(\" id>$1\");\n } else sql = sql.concat(\" id>-1\");\n if (stream) {\n values.push(stream);\n sql = sql.concat(` AND stream=$${values.length}`);\n }\n if (names && names.length) {\n values.push(names);\n sql = sql.concat(` AND name = ANY($${values.length})`);\n }\n if (before) {\n values.push(before);\n sql = sql.concat(` AND id<$${values.length}`);\n }\n if (created_after) {\n values.push(created_after.toISOString());\n sql = sql.concat(` AND created>$${values.length}`);\n }\n if (created_before) {\n values.push(created_before.toISOString());\n sql = sql.concat(` AND created<$${values.length}`);\n }\n if (correlation) {\n values.push(correlation);\n sql = sql.concat(` AND meta->>'correlation'=$${values.length}`);\n }\n }\n sql = sql.concat(` ORDER BY id ${backward ? \"DESC\" : \"ASC\"}`);\n if (limit) {\n values.push(limit);\n sql = sql.concat(` LIMIT $${values.length}`);\n }\n\n const result = await this._pool.query<Committed<E, keyof E>>(sql, values);\n for (const row of result.rows) callback(row);\n\n return result.rowCount ?? 0;\n }\n\n async commit<E extends Schemas>(\n stream: string,\n msgs: Message<E, keyof E>[],\n meta: EventMeta,\n expectedVersion?: number\n ) {\n const client = await this._pool.connect();\n let version = -1;\n try {\n await client.query(\"BEGIN\");\n\n const last = await client.query<Committed<E, keyof E>>(\n `SELECT version FROM \"${this.table}\" WHERE stream=$1 ORDER BY version DESC LIMIT 1`,\n [stream]\n );\n version = last.rowCount ? last.rows[0].version : -1;\n if (expectedVersion && version !== expectedVersion)\n throw new ConcurrencyError(\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion\n );\n\n const committed = await Promise.all(\n msgs.map(async ({ name, data }) => {\n version++;\n const sql = `\n INSERT INTO \"${this.table}\"(name, data, stream, version, meta) \n VALUES($1, $2, $3, $4, $5) RETURNING *`;\n const vals = [name, data, stream, version, meta];\n const { rows } = await client.query<Committed<E, keyof E>>(sql, vals);\n return rows.at(0)!;\n })\n );\n\n await client\n .query(\n `\n NOTIFY \"${this.table}\", '${JSON.stringify({\n operation: \"INSERT\",\n id: committed[0].name,\n position: committed[0].id,\n })}';\n COMMIT;\n `\n )\n .catch((error) => {\n logger.error(error);\n throw new ConcurrencyError(\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion || -1\n );\n });\n return committed;\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n throw error;\n } finally {\n client.release();\n }\n }\n\n async fetch<E extends Schemas>(limit: number) {\n const { rows } = await this._pool.query<{ stream: string; at: number }>(\n `\n SELECT stream, at\n FROM \"${this.table}_streams\"\n WHERE blocked=false\n ORDER BY at ASC\n LIMIT $1::integer\n `,\n [limit]\n );\n\n const after = rows.length\n ? rows.reduce((min, r) => Math.min(min, r.at), Number.MAX_SAFE_INTEGER)\n : -1;\n\n const events: Committed<E, keyof E>[] = [];\n await this.query<E>((e) => events.push(e), { after, limit });\n return { streams: rows.map(({ stream }) => stream), events };\n }\n\n async lease(leases: Lease[]) {\n const { by, at } = leases.at(0)!;\n const streams = leases.map(({ stream }) => stream);\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n // insert new streams\n await client.query(\n `\n INSERT INTO \"${this.table}_streams\" (stream)\n SELECT UNNEST($1::text[])\n ON CONFLICT (stream) DO NOTHING\n `,\n [streams]\n );\n // set leases\n const { rows } = await client.query<{\n stream: string;\n leased_at: number;\n retry: number;\n }>(\n `\n WITH free AS (\n SELECT * FROM \"${this.table}_streams\" \n WHERE stream = ANY($1::text[]) AND (leased_by IS NULL OR leased_until <= NOW())\n FOR UPDATE\n )\n UPDATE \"${this.table}_streams\" U\n SET\n leased_by = $2::uuid,\n leased_at = $3::integer,\n leased_until = NOW() + ($4::integer || ' milliseconds')::interval\n FROM free\n WHERE U.stream = free.stream\n RETURNING U.stream, U.leased_at, U.retry\n `,\n [streams, by, at, this.leaseMillis]\n );\n await client.query(\"COMMIT\");\n\n return rows.map(({ stream, leased_at, retry }) => ({\n stream,\n by,\n at: leased_at,\n retry,\n block: false,\n }));\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n throw error;\n } finally {\n client.release();\n }\n }\n\n async ack(leases: Lease[]) {\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n for (const { stream, by, at, retry, block } of leases) {\n await client.query(\n `UPDATE \"${this.table}_streams\"\n SET\n at = $3::integer,\n retry = $4::integer,\n blocked = $5::boolean,\n leased_by = NULL,\n leased_at = NULL,\n leased_until = NULL\n WHERE\n stream = $1::text\n AND leased_by = $2::uuid`,\n [stream, by, at, retry, block]\n );\n }\n await client.query(\"COMMIT\");\n } catch {\n // leased_until fallback\n await client.query(\"ROLLBACK\").catch(() => {});\n } finally {\n client.release();\n }\n }\n}\n","export const seed_store = (table: string): string => `\n-- events\nCREATE TABLE IF NOT EXISTS public.\"${table}\"\n(\n\tid serial PRIMARY KEY,\n name varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n data jsonb,\n stream varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n version int NOT NULL,\n created timestamptz NOT NULL DEFAULT now(),\n meta jsonb\n) TABLESPACE pg_default;\n\nCREATE UNIQUE INDEX IF NOT EXISTS \"${table}_stream_ix\"\n ON public.\"${table}\" USING btree (stream COLLATE pg_catalog.\"default\" ASC, version ASC)\n TABLESPACE pg_default;\n\t\nCREATE INDEX IF NOT EXISTS \"${table}_name_ix\"\n ON public.\"${table}\" USING btree (name COLLATE pg_catalog.\"default\" ASC)\n TABLESPACE pg_default;\n \nCREATE INDEX IF NOT EXISTS \"${table}_created_id_ix\"\n ON public.\"${table}\" USING btree (created ASC, id ASC)\n TABLESPACE pg_default;\n\nCREATE INDEX IF NOT EXISTS \"${table}_correlation_ix\"\n ON public.\"${table}\" USING btree ((meta ->> 'correlation'::text) COLLATE pg_catalog.\"default\" ASC NULLS LAST)\n TABLESPACE pg_default;\n\n-- streams\nCREATE TABLE IF NOT EXISTS public.\"${table}_streams\"\n(\n stream varchar(100) COLLATE pg_catalog.\"default\" PRIMARY KEY,\n at int not null default(-1),\n retry smallint not null default(0),\n blocked boolean not null default(false),\n leased_at int,\n leased_by uuid,\n leased_until timestamptz\n) TABLESPACE pg_default;\n\n-- supports order by { blocked, at } when fetching\nCREATE INDEX IF NOT EXISTS \"${table}_streams_fetch_ix\"\n ON public.\"${table}_streams\" USING btree (blocked, at) TABLESPACE pg_default;\n`;\n","/**\n * Date reviver when parsing JSON strings with the following formats:\n * - YYYY-MM-DDTHH:MM:SS.sssZ\n * - YYYY-MM-DDTHH:MM:SS.sss+HH:MM\n * - YYYY-MM-DDTHH:MM:SS.sss-HH:MM\n */\nconst ISO_8601 =\n /^(\\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(\\.\\d+)?(Z|[+-][0-2][0-9]:[0-5][0-9])?$/;\nexport const dateReviver = (key: string, value: string): string | Date => {\n if (typeof value === \"string\" && ISO_8601.test(value)) {\n return new Date(value);\n }\n return value;\n};\n"],"mappings":";AAAA,SAAS,QAAQ,UAAU,cAAc;AACzC,SAAS,SAAS;AAElB,IAAM,EAAE,SAAS,SAAS,aAAa,aAAa,QAAQ,IAAI,QAAQ;AAEjE,IAAM,SAAS;AAAA,EACpB;AAAA,IACE,IAAI;AAAA,MACF,MAAM,WAAW;AAAA,MACjB,MAAM,WAAW;AAAA,MACjB,UAAU,eAAe;AAAA,MACzB,UAAU,eAAe;AAAA,MACzB,MAAM,OAAO,SAAS,WAAW,MAAM;AAAA,IACzC;AAAA,EACF;AAAA,EACA,EAAE,OAAO;AAAA,IACP,IAAI,EAAE,OAAO;AAAA,MACX,MAAM,EAAE,OAAO,EAAE,IAAI,CAAC;AAAA,MACtB,MAAM,EAAE,OAAO,EAAE,IAAI,CAAC;AAAA,MACtB,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC;AAAA,MAC1B,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC;AAAA,MAC1B,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,IAAI,GAAI,EAAE,IAAI,KAAK;AAAA,IAC5C,CAAC;AAAA,EACH,CAAC;AAAA,EACD,OAAO;AACT;;;AChBA,SAAS,kBAAkB,YAAY,cAAc;AACrD,OAAO,QAAQ;;;ACVR,IAAM,aAAa,CAAC,UAA0B;AAAA;AAAA,qCAEhB,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qCAWL,KAAK;AAAA,eAC3B,KAAK;AAAA;AAAA;AAAA,8BAGU,KAAK;AAAA,eACpB,KAAK;AAAA;AAAA;AAAA,8BAGU,KAAK;AAAA,eACpB,KAAK;AAAA;AAAA;AAAA,8BAGU,KAAK;AAAA,eACpB,KAAK;AAAA;AAAA;AAAA;AAAA,qCAIiB,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,8BAYZ,KAAK;AAAA,eACpB,KAAK;AAAA;;;ACrCpB,IAAM,WACJ;AACK,IAAM,cAAc,CAAC,KAAa,UAAiC;AACxE,MAAI,OAAO,UAAU,YAAY,SAAS,KAAK,KAAK,GAAG;AACrD,WAAO,IAAI,KAAK,KAAK;AAAA,EACvB;AACA,SAAO;AACT;;;AFEA,IAAM,EAAE,MAAM,MAAM,IAAI;AACxB,MAAM;AAAA,EAAc,MAAM,SAAS;AAAA,EAAO,CAAC,QACzC,KAAK,MAAM,KAAK,WAAW;AAC7B;AAEO,IAAM,gBAAN,MAAqC;AAAA,EAG1C,YACW,OACA,cAAc,KACvB;AAFS;AACA;AAAA,EACR;AAAA,EALK,QAAQ,IAAI,KAAK,OAAO,EAAE;AAAA,EAMlC,MAAM,UAAU;AACd,UAAM,KAAK,MAAM,IAAI;AAAA,EACvB;AAAA,EAEA,MAAM,OAAO;AACX,UAAM,OAAO,WAAW,KAAK,KAAK;AAClC,UAAM,KAAK,MAAM,MAAM,IAAI;AAAA,EAC7B;AAAA,EAEA,MAAM,OAAO;AACX,UAAM,KAAK,MAAM,MAAM,yBAAyB,KAAK,KAAK,GAAG;AAC7D,UAAM,KAAK,MAAM,MAAM,yBAAyB,KAAK,KAAK,WAAW;AAAA,EACvE;AAAA,EAEA,MAAM,MACJ,UACA,OACA,YAAY,OACZ;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI,SAAS,CAAC;AAEd,QAAI,MAAM,kBAAkB,KAAK,KAAK;AACtC,UAAM,SAAgB,CAAC;AAEvB,QAAI;AACF,YAAM,IAAI;AAAA,QACR;AAAA,oBACY,KAAK,KAAK;AAAA,4BACF,MAAM,eAAe,UAAU;AAAA;AAAA,0BAEjC,MAAM;AAAA,MAC1B;AAAA,aACO,OAAO;AACd,UAAI,OAAO,UAAU,aAAa;AAChC,eAAO,KAAK,KAAK;AACjB,cAAM,IAAI,OAAO,QAAQ;AAAA,MAC3B,MAAO,OAAM,IAAI,OAAO,QAAQ;AAChC,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,cAAM,IAAI,OAAO,gBAAgB,OAAO,MAAM,EAAE;AAAA,MAClD;AACA,UAAI,SAAS,MAAM,QAAQ;AACzB,eAAO,KAAK,KAAK;AACjB,cAAM,IAAI,OAAO,oBAAoB,OAAO,MAAM,GAAG;AAAA,MACvD;AACA,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,cAAM,IAAI,OAAO,YAAY,OAAO,MAAM,EAAE;AAAA,MAC9C;AACA,UAAI,eAAe;AACjB,eAAO,KAAK,cAAc,YAAY,CAAC;AACvC,cAAM,IAAI,OAAO,iBAAiB,OAAO,MAAM,EAAE;AAAA,MACnD;AACA,UAAI,gBAAgB;AAClB,eAAO,KAAK,eAAe,YAAY,CAAC;AACxC,cAAM,IAAI,OAAO,iBAAiB,OAAO,MAAM,EAAE;AAAA,MACnD;AACA,UAAI,aAAa;AACf,eAAO,KAAK,WAAW;AACvB,cAAM,IAAI,OAAO,8BAA8B,OAAO,MAAM,EAAE;AAAA,MAChE;AAAA,IACF;AACA,UAAM,IAAI,OAAO,gBAAgB,WAAW,SAAS,KAAK,EAAE;AAC5D,QAAI,OAAO;AACT,aAAO,KAAK,KAAK;AACjB,YAAM,IAAI,OAAO,WAAW,OAAO,MAAM,EAAE;AAAA,IAC7C;AAEA,UAAM,SAAS,MAAM,KAAK,MAAM,MAA6B,KAAK,MAAM;AACxE,eAAW,OAAO,OAAO,KAAM,UAAS,GAAG;AAE3C,WAAO,OAAO,YAAY;AAAA,EAC5B;AAAA,EAEA,MAAM,OACJ,QACA,MACA,MACA,iBACA;AACA,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AACxC,QAAI,UAAU;AACd,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO,MAAM,OAAO;AAAA,QACxB,wBAAwB,KAAK,KAAK;AAAA,QAClC,CAAC,MAAM;AAAA,MACT;AACA,gBAAU,KAAK,WAAW,KAAK,KAAK,CAAC,EAAE,UAAU;AACjD,UAAI,mBAAmB,YAAY;AACjC,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,YAAM,YAAY,MAAM,QAAQ;AAAA,QAC9B,KAAK,IAAI,OAAO,EAAE,MAAM,KAAK,MAAM;AACjC;AACA,gBAAM,MAAM;AAAA,yBACG,KAAK,KAAK;AAAA;AAEzB,gBAAM,OAAO,CAAC,MAAM,MAAM,QAAQ,SAAS,IAAI;AAC/C,gBAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAA6B,KAAK,IAAI;AACpE,iBAAO,KAAK,GAAG,CAAC;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,YAAM,OACH;AAAA,QACC;AAAA,sBACY,KAAK,KAAK,OAAO,KAAK,UAAU;AAAA,UACxC,WAAW;AAAA,UACX,IAAI,UAAU,CAAC,EAAE;AAAA,UACjB,UAAU,UAAU,CAAC,EAAE;AAAA,QACzB,CAAC,CAAC;AAAA;AAAA;AAAA,MAGN,EACC,MAAM,CAAC,UAAU;AAChB,eAAO,MAAM,KAAK;AAClB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA,mBAAmB;AAAA,QACrB;AAAA,MACF,CAAC;AACH,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,MAAyB,OAAe;AAC5C,UAAM,EAAE,KAAK,IAAI,MAAM,KAAK,MAAM;AAAA,MAChC;AAAA;AAAA,cAEQ,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA,MAKlB,CAAC,KAAK;AAAA,IACR;AAEA,UAAM,QAAQ,KAAK,SACf,KAAK,OAAO,CAAC,KAAK,MAAM,KAAK,IAAI,KAAK,EAAE,EAAE,GAAG,OAAO,gBAAgB,IACpE;AAEJ,UAAM,SAAkC,CAAC;AACzC,UAAM,KAAK,MAAS,CAAC,MAAM,OAAO,KAAK,CAAC,GAAG,EAAE,OAAO,MAAM,CAAC;AAC3D,WAAO,EAAE,SAAS,KAAK,IAAI,CAAC,EAAE,OAAO,MAAM,MAAM,GAAG,OAAO;AAAA,EAC7D;AAAA,EAEA,MAAM,MAAM,QAAiB;AAC3B,UAAM,EAAE,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC;AAC9B,UAAM,UAAU,OAAO,IAAI,CAAC,EAAE,OAAO,MAAM,MAAM;AACjD,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO;AAAA,QACX;AAAA,uBACe,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,QAIzB,CAAC,OAAO;AAAA,MACV;AAEA,YAAM,EAAE,KAAK,IAAI,MAAM,OAAO;AAAA,QAK5B;AAAA;AAAA,2BAEmB,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,kBAInB,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QASpB,CAAC,SAAS,IAAI,IAAI,KAAK,WAAW;AAAA,MACpC;AACA,YAAM,OAAO,MAAM,QAAQ;AAE3B,aAAO,KAAK,IAAI,CAAC,EAAE,QAAQ,WAAW,MAAM,OAAO;AAAA,QACjD;AAAA,QACA;AAAA,QACA,IAAI;AAAA,QACJ;AAAA,QACA,OAAO;AAAA,MACT,EAAE;AAAA,IACJ,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,IAAI,QAAiB;AACzB,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAC1B,iBAAW,EAAE,QAAQ,IAAI,IAAI,OAAO,MAAM,KAAK,QAAQ;AACrD,cAAM,OAAO;AAAA,UACX,WAAW,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAWrB,CAAC,QAAQ,IAAI,IAAI,OAAO,KAAK;AAAA,QAC/B;AAAA,MACF;AACA,YAAM,OAAO,MAAM,QAAQ;AAAA,IAC7B,QAAQ;AAEN,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAAA,IAC/C,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/PostgresStore.ts","../src/utils.ts"],"sourcesContent":["import type {\n Committed,\n EventMeta,\n Lease,\n Message,\n Query,\n Schemas,\n Store,\n} from \"@rotorsoft/act\";\nimport { ConcurrencyError, SNAP_EVENT, logger } from \"@rotorsoft/act\";\nimport pg from \"pg\";\nimport { dateReviver } from \"./utils.js\";\n\nconst { Pool, types } = pg;\ntypes.setTypeParser(types.builtins.JSONB, (val) =>\n JSON.parse(val, dateReviver)\n);\n\ntype Config = Readonly<{\n host: string;\n port: number;\n database: string;\n user: string;\n password: string;\n schema: string;\n table: string;\n leaseMillis: number;\n}>;\n\nconst DEFAULT_CONFIG: Config = {\n host: \"localhost\",\n port: 5432,\n database: \"postgres\",\n user: \"postgres\",\n password: \"postgres\",\n schema: \"public\",\n table: \"events\",\n leaseMillis: 30_000,\n};\n\nexport class PostgresStore implements Store {\n private _pool;\n readonly config: Config;\n\n constructor(config: Partial<Config> = {}) {\n this.config = { ...DEFAULT_CONFIG, ...config };\n this._pool = new Pool(this.config);\n }\n\n async dispose() {\n await this._pool.end();\n }\n\n async seed() {\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n\n // Create schema\n await client.query(\n `CREATE SCHEMA IF NOT EXISTS \"${this.config.schema}\";`\n );\n\n // Events table\n await client.query(\n `CREATE TABLE IF NOT EXISTS \"${this.config.schema}\".\"${this.config.table}\" (\n id serial PRIMARY KEY,\n name varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n data jsonb,\n stream varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n version int NOT NULL,\n created timestamptz NOT NULL DEFAULT now(),\n meta jsonb\n ) TABLESPACE pg_default;`\n );\n\n // Indexes on events\n await client.query(\n `CREATE UNIQUE INDEX IF NOT EXISTS \"${this.config.table}_stream_ix\" \n ON \"${this.config.schema}\".\"${this.config.table}\" (stream COLLATE pg_catalog.\"default\", version);`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_name_ix\" \n ON \"${this.config.schema}\".\"${this.config.table}\" (name COLLATE pg_catalog.\"default\");`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_created_id_ix\" \n ON \"${this.config.schema}\".\"${this.config.table}\" (created, id);`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_correlation_ix\" \n ON \"${this.config.schema}\".\"${this.config.table}\" ((meta ->> 'correlation') COLLATE pg_catalog.\"default\");`\n );\n\n // Streams table\n await client.query(\n `CREATE TABLE IF NOT EXISTS \"${this.config.schema}\".\"${this.config.table}_streams\" (\n stream varchar(100) COLLATE pg_catalog.\"default\" PRIMARY KEY,\n at int NOT NULL DEFAULT -1,\n retry smallint NOT NULL DEFAULT 0,\n blocked boolean NOT NULL DEFAULT false,\n leased_at int,\n leased_by uuid,\n leased_until timestamptz\n ) TABLESPACE pg_default;`\n );\n\n // Index for fetching streams\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_streams_fetch_ix\" \n ON \"${this.config.schema}\".\"${this.config.table}_streams\" (blocked, at);`\n );\n\n await client.query(\"COMMIT\");\n logger.info(\n `Seeded schema \"${this.config.schema}\" with table \"${this.config.table}\"`\n );\n } catch (error) {\n await client.query(\"ROLLBACK\");\n logger.error(\"Failed to seed store:\", error);\n throw error;\n } finally {\n client.release();\n }\n }\n\n async drop() {\n await this._pool.query(\n `\n DO $$\n BEGIN\n IF EXISTS (SELECT 1 FROM information_schema.schemata\n WHERE schema_name = '${this.config.schema}'\n ) THEN\n EXECUTE 'DROP TABLE IF EXISTS \"${this.config.schema}\".\"${this.config.table}\"';\n EXECUTE 'DROP TABLE IF EXISTS \"${this.config.schema}\".\"${this.config.table}_streams\"';\n IF '${this.config.schema}' <> 'public' THEN\n EXECUTE 'DROP SCHEMA \"${this.config.schema}\" CASCADE';\n END IF;\n END IF;\n END\n $$;\n `\n );\n }\n\n async query<E extends Schemas>(\n callback: (event: Committed<E, keyof E>) => void,\n query?: Query,\n withSnaps = false\n ) {\n const {\n stream,\n names,\n before,\n after,\n limit,\n created_before,\n created_after,\n backward,\n correlation,\n } = query || {};\n\n let sql = `SELECT * FROM \"${this.config.schema}\".\"${this.config.table}\" WHERE`;\n const values: any[] = [];\n\n if (withSnaps)\n sql = sql.concat(\n ` id>=COALESCE((SELECT id\n FROM \"${this.config.schema}\".\"${this.config.table}\"\n WHERE stream='${stream}' AND name='${SNAP_EVENT}'\n ORDER BY id DESC LIMIT 1), 0)\n AND stream='${stream}'`\n );\n else if (query) {\n if (typeof after !== \"undefined\") {\n values.push(after);\n sql = sql.concat(\" id>$1\");\n } else sql = sql.concat(\" id>-1\");\n if (stream) {\n values.push(stream);\n sql = sql.concat(` AND stream=$${values.length}`);\n }\n if (names && names.length) {\n values.push(names);\n sql = sql.concat(` AND name = ANY($${values.length})`);\n }\n if (before) {\n values.push(before);\n sql = sql.concat(` AND id<$${values.length}`);\n }\n if (created_after) {\n values.push(created_after.toISOString());\n sql = sql.concat(` AND created>$${values.length}`);\n }\n if (created_before) {\n values.push(created_before.toISOString());\n sql = sql.concat(` AND created<$${values.length}`);\n }\n if (correlation) {\n values.push(correlation);\n sql = sql.concat(` AND meta->>'correlation'=$${values.length}`);\n }\n }\n sql = sql.concat(` ORDER BY id ${backward ? \"DESC\" : \"ASC\"}`);\n if (limit) {\n values.push(limit);\n sql = sql.concat(` LIMIT $${values.length}`);\n }\n\n const result = await this._pool.query<Committed<E, keyof E>>(sql, values);\n for (const row of result.rows) callback(row);\n\n return result.rowCount ?? 0;\n }\n\n async commit<E extends Schemas>(\n stream: string,\n msgs: Message<E, keyof E>[],\n meta: EventMeta,\n expectedVersion?: number\n ) {\n const client = await this._pool.connect();\n let version = -1;\n try {\n await client.query(\"BEGIN\");\n\n const last = await client.query<Committed<E, keyof E>>(\n `SELECT version\n FROM \"${this.config.schema}\".\"${this.config.table}\"\n WHERE stream=$1 ORDER BY version DESC LIMIT 1`,\n [stream]\n );\n version = last.rowCount ? last.rows[0].version : -1;\n if (expectedVersion && version !== expectedVersion)\n throw new ConcurrencyError(\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion\n );\n\n const committed = await Promise.all(\n msgs.map(async ({ name, data }) => {\n version++;\n const sql = `\n INSERT INTO \"${this.config.schema}\".\"${this.config.table}\"(name, data, stream, version, meta) \n VALUES($1, $2, $3, $4, $5) RETURNING *`;\n const vals = [name, data, stream, version, meta];\n const { rows } = await client.query<Committed<E, keyof E>>(sql, vals);\n return rows.at(0)!;\n })\n );\n\n await client\n .query(\n `\n NOTIFY \"${this.config.table}\", '${JSON.stringify({\n operation: \"INSERT\",\n id: committed[0].name,\n position: committed[0].id,\n })}';\n COMMIT;\n `\n )\n .catch((error) => {\n logger.error(error);\n throw new ConcurrencyError(\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion || -1\n );\n });\n return committed;\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n throw error;\n } finally {\n client.release();\n }\n }\n\n async fetch<E extends Schemas>(limit: number) {\n const { rows } = await this._pool.query<{ stream: string; at: number }>(\n `\n SELECT stream, at\n FROM \"${this.config.schema}\".\"${this.config.table}_streams\"\n WHERE blocked=false\n ORDER BY at ASC\n LIMIT $1::integer\n `,\n [limit]\n );\n\n const after = rows.length\n ? rows.reduce((min, r) => Math.min(min, r.at), Number.MAX_SAFE_INTEGER)\n : -1;\n\n const events: Committed<E, keyof E>[] = [];\n await this.query<E>((e) => e.name !== SNAP_EVENT && events.push(e), {\n after,\n limit,\n });\n return { streams: rows.map(({ stream }) => stream), events };\n }\n\n async lease(leases: Lease[]) {\n const { by, at } = leases.at(0)!;\n const streams = leases.map(({ stream }) => stream);\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n // insert new streams\n await client.query(\n `\n INSERT INTO \"${this.config.schema}\".\"${this.config.table}_streams\" (stream)\n SELECT UNNEST($1::text[])\n ON CONFLICT (stream) DO NOTHING\n `,\n [streams]\n );\n // set leases\n const { rows } = await client.query<{\n stream: string;\n leased_at: number;\n retry: number;\n }>(\n `\n WITH free AS (\n SELECT * FROM \"${this.config.schema}\".\"${this.config.table}_streams\" \n WHERE stream = ANY($1::text[]) AND (leased_by IS NULL OR leased_until <= NOW())\n FOR UPDATE\n )\n UPDATE \"${this.config.schema}\".\"${this.config.table}_streams\" U\n SET\n leased_by = $2::uuid,\n leased_at = $3::integer,\n leased_until = NOW() + ($4::integer || ' milliseconds')::interval\n FROM free\n WHERE U.stream = free.stream\n RETURNING U.stream, U.leased_at, U.retry\n `,\n [streams, by, at, this.config.leaseMillis]\n );\n await client.query(\"COMMIT\");\n\n return rows.map(({ stream, leased_at, retry }) => ({\n stream,\n by,\n at: leased_at,\n retry,\n block: false,\n }));\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n throw error;\n } finally {\n client.release();\n }\n }\n\n async ack(leases: Lease[]) {\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n for (const { stream, by, at, retry, block } of leases) {\n await client.query(\n `UPDATE \"${this.config.schema}\".\"${this.config.table}_streams\"\n SET\n at = $3::integer,\n retry = $4::integer,\n blocked = $5::boolean,\n leased_by = NULL,\n leased_at = NULL,\n leased_until = NULL\n WHERE\n stream = $1::text\n AND leased_by = $2::uuid`,\n [stream, by, at, retry, block]\n );\n }\n await client.query(\"COMMIT\");\n } catch {\n // leased_until fallback\n await client.query(\"ROLLBACK\").catch(() => {});\n } finally {\n client.release();\n }\n }\n}\n","/**\n * Date reviver when parsing JSON strings with the following formats:\n * - YYYY-MM-DDTHH:MM:SS.sssZ\n * - YYYY-MM-DDTHH:MM:SS.sss+HH:MM\n * - YYYY-MM-DDTHH:MM:SS.sss-HH:MM\n */\nconst ISO_8601 =\n /^(\\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(\\.\\d+)?(Z|[+-][0-2][0-9]:[0-5][0-9])?$/;\nexport const dateReviver = (key: string, value: string): string | Date => {\n if (typeof value === \"string\" && ISO_8601.test(value)) {\n return new Date(value);\n }\n return value;\n};\n"],"mappings":";AASA,SAAS,kBAAkB,YAAY,cAAc;AACrD,OAAO,QAAQ;;;ACJf,IAAM,WACJ;AACK,IAAM,cAAc,CAAC,KAAa,UAAiC;AACxE,MAAI,OAAO,UAAU,YAAY,SAAS,KAAK,KAAK,GAAG;AACrD,WAAO,IAAI,KAAK,KAAK;AAAA,EACvB;AACA,SAAO;AACT;;;ADAA,IAAM,EAAE,MAAM,MAAM,IAAI;AACxB,MAAM;AAAA,EAAc,MAAM,SAAS;AAAA,EAAO,CAAC,QACzC,KAAK,MAAM,KAAK,WAAW;AAC7B;AAaA,IAAM,iBAAyB;AAAA,EAC7B,MAAM;AAAA,EACN,MAAM;AAAA,EACN,UAAU;AAAA,EACV,MAAM;AAAA,EACN,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,aAAa;AACf;AAEO,IAAM,gBAAN,MAAqC;AAAA,EAClC;AAAA,EACC;AAAA,EAET,YAAY,SAA0B,CAAC,GAAG;AACxC,SAAK,SAAS,EAAE,GAAG,gBAAgB,GAAG,OAAO;AAC7C,SAAK,QAAQ,IAAI,KAAK,KAAK,MAAM;AAAA,EACnC;AAAA,EAEA,MAAM,UAAU;AACd,UAAM,KAAK,MAAM,IAAI;AAAA,EACvB;AAAA,EAEA,MAAM,OAAO;AACX,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAG1B,YAAM,OAAO;AAAA,QACX,gCAAgC,KAAK,OAAO,MAAM;AAAA,MACpD;AAGA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAS1E;AAGA,YAAM,OAAO;AAAA,QACX,sCAAsC,KAAK,OAAO,KAAK;AAAA,cACjD,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,MACjD;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,cAC1C,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,MACjD;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,cAC1C,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,MACjD;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,cAC1C,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,MACjD;AAGA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAS1E;AAGA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,cAC1C,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,MACjD;AAEA,YAAM,OAAO,MAAM,QAAQ;AAC3B,aAAO;AAAA,QACL,kBAAkB,KAAK,OAAO,MAAM,iBAAiB,KAAK,OAAO,KAAK;AAAA,MACxE;AAAA,IACF,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU;AAC7B,aAAO,MAAM,yBAAyB,KAAK;AAC3C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,OAAO;AACX,UAAM,KAAK,MAAM;AAAA,MACf;AAAA;AAAA;AAAA;AAAA,iCAI2B,KAAK,OAAO,MAAM;AAAA;AAAA,2CAER,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,2CACzC,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,gBACpE,KAAK,OAAO,MAAM;AAAA,oCACE,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMlD;AAAA,EACF;AAAA,EAEA,MAAM,MACJ,UACA,OACA,YAAY,OACZ;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI,SAAS,CAAC;AAEd,QAAI,MAAM,kBAAkB,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AACrE,UAAM,SAAgB,CAAC;AAEvB,QAAI;AACF,YAAM,IAAI;AAAA,QACR;AAAA,oBACY,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,4BACjC,MAAM,eAAe,UAAU;AAAA;AAAA,0BAEjC,MAAM;AAAA,MAC1B;AAAA,aACO,OAAO;AACd,UAAI,OAAO,UAAU,aAAa;AAChC,eAAO,KAAK,KAAK;AACjB,cAAM,IAAI,OAAO,QAAQ;AAAA,MAC3B,MAAO,OAAM,IAAI,OAAO,QAAQ;AAChC,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,cAAM,IAAI,OAAO,gBAAgB,OAAO,MAAM,EAAE;AAAA,MAClD;AACA,UAAI,SAAS,MAAM,QAAQ;AACzB,eAAO,KAAK,KAAK;AACjB,cAAM,IAAI,OAAO,oBAAoB,OAAO,MAAM,GAAG;AAAA,MACvD;AACA,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,cAAM,IAAI,OAAO,YAAY,OAAO,MAAM,EAAE;AAAA,MAC9C;AACA,UAAI,eAAe;AACjB,eAAO,KAAK,cAAc,YAAY,CAAC;AACvC,cAAM,IAAI,OAAO,iBAAiB,OAAO,MAAM,EAAE;AAAA,MACnD;AACA,UAAI,gBAAgB;AAClB,eAAO,KAAK,eAAe,YAAY,CAAC;AACxC,cAAM,IAAI,OAAO,iBAAiB,OAAO,MAAM,EAAE;AAAA,MACnD;AACA,UAAI,aAAa;AACf,eAAO,KAAK,WAAW;AACvB,cAAM,IAAI,OAAO,8BAA8B,OAAO,MAAM,EAAE;AAAA,MAChE;AAAA,IACF;AACA,UAAM,IAAI,OAAO,gBAAgB,WAAW,SAAS,KAAK,EAAE;AAC5D,QAAI,OAAO;AACT,aAAO,KAAK,KAAK;AACjB,YAAM,IAAI,OAAO,WAAW,OAAO,MAAM,EAAE;AAAA,IAC7C;AAEA,UAAM,SAAS,MAAM,KAAK,MAAM,MAA6B,KAAK,MAAM;AACxE,eAAW,OAAO,OAAO,KAAM,UAAS,GAAG;AAE3C,WAAO,OAAO,YAAY;AAAA,EAC5B;AAAA,EAEA,MAAM,OACJ,QACA,MACA,MACA,iBACA;AACA,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AACxC,QAAI,UAAU;AACd,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO,MAAM,OAAO;AAAA,QACxB;AAAA,gBACQ,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA,QAEjD,CAAC,MAAM;AAAA,MACT;AACA,gBAAU,KAAK,WAAW,KAAK,KAAK,CAAC,EAAE,UAAU;AACjD,UAAI,mBAAmB,YAAY;AACjC,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,YAAM,YAAY,MAAM,QAAQ;AAAA,QAC9B,KAAK,IAAI,OAAO,EAAE,MAAM,KAAK,MAAM;AACjC;AACA,gBAAM,MAAM;AAAA,yBACG,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAExD,gBAAM,OAAO,CAAC,MAAM,MAAM,QAAQ,SAAS,IAAI;AAC/C,gBAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAA6B,KAAK,IAAI;AACpE,iBAAO,KAAK,GAAG,CAAC;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,YAAM,OACH;AAAA,QACC;AAAA,sBACY,KAAK,OAAO,KAAK,OAAO,KAAK,UAAU;AAAA,UAC/C,WAAW;AAAA,UACX,IAAI,UAAU,CAAC,EAAE;AAAA,UACjB,UAAU,UAAU,CAAC,EAAE;AAAA,QACzB,CAAC,CAAC;AAAA;AAAA;AAAA,MAGN,EACC,MAAM,CAAC,UAAU;AAChB,eAAO,MAAM,KAAK;AAClB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA,mBAAmB;AAAA,QACrB;AAAA,MACF,CAAC;AACH,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,MAAyB,OAAe;AAC5C,UAAM,EAAE,KAAK,IAAI,MAAM,KAAK,MAAM;AAAA,MAChC;AAAA;AAAA,cAEQ,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA,MAKjD,CAAC,KAAK;AAAA,IACR;AAEA,UAAM,QAAQ,KAAK,SACf,KAAK,OAAO,CAAC,KAAK,MAAM,KAAK,IAAI,KAAK,EAAE,EAAE,GAAG,OAAO,gBAAgB,IACpE;AAEJ,UAAM,SAAkC,CAAC;AACzC,UAAM,KAAK,MAAS,CAAC,MAAM,EAAE,SAAS,cAAc,OAAO,KAAK,CAAC,GAAG;AAAA,MAClE;AAAA,MACA;AAAA,IACF,CAAC;AACD,WAAO,EAAE,SAAS,KAAK,IAAI,CAAC,EAAE,OAAO,MAAM,MAAM,GAAG,OAAO;AAAA,EAC7D;AAAA,EAEA,MAAM,MAAM,QAAiB;AAC3B,UAAM,EAAE,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC;AAC9B,UAAM,UAAU,OAAO,IAAI,CAAC,EAAE,OAAO,MAAM,MAAM;AACjD,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO;AAAA,QACX;AAAA,uBACe,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAIxD,CAAC,OAAO;AAAA,MACV;AAEA,YAAM,EAAE,KAAK,IAAI,MAAM,OAAO;AAAA,QAK5B;AAAA;AAAA,2BAEmB,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,kBAIlD,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QASnD,CAAC,SAAS,IAAI,IAAI,KAAK,OAAO,WAAW;AAAA,MAC3C;AACA,YAAM,OAAO,MAAM,QAAQ;AAE3B,aAAO,KAAK,IAAI,CAAC,EAAE,QAAQ,WAAW,MAAM,OAAO;AAAA,QACjD;AAAA,QACA;AAAA,QACA,IAAI;AAAA,QACJ;AAAA,QACA,OAAO;AAAA,MACT,EAAE;AAAA,IACJ,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,IAAI,QAAiB;AACzB,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAC1B,iBAAW,EAAE,QAAQ,IAAI,IAAI,OAAO,MAAM,KAAK,QAAQ;AACrD,cAAM,OAAO;AAAA,UACX,WAAW,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAWpD,CAAC,QAAQ,IAAI,IAAI,OAAO,KAAK;AAAA,QAC/B;AAAA,MACF;AACA,YAAM,OAAO,MAAM,QAAQ;AAAA,IAC7B,QAAQ;AAEN,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAAA,IAC/C,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AACF;","names":[]}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@rotorsoft/act-pg",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.
|
|
4
|
+
"version": "0.4.1",
|
|
5
5
|
"description": "act pg adapters",
|
|
6
6
|
"author": "rotorsoft",
|
|
7
7
|
"license": "MIT",
|
|
@@ -33,7 +33,7 @@
|
|
|
33
33
|
"dependencies": {
|
|
34
34
|
"pg": "^8.16.2",
|
|
35
35
|
"zod": "^3.25.67",
|
|
36
|
-
"@rotorsoft/act": "0.
|
|
36
|
+
"@rotorsoft/act": "0.5.1"
|
|
37
37
|
},
|
|
38
38
|
"devDependencies": {
|
|
39
39
|
"@types/pg": "^8.15.4"
|
package/dist/@types/config.d.ts
DELETED
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
export declare const config: Readonly<{
|
|
2
|
-
pg: {
|
|
3
|
-
host: string;
|
|
4
|
-
user: string;
|
|
5
|
-
password: string;
|
|
6
|
-
database: string;
|
|
7
|
-
port: number;
|
|
8
|
-
};
|
|
9
|
-
} & {
|
|
10
|
-
name: string;
|
|
11
|
-
version: string;
|
|
12
|
-
description: string;
|
|
13
|
-
author: string | {
|
|
14
|
-
name: string;
|
|
15
|
-
email?: string | undefined;
|
|
16
|
-
};
|
|
17
|
-
license: string;
|
|
18
|
-
dependencies: Record<string, string>;
|
|
19
|
-
env: "development" | "test" | "staging" | "production";
|
|
20
|
-
logLevel: "error" | "fatal" | "warn" | "info" | "debug" | "trace";
|
|
21
|
-
logSingleLine: boolean;
|
|
22
|
-
sleepMs: number;
|
|
23
|
-
}>;
|
|
24
|
-
//# sourceMappingURL=config.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../src/config.ts"],"names":[],"mappings":"AAKA,eAAO,MAAM,MAAM;;;;;;;;;;;;;;;;;;;;;;EAoBlB,CAAC"}
|
package/dist/@types/seed.d.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"seed.d.ts","sourceRoot":"","sources":["../../src/seed.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU,GAAI,OAAO,MAAM,KAAG,MA4C1C,CAAC"}
|