@rotorsoft/act-pg 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/.tsbuildinfo +1 -1
- package/dist/@types/PostgresStore.d.ts.map +1 -0
- package/dist/@types/config.d.ts.map +1 -0
- package/dist/@types/index.d.ts +4 -0
- package/dist/@types/index.d.ts.map +1 -0
- package/dist/@types/seed.d.ts.map +1 -0
- package/dist/@types/utils.d.ts.map +1 -0
- package/dist/index.cjs +359 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.js +320 -3
- package/dist/index.js.map +1 -1
- package/package.json +19 -7
- package/dist/PostgresStore.d.ts.map +0 -1
- package/dist/PostgresStore.js +0 -208
- package/dist/PostgresStore.js.map +0 -1
- package/dist/config.d.ts.map +0 -1
- package/dist/config.js +0 -21
- package/dist/config.js.map +0 -1
- package/dist/index.d.ts +0 -4
- package/dist/index.d.ts.map +0 -1
- package/dist/seed.d.ts.map +0 -1
- package/dist/seed.js +0 -46
- package/dist/seed.js.map +0 -1
- package/dist/utils.d.ts.map +0 -1
- package/dist/utils.js +0 -19
- package/dist/utils.js.map +0 -1
- /package/dist/{PostgresStore.d.ts → @types/PostgresStore.d.ts} +0 -0
- /package/dist/{config.d.ts → @types/config.d.ts} +0 -0
- /package/dist/{seed.d.ts → @types/seed.d.ts} +0 -0
- /package/dist/{utils.d.ts → @types/utils.d.ts} +0 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,359 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target2, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target2, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target2) => (target2 = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target2, "default", { value: mod, enumerable: true }) : target2,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
PostgresStore: () => PostgresStore,
|
|
34
|
+
config: () => config
|
|
35
|
+
});
|
|
36
|
+
module.exports = __toCommonJS(index_exports);
|
|
37
|
+
|
|
38
|
+
// src/config.ts
|
|
39
|
+
var import_act = require("@rotorsoft/act");
|
|
40
|
+
var import_v4 = require("zod/v4");
|
|
41
|
+
var { PG_HOST, PG_USER, PG_PASSWORD, PG_DATABASE, PG_PORT } = process.env;
|
|
42
|
+
var config = (0, import_act.extend)(
|
|
43
|
+
{
|
|
44
|
+
pg: {
|
|
45
|
+
host: PG_HOST || "localhost",
|
|
46
|
+
user: PG_USER || "postgres",
|
|
47
|
+
password: PG_PASSWORD || "postgres",
|
|
48
|
+
database: PG_DATABASE || "postgres",
|
|
49
|
+
port: Number.parseInt(PG_PORT || "5431")
|
|
50
|
+
}
|
|
51
|
+
},
|
|
52
|
+
import_v4.z.object({
|
|
53
|
+
pg: import_v4.z.object({
|
|
54
|
+
host: import_v4.z.string().min(1),
|
|
55
|
+
user: import_v4.z.string().min(1),
|
|
56
|
+
password: import_v4.z.string().min(1),
|
|
57
|
+
database: import_v4.z.string().min(1),
|
|
58
|
+
port: import_v4.z.number().int().min(1e3).max(65535)
|
|
59
|
+
})
|
|
60
|
+
}),
|
|
61
|
+
(0, import_act.config)()
|
|
62
|
+
);
|
|
63
|
+
|
|
64
|
+
// src/PostgresStore.ts
|
|
65
|
+
var import_act2 = require("@rotorsoft/act");
|
|
66
|
+
var import_pg = __toESM(require("pg"), 1);
|
|
67
|
+
|
|
68
|
+
// src/seed.ts
|
|
69
|
+
var seed_store = (table) => `
|
|
70
|
+
-- events
|
|
71
|
+
CREATE TABLE IF NOT EXISTS public."${table}"
|
|
72
|
+
(
|
|
73
|
+
id serial PRIMARY KEY,
|
|
74
|
+
name varchar(100) COLLATE pg_catalog."default" NOT NULL,
|
|
75
|
+
data jsonb,
|
|
76
|
+
stream varchar(100) COLLATE pg_catalog."default" NOT NULL,
|
|
77
|
+
version int NOT NULL,
|
|
78
|
+
created timestamptz NOT NULL DEFAULT now(),
|
|
79
|
+
meta jsonb
|
|
80
|
+
) TABLESPACE pg_default;
|
|
81
|
+
|
|
82
|
+
CREATE UNIQUE INDEX IF NOT EXISTS "${table}_stream_ix"
|
|
83
|
+
ON public."${table}" USING btree (stream COLLATE pg_catalog."default" ASC, version ASC)
|
|
84
|
+
TABLESPACE pg_default;
|
|
85
|
+
|
|
86
|
+
CREATE INDEX IF NOT EXISTS "${table}_name_ix"
|
|
87
|
+
ON public."${table}" USING btree (name COLLATE pg_catalog."default" ASC)
|
|
88
|
+
TABLESPACE pg_default;
|
|
89
|
+
|
|
90
|
+
CREATE INDEX IF NOT EXISTS "${table}_created_id_ix"
|
|
91
|
+
ON public."${table}" USING btree (created ASC, id ASC)
|
|
92
|
+
TABLESPACE pg_default;
|
|
93
|
+
|
|
94
|
+
CREATE INDEX IF NOT EXISTS "${table}_correlation_ix"
|
|
95
|
+
ON public."${table}" USING btree ((meta ->> 'correlation'::text) COLLATE pg_catalog."default" ASC NULLS LAST)
|
|
96
|
+
TABLESPACE pg_default;
|
|
97
|
+
|
|
98
|
+
-- streams
|
|
99
|
+
CREATE TABLE IF NOT EXISTS public."${table}_streams"
|
|
100
|
+
(
|
|
101
|
+
stream varchar(100) COLLATE pg_catalog."default" PRIMARY KEY,
|
|
102
|
+
at int not null default(-1),
|
|
103
|
+
retry smallint not null default(0),
|
|
104
|
+
blocked boolean not null default(false),
|
|
105
|
+
leased_at int,
|
|
106
|
+
leased_by uuid,
|
|
107
|
+
leased_until timestamptz
|
|
108
|
+
) TABLESPACE pg_default;
|
|
109
|
+
|
|
110
|
+
-- supports order by { blocked, at } when fetching
|
|
111
|
+
CREATE INDEX IF NOT EXISTS "${table}_streams_fetch_ix"
|
|
112
|
+
ON public."${table}_streams" USING btree (blocked, at) TABLESPACE pg_default;
|
|
113
|
+
`;
|
|
114
|
+
|
|
115
|
+
// src/utils.ts
|
|
116
|
+
var ISO_8601 = /^(\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(\.\d+)?(Z|[+-][0-2][0-9]:[0-5][0-9])?$/;
|
|
117
|
+
var dateReviver = (key, value) => {
|
|
118
|
+
if (typeof value === "string" && ISO_8601.test(value)) {
|
|
119
|
+
return new Date(value);
|
|
120
|
+
}
|
|
121
|
+
return value;
|
|
122
|
+
};
|
|
123
|
+
|
|
124
|
+
// src/PostgresStore.ts
|
|
125
|
+
var { Pool, types } = import_pg.default;
|
|
126
|
+
types.setTypeParser(
|
|
127
|
+
types.builtins.JSONB,
|
|
128
|
+
(val) => JSON.parse(val, dateReviver)
|
|
129
|
+
);
|
|
130
|
+
var PostgresStore = class {
|
|
131
|
+
constructor(table, leaseMillis = 3e4) {
|
|
132
|
+
this.table = table;
|
|
133
|
+
this.leaseMillis = leaseMillis;
|
|
134
|
+
}
|
|
135
|
+
_pool = new Pool(config.pg);
|
|
136
|
+
async dispose() {
|
|
137
|
+
await this._pool.end();
|
|
138
|
+
}
|
|
139
|
+
async seed() {
|
|
140
|
+
const seed = seed_store(this.table);
|
|
141
|
+
await this._pool.query(seed);
|
|
142
|
+
}
|
|
143
|
+
async drop() {
|
|
144
|
+
await this._pool.query(`DROP TABLE IF EXISTS "${this.table}"`);
|
|
145
|
+
await this._pool.query(`DROP TABLE IF EXISTS "${this.table}_streams"`);
|
|
146
|
+
}
|
|
147
|
+
async query(callback, query, withSnaps = false) {
|
|
148
|
+
const {
|
|
149
|
+
stream,
|
|
150
|
+
names,
|
|
151
|
+
before,
|
|
152
|
+
after,
|
|
153
|
+
limit,
|
|
154
|
+
created_before,
|
|
155
|
+
created_after,
|
|
156
|
+
backward,
|
|
157
|
+
correlation
|
|
158
|
+
} = query || {};
|
|
159
|
+
let sql = `SELECT * FROM "${this.table}" WHERE`;
|
|
160
|
+
const values = [];
|
|
161
|
+
if (withSnaps)
|
|
162
|
+
sql = sql.concat(
|
|
163
|
+
` id>=COALESCE((SELECT id
|
|
164
|
+
FROM "${this.table}"
|
|
165
|
+
WHERE stream='${stream}' AND name='${import_act2.SNAP_EVENT}'
|
|
166
|
+
ORDER BY id DESC LIMIT 1), 0)
|
|
167
|
+
AND stream='${stream}'`
|
|
168
|
+
);
|
|
169
|
+
else if (query) {
|
|
170
|
+
if (typeof after !== "undefined") {
|
|
171
|
+
values.push(after);
|
|
172
|
+
sql = sql.concat(" id>$1");
|
|
173
|
+
} else sql = sql.concat(" id>-1");
|
|
174
|
+
if (stream) {
|
|
175
|
+
values.push(stream);
|
|
176
|
+
sql = sql.concat(` AND stream=$${values.length}`);
|
|
177
|
+
}
|
|
178
|
+
if (names && names.length) {
|
|
179
|
+
values.push(names);
|
|
180
|
+
sql = sql.concat(` AND name = ANY($${values.length})`);
|
|
181
|
+
}
|
|
182
|
+
if (before) {
|
|
183
|
+
values.push(before);
|
|
184
|
+
sql = sql.concat(` AND id<$${values.length}`);
|
|
185
|
+
}
|
|
186
|
+
if (created_after) {
|
|
187
|
+
values.push(created_after.toISOString());
|
|
188
|
+
sql = sql.concat(` AND created>$${values.length}`);
|
|
189
|
+
}
|
|
190
|
+
if (created_before) {
|
|
191
|
+
values.push(created_before.toISOString());
|
|
192
|
+
sql = sql.concat(` AND created<$${values.length}`);
|
|
193
|
+
}
|
|
194
|
+
if (correlation) {
|
|
195
|
+
values.push(correlation);
|
|
196
|
+
sql = sql.concat(` AND meta->>'correlation'=$${values.length}`);
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
sql = sql.concat(` ORDER BY id ${backward ? "DESC" : "ASC"}`);
|
|
200
|
+
if (limit) {
|
|
201
|
+
values.push(limit);
|
|
202
|
+
sql = sql.concat(` LIMIT $${values.length}`);
|
|
203
|
+
}
|
|
204
|
+
const result = await this._pool.query(sql, values);
|
|
205
|
+
for (const row of result.rows) callback(row);
|
|
206
|
+
return result.rowCount ?? 0;
|
|
207
|
+
}
|
|
208
|
+
async commit(stream, msgs, meta, expectedVersion) {
|
|
209
|
+
const client = await this._pool.connect();
|
|
210
|
+
let version = -1;
|
|
211
|
+
try {
|
|
212
|
+
await client.query("BEGIN");
|
|
213
|
+
const last = await client.query(
|
|
214
|
+
`SELECT version FROM "${this.table}" WHERE stream=$1 ORDER BY version DESC LIMIT 1`,
|
|
215
|
+
[stream]
|
|
216
|
+
);
|
|
217
|
+
version = last.rowCount ? last.rows[0].version : -1;
|
|
218
|
+
if (expectedVersion && version !== expectedVersion)
|
|
219
|
+
throw new import_act2.ConcurrencyError(
|
|
220
|
+
version,
|
|
221
|
+
msgs,
|
|
222
|
+
expectedVersion
|
|
223
|
+
);
|
|
224
|
+
const committed = await Promise.all(
|
|
225
|
+
msgs.map(async ({ name, data }) => {
|
|
226
|
+
version++;
|
|
227
|
+
const sql = `
|
|
228
|
+
INSERT INTO "${this.table}"(name, data, stream, version, meta)
|
|
229
|
+
VALUES($1, $2, $3, $4, $5) RETURNING *`;
|
|
230
|
+
const vals = [name, data, stream, version, meta];
|
|
231
|
+
const { rows } = await client.query(sql, vals);
|
|
232
|
+
return rows.at(0);
|
|
233
|
+
})
|
|
234
|
+
);
|
|
235
|
+
await client.query(
|
|
236
|
+
`
|
|
237
|
+
NOTIFY "${this.table}", '${JSON.stringify({
|
|
238
|
+
operation: "INSERT",
|
|
239
|
+
id: committed[0].name,
|
|
240
|
+
position: committed[0].id
|
|
241
|
+
})}';
|
|
242
|
+
COMMIT;
|
|
243
|
+
`
|
|
244
|
+
).catch((error) => {
|
|
245
|
+
import_act2.logger.error(error);
|
|
246
|
+
throw new import_act2.ConcurrencyError(
|
|
247
|
+
version,
|
|
248
|
+
msgs,
|
|
249
|
+
expectedVersion || -1
|
|
250
|
+
);
|
|
251
|
+
});
|
|
252
|
+
return committed;
|
|
253
|
+
} catch (error) {
|
|
254
|
+
await client.query("ROLLBACK").catch(() => {
|
|
255
|
+
});
|
|
256
|
+
throw error;
|
|
257
|
+
} finally {
|
|
258
|
+
client.release();
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
async fetch(limit) {
|
|
262
|
+
const { rows } = await this._pool.query(
|
|
263
|
+
`
|
|
264
|
+
SELECT stream, at
|
|
265
|
+
FROM "${this.table}_streams"
|
|
266
|
+
WHERE blocked=false
|
|
267
|
+
ORDER BY at ASC
|
|
268
|
+
LIMIT $1::integer
|
|
269
|
+
`,
|
|
270
|
+
[limit]
|
|
271
|
+
);
|
|
272
|
+
const after = rows.length ? rows.reduce((min, r) => Math.min(min, r.at), Number.MAX_SAFE_INTEGER) : -1;
|
|
273
|
+
const events = [];
|
|
274
|
+
await this.query((e) => events.push(e), { after, limit });
|
|
275
|
+
return { streams: rows.map(({ stream }) => stream), events };
|
|
276
|
+
}
|
|
277
|
+
async lease(leases) {
|
|
278
|
+
const { by, at } = leases.at(0);
|
|
279
|
+
const streams = leases.map(({ stream }) => stream);
|
|
280
|
+
const client = await this._pool.connect();
|
|
281
|
+
try {
|
|
282
|
+
await client.query("BEGIN");
|
|
283
|
+
await client.query(
|
|
284
|
+
`
|
|
285
|
+
INSERT INTO "${this.table}_streams" (stream)
|
|
286
|
+
SELECT UNNEST($1::text[])
|
|
287
|
+
ON CONFLICT (stream) DO NOTHING
|
|
288
|
+
`,
|
|
289
|
+
[streams]
|
|
290
|
+
);
|
|
291
|
+
const { rows } = await client.query(
|
|
292
|
+
`
|
|
293
|
+
WITH free AS (
|
|
294
|
+
SELECT * FROM "${this.table}_streams"
|
|
295
|
+
WHERE stream = ANY($1::text[]) AND (leased_by IS NULL OR leased_until <= NOW())
|
|
296
|
+
FOR UPDATE
|
|
297
|
+
)
|
|
298
|
+
UPDATE "${this.table}_streams" U
|
|
299
|
+
SET
|
|
300
|
+
leased_by = $2::uuid,
|
|
301
|
+
leased_at = $3::integer,
|
|
302
|
+
leased_until = NOW() + ($4::integer || ' milliseconds')::interval
|
|
303
|
+
FROM free
|
|
304
|
+
WHERE U.stream = free.stream
|
|
305
|
+
RETURNING U.stream, U.leased_at, U.retry
|
|
306
|
+
`,
|
|
307
|
+
[streams, by, at, this.leaseMillis]
|
|
308
|
+
);
|
|
309
|
+
await client.query("COMMIT");
|
|
310
|
+
return rows.map(({ stream, leased_at, retry }) => ({
|
|
311
|
+
stream,
|
|
312
|
+
by,
|
|
313
|
+
at: leased_at,
|
|
314
|
+
retry,
|
|
315
|
+
block: false
|
|
316
|
+
}));
|
|
317
|
+
} catch (error) {
|
|
318
|
+
await client.query("ROLLBACK").catch(() => {
|
|
319
|
+
});
|
|
320
|
+
throw error;
|
|
321
|
+
} finally {
|
|
322
|
+
client.release();
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
async ack(leases) {
|
|
326
|
+
const client = await this._pool.connect();
|
|
327
|
+
try {
|
|
328
|
+
await client.query("BEGIN");
|
|
329
|
+
for (const { stream, by, at, retry, block } of leases) {
|
|
330
|
+
await client.query(
|
|
331
|
+
`UPDATE "${this.table}_streams"
|
|
332
|
+
SET
|
|
333
|
+
at = $3::integer,
|
|
334
|
+
retry = $4::integer,
|
|
335
|
+
blocked = $5::boolean,
|
|
336
|
+
leased_by = NULL,
|
|
337
|
+
leased_at = NULL,
|
|
338
|
+
leased_until = NULL
|
|
339
|
+
WHERE
|
|
340
|
+
stream = $1::text
|
|
341
|
+
AND leased_by = $2::uuid`,
|
|
342
|
+
[stream, by, at, retry, block]
|
|
343
|
+
);
|
|
344
|
+
}
|
|
345
|
+
await client.query("COMMIT");
|
|
346
|
+
} catch {
|
|
347
|
+
await client.query("ROLLBACK").catch(() => {
|
|
348
|
+
});
|
|
349
|
+
} finally {
|
|
350
|
+
client.release();
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
};
|
|
354
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
355
|
+
0 && (module.exports = {
|
|
356
|
+
PostgresStore,
|
|
357
|
+
config
|
|
358
|
+
});
|
|
359
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/config.ts","../src/PostgresStore.ts","../src/seed.ts","../src/utils.ts"],"sourcesContent":["/** @module act-pg */\nexport * from \"./config.js\";\nexport * from \"./PostgresStore.js\";\n","import { extend, config as target } from \"@rotorsoft/act\";\nimport { z } from \"zod/v4\";\n\nconst { PG_HOST, PG_USER, PG_PASSWORD, PG_DATABASE, PG_PORT } = process.env;\n\nexport const config = extend(\n {\n pg: {\n host: PG_HOST || \"localhost\",\n user: PG_USER || \"postgres\",\n password: PG_PASSWORD || \"postgres\",\n database: PG_DATABASE || \"postgres\",\n port: Number.parseInt(PG_PORT || \"5431\"),\n },\n },\n z.object({\n pg: z.object({\n host: z.string().min(1),\n user: z.string().min(1),\n password: z.string().min(1),\n database: z.string().min(1),\n port: z.number().int().min(1000).max(65535),\n }),\n }),\n target()\n);\n","import type {\n Committed,\n EventMeta,\n Lease,\n Message,\n Query,\n Schemas,\n Store,\n} from \"@rotorsoft/act\";\nimport { ConcurrencyError, SNAP_EVENT, logger } from \"@rotorsoft/act\";\nimport pg from \"pg\";\nimport { config } from \"./config.js\";\nimport { seed_store } from \"./seed.js\";\nimport { dateReviver } from \"./utils.js\";\n\nconst { Pool, types } = pg;\ntypes.setTypeParser(types.builtins.JSONB, (val) =>\n JSON.parse(val, dateReviver)\n);\n\nexport class PostgresStore implements Store {\n private _pool = new Pool(config.pg);\n\n constructor(\n readonly table: string,\n readonly leaseMillis = 30_000\n ) {}\n async dispose() {\n await this._pool.end();\n }\n\n async seed() {\n const seed = seed_store(this.table);\n await this._pool.query(seed);\n }\n\n async drop() {\n await this._pool.query(`DROP TABLE IF EXISTS \"${this.table}\"`);\n await this._pool.query(`DROP TABLE IF EXISTS \"${this.table}_streams\"`);\n }\n\n async query<E extends Schemas>(\n callback: (event: Committed<E, keyof E>) => void,\n query?: Query,\n withSnaps = false\n ) {\n const {\n stream,\n names,\n before,\n after,\n limit,\n created_before,\n created_after,\n backward,\n correlation,\n } = query || {};\n\n let sql = `SELECT * FROM \"${this.table}\" WHERE`;\n const values: any[] = [];\n\n if (withSnaps)\n sql = sql.concat(\n ` id>=COALESCE((SELECT id\n FROM \"${this.table}\"\n WHERE stream='${stream}' AND name='${SNAP_EVENT}'\n ORDER BY id DESC LIMIT 1), 0)\n AND stream='${stream}'`\n );\n else if (query) {\n if (typeof after !== \"undefined\") {\n values.push(after);\n sql = sql.concat(\" id>$1\");\n } else sql = sql.concat(\" id>-1\");\n if (stream) {\n values.push(stream);\n sql = sql.concat(` AND stream=$${values.length}`);\n }\n if (names && names.length) {\n values.push(names);\n sql = sql.concat(` AND name = ANY($${values.length})`);\n }\n if (before) {\n values.push(before);\n sql = sql.concat(` AND id<$${values.length}`);\n }\n if (created_after) {\n values.push(created_after.toISOString());\n sql = sql.concat(` AND created>$${values.length}`);\n }\n if (created_before) {\n values.push(created_before.toISOString());\n sql = sql.concat(` AND created<$${values.length}`);\n }\n if (correlation) {\n values.push(correlation);\n sql = sql.concat(` AND meta->>'correlation'=$${values.length}`);\n }\n }\n sql = sql.concat(` ORDER BY id ${backward ? \"DESC\" : \"ASC\"}`);\n if (limit) {\n values.push(limit);\n sql = sql.concat(` LIMIT $${values.length}`);\n }\n\n const result = await this._pool.query<Committed<E, keyof E>>(sql, values);\n for (const row of result.rows) callback(row);\n\n return result.rowCount ?? 0;\n }\n\n async commit<E extends Schemas>(\n stream: string,\n msgs: Message<E, keyof E>[],\n meta: EventMeta,\n expectedVersion?: number\n ) {\n const client = await this._pool.connect();\n let version = -1;\n try {\n await client.query(\"BEGIN\");\n\n const last = await client.query<Committed<E, keyof E>>(\n `SELECT version FROM \"${this.table}\" WHERE stream=$1 ORDER BY version DESC LIMIT 1`,\n [stream]\n );\n version = last.rowCount ? last.rows[0].version : -1;\n if (expectedVersion && version !== expectedVersion)\n throw new ConcurrencyError(\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion\n );\n\n const committed = await Promise.all(\n msgs.map(async ({ name, data }) => {\n version++;\n const sql = `\n INSERT INTO \"${this.table}\"(name, data, stream, version, meta) \n VALUES($1, $2, $3, $4, $5) RETURNING *`;\n const vals = [name, data, stream, version, meta];\n const { rows } = await client.query<Committed<E, keyof E>>(sql, vals);\n return rows.at(0)!;\n })\n );\n\n await client\n .query(\n `\n NOTIFY \"${this.table}\", '${JSON.stringify({\n operation: \"INSERT\",\n id: committed[0].name,\n position: committed[0].id,\n })}';\n COMMIT;\n `\n )\n .catch((error) => {\n logger.error(error);\n throw new ConcurrencyError(\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion || -1\n );\n });\n return committed;\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n throw error;\n } finally {\n client.release();\n }\n }\n\n async fetch<E extends Schemas>(limit: number) {\n const { rows } = await this._pool.query<{ stream: string; at: number }>(\n `\n SELECT stream, at\n FROM \"${this.table}_streams\"\n WHERE blocked=false\n ORDER BY at ASC\n LIMIT $1::integer\n `,\n [limit]\n );\n\n const after = rows.length\n ? rows.reduce((min, r) => Math.min(min, r.at), Number.MAX_SAFE_INTEGER)\n : -1;\n\n const events: Committed<E, keyof E>[] = [];\n await this.query<E>((e) => events.push(e), { after, limit });\n return { streams: rows.map(({ stream }) => stream), events };\n }\n\n async lease(leases: Lease[]) {\n const { by, at } = leases.at(0)!;\n const streams = leases.map(({ stream }) => stream);\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n // insert new streams\n await client.query(\n `\n INSERT INTO \"${this.table}_streams\" (stream)\n SELECT UNNEST($1::text[])\n ON CONFLICT (stream) DO NOTHING\n `,\n [streams]\n );\n // set leases\n const { rows } = await client.query<{\n stream: string;\n leased_at: number;\n retry: number;\n }>(\n `\n WITH free AS (\n SELECT * FROM \"${this.table}_streams\" \n WHERE stream = ANY($1::text[]) AND (leased_by IS NULL OR leased_until <= NOW())\n FOR UPDATE\n )\n UPDATE \"${this.table}_streams\" U\n SET\n leased_by = $2::uuid,\n leased_at = $3::integer,\n leased_until = NOW() + ($4::integer || ' milliseconds')::interval\n FROM free\n WHERE U.stream = free.stream\n RETURNING U.stream, U.leased_at, U.retry\n `,\n [streams, by, at, this.leaseMillis]\n );\n await client.query(\"COMMIT\");\n\n return rows.map(({ stream, leased_at, retry }) => ({\n stream,\n by,\n at: leased_at,\n retry,\n block: false,\n }));\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n throw error;\n } finally {\n client.release();\n }\n }\n\n async ack(leases: Lease[]) {\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n for (const { stream, by, at, retry, block } of leases) {\n await client.query(\n `UPDATE \"${this.table}_streams\"\n SET\n at = $3::integer,\n retry = $4::integer,\n blocked = $5::boolean,\n leased_by = NULL,\n leased_at = NULL,\n leased_until = NULL\n WHERE\n stream = $1::text\n AND leased_by = $2::uuid`,\n [stream, by, at, retry, block]\n );\n }\n await client.query(\"COMMIT\");\n } catch {\n // leased_until fallback\n await client.query(\"ROLLBACK\").catch(() => {});\n } finally {\n client.release();\n }\n }\n}\n","export const seed_store = (table: string): string => `\n-- events\nCREATE TABLE IF NOT EXISTS public.\"${table}\"\n(\n\tid serial PRIMARY KEY,\n name varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n data jsonb,\n stream varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n version int NOT NULL,\n created timestamptz NOT NULL DEFAULT now(),\n meta jsonb\n) TABLESPACE pg_default;\n\nCREATE UNIQUE INDEX IF NOT EXISTS \"${table}_stream_ix\"\n ON public.\"${table}\" USING btree (stream COLLATE pg_catalog.\"default\" ASC, version ASC)\n TABLESPACE pg_default;\n\t\nCREATE INDEX IF NOT EXISTS \"${table}_name_ix\"\n ON public.\"${table}\" USING btree (name COLLATE pg_catalog.\"default\" ASC)\n TABLESPACE pg_default;\n \nCREATE INDEX IF NOT EXISTS \"${table}_created_id_ix\"\n ON public.\"${table}\" USING btree (created ASC, id ASC)\n TABLESPACE pg_default;\n\nCREATE INDEX IF NOT EXISTS \"${table}_correlation_ix\"\n ON public.\"${table}\" USING btree ((meta ->> 'correlation'::text) COLLATE pg_catalog.\"default\" ASC NULLS LAST)\n TABLESPACE pg_default;\n\n-- streams\nCREATE TABLE IF NOT EXISTS public.\"${table}_streams\"\n(\n stream varchar(100) COLLATE pg_catalog.\"default\" PRIMARY KEY,\n at int not null default(-1),\n retry smallint not null default(0),\n blocked boolean not null default(false),\n leased_at int,\n leased_by uuid,\n leased_until timestamptz\n) TABLESPACE pg_default;\n\n-- supports order by { blocked, at } when fetching\nCREATE INDEX IF NOT EXISTS \"${table}_streams_fetch_ix\"\n ON public.\"${table}_streams\" USING btree (blocked, at) TABLESPACE pg_default;\n`;\n","/**\n * Date reviver when parsing JSON strings with the following formats:\n * - YYYY-MM-DDTHH:MM:SS.sssZ\n * - YYYY-MM-DDTHH:MM:SS.sss+HH:MM\n * - YYYY-MM-DDTHH:MM:SS.sss-HH:MM\n */\nconst ISO_8601 =\n /^(\\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(\\.\\d+)?(Z|[+-][0-2][0-9]:[0-5][0-9])?$/;\nexport const dateReviver = (key: string, value: string): string | Date => {\n if (typeof value === \"string\" && ISO_8601.test(value)) {\n return new Date(value);\n }\n return value;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,iBAAyC;AACzC,gBAAkB;AAElB,IAAM,EAAE,SAAS,SAAS,aAAa,aAAa,QAAQ,IAAI,QAAQ;AAEjE,IAAM,aAAS;AAAA,EACpB;AAAA,IACE,IAAI;AAAA,MACF,MAAM,WAAW;AAAA,MACjB,MAAM,WAAW;AAAA,MACjB,UAAU,eAAe;AAAA,MACzB,UAAU,eAAe;AAAA,MACzB,MAAM,OAAO,SAAS,WAAW,MAAM;AAAA,IACzC;AAAA,EACF;AAAA,EACA,YAAE,OAAO;AAAA,IACP,IAAI,YAAE,OAAO;AAAA,MACX,MAAM,YAAE,OAAO,EAAE,IAAI,CAAC;AAAA,MACtB,MAAM,YAAE,OAAO,EAAE,IAAI,CAAC;AAAA,MACtB,UAAU,YAAE,OAAO,EAAE,IAAI,CAAC;AAAA,MAC1B,UAAU,YAAE,OAAO,EAAE,IAAI,CAAC;AAAA,MAC1B,MAAM,YAAE,OAAO,EAAE,IAAI,EAAE,IAAI,GAAI,EAAE,IAAI,KAAK;AAAA,IAC5C,CAAC;AAAA,EACH,CAAC;AAAA,MACD,WAAAA,QAAO;AACT;;;AChBA,IAAAC,cAAqD;AACrD,gBAAe;;;ACVR,IAAM,aAAa,CAAC,UAA0B;AAAA;AAAA,qCAEhB,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qCAWL,KAAK;AAAA,eAC3B,KAAK;AAAA;AAAA;AAAA,8BAGU,KAAK;AAAA,eACpB,KAAK;AAAA;AAAA;AAAA,8BAGU,KAAK;AAAA,eACpB,KAAK;AAAA;AAAA;AAAA,8BAGU,KAAK;AAAA,eACpB,KAAK;AAAA;AAAA;AAAA;AAAA,qCAIiB,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,8BAYZ,KAAK;AAAA,eACpB,KAAK;AAAA;;;ACrCpB,IAAM,WACJ;AACK,IAAM,cAAc,CAAC,KAAa,UAAiC;AACxE,MAAI,OAAO,UAAU,YAAY,SAAS,KAAK,KAAK,GAAG;AACrD,WAAO,IAAI,KAAK,KAAK;AAAA,EACvB;AACA,SAAO;AACT;;;AFEA,IAAM,EAAE,MAAM,MAAM,IAAI,UAAAC;AACxB,MAAM;AAAA,EAAc,MAAM,SAAS;AAAA,EAAO,CAAC,QACzC,KAAK,MAAM,KAAK,WAAW;AAC7B;AAEO,IAAM,gBAAN,MAAqC;AAAA,EAG1C,YACW,OACA,cAAc,KACvB;AAFS;AACA;AAAA,EACR;AAAA,EALK,QAAQ,IAAI,KAAK,OAAO,EAAE;AAAA,EAMlC,MAAM,UAAU;AACd,UAAM,KAAK,MAAM,IAAI;AAAA,EACvB;AAAA,EAEA,MAAM,OAAO;AACX,UAAM,OAAO,WAAW,KAAK,KAAK;AAClC,UAAM,KAAK,MAAM,MAAM,IAAI;AAAA,EAC7B;AAAA,EAEA,MAAM,OAAO;AACX,UAAM,KAAK,MAAM,MAAM,yBAAyB,KAAK,KAAK,GAAG;AAC7D,UAAM,KAAK,MAAM,MAAM,yBAAyB,KAAK,KAAK,WAAW;AAAA,EACvE;AAAA,EAEA,MAAM,MACJ,UACA,OACA,YAAY,OACZ;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI,SAAS,CAAC;AAEd,QAAI,MAAM,kBAAkB,KAAK,KAAK;AACtC,UAAM,SAAgB,CAAC;AAEvB,QAAI;AACF,YAAM,IAAI;AAAA,QACR;AAAA,oBACY,KAAK,KAAK;AAAA,4BACF,MAAM,eAAe,sBAAU;AAAA;AAAA,0BAEjC,MAAM;AAAA,MAC1B;AAAA,aACO,OAAO;AACd,UAAI,OAAO,UAAU,aAAa;AAChC,eAAO,KAAK,KAAK;AACjB,cAAM,IAAI,OAAO,QAAQ;AAAA,MAC3B,MAAO,OAAM,IAAI,OAAO,QAAQ;AAChC,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,cAAM,IAAI,OAAO,gBAAgB,OAAO,MAAM,EAAE;AAAA,MAClD;AACA,UAAI,SAAS,MAAM,QAAQ;AACzB,eAAO,KAAK,KAAK;AACjB,cAAM,IAAI,OAAO,oBAAoB,OAAO,MAAM,GAAG;AAAA,MACvD;AACA,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,cAAM,IAAI,OAAO,YAAY,OAAO,MAAM,EAAE;AAAA,MAC9C;AACA,UAAI,eAAe;AACjB,eAAO,KAAK,cAAc,YAAY,CAAC;AACvC,cAAM,IAAI,OAAO,iBAAiB,OAAO,MAAM,EAAE;AAAA,MACnD;AACA,UAAI,gBAAgB;AAClB,eAAO,KAAK,eAAe,YAAY,CAAC;AACxC,cAAM,IAAI,OAAO,iBAAiB,OAAO,MAAM,EAAE;AAAA,MACnD;AACA,UAAI,aAAa;AACf,eAAO,KAAK,WAAW;AACvB,cAAM,IAAI,OAAO,8BAA8B,OAAO,MAAM,EAAE;AAAA,MAChE;AAAA,IACF;AACA,UAAM,IAAI,OAAO,gBAAgB,WAAW,SAAS,KAAK,EAAE;AAC5D,QAAI,OAAO;AACT,aAAO,KAAK,KAAK;AACjB,YAAM,IAAI,OAAO,WAAW,OAAO,MAAM,EAAE;AAAA,IAC7C;AAEA,UAAM,SAAS,MAAM,KAAK,MAAM,MAA6B,KAAK,MAAM;AACxE,eAAW,OAAO,OAAO,KAAM,UAAS,GAAG;AAE3C,WAAO,OAAO,YAAY;AAAA,EAC5B;AAAA,EAEA,MAAM,OACJ,QACA,MACA,MACA,iBACA;AACA,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AACxC,QAAI,UAAU;AACd,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO,MAAM,OAAO;AAAA,QACxB,wBAAwB,KAAK,KAAK;AAAA,QAClC,CAAC,MAAM;AAAA,MACT;AACA,gBAAU,KAAK,WAAW,KAAK,KAAK,CAAC,EAAE,UAAU;AACjD,UAAI,mBAAmB,YAAY;AACjC,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,YAAM,YAAY,MAAM,QAAQ;AAAA,QAC9B,KAAK,IAAI,OAAO,EAAE,MAAM,KAAK,MAAM;AACjC;AACA,gBAAM,MAAM;AAAA,yBACG,KAAK,KAAK;AAAA;AAEzB,gBAAM,OAAO,CAAC,MAAM,MAAM,QAAQ,SAAS,IAAI;AAC/C,gBAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAA6B,KAAK,IAAI;AACpE,iBAAO,KAAK,GAAG,CAAC;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,YAAM,OACH;AAAA,QACC;AAAA,sBACY,KAAK,KAAK,OAAO,KAAK,UAAU;AAAA,UACxC,WAAW;AAAA,UACX,IAAI,UAAU,CAAC,EAAE;AAAA,UACjB,UAAU,UAAU,CAAC,EAAE;AAAA,QACzB,CAAC,CAAC;AAAA;AAAA;AAAA,MAGN,EACC,MAAM,CAAC,UAAU;AAChB,2BAAO,MAAM,KAAK;AAClB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA,mBAAmB;AAAA,QACrB;AAAA,MACF,CAAC;AACH,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,MAAyB,OAAe;AAC5C,UAAM,EAAE,KAAK,IAAI,MAAM,KAAK,MAAM;AAAA,MAChC;AAAA;AAAA,cAEQ,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA,MAKlB,CAAC,KAAK;AAAA,IACR;AAEA,UAAM,QAAQ,KAAK,SACf,KAAK,OAAO,CAAC,KAAK,MAAM,KAAK,IAAI,KAAK,EAAE,EAAE,GAAG,OAAO,gBAAgB,IACpE;AAEJ,UAAM,SAAkC,CAAC;AACzC,UAAM,KAAK,MAAS,CAAC,MAAM,OAAO,KAAK,CAAC,GAAG,EAAE,OAAO,MAAM,CAAC;AAC3D,WAAO,EAAE,SAAS,KAAK,IAAI,CAAC,EAAE,OAAO,MAAM,MAAM,GAAG,OAAO;AAAA,EAC7D;AAAA,EAEA,MAAM,MAAM,QAAiB;AAC3B,UAAM,EAAE,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC;AAC9B,UAAM,UAAU,OAAO,IAAI,CAAC,EAAE,OAAO,MAAM,MAAM;AACjD,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO;AAAA,QACX;AAAA,uBACe,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,QAIzB,CAAC,OAAO;AAAA,MACV;AAEA,YAAM,EAAE,KAAK,IAAI,MAAM,OAAO;AAAA,QAK5B;AAAA;AAAA,2BAEmB,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,kBAInB,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QASpB,CAAC,SAAS,IAAI,IAAI,KAAK,WAAW;AAAA,MACpC;AACA,YAAM,OAAO,MAAM,QAAQ;AAE3B,aAAO,KAAK,IAAI,CAAC,EAAE,QAAQ,WAAW,MAAM,OAAO;AAAA,QACjD;AAAA,QACA;AAAA,QACA,IAAI;AAAA,QACJ;AAAA,QACA,OAAO;AAAA,MACT,EAAE;AAAA,IACJ,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,IAAI,QAAiB;AACzB,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAC1B,iBAAW,EAAE,QAAQ,IAAI,IAAI,OAAO,MAAM,KAAK,QAAQ;AACrD,cAAM,OAAO;AAAA,UACX,WAAW,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAWrB,CAAC,QAAQ,IAAI,IAAI,OAAO,KAAK;AAAA,QAC/B;AAAA,MACF;AACA,YAAM,OAAO,MAAM,QAAQ;AAAA,IAC7B,QAAQ;AAEN,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAAA,IAC/C,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AACF;","names":["target","import_act","pg"]}
|