@rotorsoft/act-pg 0.4.1 → 0.4.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -66,13 +66,30 @@ var DEFAULT_CONFIG = {
66
66
  var PostgresStore = class {
67
67
  _pool;
68
68
  config;
69
+ _fqt;
70
+ _fqs;
71
+ /**
72
+ * Create a new PostgresStore instance.
73
+ * @param config Partial configuration (host, port, user, password, schema, table, etc.)
74
+ */
69
75
  constructor(config = {}) {
70
76
  this.config = { ...DEFAULT_CONFIG, ...config };
71
77
  this._pool = new Pool(this.config);
78
+ this._fqt = `"${this.config.schema}"."${this.config.table}"`;
79
+ this._fqs = `"${this.config.schema}"."${this.config.table}_streams"`;
72
80
  }
81
+ /**
82
+ * Dispose of the store and close all database connections.
83
+ * @returns Promise that resolves when all connections are closed
84
+ */
73
85
  async dispose() {
74
86
  await this._pool.end();
75
87
  }
88
+ /**
89
+ * Seed the database with required tables, indexes, and schema for event storage.
90
+ * @returns Promise that resolves when seeding is complete
91
+ * @throws Error if seeding fails
92
+ */
76
93
  async seed() {
77
94
  const client = await this._pool.connect();
78
95
  try {
@@ -81,7 +98,7 @@ var PostgresStore = class {
81
98
  `CREATE SCHEMA IF NOT EXISTS "${this.config.schema}";`
82
99
  );
83
100
  await client.query(
84
- `CREATE TABLE IF NOT EXISTS "${this.config.schema}"."${this.config.table}" (
101
+ `CREATE TABLE IF NOT EXISTS ${this._fqt} (
85
102
  id serial PRIMARY KEY,
86
103
  name varchar(100) COLLATE pg_catalog."default" NOT NULL,
87
104
  data jsonb,
@@ -93,22 +110,22 @@ var PostgresStore = class {
93
110
  );
94
111
  await client.query(
95
112
  `CREATE UNIQUE INDEX IF NOT EXISTS "${this.config.table}_stream_ix"
96
- ON "${this.config.schema}"."${this.config.table}" (stream COLLATE pg_catalog."default", version);`
113
+ ON ${this._fqt} (stream COLLATE pg_catalog."default", version);`
97
114
  );
98
115
  await client.query(
99
116
  `CREATE INDEX IF NOT EXISTS "${this.config.table}_name_ix"
100
- ON "${this.config.schema}"."${this.config.table}" (name COLLATE pg_catalog."default");`
117
+ ON ${this._fqt} (name COLLATE pg_catalog."default");`
101
118
  );
102
119
  await client.query(
103
120
  `CREATE INDEX IF NOT EXISTS "${this.config.table}_created_id_ix"
104
- ON "${this.config.schema}"."${this.config.table}" (created, id);`
121
+ ON ${this._fqt} (created, id);`
105
122
  );
106
123
  await client.query(
107
124
  `CREATE INDEX IF NOT EXISTS "${this.config.table}_correlation_ix"
108
- ON "${this.config.schema}"."${this.config.table}" ((meta ->> 'correlation') COLLATE pg_catalog."default");`
125
+ ON ${this._fqt} ((meta ->> 'correlation') COLLATE pg_catalog."default");`
109
126
  );
110
127
  await client.query(
111
- `CREATE TABLE IF NOT EXISTS "${this.config.schema}"."${this.config.table}_streams" (
128
+ `CREATE TABLE IF NOT EXISTS ${this._fqs} (
112
129
  stream varchar(100) COLLATE pg_catalog."default" PRIMARY KEY,
113
130
  at int NOT NULL DEFAULT -1,
114
131
  retry smallint NOT NULL DEFAULT 0,
@@ -120,7 +137,7 @@ var PostgresStore = class {
120
137
  );
121
138
  await client.query(
122
139
  `CREATE INDEX IF NOT EXISTS "${this.config.table}_streams_fetch_ix"
123
- ON "${this.config.schema}"."${this.config.table}_streams" (blocked, at);`
140
+ ON ${this._fqs} (blocked, at);`
124
141
  );
125
142
  await client.query("COMMIT");
126
143
  import_act.logger.info(
@@ -134,6 +151,10 @@ var PostgresStore = class {
134
151
  client.release();
135
152
  }
136
153
  }
154
+ /**
155
+ * Drop all tables and schema created by the store (for testing or cleanup).
156
+ * @returns Promise that resolves when the schema is dropped
157
+ */
137
158
  async drop() {
138
159
  await this._pool.query(
139
160
  `
@@ -142,8 +163,8 @@ var PostgresStore = class {
142
163
  IF EXISTS (SELECT 1 FROM information_schema.schemata
143
164
  WHERE schema_name = '${this.config.schema}'
144
165
  ) THEN
145
- EXECUTE 'DROP TABLE IF EXISTS "${this.config.schema}"."${this.config.table}"';
146
- EXECUTE 'DROP TABLE IF EXISTS "${this.config.schema}"."${this.config.table}_streams"';
166
+ EXECUTE 'DROP TABLE IF EXISTS ${this._fqt}';
167
+ EXECUTE 'DROP TABLE IF EXISTS ${this._fqs}';
147
168
  IF '${this.config.schema}' <> 'public' THEN
148
169
  EXECUTE 'DROP SCHEMA "${this.config.schema}" CASCADE';
149
170
  END IF;
@@ -153,6 +174,17 @@ var PostgresStore = class {
153
174
  `
154
175
  );
155
176
  }
177
+ /**
178
+ * Query events from the store, optionally filtered by stream, event name, time, etc.
179
+ *
180
+ * @param callback Function called for each event found
181
+ * @param query (Optional) Query filter (stream, names, before, after, etc.)
182
+ * @param withSnaps (Optional) If true, includes only events after the last snapshot
183
+ * @returns The number of events found
184
+ *
185
+ * @example
186
+ * await store.query((event) => console.log(event), { stream: "A" });
187
+ */
156
188
  async query(callback, query, withSnaps = false) {
157
189
  const {
158
190
  stream,
@@ -165,68 +197,82 @@ var PostgresStore = class {
165
197
  backward,
166
198
  correlation
167
199
  } = query || {};
168
- let sql = `SELECT * FROM "${this.config.schema}"."${this.config.table}" WHERE`;
200
+ let sql = `SELECT * FROM ${this._fqt}`;
201
+ const conditions = [];
169
202
  const values = [];
170
- if (withSnaps)
171
- sql = sql.concat(
172
- ` id>=COALESCE((SELECT id
173
- FROM "${this.config.schema}"."${this.config.table}"
174
- WHERE stream='${stream}' AND name='${import_act.SNAP_EVENT}'
175
- ORDER BY id DESC LIMIT 1), 0)
176
- AND stream='${stream}'`
203
+ if (withSnaps) {
204
+ conditions.push(
205
+ `id>=COALESCE((SELECT id FROM ${this._fqt} WHERE stream='${stream}' AND name='${import_act.SNAP_EVENT}' ORDER BY id DESC LIMIT 1), 0)`
177
206
  );
178
- else if (query) {
207
+ conditions.push(`stream='${stream}'`);
208
+ } else if (query) {
179
209
  if (typeof after !== "undefined") {
180
210
  values.push(after);
181
- sql = sql.concat(" id>$1");
182
- } else sql = sql.concat(" id>-1");
211
+ conditions.push(`id>$${values.length}`);
212
+ } else {
213
+ conditions.push("id>-1");
214
+ }
183
215
  if (stream) {
184
216
  values.push(stream);
185
- sql = sql.concat(` AND stream=$${values.length}`);
217
+ conditions.push(`stream=$${values.length}`);
186
218
  }
187
219
  if (names && names.length) {
188
220
  values.push(names);
189
- sql = sql.concat(` AND name = ANY($${values.length})`);
221
+ conditions.push(`name = ANY($${values.length})`);
190
222
  }
191
223
  if (before) {
192
224
  values.push(before);
193
- sql = sql.concat(` AND id<$${values.length}`);
225
+ conditions.push(`id<$${values.length}`);
194
226
  }
195
227
  if (created_after) {
196
228
  values.push(created_after.toISOString());
197
- sql = sql.concat(` AND created>$${values.length}`);
229
+ conditions.push(`created>$${values.length}`);
198
230
  }
199
231
  if (created_before) {
200
232
  values.push(created_before.toISOString());
201
- sql = sql.concat(` AND created<$${values.length}`);
233
+ conditions.push(`created<$${values.length}`);
202
234
  }
203
235
  if (correlation) {
204
236
  values.push(correlation);
205
- sql = sql.concat(` AND meta->>'correlation'=$${values.length}`);
237
+ conditions.push(`meta->>'correlation'=$${values.length}`);
206
238
  }
207
239
  }
208
- sql = sql.concat(` ORDER BY id ${backward ? "DESC" : "ASC"}`);
240
+ if (conditions.length) {
241
+ sql += " WHERE " + conditions.join(" AND ");
242
+ }
243
+ sql += ` ORDER BY id ${backward ? "DESC" : "ASC"}`;
209
244
  if (limit) {
210
245
  values.push(limit);
211
- sql = sql.concat(` LIMIT $${values.length}`);
246
+ sql += ` LIMIT $${values.length}`;
212
247
  }
213
248
  const result = await this._pool.query(sql, values);
214
249
  for (const row of result.rows) callback(row);
215
250
  return result.rowCount ?? 0;
216
251
  }
252
+ /**
253
+ * Commit new events to the store for a given stream, with concurrency control.
254
+ *
255
+ * @param stream The stream name
256
+ * @param msgs Array of messages (event name and data)
257
+ * @param meta Event metadata (correlation, causation, etc.)
258
+ * @param expectedVersion (Optional) Expected stream version for concurrency control
259
+ * @returns Array of committed events
260
+ * @throws ConcurrencyError if the expected version does not match
261
+ */
217
262
  async commit(stream, msgs, meta, expectedVersion) {
263
+ if (msgs.length === 0) return [];
218
264
  const client = await this._pool.connect();
219
265
  let version = -1;
220
266
  try {
221
267
  await client.query("BEGIN");
222
268
  const last = await client.query(
223
269
  `SELECT version
224
- FROM "${this.config.schema}"."${this.config.table}"
270
+ FROM ${this._fqt}
225
271
  WHERE stream=$1 ORDER BY version DESC LIMIT 1`,
226
272
  [stream]
227
273
  );
228
274
  version = last.rowCount ? last.rows[0].version : -1;
229
- if (expectedVersion && version !== expectedVersion)
275
+ if (typeof expectedVersion === "number" && version !== expectedVersion)
230
276
  throw new import_act.ConcurrencyError(
231
277
  version,
232
278
  msgs,
@@ -236,7 +282,7 @@ var PostgresStore = class {
236
282
  msgs.map(async ({ name, data }) => {
237
283
  version++;
238
284
  const sql = `
239
- INSERT INTO "${this.config.schema}"."${this.config.table}"(name, data, stream, version, meta)
285
+ INSERT INTO ${this._fqt}(name, data, stream, version, meta)
240
286
  VALUES($1, $2, $3, $4, $5) RETURNING *`;
241
287
  const vals = [name, data, stream, version, meta];
242
288
  const { rows } = await client.query(sql, vals);
@@ -269,11 +315,17 @@ var PostgresStore = class {
269
315
  client.release();
270
316
  }
271
317
  }
318
+ /**
319
+ * Fetch a batch of events and streams for processing (drain cycle).
320
+ *
321
+ * @param limit The maximum number of events to fetch
322
+ * @returns An object with arrays of streams and events
323
+ */
272
324
  async fetch(limit) {
273
325
  const { rows } = await this._pool.query(
274
326
  `
275
327
  SELECT stream, at
276
- FROM "${this.config.schema}"."${this.config.table}_streams"
328
+ FROM ${this._fqs}
277
329
  WHERE blocked=false
278
330
  ORDER BY at ASC
279
331
  LIMIT $1::integer
@@ -288,6 +340,12 @@ var PostgresStore = class {
288
340
  });
289
341
  return { streams: rows.map(({ stream }) => stream), events };
290
342
  }
343
+ /**
344
+ * Lease streams for reaction processing, marking them as in-progress.
345
+ *
346
+ * @param leases Array of lease objects (stream, at, etc.)
347
+ * @returns Array of leased objects with updated lease info
348
+ */
291
349
  async lease(leases) {
292
350
  const { by, at } = leases.at(0);
293
351
  const streams = leases.map(({ stream }) => stream);
@@ -296,7 +354,7 @@ var PostgresStore = class {
296
354
  await client.query("BEGIN");
297
355
  await client.query(
298
356
  `
299
- INSERT INTO "${this.config.schema}"."${this.config.table}_streams" (stream)
357
+ INSERT INTO ${this._fqs} (stream)
300
358
  SELECT UNNEST($1::text[])
301
359
  ON CONFLICT (stream) DO NOTHING
302
360
  `,
@@ -305,11 +363,11 @@ var PostgresStore = class {
305
363
  const { rows } = await client.query(
306
364
  `
307
365
  WITH free AS (
308
- SELECT * FROM "${this.config.schema}"."${this.config.table}_streams"
366
+ SELECT * FROM ${this._fqs}
309
367
  WHERE stream = ANY($1::text[]) AND (leased_by IS NULL OR leased_until <= NOW())
310
368
  FOR UPDATE
311
369
  )
312
- UPDATE "${this.config.schema}"."${this.config.table}_streams" U
370
+ UPDATE ${this._fqs} U
313
371
  SET
314
372
  leased_by = $2::uuid,
315
373
  leased_at = $3::integer,
@@ -336,13 +394,19 @@ var PostgresStore = class {
336
394
  client.release();
337
395
  }
338
396
  }
397
+ /**
398
+ * Acknowledge and release leases after processing, updating stream positions.
399
+ *
400
+ * @param leases Array of lease objects to acknowledge
401
+ * @returns Promise that resolves when leases are acknowledged
402
+ */
339
403
  async ack(leases) {
340
404
  const client = await this._pool.connect();
341
405
  try {
342
406
  await client.query("BEGIN");
343
407
  for (const { stream, by, at, retry, block } of leases) {
344
408
  await client.query(
345
- `UPDATE "${this.config.schema}"."${this.config.table}_streams"
409
+ `UPDATE ${this._fqs}
346
410
  SET
347
411
  at = $3::integer,
348
412
  retry = $4::integer,
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/PostgresStore.ts","../src/utils.ts"],"sourcesContent":["/** @module act-pg */\nexport * from \"./PostgresStore.js\";\n","import type {\n Committed,\n EventMeta,\n Lease,\n Message,\n Query,\n Schemas,\n Store,\n} from \"@rotorsoft/act\";\nimport { ConcurrencyError, SNAP_EVENT, logger } from \"@rotorsoft/act\";\nimport pg from \"pg\";\nimport { dateReviver } from \"./utils.js\";\n\nconst { Pool, types } = pg;\ntypes.setTypeParser(types.builtins.JSONB, (val) =>\n JSON.parse(val, dateReviver)\n);\n\ntype Config = Readonly<{\n host: string;\n port: number;\n database: string;\n user: string;\n password: string;\n schema: string;\n table: string;\n leaseMillis: number;\n}>;\n\nconst DEFAULT_CONFIG: Config = {\n host: \"localhost\",\n port: 5432,\n database: \"postgres\",\n user: \"postgres\",\n password: \"postgres\",\n schema: \"public\",\n table: \"events\",\n leaseMillis: 30_000,\n};\n\nexport class PostgresStore implements Store {\n private _pool;\n readonly config: Config;\n\n constructor(config: Partial<Config> = {}) {\n this.config = { ...DEFAULT_CONFIG, ...config };\n this._pool = new Pool(this.config);\n }\n\n async dispose() {\n await this._pool.end();\n }\n\n async seed() {\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n\n // Create schema\n await client.query(\n `CREATE SCHEMA IF NOT EXISTS \"${this.config.schema}\";`\n );\n\n // Events table\n await client.query(\n `CREATE TABLE IF NOT EXISTS \"${this.config.schema}\".\"${this.config.table}\" (\n id serial PRIMARY KEY,\n name varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n data jsonb,\n stream varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n version int NOT NULL,\n created timestamptz NOT NULL DEFAULT now(),\n meta jsonb\n ) TABLESPACE pg_default;`\n );\n\n // Indexes on events\n await client.query(\n `CREATE UNIQUE INDEX IF NOT EXISTS \"${this.config.table}_stream_ix\" \n ON \"${this.config.schema}\".\"${this.config.table}\" (stream COLLATE pg_catalog.\"default\", version);`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_name_ix\" \n ON \"${this.config.schema}\".\"${this.config.table}\" (name COLLATE pg_catalog.\"default\");`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_created_id_ix\" \n ON \"${this.config.schema}\".\"${this.config.table}\" (created, id);`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_correlation_ix\" \n ON \"${this.config.schema}\".\"${this.config.table}\" ((meta ->> 'correlation') COLLATE pg_catalog.\"default\");`\n );\n\n // Streams table\n await client.query(\n `CREATE TABLE IF NOT EXISTS \"${this.config.schema}\".\"${this.config.table}_streams\" (\n stream varchar(100) COLLATE pg_catalog.\"default\" PRIMARY KEY,\n at int NOT NULL DEFAULT -1,\n retry smallint NOT NULL DEFAULT 0,\n blocked boolean NOT NULL DEFAULT false,\n leased_at int,\n leased_by uuid,\n leased_until timestamptz\n ) TABLESPACE pg_default;`\n );\n\n // Index for fetching streams\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_streams_fetch_ix\" \n ON \"${this.config.schema}\".\"${this.config.table}_streams\" (blocked, at);`\n );\n\n await client.query(\"COMMIT\");\n logger.info(\n `Seeded schema \"${this.config.schema}\" with table \"${this.config.table}\"`\n );\n } catch (error) {\n await client.query(\"ROLLBACK\");\n logger.error(\"Failed to seed store:\", error);\n throw error;\n } finally {\n client.release();\n }\n }\n\n async drop() {\n await this._pool.query(\n `\n DO $$\n BEGIN\n IF EXISTS (SELECT 1 FROM information_schema.schemata\n WHERE schema_name = '${this.config.schema}'\n ) THEN\n EXECUTE 'DROP TABLE IF EXISTS \"${this.config.schema}\".\"${this.config.table}\"';\n EXECUTE 'DROP TABLE IF EXISTS \"${this.config.schema}\".\"${this.config.table}_streams\"';\n IF '${this.config.schema}' <> 'public' THEN\n EXECUTE 'DROP SCHEMA \"${this.config.schema}\" CASCADE';\n END IF;\n END IF;\n END\n $$;\n `\n );\n }\n\n async query<E extends Schemas>(\n callback: (event: Committed<E, keyof E>) => void,\n query?: Query,\n withSnaps = false\n ) {\n const {\n stream,\n names,\n before,\n after,\n limit,\n created_before,\n created_after,\n backward,\n correlation,\n } = query || {};\n\n let sql = `SELECT * FROM \"${this.config.schema}\".\"${this.config.table}\" WHERE`;\n const values: any[] = [];\n\n if (withSnaps)\n sql = sql.concat(\n ` id>=COALESCE((SELECT id\n FROM \"${this.config.schema}\".\"${this.config.table}\"\n WHERE stream='${stream}' AND name='${SNAP_EVENT}'\n ORDER BY id DESC LIMIT 1), 0)\n AND stream='${stream}'`\n );\n else if (query) {\n if (typeof after !== \"undefined\") {\n values.push(after);\n sql = sql.concat(\" id>$1\");\n } else sql = sql.concat(\" id>-1\");\n if (stream) {\n values.push(stream);\n sql = sql.concat(` AND stream=$${values.length}`);\n }\n if (names && names.length) {\n values.push(names);\n sql = sql.concat(` AND name = ANY($${values.length})`);\n }\n if (before) {\n values.push(before);\n sql = sql.concat(` AND id<$${values.length}`);\n }\n if (created_after) {\n values.push(created_after.toISOString());\n sql = sql.concat(` AND created>$${values.length}`);\n }\n if (created_before) {\n values.push(created_before.toISOString());\n sql = sql.concat(` AND created<$${values.length}`);\n }\n if (correlation) {\n values.push(correlation);\n sql = sql.concat(` AND meta->>'correlation'=$${values.length}`);\n }\n }\n sql = sql.concat(` ORDER BY id ${backward ? \"DESC\" : \"ASC\"}`);\n if (limit) {\n values.push(limit);\n sql = sql.concat(` LIMIT $${values.length}`);\n }\n\n const result = await this._pool.query<Committed<E, keyof E>>(sql, values);\n for (const row of result.rows) callback(row);\n\n return result.rowCount ?? 0;\n }\n\n async commit<E extends Schemas>(\n stream: string,\n msgs: Message<E, keyof E>[],\n meta: EventMeta,\n expectedVersion?: number\n ) {\n const client = await this._pool.connect();\n let version = -1;\n try {\n await client.query(\"BEGIN\");\n\n const last = await client.query<Committed<E, keyof E>>(\n `SELECT version\n FROM \"${this.config.schema}\".\"${this.config.table}\"\n WHERE stream=$1 ORDER BY version DESC LIMIT 1`,\n [stream]\n );\n version = last.rowCount ? last.rows[0].version : -1;\n if (expectedVersion && version !== expectedVersion)\n throw new ConcurrencyError(\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion\n );\n\n const committed = await Promise.all(\n msgs.map(async ({ name, data }) => {\n version++;\n const sql = `\n INSERT INTO \"${this.config.schema}\".\"${this.config.table}\"(name, data, stream, version, meta) \n VALUES($1, $2, $3, $4, $5) RETURNING *`;\n const vals = [name, data, stream, version, meta];\n const { rows } = await client.query<Committed<E, keyof E>>(sql, vals);\n return rows.at(0)!;\n })\n );\n\n await client\n .query(\n `\n NOTIFY \"${this.config.table}\", '${JSON.stringify({\n operation: \"INSERT\",\n id: committed[0].name,\n position: committed[0].id,\n })}';\n COMMIT;\n `\n )\n .catch((error) => {\n logger.error(error);\n throw new ConcurrencyError(\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion || -1\n );\n });\n return committed;\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n throw error;\n } finally {\n client.release();\n }\n }\n\n async fetch<E extends Schemas>(limit: number) {\n const { rows } = await this._pool.query<{ stream: string; at: number }>(\n `\n SELECT stream, at\n FROM \"${this.config.schema}\".\"${this.config.table}_streams\"\n WHERE blocked=false\n ORDER BY at ASC\n LIMIT $1::integer\n `,\n [limit]\n );\n\n const after = rows.length\n ? rows.reduce((min, r) => Math.min(min, r.at), Number.MAX_SAFE_INTEGER)\n : -1;\n\n const events: Committed<E, keyof E>[] = [];\n await this.query<E>((e) => e.name !== SNAP_EVENT && events.push(e), {\n after,\n limit,\n });\n return { streams: rows.map(({ stream }) => stream), events };\n }\n\n async lease(leases: Lease[]) {\n const { by, at } = leases.at(0)!;\n const streams = leases.map(({ stream }) => stream);\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n // insert new streams\n await client.query(\n `\n INSERT INTO \"${this.config.schema}\".\"${this.config.table}_streams\" (stream)\n SELECT UNNEST($1::text[])\n ON CONFLICT (stream) DO NOTHING\n `,\n [streams]\n );\n // set leases\n const { rows } = await client.query<{\n stream: string;\n leased_at: number;\n retry: number;\n }>(\n `\n WITH free AS (\n SELECT * FROM \"${this.config.schema}\".\"${this.config.table}_streams\" \n WHERE stream = ANY($1::text[]) AND (leased_by IS NULL OR leased_until <= NOW())\n FOR UPDATE\n )\n UPDATE \"${this.config.schema}\".\"${this.config.table}_streams\" U\n SET\n leased_by = $2::uuid,\n leased_at = $3::integer,\n leased_until = NOW() + ($4::integer || ' milliseconds')::interval\n FROM free\n WHERE U.stream = free.stream\n RETURNING U.stream, U.leased_at, U.retry\n `,\n [streams, by, at, this.config.leaseMillis]\n );\n await client.query(\"COMMIT\");\n\n return rows.map(({ stream, leased_at, retry }) => ({\n stream,\n by,\n at: leased_at,\n retry,\n block: false,\n }));\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n throw error;\n } finally {\n client.release();\n }\n }\n\n async ack(leases: Lease[]) {\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n for (const { stream, by, at, retry, block } of leases) {\n await client.query(\n `UPDATE \"${this.config.schema}\".\"${this.config.table}_streams\"\n SET\n at = $3::integer,\n retry = $4::integer,\n blocked = $5::boolean,\n leased_by = NULL,\n leased_at = NULL,\n leased_until = NULL\n WHERE\n stream = $1::text\n AND leased_by = $2::uuid`,\n [stream, by, at, retry, block]\n );\n }\n await client.query(\"COMMIT\");\n } catch {\n // leased_until fallback\n await client.query(\"ROLLBACK\").catch(() => {});\n } finally {\n client.release();\n }\n }\n}\n","/**\n * Date reviver when parsing JSON strings with the following formats:\n * - YYYY-MM-DDTHH:MM:SS.sssZ\n * - YYYY-MM-DDTHH:MM:SS.sss+HH:MM\n * - YYYY-MM-DDTHH:MM:SS.sss-HH:MM\n */\nconst ISO_8601 =\n /^(\\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(\\.\\d+)?(Z|[+-][0-2][0-9]:[0-5][0-9])?$/;\nexport const dateReviver = (key: string, value: string): string | Date => {\n if (typeof value === \"string\" && ISO_8601.test(value)) {\n return new Date(value);\n }\n return value;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACSA,iBAAqD;AACrD,gBAAe;;;ACJf,IAAM,WACJ;AACK,IAAM,cAAc,CAAC,KAAa,UAAiC;AACxE,MAAI,OAAO,UAAU,YAAY,SAAS,KAAK,KAAK,GAAG;AACrD,WAAO,IAAI,KAAK,KAAK;AAAA,EACvB;AACA,SAAO;AACT;;;ADAA,IAAM,EAAE,MAAM,MAAM,IAAI,UAAAA;AACxB,MAAM;AAAA,EAAc,MAAM,SAAS;AAAA,EAAO,CAAC,QACzC,KAAK,MAAM,KAAK,WAAW;AAC7B;AAaA,IAAM,iBAAyB;AAAA,EAC7B,MAAM;AAAA,EACN,MAAM;AAAA,EACN,UAAU;AAAA,EACV,MAAM;AAAA,EACN,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,aAAa;AACf;AAEO,IAAM,gBAAN,MAAqC;AAAA,EAClC;AAAA,EACC;AAAA,EAET,YAAY,SAA0B,CAAC,GAAG;AACxC,SAAK,SAAS,EAAE,GAAG,gBAAgB,GAAG,OAAO;AAC7C,SAAK,QAAQ,IAAI,KAAK,KAAK,MAAM;AAAA,EACnC;AAAA,EAEA,MAAM,UAAU;AACd,UAAM,KAAK,MAAM,IAAI;AAAA,EACvB;AAAA,EAEA,MAAM,OAAO;AACX,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAG1B,YAAM,OAAO;AAAA,QACX,gCAAgC,KAAK,OAAO,MAAM;AAAA,MACpD;AAGA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAS1E;AAGA,YAAM,OAAO;AAAA,QACX,sCAAsC,KAAK,OAAO,KAAK;AAAA,cACjD,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,MACjD;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,cAC1C,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,MACjD;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,cAC1C,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,MACjD;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,cAC1C,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,MACjD;AAGA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAS1E;AAGA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,cAC1C,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,MACjD;AAEA,YAAM,OAAO,MAAM,QAAQ;AAC3B,wBAAO;AAAA,QACL,kBAAkB,KAAK,OAAO,MAAM,iBAAiB,KAAK,OAAO,KAAK;AAAA,MACxE;AAAA,IACF,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU;AAC7B,wBAAO,MAAM,yBAAyB,KAAK;AAC3C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,OAAO;AACX,UAAM,KAAK,MAAM;AAAA,MACf;AAAA;AAAA;AAAA;AAAA,iCAI2B,KAAK,OAAO,MAAM;AAAA;AAAA,2CAER,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,2CACzC,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,gBACpE,KAAK,OAAO,MAAM;AAAA,oCACE,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMlD;AAAA,EACF;AAAA,EAEA,MAAM,MACJ,UACA,OACA,YAAY,OACZ;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI,SAAS,CAAC;AAEd,QAAI,MAAM,kBAAkB,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AACrE,UAAM,SAAgB,CAAC;AAEvB,QAAI;AACF,YAAM,IAAI;AAAA,QACR;AAAA,oBACY,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,4BACjC,MAAM,eAAe,qBAAU;AAAA;AAAA,0BAEjC,MAAM;AAAA,MAC1B;AAAA,aACO,OAAO;AACd,UAAI,OAAO,UAAU,aAAa;AAChC,eAAO,KAAK,KAAK;AACjB,cAAM,IAAI,OAAO,QAAQ;AAAA,MAC3B,MAAO,OAAM,IAAI,OAAO,QAAQ;AAChC,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,cAAM,IAAI,OAAO,gBAAgB,OAAO,MAAM,EAAE;AAAA,MAClD;AACA,UAAI,SAAS,MAAM,QAAQ;AACzB,eAAO,KAAK,KAAK;AACjB,cAAM,IAAI,OAAO,oBAAoB,OAAO,MAAM,GAAG;AAAA,MACvD;AACA,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,cAAM,IAAI,OAAO,YAAY,OAAO,MAAM,EAAE;AAAA,MAC9C;AACA,UAAI,eAAe;AACjB,eAAO,KAAK,cAAc,YAAY,CAAC;AACvC,cAAM,IAAI,OAAO,iBAAiB,OAAO,MAAM,EAAE;AAAA,MACnD;AACA,UAAI,gBAAgB;AAClB,eAAO,KAAK,eAAe,YAAY,CAAC;AACxC,cAAM,IAAI,OAAO,iBAAiB,OAAO,MAAM,EAAE;AAAA,MACnD;AACA,UAAI,aAAa;AACf,eAAO,KAAK,WAAW;AACvB,cAAM,IAAI,OAAO,8BAA8B,OAAO,MAAM,EAAE;AAAA,MAChE;AAAA,IACF;AACA,UAAM,IAAI,OAAO,gBAAgB,WAAW,SAAS,KAAK,EAAE;AAC5D,QAAI,OAAO;AACT,aAAO,KAAK,KAAK;AACjB,YAAM,IAAI,OAAO,WAAW,OAAO,MAAM,EAAE;AAAA,IAC7C;AAEA,UAAM,SAAS,MAAM,KAAK,MAAM,MAA6B,KAAK,MAAM;AACxE,eAAW,OAAO,OAAO,KAAM,UAAS,GAAG;AAE3C,WAAO,OAAO,YAAY;AAAA,EAC5B;AAAA,EAEA,MAAM,OACJ,QACA,MACA,MACA,iBACA;AACA,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AACxC,QAAI,UAAU;AACd,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO,MAAM,OAAO;AAAA,QACxB;AAAA,gBACQ,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA,QAEjD,CAAC,MAAM;AAAA,MACT;AACA,gBAAU,KAAK,WAAW,KAAK,KAAK,CAAC,EAAE,UAAU;AACjD,UAAI,mBAAmB,YAAY;AACjC,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,YAAM,YAAY,MAAM,QAAQ;AAAA,QAC9B,KAAK,IAAI,OAAO,EAAE,MAAM,KAAK,MAAM;AACjC;AACA,gBAAM,MAAM;AAAA,yBACG,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAExD,gBAAM,OAAO,CAAC,MAAM,MAAM,QAAQ,SAAS,IAAI;AAC/C,gBAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAA6B,KAAK,IAAI;AACpE,iBAAO,KAAK,GAAG,CAAC;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,YAAM,OACH;AAAA,QACC;AAAA,sBACY,KAAK,OAAO,KAAK,OAAO,KAAK,UAAU;AAAA,UAC/C,WAAW;AAAA,UACX,IAAI,UAAU,CAAC,EAAE;AAAA,UACjB,UAAU,UAAU,CAAC,EAAE;AAAA,QACzB,CAAC,CAAC;AAAA;AAAA;AAAA,MAGN,EACC,MAAM,CAAC,UAAU;AAChB,0BAAO,MAAM,KAAK;AAClB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA,mBAAmB;AAAA,QACrB;AAAA,MACF,CAAC;AACH,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,MAAyB,OAAe;AAC5C,UAAM,EAAE,KAAK,IAAI,MAAM,KAAK,MAAM;AAAA,MAChC;AAAA;AAAA,cAEQ,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA,MAKjD,CAAC,KAAK;AAAA,IACR;AAEA,UAAM,QAAQ,KAAK,SACf,KAAK,OAAO,CAAC,KAAK,MAAM,KAAK,IAAI,KAAK,EAAE,EAAE,GAAG,OAAO,gBAAgB,IACpE;AAEJ,UAAM,SAAkC,CAAC;AACzC,UAAM,KAAK,MAAS,CAAC,MAAM,EAAE,SAAS,yBAAc,OAAO,KAAK,CAAC,GAAG;AAAA,MAClE;AAAA,MACA;AAAA,IACF,CAAC;AACD,WAAO,EAAE,SAAS,KAAK,IAAI,CAAC,EAAE,OAAO,MAAM,MAAM,GAAG,OAAO;AAAA,EAC7D;AAAA,EAEA,MAAM,MAAM,QAAiB;AAC3B,UAAM,EAAE,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC;AAC9B,UAAM,UAAU,OAAO,IAAI,CAAC,EAAE,OAAO,MAAM,MAAM;AACjD,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO;AAAA,QACX;AAAA,uBACe,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,QAIxD,CAAC,OAAO;AAAA,MACV;AAEA,YAAM,EAAE,KAAK,IAAI,MAAM,OAAO;AAAA,QAK5B;AAAA;AAAA,2BAEmB,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA,kBAIlD,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QASnD,CAAC,SAAS,IAAI,IAAI,KAAK,OAAO,WAAW;AAAA,MAC3C;AACA,YAAM,OAAO,MAAM,QAAQ;AAE3B,aAAO,KAAK,IAAI,CAAC,EAAE,QAAQ,WAAW,MAAM,OAAO;AAAA,QACjD;AAAA,QACA;AAAA,QACA,IAAI;AAAA,QACJ;AAAA,QACA,OAAO;AAAA,MACT,EAAE;AAAA,IACJ,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA,EAEA,MAAM,IAAI,QAAiB;AACzB,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAC1B,iBAAW,EAAE,QAAQ,IAAI,IAAI,OAAO,MAAM,KAAK,QAAQ;AACrD,cAAM,OAAO;AAAA,UACX,WAAW,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAWpD,CAAC,QAAQ,IAAI,IAAI,OAAO,KAAK;AAAA,QAC/B;AAAA,MACF;AACA,YAAM,OAAO,MAAM,QAAQ;AAAA,IAC7B,QAAQ;AAEN,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAAA,IAC/C,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AACF;","names":["pg"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/PostgresStore.ts","../src/utils.ts"],"sourcesContent":["/**\n * @packageDocumentation\n * @module act-pg\n * Main entry point for the Act-PG framework. Re-exports all core APIs.\n */\nexport * from \"./PostgresStore.js\";\n","import type {\n Committed,\n EventMeta,\n Lease,\n Message,\n Query,\n Schemas,\n Store,\n} from \"@rotorsoft/act\";\nimport { ConcurrencyError, SNAP_EVENT, logger } from \"@rotorsoft/act\";\nimport pg from \"pg\";\nimport { dateReviver } from \"./utils.js\";\n\nconst { Pool, types } = pg;\ntypes.setTypeParser(types.builtins.JSONB, (val) =>\n JSON.parse(val, dateReviver)\n);\n\ntype Config = Readonly<{\n host: string;\n port: number;\n database: string;\n user: string;\n password: string;\n schema: string;\n table: string;\n leaseMillis: number;\n}>;\n\nconst DEFAULT_CONFIG: Config = {\n host: \"localhost\",\n port: 5432,\n database: \"postgres\",\n user: \"postgres\",\n password: \"postgres\",\n schema: \"public\",\n table: \"events\",\n leaseMillis: 30_000,\n};\n\n/**\n * @category Adapters\n * @see Store\n *\n * PostgresStore is a production-ready event store adapter for Act, using PostgreSQL as the backend.\n *\n * - Supports event sourcing, leasing, snapshots, and concurrency control.\n * - Designed for high-throughput, scalable, and reliable event storage.\n * - Implements the Act Store interface.\n *\n * @example\n * import { PostgresStore } from \"@act/pg\";\n * const store = new PostgresStore({ schema: \"my_schema\", table: \"events\" });\n * await store.seed();\n *\n * @see https://github.com/rotorsoft/act-root\n */\nexport class PostgresStore implements Store {\n private _pool;\n readonly config: Config;\n private _fqt: string;\n private _fqs: string;\n\n /**\n * Create a new PostgresStore instance.\n * @param config Partial configuration (host, port, user, password, schema, table, etc.)\n */\n constructor(config: Partial<Config> = {}) {\n this.config = { ...DEFAULT_CONFIG, ...config };\n this._pool = new Pool(this.config);\n this._fqt = `\"${this.config.schema}\".\"${this.config.table}\"`;\n this._fqs = `\"${this.config.schema}\".\"${this.config.table}_streams\"`;\n }\n\n /**\n * Dispose of the store and close all database connections.\n * @returns Promise that resolves when all connections are closed\n */\n async dispose() {\n await this._pool.end();\n }\n\n /**\n * Seed the database with required tables, indexes, and schema for event storage.\n * @returns Promise that resolves when seeding is complete\n * @throws Error if seeding fails\n */\n async seed() {\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n\n // Create schema\n await client.query(\n `CREATE SCHEMA IF NOT EXISTS \"${this.config.schema}\";`\n );\n\n // Events table\n await client.query(\n `CREATE TABLE IF NOT EXISTS ${this._fqt} (\n id serial PRIMARY KEY,\n name varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n data jsonb,\n stream varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n version int NOT NULL,\n created timestamptz NOT NULL DEFAULT now(),\n meta jsonb\n ) TABLESPACE pg_default;`\n );\n\n // Indexes on events\n await client.query(\n `CREATE UNIQUE INDEX IF NOT EXISTS \"${this.config.table}_stream_ix\" \n ON ${this._fqt} (stream COLLATE pg_catalog.\"default\", version);`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_name_ix\" \n ON ${this._fqt} (name COLLATE pg_catalog.\"default\");`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_created_id_ix\" \n ON ${this._fqt} (created, id);`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_correlation_ix\" \n ON ${this._fqt} ((meta ->> 'correlation') COLLATE pg_catalog.\"default\");`\n );\n\n // Streams table\n await client.query(\n `CREATE TABLE IF NOT EXISTS ${this._fqs} (\n stream varchar(100) COLLATE pg_catalog.\"default\" PRIMARY KEY,\n at int NOT NULL DEFAULT -1,\n retry smallint NOT NULL DEFAULT 0,\n blocked boolean NOT NULL DEFAULT false,\n leased_at int,\n leased_by uuid,\n leased_until timestamptz\n ) TABLESPACE pg_default;`\n );\n\n // Index for fetching streams\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_streams_fetch_ix\" \n ON ${this._fqs} (blocked, at);`\n );\n\n await client.query(\"COMMIT\");\n logger.info(\n `Seeded schema \"${this.config.schema}\" with table \"${this.config.table}\"`\n );\n } catch (error) {\n await client.query(\"ROLLBACK\");\n logger.error(\"Failed to seed store:\", error);\n throw error;\n } finally {\n client.release();\n }\n }\n\n /**\n * Drop all tables and schema created by the store (for testing or cleanup).\n * @returns Promise that resolves when the schema is dropped\n */\n async drop() {\n await this._pool.query(\n `\n DO $$\n BEGIN\n IF EXISTS (SELECT 1 FROM information_schema.schemata\n WHERE schema_name = '${this.config.schema}'\n ) THEN\n EXECUTE 'DROP TABLE IF EXISTS ${this._fqt}';\n EXECUTE 'DROP TABLE IF EXISTS ${this._fqs}';\n IF '${this.config.schema}' <> 'public' THEN\n EXECUTE 'DROP SCHEMA \"${this.config.schema}\" CASCADE';\n END IF;\n END IF;\n END\n $$;\n `\n );\n }\n\n /**\n * Query events from the store, optionally filtered by stream, event name, time, etc.\n *\n * @param callback Function called for each event found\n * @param query (Optional) Query filter (stream, names, before, after, etc.)\n * @param withSnaps (Optional) If true, includes only events after the last snapshot\n * @returns The number of events found\n *\n * @example\n * await store.query((event) => console.log(event), { stream: \"A\" });\n */\n async query<E extends Schemas>(\n callback: (event: Committed<E, keyof E>) => void,\n query?: Query,\n withSnaps = false\n ) {\n const {\n stream,\n names,\n before,\n after,\n limit,\n created_before,\n created_after,\n backward,\n correlation,\n } = query || {};\n\n let sql = `SELECT * FROM ${this._fqt}`;\n const conditions: string[] = [];\n const values: any[] = [];\n\n if (withSnaps) {\n conditions.push(\n `id>=COALESCE((SELECT id FROM ${this._fqt} WHERE stream='${stream}' AND name='${SNAP_EVENT}' ORDER BY id DESC LIMIT 1), 0)`\n );\n conditions.push(`stream='${stream}'`);\n } else if (query) {\n if (typeof after !== \"undefined\") {\n values.push(after);\n conditions.push(`id>$${values.length}`);\n } else {\n conditions.push(\"id>-1\");\n }\n if (stream) {\n values.push(stream);\n conditions.push(`stream=$${values.length}`);\n }\n if (names && names.length) {\n values.push(names);\n conditions.push(`name = ANY($${values.length})`);\n }\n if (before) {\n values.push(before);\n conditions.push(`id<$${values.length}`);\n }\n if (created_after) {\n values.push(created_after.toISOString());\n conditions.push(`created>$${values.length}`);\n }\n if (created_before) {\n values.push(created_before.toISOString());\n conditions.push(`created<$${values.length}`);\n }\n if (correlation) {\n values.push(correlation);\n conditions.push(`meta->>'correlation'=$${values.length}`);\n }\n }\n if (conditions.length) {\n sql += \" WHERE \" + conditions.join(\" AND \");\n }\n sql += ` ORDER BY id ${backward ? \"DESC\" : \"ASC\"}`;\n if (limit) {\n values.push(limit);\n sql += ` LIMIT $${values.length}`;\n }\n\n const result = await this._pool.query<Committed<E, keyof E>>(sql, values);\n for (const row of result.rows) callback(row);\n\n return result.rowCount ?? 0;\n }\n\n /**\n * Commit new events to the store for a given stream, with concurrency control.\n *\n * @param stream The stream name\n * @param msgs Array of messages (event name and data)\n * @param meta Event metadata (correlation, causation, etc.)\n * @param expectedVersion (Optional) Expected stream version for concurrency control\n * @returns Array of committed events\n * @throws ConcurrencyError if the expected version does not match\n */\n async commit<E extends Schemas>(\n stream: string,\n msgs: Message<E, keyof E>[],\n meta: EventMeta,\n expectedVersion?: number\n ) {\n if (msgs.length === 0) return [];\n const client = await this._pool.connect();\n let version = -1;\n try {\n await client.query(\"BEGIN\");\n\n const last = await client.query<Committed<E, keyof E>>(\n `SELECT version\n FROM ${this._fqt}\n WHERE stream=$1 ORDER BY version DESC LIMIT 1`,\n [stream]\n );\n version = last.rowCount ? last.rows[0].version : -1;\n if (typeof expectedVersion === \"number\" && version !== expectedVersion)\n throw new ConcurrencyError(\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion\n );\n\n const committed = await Promise.all(\n msgs.map(async ({ name, data }) => {\n version++;\n const sql = `\n INSERT INTO ${this._fqt}(name, data, stream, version, meta) \n VALUES($1, $2, $3, $4, $5) RETURNING *`;\n const vals = [name, data, stream, version, meta];\n const { rows } = await client.query<Committed<E, keyof E>>(sql, vals);\n return rows.at(0)!;\n })\n );\n\n await client\n .query(\n `\n NOTIFY \"${this.config.table}\", '${JSON.stringify({\n operation: \"INSERT\",\n id: committed[0].name,\n position: committed[0].id,\n })}';\n COMMIT;\n `\n )\n .catch((error) => {\n logger.error(error);\n throw new ConcurrencyError(\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion || -1\n );\n });\n return committed;\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n throw error;\n } finally {\n client.release();\n }\n }\n\n /**\n * Fetch a batch of events and streams for processing (drain cycle).\n *\n * @param limit The maximum number of events to fetch\n * @returns An object with arrays of streams and events\n */\n async fetch<E extends Schemas>(limit: number) {\n const { rows } = await this._pool.query<{ stream: string; at: number }>(\n `\n SELECT stream, at\n FROM ${this._fqs}\n WHERE blocked=false\n ORDER BY at ASC\n LIMIT $1::integer\n `,\n [limit]\n );\n\n const after = rows.length\n ? rows.reduce((min, r) => Math.min(min, r.at), Number.MAX_SAFE_INTEGER)\n : -1;\n\n const events: Committed<E, keyof E>[] = [];\n await this.query<E>((e) => e.name !== SNAP_EVENT && events.push(e), {\n after,\n limit,\n });\n return { streams: rows.map(({ stream }) => stream), events };\n }\n\n /**\n * Lease streams for reaction processing, marking them as in-progress.\n *\n * @param leases Array of lease objects (stream, at, etc.)\n * @returns Array of leased objects with updated lease info\n */\n async lease(leases: Lease[]) {\n const { by, at } = leases.at(0)!;\n const streams = leases.map(({ stream }) => stream);\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n // insert new streams\n await client.query(\n `\n INSERT INTO ${this._fqs} (stream)\n SELECT UNNEST($1::text[])\n ON CONFLICT (stream) DO NOTHING\n `,\n [streams]\n );\n // set leases\n const { rows } = await client.query<{\n stream: string;\n leased_at: number;\n retry: number;\n }>(\n `\n WITH free AS (\n SELECT * FROM ${this._fqs} \n WHERE stream = ANY($1::text[]) AND (leased_by IS NULL OR leased_until <= NOW())\n FOR UPDATE\n )\n UPDATE ${this._fqs} U\n SET\n leased_by = $2::uuid,\n leased_at = $3::integer,\n leased_until = NOW() + ($4::integer || ' milliseconds')::interval\n FROM free\n WHERE U.stream = free.stream\n RETURNING U.stream, U.leased_at, U.retry\n `,\n [streams, by, at, this.config.leaseMillis]\n );\n await client.query(\"COMMIT\");\n\n return rows.map(({ stream, leased_at, retry }) => ({\n stream,\n by,\n at: leased_at,\n retry,\n block: false,\n }));\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n throw error;\n } finally {\n client.release();\n }\n }\n\n /**\n * Acknowledge and release leases after processing, updating stream positions.\n *\n * @param leases Array of lease objects to acknowledge\n * @returns Promise that resolves when leases are acknowledged\n */\n async ack(leases: Lease[]) {\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n for (const { stream, by, at, retry, block } of leases) {\n await client.query(\n `UPDATE ${this._fqs}\n SET\n at = $3::integer,\n retry = $4::integer,\n blocked = $5::boolean,\n leased_by = NULL,\n leased_at = NULL,\n leased_until = NULL\n WHERE\n stream = $1::text\n AND leased_by = $2::uuid`,\n [stream, by, at, retry, block]\n );\n }\n await client.query(\"COMMIT\");\n } catch {\n // leased_until fallback\n await client.query(\"ROLLBACK\").catch(() => {});\n } finally {\n client.release();\n }\n }\n}\n","/**\n * @module act-pg\n * Date reviver for JSON.parse to automatically convert ISO 8601 date strings to Date objects.\n *\n * Recognizes the following formats:\n * - YYYY-MM-DDTHH:MM:SS.sssZ\n * - YYYY-MM-DDTHH:MM:SS.sss+HH:MM\n * - YYYY-MM-DDTHH:MM:SS.sss-HH:MM\n *\n * @param key The key being parsed\n * @param value The value being parsed\n * @returns A Date object if the value matches ISO 8601, otherwise the original value\n *\n * @example\n * const obj = JSON.parse(jsonString, dateReviver);\n */\nconst ISO_8601 =\n /^(\\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(\\.\\d+)?(Z|[+-][0-2][0-9]:[0-5][0-9])?$/;\nexport const dateReviver = (key: string, value: string): string | Date => {\n if (typeof value === \"string\" && ISO_8601.test(value)) {\n return new Date(value);\n }\n return value;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACSA,iBAAqD;AACrD,gBAAe;;;ACMf,IAAM,WACJ;AACK,IAAM,cAAc,CAAC,KAAa,UAAiC;AACxE,MAAI,OAAO,UAAU,YAAY,SAAS,KAAK,KAAK,GAAG;AACrD,WAAO,IAAI,KAAK,KAAK;AAAA,EACvB;AACA,SAAO;AACT;;;ADVA,IAAM,EAAE,MAAM,MAAM,IAAI,UAAAA;AACxB,MAAM;AAAA,EAAc,MAAM,SAAS;AAAA,EAAO,CAAC,QACzC,KAAK,MAAM,KAAK,WAAW;AAC7B;AAaA,IAAM,iBAAyB;AAAA,EAC7B,MAAM;AAAA,EACN,MAAM;AAAA,EACN,UAAU;AAAA,EACV,MAAM;AAAA,EACN,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,aAAa;AACf;AAmBO,IAAM,gBAAN,MAAqC;AAAA,EAClC;AAAA,EACC;AAAA,EACD;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMR,YAAY,SAA0B,CAAC,GAAG;AACxC,SAAK,SAAS,EAAE,GAAG,gBAAgB,GAAG,OAAO;AAC7C,SAAK,QAAQ,IAAI,KAAK,KAAK,MAAM;AACjC,SAAK,OAAO,IAAI,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AACzD,SAAK,OAAO,IAAI,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAU;AACd,UAAM,KAAK,MAAM,IAAI;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,OAAO;AACX,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAG1B,YAAM,OAAO;AAAA,QACX,gCAAgC,KAAK,OAAO,MAAM;AAAA,MACpD;AAGA,YAAM,OAAO;AAAA,QACX,8BAA8B,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASzC;AAGA,YAAM,OAAO;AAAA,QACX,sCAAsC,KAAK,OAAO,KAAK;AAAA,aAClD,KAAK,IAAI;AAAA,MAChB;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,aAC3C,KAAK,IAAI;AAAA,MAChB;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,aAC3C,KAAK,IAAI;AAAA,MAChB;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,aAC3C,KAAK,IAAI;AAAA,MAChB;AAGA,YAAM,OAAO;AAAA,QACX,8BAA8B,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASzC;AAGA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,aAC3C,KAAK,IAAI;AAAA,MAChB;AAEA,YAAM,OAAO,MAAM,QAAQ;AAC3B,wBAAO;AAAA,QACL,kBAAkB,KAAK,OAAO,MAAM,iBAAiB,KAAK,OAAO,KAAK;AAAA,MACxE;AAAA,IACF,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU;AAC7B,wBAAO,MAAM,yBAAyB,KAAK;AAC3C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO;AACX,UAAM,KAAK,MAAM;AAAA,MACf;AAAA;AAAA;AAAA;AAAA,iCAI2B,KAAK,OAAO,MAAM;AAAA;AAAA,0CAET,KAAK,IAAI;AAAA,0CACT,KAAK,IAAI;AAAA,gBACnC,KAAK,OAAO,MAAM;AAAA,oCACE,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMlD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,MACJ,UACA,OACA,YAAY,OACZ;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI,SAAS,CAAC;AAEd,QAAI,MAAM,iBAAiB,KAAK,IAAI;AACpC,UAAM,aAAuB,CAAC;AAC9B,UAAM,SAAgB,CAAC;AAEvB,QAAI,WAAW;AACb,iBAAW;AAAA,QACT,gCAAgC,KAAK,IAAI,kBAAkB,MAAM,eAAe,qBAAU;AAAA,MAC5F;AACA,iBAAW,KAAK,WAAW,MAAM,GAAG;AAAA,IACtC,WAAW,OAAO;AAChB,UAAI,OAAO,UAAU,aAAa;AAChC,eAAO,KAAK,KAAK;AACjB,mBAAW,KAAK,OAAO,OAAO,MAAM,EAAE;AAAA,MACxC,OAAO;AACL,mBAAW,KAAK,OAAO;AAAA,MACzB;AACA,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,mBAAW,KAAK,WAAW,OAAO,MAAM,EAAE;AAAA,MAC5C;AACA,UAAI,SAAS,MAAM,QAAQ;AACzB,eAAO,KAAK,KAAK;AACjB,mBAAW,KAAK,eAAe,OAAO,MAAM,GAAG;AAAA,MACjD;AACA,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,mBAAW,KAAK,OAAO,OAAO,MAAM,EAAE;AAAA,MACxC;AACA,UAAI,eAAe;AACjB,eAAO,KAAK,cAAc,YAAY,CAAC;AACvC,mBAAW,KAAK,YAAY,OAAO,MAAM,EAAE;AAAA,MAC7C;AACA,UAAI,gBAAgB;AAClB,eAAO,KAAK,eAAe,YAAY,CAAC;AACxC,mBAAW,KAAK,YAAY,OAAO,MAAM,EAAE;AAAA,MAC7C;AACA,UAAI,aAAa;AACf,eAAO,KAAK,WAAW;AACvB,mBAAW,KAAK,yBAAyB,OAAO,MAAM,EAAE;AAAA,MAC1D;AAAA,IACF;AACA,QAAI,WAAW,QAAQ;AACrB,aAAO,YAAY,WAAW,KAAK,OAAO;AAAA,IAC5C;AACA,WAAO,gBAAgB,WAAW,SAAS,KAAK;AAChD,QAAI,OAAO;AACT,aAAO,KAAK,KAAK;AACjB,aAAO,WAAW,OAAO,MAAM;AAAA,IACjC;AAEA,UAAM,SAAS,MAAM,KAAK,MAAM,MAA6B,KAAK,MAAM;AACxE,eAAW,OAAO,OAAO,KAAM,UAAS,GAAG;AAE3C,WAAO,OAAO,YAAY;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,OACJ,QACA,MACA,MACA,iBACA;AACA,QAAI,KAAK,WAAW,EAAG,QAAO,CAAC;AAC/B,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AACxC,QAAI,UAAU;AACd,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO,MAAM,OAAO;AAAA,QACxB;AAAA,eACO,KAAK,IAAI;AAAA;AAAA,QAEhB,CAAC,MAAM;AAAA,MACT;AACA,gBAAU,KAAK,WAAW,KAAK,KAAK,CAAC,EAAE,UAAU;AACjD,UAAI,OAAO,oBAAoB,YAAY,YAAY;AACrD,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,YAAM,YAAY,MAAM,QAAQ;AAAA,QAC9B,KAAK,IAAI,OAAO,EAAE,MAAM,KAAK,MAAM;AACjC;AACA,gBAAM,MAAM;AAAA,wBACE,KAAK,IAAI;AAAA;AAEvB,gBAAM,OAAO,CAAC,MAAM,MAAM,QAAQ,SAAS,IAAI;AAC/C,gBAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAA6B,KAAK,IAAI;AACpE,iBAAO,KAAK,GAAG,CAAC;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,YAAM,OACH;AAAA,QACC;AAAA,sBACY,KAAK,OAAO,KAAK,OAAO,KAAK,UAAU;AAAA,UAC/C,WAAW;AAAA,UACX,IAAI,UAAU,CAAC,EAAE;AAAA,UACjB,UAAU,UAAU,CAAC,EAAE;AAAA,QACzB,CAAC,CAAC;AAAA;AAAA;AAAA,MAGN,EACC,MAAM,CAAC,UAAU;AAChB,0BAAO,MAAM,KAAK;AAClB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA,mBAAmB;AAAA,QACrB;AAAA,MACF,CAAC;AACH,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,MAAyB,OAAe;AAC5C,UAAM,EAAE,KAAK,IAAI,MAAM,KAAK,MAAM;AAAA,MAChC;AAAA;AAAA,aAEO,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA,MAKhB,CAAC,KAAK;AAAA,IACR;AAEA,UAAM,QAAQ,KAAK,SACf,KAAK,OAAO,CAAC,KAAK,MAAM,KAAK,IAAI,KAAK,EAAE,EAAE,GAAG,OAAO,gBAAgB,IACpE;AAEJ,UAAM,SAAkC,CAAC;AACzC,UAAM,KAAK,MAAS,CAAC,MAAM,EAAE,SAAS,yBAAc,OAAO,KAAK,CAAC,GAAG;AAAA,MAClE;AAAA,MACA;AAAA,IACF,CAAC;AACD,WAAO,EAAE,SAAS,KAAK,IAAI,CAAC,EAAE,OAAO,MAAM,MAAM,GAAG,OAAO;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,MAAM,QAAiB;AAC3B,UAAM,EAAE,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC;AAC9B,UAAM,UAAU,OAAO,IAAI,CAAC,EAAE,OAAO,MAAM,MAAM;AACjD,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO;AAAA,QACX;AAAA,sBACc,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA,QAIvB,CAAC,OAAO;AAAA,MACV;AAEA,YAAM,EAAE,KAAK,IAAI,MAAM,OAAO;AAAA,QAK5B;AAAA;AAAA,0BAEkB,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA,iBAIlB,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QASlB,CAAC,SAAS,IAAI,IAAI,KAAK,OAAO,WAAW;AAAA,MAC3C;AACA,YAAM,OAAO,MAAM,QAAQ;AAE3B,aAAO,KAAK,IAAI,CAAC,EAAE,QAAQ,WAAW,MAAM,OAAO;AAAA,QACjD;AAAA,QACA;AAAA,QACA,IAAI;AAAA,QACJ;AAAA,QACA,OAAO;AAAA,MACT,EAAE;AAAA,IACJ,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,IAAI,QAAiB;AACzB,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAC1B,iBAAW,EAAE,QAAQ,IAAI,IAAI,OAAO,MAAM,KAAK,QAAQ;AACrD,cAAM,OAAO;AAAA,UACX,UAAU,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAWnB,CAAC,QAAQ,IAAI,IAAI,OAAO,KAAK;AAAA,QAC/B;AAAA,MACF;AACA,YAAM,OAAO,MAAM,QAAQ;AAAA,IAC7B,QAAQ;AAEN,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAAA,IAC/C,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AACF;","names":["pg"]}