@rotorsoft/act-pg 0.4.7 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -60,8 +60,7 @@ var DEFAULT_CONFIG = {
60
60
  user: "postgres",
61
61
  password: "postgres",
62
62
  schema: "public",
63
- table: "events",
64
- leaseMillis: 3e4
63
+ table: "events"
65
64
  };
66
65
  var PostgresStore = class {
67
66
  _pool;
@@ -127,11 +126,13 @@ var PostgresStore = class {
127
126
  await client.query(
128
127
  `CREATE TABLE IF NOT EXISTS ${this._fqs} (
129
128
  stream varchar(100) COLLATE pg_catalog."default" PRIMARY KEY,
129
+ source varchar(100) COLLATE pg_catalog."default",
130
130
  at int NOT NULL DEFAULT -1,
131
131
  retry smallint NOT NULL DEFAULT 0,
132
132
  blocked boolean NOT NULL DEFAULT false,
133
+ error text,
133
134
  leased_at int,
134
- leased_by uuid,
135
+ leased_by text,
135
136
  leased_until timestamptz
136
137
  ) TABLESPACE pg_default;`
137
138
  );
@@ -179,13 +180,12 @@ var PostgresStore = class {
179
180
  *
180
181
  * @param callback Function called for each event found
181
182
  * @param query (Optional) Query filter (stream, names, before, after, etc.)
182
- * @param withSnaps (Optional) If true, includes only events after the last snapshot
183
183
  * @returns The number of events found
184
184
  *
185
185
  * @example
186
186
  * await store.query((event) => console.log(event), { stream: "A" });
187
187
  */
188
- async query(callback, query, withSnaps = false) {
188
+ async query(callback, query) {
189
189
  const {
190
190
  stream,
191
191
  names,
@@ -195,17 +195,13 @@ var PostgresStore = class {
195
195
  created_before,
196
196
  created_after,
197
197
  backward,
198
- correlation
198
+ correlation,
199
+ with_snaps = false
199
200
  } = query || {};
200
201
  let sql = `SELECT * FROM ${this._fqt}`;
201
202
  const conditions = [];
202
203
  const values = [];
203
- if (withSnaps) {
204
- conditions.push(
205
- `id>=COALESCE((SELECT id FROM ${this._fqt} WHERE stream='${stream}' AND name='${import_act.SNAP_EVENT}' ORDER BY id DESC LIMIT 1), 0)`
206
- );
207
- conditions.push(`stream='${stream}'`);
208
- } else if (query) {
204
+ if (query) {
209
205
  if (typeof after !== "undefined") {
210
206
  values.push(after);
211
207
  conditions.push(`id>$${values.length}`);
@@ -214,7 +210,7 @@ var PostgresStore = class {
214
210
  }
215
211
  if (stream) {
216
212
  values.push(stream);
217
- conditions.push(`stream=$${values.length}`);
213
+ conditions.push(`stream ~ $${values.length}`);
218
214
  }
219
215
  if (names && names.length) {
220
216
  values.push(names);
@@ -236,6 +232,9 @@ var PostgresStore = class {
236
232
  values.push(correlation);
237
233
  conditions.push(`meta->>'correlation'=$${values.length}`);
238
234
  }
235
+ if (!with_snaps) {
236
+ conditions.push(`name <> '${import_act.SNAP_EVENT}'`);
237
+ }
239
238
  }
240
239
  if (conditions.length) {
241
240
  sql += " WHERE " + conditions.join(" AND ");
@@ -274,6 +273,7 @@ var PostgresStore = class {
274
273
  version = last.rowCount ? last.rows[0].version : -1;
275
274
  if (typeof expectedVersion === "number" && version !== expectedVersion)
276
275
  throw new import_act.ConcurrencyError(
276
+ stream,
277
277
  version,
278
278
  msgs,
279
279
  expectedVersion
@@ -301,6 +301,7 @@ var PostgresStore = class {
301
301
  ).catch((error) => {
302
302
  import_act.logger.error(error);
303
303
  throw new import_act.ConcurrencyError(
304
+ stream,
304
305
  version,
305
306
  msgs,
306
307
  expectedVersion || -1
@@ -316,80 +317,83 @@ var PostgresStore = class {
316
317
  }
317
318
  }
318
319
  /**
319
- * Fetch a batch of events and streams for processing (drain cycle).
320
- *
321
- * @param limit The maximum number of events to fetch
322
- * @returns An object with arrays of streams and events
320
+ * Polls the store for unblocked streams needing processing, ordered by lease watermark ascending.
321
+ * @param limit - Maximum number of streams to poll.
322
+ * @param descending - Whether to poll streams in descending order (aka poll the most advanced first).
323
+ * @returns The polled streams.
323
324
  */
324
- async fetch(limit) {
325
+ async poll(limit, descending = false) {
325
326
  const { rows } = await this._pool.query(
326
327
  `
327
328
  SELECT stream, at
328
329
  FROM ${this._fqs}
329
- WHERE blocked=false
330
- ORDER BY at ASC
330
+ WHERE blocked=false AND (leased_by IS NULL OR leased_until <= NOW())
331
+ ORDER BY at ${descending ? "DESC" : "ASC"}
331
332
  LIMIT $1::integer
332
333
  `,
333
334
  [limit]
334
335
  );
335
- const after = rows.length ? rows.reduce((min, r) => Math.min(min, r.at), Number.MAX_SAFE_INTEGER) : -1;
336
- const events = [];
337
- await this.query((e) => e.name !== import_act.SNAP_EVENT && events.push(e), {
338
- after,
339
- limit
340
- });
341
- return { streams: rows.map(({ stream }) => stream), events };
336
+ return rows;
342
337
  }
343
338
  /**
344
339
  * Lease streams for reaction processing, marking them as in-progress.
345
340
  *
346
- * @param leases Array of lease objects (stream, at, etc.)
341
+ * @param leases - Lease requests for streams, including end-of-lease watermark, lease holder, and source stream.
342
+ * @param millis - Lease duration in milliseconds.
347
343
  * @returns Array of leased objects with updated lease info
348
344
  */
349
- async lease(leases) {
350
- const { by, at } = leases.at(0);
351
- const streams = leases.map(({ stream }) => stream);
345
+ async lease(leases, millis) {
352
346
  const client = await this._pool.connect();
353
347
  try {
354
348
  await client.query("BEGIN");
355
349
  await client.query(
356
350
  `
357
- INSERT INTO ${this._fqs} (stream)
358
- SELECT UNNEST($1::text[])
351
+ INSERT INTO ${this._fqs} (stream, source)
352
+ SELECT lease->>'stream', lease->>'source'
353
+ FROM jsonb_array_elements($1::jsonb) AS lease
359
354
  ON CONFLICT (stream) DO NOTHING
360
355
  `,
361
- [streams]
356
+ [JSON.stringify(leases)]
362
357
  );
363
358
  const { rows } = await client.query(
364
359
  `
365
- WITH free AS (
366
- SELECT * FROM ${this._fqs}
367
- WHERE stream = ANY($1::text[]) AND (leased_by IS NULL OR leased_until <= NOW())
368
- FOR UPDATE
369
- )
370
- UPDATE ${this._fqs} U
371
- SET
372
- leased_by = $2::uuid,
373
- leased_at = $3::integer,
374
- leased_until = NOW() + ($4::integer || ' milliseconds')::interval
375
- FROM free
376
- WHERE U.stream = free.stream
377
- RETURNING U.stream, U.leased_at, U.retry
378
- `,
379
- [streams, by, at, this.config.leaseMillis]
360
+ WITH input AS (
361
+ SELECT * FROM jsonb_to_recordset($1::jsonb)
362
+ AS x(stream text, at int, by text)
363
+ ), free AS (
364
+ SELECT s.stream FROM ${this._fqs} s
365
+ JOIN input i ON s.stream = i.stream
366
+ WHERE s.leased_by IS NULL OR s.leased_until <= NOW()
367
+ FOR UPDATE
368
+ )
369
+ UPDATE ${this._fqs} s
370
+ SET
371
+ leased_by = i.by,
372
+ leased_at = i.at,
373
+ leased_until = NOW() + ($2::integer || ' milliseconds')::interval,
374
+ retry = CASE WHEN $2::integer > 0 THEN s.retry + 1 ELSE s.retry END
375
+ FROM input i, free f
376
+ WHERE s.stream = f.stream AND s.stream = i.stream
377
+ RETURNING s.stream, s.source, s.leased_at, s.leased_by, s.leased_until, s.retry
378
+ `,
379
+ [JSON.stringify(leases), millis]
380
380
  );
381
381
  await client.query("COMMIT");
382
- return rows.map(({ stream, leased_at, retry }) => ({
383
- stream,
384
- by,
385
- at: leased_at,
386
- retry,
387
- block: false
388
- }));
382
+ return rows.map(
383
+ ({ stream, source, leased_at, leased_by, leased_until, retry }) => ({
384
+ stream,
385
+ source: source ?? void 0,
386
+ at: leased_at,
387
+ by: leased_by,
388
+ until: new Date(leased_until),
389
+ retry
390
+ })
391
+ );
389
392
  } catch (error) {
390
393
  await client.query("ROLLBACK").catch(() => {
391
394
  });
392
- throw error;
395
+ import_act.logger.error(error);
396
+ return [];
393
397
  } finally {
394
398
  client.release();
395
399
  }
@@ -397,33 +401,86 @@ var PostgresStore = class {
397
401
  /**
398
402
  * Acknowledge and release leases after processing, updating stream positions.
399
403
  *
400
- * @param leases Array of lease objects to acknowledge
401
- * @returns Promise that resolves when leases are acknowledged
404
+ * @param leases - Leases to acknowledge, including last processed watermark and lease holder.
405
+ * @returns Acked leases.
402
406
  */
403
407
  async ack(leases) {
404
408
  const client = await this._pool.connect();
405
409
  try {
406
410
  await client.query("BEGIN");
407
- for (const { stream, by, at, retry, block } of leases) {
408
- await client.query(
409
- `UPDATE ${this._fqs}
410
- SET
411
- at = $3::integer,
412
- retry = $4::integer,
413
- blocked = $5::boolean,
414
- leased_by = NULL,
415
- leased_at = NULL,
416
- leased_until = NULL
417
- WHERE
418
- stream = $1::text
419
- AND leased_by = $2::uuid`,
420
- [stream, by, at, retry, block]
421
- );
422
- }
411
+ const { rows } = await client.query(
412
+ `
413
+ WITH input AS (
414
+ SELECT * FROM jsonb_to_recordset($1::jsonb)
415
+ AS x(stream text, by text, at int)
416
+ )
417
+ UPDATE ${this._fqs} AS s
418
+ SET
419
+ at = i.at,
420
+ retry = -1,
421
+ leased_by = NULL,
422
+ leased_at = NULL,
423
+ leased_until = NULL
424
+ FROM input i
425
+ WHERE s.stream = i.stream AND s.leased_by = i.by
426
+ RETURNING s.stream, s.source, s.at, s.retry
427
+ `,
428
+ [JSON.stringify(leases)]
429
+ );
430
+ await client.query("COMMIT");
431
+ return rows.map((row) => ({
432
+ stream: row.stream,
433
+ source: row.source ?? void 0,
434
+ at: row.at,
435
+ by: "",
436
+ retry: row.retry
437
+ }));
438
+ } catch (error) {
439
+ await client.query("ROLLBACK").catch(() => {
440
+ });
441
+ import_act.logger.error(error);
442
+ return [];
443
+ } finally {
444
+ client.release();
445
+ }
446
+ }
447
+ /**
448
+ * Block a stream for processing after failing to process and reaching max retries with blocking enabled.
449
+ * @param leases - Leases to block, including lease holder and last error message.
450
+ * @returns Blocked leases.
451
+ */
452
+ async block(leases) {
453
+ const client = await this._pool.connect();
454
+ try {
455
+ await client.query("BEGIN");
456
+ const { rows } = await client.query(
457
+ `
458
+ WITH input AS (
459
+ SELECT * FROM jsonb_to_recordset($1::jsonb)
460
+ AS x(stream text, by text, error text)
461
+ )
462
+ UPDATE ${this._fqs} AS s
463
+ SET blocked = true, error = i.error
464
+ FROM input i
465
+ WHERE s.stream = i.stream AND s.leased_by = i.by AND s.blocked = false
466
+ RETURNING s.stream, s.source, s.at, i.by, s.retry, s.error
467
+ `,
468
+ [JSON.stringify(leases)]
469
+ );
423
470
  await client.query("COMMIT");
424
- } catch {
471
+ return rows.map((row) => ({
472
+ stream: row.stream,
473
+ source: row.source ?? void 0,
474
+ at: row.at,
475
+ by: row.by,
476
+ retry: row.retry,
477
+ error: row.error
478
+ }));
479
+ } catch (error) {
425
480
  await client.query("ROLLBACK").catch(() => {
426
481
  });
482
+ import_act.logger.error(error);
483
+ return [];
427
484
  } finally {
428
485
  client.release();
429
486
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/PostgresStore.ts","../src/utils.ts"],"sourcesContent":["/**\n * @packageDocumentation\n * @module act-pg\n * Main entry point for the Act-PG framework. Re-exports all core APIs\n */\nexport * from \"./PostgresStore.js\";\n","import type {\n Committed,\n EventMeta,\n Lease,\n Message,\n Query,\n Schemas,\n Store,\n} from \"@rotorsoft/act\";\nimport { ConcurrencyError, SNAP_EVENT, logger } from \"@rotorsoft/act\";\nimport pg from \"pg\";\nimport { dateReviver } from \"./utils.js\";\n\nconst { Pool, types } = pg;\ntypes.setTypeParser(types.builtins.JSONB, (val) =>\n JSON.parse(val, dateReviver)\n);\n\ntype Config = Readonly<{\n host: string;\n port: number;\n database: string;\n user: string;\n password: string;\n schema: string;\n table: string;\n leaseMillis: number;\n}>;\n\nconst DEFAULT_CONFIG: Config = {\n host: \"localhost\",\n port: 5432,\n database: \"postgres\",\n user: \"postgres\",\n password: \"postgres\",\n schema: \"public\",\n table: \"events\",\n leaseMillis: 30_000,\n};\n\n/**\n * @category Adapters\n * @see Store\n *\n * PostgresStore is a production-ready event store adapter for Act, using PostgreSQL as the backend.\n *\n * - Supports event sourcing, leasing, snapshots, and concurrency control.\n * - Designed for high-throughput, scalable, and reliable event storage.\n * - Implements the Act Store interface.\n *\n * @example\n * import { PostgresStore } from \"@act/pg\";\n * const store = new PostgresStore({ schema: \"my_schema\", table: \"events\" });\n * await store.seed();\n *\n * @see https://github.com/rotorsoft/act-root\n */\nexport class PostgresStore implements Store {\n private _pool;\n readonly config: Config;\n private _fqt: string;\n private _fqs: string;\n\n /**\n * Create a new PostgresStore instance.\n * @param config Partial configuration (host, port, user, password, schema, table, etc.)\n */\n constructor(config: Partial<Config> = {}) {\n this.config = { ...DEFAULT_CONFIG, ...config };\n this._pool = new Pool(this.config);\n this._fqt = `\"${this.config.schema}\".\"${this.config.table}\"`;\n this._fqs = `\"${this.config.schema}\".\"${this.config.table}_streams\"`;\n }\n\n /**\n * Dispose of the store and close all database connections.\n * @returns Promise that resolves when all connections are closed\n */\n async dispose() {\n await this._pool.end();\n }\n\n /**\n * Seed the database with required tables, indexes, and schema for event storage.\n * @returns Promise that resolves when seeding is complete\n * @throws Error if seeding fails\n */\n async seed() {\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n\n // Create schema\n await client.query(\n `CREATE SCHEMA IF NOT EXISTS \"${this.config.schema}\";`\n );\n\n // Events table\n await client.query(\n `CREATE TABLE IF NOT EXISTS ${this._fqt} (\n id serial PRIMARY KEY,\n name varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n data jsonb,\n stream varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n version int NOT NULL,\n created timestamptz NOT NULL DEFAULT now(),\n meta jsonb\n ) TABLESPACE pg_default;`\n );\n\n // Indexes on events\n await client.query(\n `CREATE UNIQUE INDEX IF NOT EXISTS \"${this.config.table}_stream_ix\" \n ON ${this._fqt} (stream COLLATE pg_catalog.\"default\", version);`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_name_ix\" \n ON ${this._fqt} (name COLLATE pg_catalog.\"default\");`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_created_id_ix\" \n ON ${this._fqt} (created, id);`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_correlation_ix\" \n ON ${this._fqt} ((meta ->> 'correlation') COLLATE pg_catalog.\"default\");`\n );\n\n // Streams table\n await client.query(\n `CREATE TABLE IF NOT EXISTS ${this._fqs} (\n stream varchar(100) COLLATE pg_catalog.\"default\" PRIMARY KEY,\n at int NOT NULL DEFAULT -1,\n retry smallint NOT NULL DEFAULT 0,\n blocked boolean NOT NULL DEFAULT false,\n leased_at int,\n leased_by uuid,\n leased_until timestamptz\n ) TABLESPACE pg_default;`\n );\n\n // Index for fetching streams\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_streams_fetch_ix\" \n ON ${this._fqs} (blocked, at);`\n );\n\n await client.query(\"COMMIT\");\n logger.info(\n `Seeded schema \"${this.config.schema}\" with table \"${this.config.table}\"`\n );\n } catch (error) {\n await client.query(\"ROLLBACK\");\n logger.error(\"Failed to seed store:\", error);\n throw error;\n } finally {\n client.release();\n }\n }\n\n /**\n * Drop all tables and schema created by the store (for testing or cleanup).\n * @returns Promise that resolves when the schema is dropped\n */\n async drop() {\n await this._pool.query(\n `\n DO $$\n BEGIN\n IF EXISTS (SELECT 1 FROM information_schema.schemata\n WHERE schema_name = '${this.config.schema}'\n ) THEN\n EXECUTE 'DROP TABLE IF EXISTS ${this._fqt}';\n EXECUTE 'DROP TABLE IF EXISTS ${this._fqs}';\n IF '${this.config.schema}' <> 'public' THEN\n EXECUTE 'DROP SCHEMA \"${this.config.schema}\" CASCADE';\n END IF;\n END IF;\n END\n $$;\n `\n );\n }\n\n /**\n * Query events from the store, optionally filtered by stream, event name, time, etc.\n *\n * @param callback Function called for each event found\n * @param query (Optional) Query filter (stream, names, before, after, etc.)\n * @param withSnaps (Optional) If true, includes only events after the last snapshot\n * @returns The number of events found\n *\n * @example\n * await store.query((event) => console.log(event), { stream: \"A\" });\n */\n async query<E extends Schemas>(\n callback: (event: Committed<E, keyof E>) => void,\n query?: Query,\n withSnaps = false\n ) {\n const {\n stream,\n names,\n before,\n after,\n limit,\n created_before,\n created_after,\n backward,\n correlation,\n } = query || {};\n\n let sql = `SELECT * FROM ${this._fqt}`;\n const conditions: string[] = [];\n const values: any[] = [];\n\n if (withSnaps) {\n conditions.push(\n `id>=COALESCE((SELECT id FROM ${this._fqt} WHERE stream='${stream}' AND name='${SNAP_EVENT}' ORDER BY id DESC LIMIT 1), 0)`\n );\n conditions.push(`stream='${stream}'`);\n } else if (query) {\n if (typeof after !== \"undefined\") {\n values.push(after);\n conditions.push(`id>$${values.length}`);\n } else {\n conditions.push(\"id>-1\");\n }\n if (stream) {\n values.push(stream);\n conditions.push(`stream=$${values.length}`);\n }\n if (names && names.length) {\n values.push(names);\n conditions.push(`name = ANY($${values.length})`);\n }\n if (before) {\n values.push(before);\n conditions.push(`id<$${values.length}`);\n }\n if (created_after) {\n values.push(created_after.toISOString());\n conditions.push(`created>$${values.length}`);\n }\n if (created_before) {\n values.push(created_before.toISOString());\n conditions.push(`created<$${values.length}`);\n }\n if (correlation) {\n values.push(correlation);\n conditions.push(`meta->>'correlation'=$${values.length}`);\n }\n }\n if (conditions.length) {\n sql += \" WHERE \" + conditions.join(\" AND \");\n }\n sql += ` ORDER BY id ${backward ? \"DESC\" : \"ASC\"}`;\n if (limit) {\n values.push(limit);\n sql += ` LIMIT $${values.length}`;\n }\n\n const result = await this._pool.query<Committed<E, keyof E>>(sql, values);\n for (const row of result.rows) callback(row);\n\n return result.rowCount ?? 0;\n }\n\n /**\n * Commit new events to the store for a given stream, with concurrency control.\n *\n * @param stream The stream name\n * @param msgs Array of messages (event name and data)\n * @param meta Event metadata (correlation, causation, etc.)\n * @param expectedVersion (Optional) Expected stream version for concurrency control\n * @returns Array of committed events\n * @throws ConcurrencyError if the expected version does not match\n */\n async commit<E extends Schemas>(\n stream: string,\n msgs: Message<E, keyof E>[],\n meta: EventMeta,\n expectedVersion?: number\n ) {\n if (msgs.length === 0) return [];\n const client = await this._pool.connect();\n let version = -1;\n try {\n await client.query(\"BEGIN\");\n\n const last = await client.query<Committed<E, keyof E>>(\n `SELECT version\n FROM ${this._fqt}\n WHERE stream=$1 ORDER BY version DESC LIMIT 1`,\n [stream]\n );\n version = last.rowCount ? last.rows[0].version : -1;\n if (typeof expectedVersion === \"number\" && version !== expectedVersion)\n throw new ConcurrencyError(\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion\n );\n\n const committed = await Promise.all(\n msgs.map(async ({ name, data }) => {\n version++;\n const sql = `\n INSERT INTO ${this._fqt}(name, data, stream, version, meta) \n VALUES($1, $2, $3, $4, $5) RETURNING *`;\n const vals = [name, data, stream, version, meta];\n const { rows } = await client.query<Committed<E, keyof E>>(sql, vals);\n return rows.at(0)!;\n })\n );\n\n await client\n .query(\n `\n NOTIFY \"${this.config.table}\", '${JSON.stringify({\n operation: \"INSERT\",\n id: committed[0].name,\n position: committed[0].id,\n })}';\n COMMIT;\n `\n )\n .catch((error) => {\n logger.error(error);\n throw new ConcurrencyError(\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion || -1\n );\n });\n return committed;\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n throw error;\n } finally {\n client.release();\n }\n }\n\n /**\n * Fetch a batch of events and streams for processing (drain cycle).\n *\n * @param limit The maximum number of events to fetch\n * @returns An object with arrays of streams and events\n */\n async fetch<E extends Schemas>(limit: number) {\n const { rows } = await this._pool.query<{ stream: string; at: number }>(\n `\n SELECT stream, at\n FROM ${this._fqs}\n WHERE blocked=false\n ORDER BY at ASC\n LIMIT $1::integer\n `,\n [limit]\n );\n\n const after = rows.length\n ? rows.reduce((min, r) => Math.min(min, r.at), Number.MAX_SAFE_INTEGER)\n : -1;\n\n const events: Committed<E, keyof E>[] = [];\n await this.query<E>((e) => e.name !== SNAP_EVENT && events.push(e), {\n after,\n limit,\n });\n return { streams: rows.map(({ stream }) => stream), events };\n }\n\n /**\n * Lease streams for reaction processing, marking them as in-progress.\n *\n * @param leases Array of lease objects (stream, at, etc.)\n * @returns Array of leased objects with updated lease info\n */\n async lease(leases: Lease[]) {\n const { by, at } = leases.at(0)!;\n const streams = leases.map(({ stream }) => stream);\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n // insert new streams\n await client.query(\n `\n INSERT INTO ${this._fqs} (stream)\n SELECT UNNEST($1::text[])\n ON CONFLICT (stream) DO NOTHING\n `,\n [streams]\n );\n // set leases\n const { rows } = await client.query<{\n stream: string;\n leased_at: number;\n retry: number;\n }>(\n `\n WITH free AS (\n SELECT * FROM ${this._fqs} \n WHERE stream = ANY($1::text[]) AND (leased_by IS NULL OR leased_until <= NOW())\n FOR UPDATE\n )\n UPDATE ${this._fqs} U\n SET\n leased_by = $2::uuid,\n leased_at = $3::integer,\n leased_until = NOW() + ($4::integer || ' milliseconds')::interval\n FROM free\n WHERE U.stream = free.stream\n RETURNING U.stream, U.leased_at, U.retry\n `,\n [streams, by, at, this.config.leaseMillis]\n );\n await client.query(\"COMMIT\");\n\n return rows.map(({ stream, leased_at, retry }) => ({\n stream,\n by,\n at: leased_at,\n retry,\n block: false,\n }));\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n throw error;\n } finally {\n client.release();\n }\n }\n\n /**\n * Acknowledge and release leases after processing, updating stream positions.\n *\n * @param leases Array of lease objects to acknowledge\n * @returns Promise that resolves when leases are acknowledged\n */\n async ack(leases: Lease[]) {\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n for (const { stream, by, at, retry, block } of leases) {\n await client.query(\n `UPDATE ${this._fqs}\n SET\n at = $3::integer,\n retry = $4::integer,\n blocked = $5::boolean,\n leased_by = NULL,\n leased_at = NULL,\n leased_until = NULL\n WHERE\n stream = $1::text\n AND leased_by = $2::uuid`,\n [stream, by, at, retry, block]\n );\n }\n await client.query(\"COMMIT\");\n } catch {\n // leased_until fallback\n await client.query(\"ROLLBACK\").catch(() => {});\n } finally {\n client.release();\n }\n }\n}\n","/**\n * @module act-pg\n * Date reviver for JSON.parse to automatically convert ISO 8601 date strings to Date objects.\n *\n * Recognizes the following formats:\n * - YYYY-MM-DDTHH:MM:SS.sssZ\n * - YYYY-MM-DDTHH:MM:SS.sss+HH:MM\n * - YYYY-MM-DDTHH:MM:SS.sss-HH:MM\n *\n * @param key The key being parsed\n * @param value The value being parsed\n * @returns A Date object if the value matches ISO 8601, otherwise the original value\n *\n * @example\n * const obj = JSON.parse(jsonString, dateReviver);\n */\nconst ISO_8601 =\n /^(\\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(\\.\\d+)?(Z|[+-][0-2][0-9]:[0-5][0-9])?$/;\nexport const dateReviver = (key: string, value: string): string | Date => {\n if (typeof value === \"string\" && ISO_8601.test(value)) {\n return new Date(value);\n }\n return value;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACSA,iBAAqD;AACrD,gBAAe;;;ACMf,IAAM,WACJ;AACK,IAAM,cAAc,CAAC,KAAa,UAAiC;AACxE,MAAI,OAAO,UAAU,YAAY,SAAS,KAAK,KAAK,GAAG;AACrD,WAAO,IAAI,KAAK,KAAK;AAAA,EACvB;AACA,SAAO;AACT;;;ADVA,IAAM,EAAE,MAAM,MAAM,IAAI,UAAAA;AACxB,MAAM;AAAA,EAAc,MAAM,SAAS;AAAA,EAAO,CAAC,QACzC,KAAK,MAAM,KAAK,WAAW;AAC7B;AAaA,IAAM,iBAAyB;AAAA,EAC7B,MAAM;AAAA,EACN,MAAM;AAAA,EACN,UAAU;AAAA,EACV,MAAM;AAAA,EACN,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,aAAa;AACf;AAmBO,IAAM,gBAAN,MAAqC;AAAA,EAClC;AAAA,EACC;AAAA,EACD;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMR,YAAY,SAA0B,CAAC,GAAG;AACxC,SAAK,SAAS,EAAE,GAAG,gBAAgB,GAAG,OAAO;AAC7C,SAAK,QAAQ,IAAI,KAAK,KAAK,MAAM;AACjC,SAAK,OAAO,IAAI,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AACzD,SAAK,OAAO,IAAI,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAU;AACd,UAAM,KAAK,MAAM,IAAI;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,OAAO;AACX,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAG1B,YAAM,OAAO;AAAA,QACX,gCAAgC,KAAK,OAAO,MAAM;AAAA,MACpD;AAGA,YAAM,OAAO;AAAA,QACX,8BAA8B,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASzC;AAGA,YAAM,OAAO;AAAA,QACX,sCAAsC,KAAK,OAAO,KAAK;AAAA,aAClD,KAAK,IAAI;AAAA,MAChB;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,aAC3C,KAAK,IAAI;AAAA,MAChB;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,aAC3C,KAAK,IAAI;AAAA,MAChB;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,aAC3C,KAAK,IAAI;AAAA,MAChB;AAGA,YAAM,OAAO;AAAA,QACX,8BAA8B,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASzC;AAGA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,aAC3C,KAAK,IAAI;AAAA,MAChB;AAEA,YAAM,OAAO,MAAM,QAAQ;AAC3B,wBAAO;AAAA,QACL,kBAAkB,KAAK,OAAO,MAAM,iBAAiB,KAAK,OAAO,KAAK;AAAA,MACxE;AAAA,IACF,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU;AAC7B,wBAAO,MAAM,yBAAyB,KAAK;AAC3C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO;AACX,UAAM,KAAK,MAAM;AAAA,MACf;AAAA;AAAA;AAAA;AAAA,iCAI2B,KAAK,OAAO,MAAM;AAAA;AAAA,0CAET,KAAK,IAAI;AAAA,0CACT,KAAK,IAAI;AAAA,gBACnC,KAAK,OAAO,MAAM;AAAA,oCACE,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMlD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,MAAM,MACJ,UACA,OACA,YAAY,OACZ;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAI,SAAS,CAAC;AAEd,QAAI,MAAM,iBAAiB,KAAK,IAAI;AACpC,UAAM,aAAuB,CAAC;AAC9B,UAAM,SAAgB,CAAC;AAEvB,QAAI,WAAW;AACb,iBAAW;AAAA,QACT,gCAAgC,KAAK,IAAI,kBAAkB,MAAM,eAAe,qBAAU;AAAA,MAC5F;AACA,iBAAW,KAAK,WAAW,MAAM,GAAG;AAAA,IACtC,WAAW,OAAO;AAChB,UAAI,OAAO,UAAU,aAAa;AAChC,eAAO,KAAK,KAAK;AACjB,mBAAW,KAAK,OAAO,OAAO,MAAM,EAAE;AAAA,MACxC,OAAO;AACL,mBAAW,KAAK,OAAO;AAAA,MACzB;AACA,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,mBAAW,KAAK,WAAW,OAAO,MAAM,EAAE;AAAA,MAC5C;AACA,UAAI,SAAS,MAAM,QAAQ;AACzB,eAAO,KAAK,KAAK;AACjB,mBAAW,KAAK,eAAe,OAAO,MAAM,GAAG;AAAA,MACjD;AACA,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,mBAAW,KAAK,OAAO,OAAO,MAAM,EAAE;AAAA,MACxC;AACA,UAAI,eAAe;AACjB,eAAO,KAAK,cAAc,YAAY,CAAC;AACvC,mBAAW,KAAK,YAAY,OAAO,MAAM,EAAE;AAAA,MAC7C;AACA,UAAI,gBAAgB;AAClB,eAAO,KAAK,eAAe,YAAY,CAAC;AACxC,mBAAW,KAAK,YAAY,OAAO,MAAM,EAAE;AAAA,MAC7C;AACA,UAAI,aAAa;AACf,eAAO,KAAK,WAAW;AACvB,mBAAW,KAAK,yBAAyB,OAAO,MAAM,EAAE;AAAA,MAC1D;AAAA,IACF;AACA,QAAI,WAAW,QAAQ;AACrB,aAAO,YAAY,WAAW,KAAK,OAAO;AAAA,IAC5C;AACA,WAAO,gBAAgB,WAAW,SAAS,KAAK;AAChD,QAAI,OAAO;AACT,aAAO,KAAK,KAAK;AACjB,aAAO,WAAW,OAAO,MAAM;AAAA,IACjC;AAEA,UAAM,SAAS,MAAM,KAAK,MAAM,MAA6B,KAAK,MAAM;AACxE,eAAW,OAAO,OAAO,KAAM,UAAS,GAAG;AAE3C,WAAO,OAAO,YAAY;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,OACJ,QACA,MACA,MACA,iBACA;AACA,QAAI,KAAK,WAAW,EAAG,QAAO,CAAC;AAC/B,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AACxC,QAAI,UAAU;AACd,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO,MAAM,OAAO;AAAA,QACxB;AAAA,eACO,KAAK,IAAI;AAAA;AAAA,QAEhB,CAAC,MAAM;AAAA,MACT;AACA,gBAAU,KAAK,WAAW,KAAK,KAAK,CAAC,EAAE,UAAU;AACjD,UAAI,OAAO,oBAAoB,YAAY,YAAY;AACrD,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,YAAM,YAAY,MAAM,QAAQ;AAAA,QAC9B,KAAK,IAAI,OAAO,EAAE,MAAM,KAAK,MAAM;AACjC;AACA,gBAAM,MAAM;AAAA,wBACE,KAAK,IAAI;AAAA;AAEvB,gBAAM,OAAO,CAAC,MAAM,MAAM,QAAQ,SAAS,IAAI;AAC/C,gBAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAA6B,KAAK,IAAI;AACpE,iBAAO,KAAK,GAAG,CAAC;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,YAAM,OACH;AAAA,QACC;AAAA,sBACY,KAAK,OAAO,KAAK,OAAO,KAAK,UAAU;AAAA,UAC/C,WAAW;AAAA,UACX,IAAI,UAAU,CAAC,EAAE;AAAA,UACjB,UAAU,UAAU,CAAC,EAAE;AAAA,QACzB,CAAC,CAAC;AAAA;AAAA;AAAA,MAGN,EACC,MAAM,CAAC,UAAU;AAChB,0BAAO,MAAM,KAAK;AAClB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA,mBAAmB;AAAA,QACrB;AAAA,MACF,CAAC;AACH,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,MAAyB,OAAe;AAC5C,UAAM,EAAE,KAAK,IAAI,MAAM,KAAK,MAAM;AAAA,MAChC;AAAA;AAAA,aAEO,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA,MAKhB,CAAC,KAAK;AAAA,IACR;AAEA,UAAM,QAAQ,KAAK,SACf,KAAK,OAAO,CAAC,KAAK,MAAM,KAAK,IAAI,KAAK,EAAE,EAAE,GAAG,OAAO,gBAAgB,IACpE;AAEJ,UAAM,SAAkC,CAAC;AACzC,UAAM,KAAK,MAAS,CAAC,MAAM,EAAE,SAAS,yBAAc,OAAO,KAAK,CAAC,GAAG;AAAA,MAClE;AAAA,MACA;AAAA,IACF,CAAC;AACD,WAAO,EAAE,SAAS,KAAK,IAAI,CAAC,EAAE,OAAO,MAAM,MAAM,GAAG,OAAO;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,MAAM,QAAiB;AAC3B,UAAM,EAAE,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC;AAC9B,UAAM,UAAU,OAAO,IAAI,CAAC,EAAE,OAAO,MAAM,MAAM;AACjD,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO;AAAA,QACX;AAAA,sBACc,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA,QAIvB,CAAC,OAAO;AAAA,MACV;AAEA,YAAM,EAAE,KAAK,IAAI,MAAM,OAAO;AAAA,QAK5B;AAAA;AAAA,0BAEkB,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA,iBAIlB,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QASlB,CAAC,SAAS,IAAI,IAAI,KAAK,OAAO,WAAW;AAAA,MAC3C;AACA,YAAM,OAAO,MAAM,QAAQ;AAE3B,aAAO,KAAK,IAAI,CAAC,EAAE,QAAQ,WAAW,MAAM,OAAO;AAAA,QACjD;AAAA,QACA;AAAA,QACA,IAAI;AAAA,QACJ;AAAA,QACA,OAAO;AAAA,MACT,EAAE;AAAA,IACJ,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,IAAI,QAAiB;AACzB,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAC1B,iBAAW,EAAE,QAAQ,IAAI,IAAI,OAAO,MAAM,KAAK,QAAQ;AACrD,cAAM,OAAO;AAAA,UACX,UAAU,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAWnB,CAAC,QAAQ,IAAI,IAAI,OAAO,KAAK;AAAA,QAC/B;AAAA,MACF;AACA,YAAM,OAAO,MAAM,QAAQ;AAAA,IAC7B,QAAQ;AAEN,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAAA,IAC/C,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AACF;","names":["pg"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/PostgresStore.ts","../src/utils.ts"],"sourcesContent":["/**\n * @packageDocumentation\n * @module act-pg\n * Main entry point for the Act-PG framework. Re-exports all core APIs\n */\nexport * from \"./PostgresStore.js\";\n","import type {\n Committed,\n EventMeta,\n Lease,\n Message,\n Query,\n Schemas,\n Store,\n} from \"@rotorsoft/act\";\nimport { ConcurrencyError, SNAP_EVENT, logger } from \"@rotorsoft/act\";\nimport pg from \"pg\";\nimport { dateReviver } from \"./utils.js\";\n\nconst { Pool, types } = pg;\ntypes.setTypeParser(types.builtins.JSONB, (val) =>\n JSON.parse(val, dateReviver)\n);\n\ntype Config = Readonly<{\n host: string;\n port: number;\n database: string;\n user: string;\n password: string;\n schema: string;\n table: string;\n}>;\n\nconst DEFAULT_CONFIG: Config = {\n host: \"localhost\",\n port: 5432,\n database: \"postgres\",\n user: \"postgres\",\n password: \"postgres\",\n schema: \"public\",\n table: \"events\",\n};\n\n/**\n * @category Adapters\n * @see Store\n *\n * PostgresStore is a production-ready event store adapter for Act, using PostgreSQL as the backend.\n *\n * - Supports event sourcing, leasing, snapshots, and concurrency control.\n * - Designed for high-throughput, scalable, and reliable event storage.\n * - Implements the Act Store interface.\n *\n * @example\n * import { PostgresStore } from \"@act/pg\";\n * const store = new PostgresStore({ schema: \"my_schema\", table: \"events\" });\n * await store.seed();\n *\n * @see https://github.com/rotorsoft/act-root\n */\nexport class PostgresStore implements Store {\n private _pool;\n readonly config: Config;\n private _fqt: string;\n private _fqs: string;\n\n /**\n * Create a new PostgresStore instance.\n * @param config Partial configuration (host, port, user, password, schema, table, etc.)\n */\n constructor(config: Partial<Config> = {}) {\n this.config = { ...DEFAULT_CONFIG, ...config };\n this._pool = new Pool(this.config);\n this._fqt = `\"${this.config.schema}\".\"${this.config.table}\"`;\n this._fqs = `\"${this.config.schema}\".\"${this.config.table}_streams\"`;\n }\n\n /**\n * Dispose of the store and close all database connections.\n * @returns Promise that resolves when all connections are closed\n */\n async dispose() {\n await this._pool.end();\n }\n\n /**\n * Seed the database with required tables, indexes, and schema for event storage.\n * @returns Promise that resolves when seeding is complete\n * @throws Error if seeding fails\n */\n async seed() {\n const client = await this._pool.connect();\n\n try {\n await client.query(\"BEGIN\");\n\n // Create schema\n await client.query(\n `CREATE SCHEMA IF NOT EXISTS \"${this.config.schema}\";`\n );\n\n // Events table\n await client.query(\n `CREATE TABLE IF NOT EXISTS ${this._fqt} (\n id serial PRIMARY KEY,\n name varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n data jsonb,\n stream varchar(100) COLLATE pg_catalog.\"default\" NOT NULL,\n version int NOT NULL,\n created timestamptz NOT NULL DEFAULT now(),\n meta jsonb\n ) TABLESPACE pg_default;`\n );\n\n // Indexes on events\n await client.query(\n `CREATE UNIQUE INDEX IF NOT EXISTS \"${this.config.table}_stream_ix\" \n ON ${this._fqt} (stream COLLATE pg_catalog.\"default\", version);`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_name_ix\" \n ON ${this._fqt} (name COLLATE pg_catalog.\"default\");`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_created_id_ix\" \n ON ${this._fqt} (created, id);`\n );\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_correlation_ix\" \n ON ${this._fqt} ((meta ->> 'correlation') COLLATE pg_catalog.\"default\");`\n );\n\n // Streams table\n await client.query(\n `CREATE TABLE IF NOT EXISTS ${this._fqs} (\n stream varchar(100) COLLATE pg_catalog.\"default\" PRIMARY KEY,\n source varchar(100) COLLATE pg_catalog.\"default\",\n at int NOT NULL DEFAULT -1,\n retry smallint NOT NULL DEFAULT 0,\n blocked boolean NOT NULL DEFAULT false,\n error text,\n leased_at int,\n leased_by text,\n leased_until timestamptz\n ) TABLESPACE pg_default;`\n );\n\n // Index for fetching streams\n await client.query(\n `CREATE INDEX IF NOT EXISTS \"${this.config.table}_streams_fetch_ix\" \n ON ${this._fqs} (blocked, at);`\n );\n\n await client.query(\"COMMIT\");\n logger.info(\n `Seeded schema \"${this.config.schema}\" with table \"${this.config.table}\"`\n );\n } catch (error) {\n await client.query(\"ROLLBACK\");\n logger.error(\"Failed to seed store:\", error);\n throw error;\n } finally {\n client.release();\n }\n }\n\n /**\n * Drop all tables and schema created by the store (for testing or cleanup).\n * @returns Promise that resolves when the schema is dropped\n */\n async drop() {\n await this._pool.query(\n `\n DO $$\n BEGIN\n IF EXISTS (SELECT 1 FROM information_schema.schemata\n WHERE schema_name = '${this.config.schema}'\n ) THEN\n EXECUTE 'DROP TABLE IF EXISTS ${this._fqt}';\n EXECUTE 'DROP TABLE IF EXISTS ${this._fqs}';\n IF '${this.config.schema}' <> 'public' THEN\n EXECUTE 'DROP SCHEMA \"${this.config.schema}\" CASCADE';\n END IF;\n END IF;\n END\n $$;\n `\n );\n }\n\n /**\n * Query events from the store, optionally filtered by stream, event name, time, etc.\n *\n * @param callback Function called for each event found\n * @param query (Optional) Query filter (stream, names, before, after, etc.)\n * @returns The number of events found\n *\n * @example\n * await store.query((event) => console.log(event), { stream: \"A\" });\n */\n async query<E extends Schemas>(\n callback: (event: Committed<E, keyof E>) => void,\n query?: Query\n ) {\n const {\n stream,\n names,\n before,\n after,\n limit,\n created_before,\n created_after,\n backward,\n correlation,\n with_snaps = false,\n } = query || {};\n\n let sql = `SELECT * FROM ${this._fqt}`;\n const conditions: string[] = [];\n const values: any[] = [];\n\n if (query) {\n if (typeof after !== \"undefined\") {\n values.push(after);\n conditions.push(`id>$${values.length}`);\n } else {\n conditions.push(\"id>-1\");\n }\n if (stream) {\n values.push(stream);\n conditions.push(`stream ~ $${values.length}`);\n }\n if (names && names.length) {\n values.push(names);\n conditions.push(`name = ANY($${values.length})`);\n }\n if (before) {\n values.push(before);\n conditions.push(`id<$${values.length}`);\n }\n if (created_after) {\n values.push(created_after.toISOString());\n conditions.push(`created>$${values.length}`);\n }\n if (created_before) {\n values.push(created_before.toISOString());\n conditions.push(`created<$${values.length}`);\n }\n if (correlation) {\n values.push(correlation);\n conditions.push(`meta->>'correlation'=$${values.length}`);\n }\n if (!with_snaps) {\n conditions.push(`name <> '${SNAP_EVENT}'`);\n }\n }\n if (conditions.length) {\n sql += \" WHERE \" + conditions.join(\" AND \");\n }\n sql += ` ORDER BY id ${backward ? \"DESC\" : \"ASC\"}`;\n if (limit) {\n values.push(limit);\n sql += ` LIMIT $${values.length}`;\n }\n\n const result = await this._pool.query<Committed<E, keyof E>>(sql, values);\n for (const row of result.rows) callback(row);\n\n return result.rowCount ?? 0;\n }\n\n /**\n * Commit new events to the store for a given stream, with concurrency control.\n *\n * @param stream The stream name\n * @param msgs Array of messages (event name and data)\n * @param meta Event metadata (correlation, causation, etc.)\n * @param expectedVersion (Optional) Expected stream version for concurrency control\n * @returns Array of committed events\n * @throws ConcurrencyError if the expected version does not match\n */\n async commit<E extends Schemas>(\n stream: string,\n msgs: Message<E, keyof E>[],\n meta: EventMeta,\n expectedVersion?: number\n ) {\n if (msgs.length === 0) return [];\n const client = await this._pool.connect();\n let version = -1;\n try {\n await client.query(\"BEGIN\");\n\n const last = await client.query<Committed<E, keyof E>>(\n `SELECT version\n FROM ${this._fqt}\n WHERE stream=$1 ORDER BY version DESC LIMIT 1`,\n [stream]\n );\n version = last.rowCount ? last.rows[0].version : -1;\n if (typeof expectedVersion === \"number\" && version !== expectedVersion)\n throw new ConcurrencyError(\n stream,\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion\n );\n\n const committed = await Promise.all(\n msgs.map(async ({ name, data }) => {\n version++;\n const sql = `\n INSERT INTO ${this._fqt}(name, data, stream, version, meta) \n VALUES($1, $2, $3, $4, $5) RETURNING *`;\n const vals = [name, data, stream, version, meta];\n const { rows } = await client.query<Committed<E, keyof E>>(sql, vals);\n return rows.at(0)!;\n })\n );\n\n await client\n .query(\n `\n NOTIFY \"${this.config.table}\", '${JSON.stringify({\n operation: \"INSERT\",\n id: committed[0].name,\n position: committed[0].id,\n })}';\n COMMIT;\n `\n )\n .catch((error) => {\n logger.error(error);\n throw new ConcurrencyError(\n stream,\n version,\n msgs as unknown as Message<Schemas, string>[],\n expectedVersion || -1\n );\n });\n return committed;\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n throw error;\n } finally {\n client.release();\n }\n }\n\n /**\n * Polls the store for unblocked streams needing processing, ordered by lease watermark ascending.\n * @param limit - Maximum number of streams to poll.\n * @param descending - Whether to poll streams in descending order (aka poll the most advanced first).\n * @returns The polled streams.\n */\n async poll(limit: number, descending = false) {\n const { rows } = await this._pool.query<{\n stream: string;\n at: number;\n source: string;\n }>(\n `\n SELECT stream, at\n FROM ${this._fqs}\n WHERE blocked=false AND (leased_by IS NULL OR leased_until <= NOW())\n ORDER BY at ${descending ? \"DESC\" : \"ASC\"}\n LIMIT $1::integer\n `,\n [limit]\n );\n return rows;\n }\n\n /**\n * Lease streams for reaction processing, marking them as in-progress.\n *\n * @param leases - Lease requests for streams, including end-of-lease watermark, lease holder, and source stream.\n * @param millis - Lease duration in milliseconds.\n * @returns Array of leased objects with updated lease info\n */\n async lease(leases: Lease[], millis: number): Promise<Lease[]> {\n const client = await this._pool.connect();\n try {\n await client.query(\"BEGIN\");\n // insert new streams\n await client.query(\n `\n INSERT INTO ${this._fqs} (stream, source)\n SELECT lease->>'stream', lease->>'source'\n FROM jsonb_array_elements($1::jsonb) AS lease\n ON CONFLICT (stream) DO NOTHING\n `,\n [JSON.stringify(leases)]\n );\n // set leases\n const { rows } = await client.query<{\n stream: string;\n source: string | null;\n leased_at: number;\n leased_by: string;\n leased_until: number;\n retry: number;\n }>(\n `\n WITH input AS (\n SELECT * FROM jsonb_to_recordset($1::jsonb)\n AS x(stream text, at int, by text)\n ), free AS (\n SELECT s.stream FROM ${this._fqs} s\n JOIN input i ON s.stream = i.stream\n WHERE s.leased_by IS NULL OR s.leased_until <= NOW()\n FOR UPDATE\n )\n UPDATE ${this._fqs} s\n SET\n leased_by = i.by,\n leased_at = i.at,\n leased_until = NOW() + ($2::integer || ' milliseconds')::interval,\n retry = CASE WHEN $2::integer > 0 THEN s.retry + 1 ELSE s.retry END\n FROM input i, free f\n WHERE s.stream = f.stream AND s.stream = i.stream\n RETURNING s.stream, s.source, s.leased_at, s.leased_by, s.leased_until, s.retry\n `,\n [JSON.stringify(leases), millis]\n );\n await client.query(\"COMMIT\");\n\n return rows.map(\n ({ stream, source, leased_at, leased_by, leased_until, retry }) => ({\n stream,\n source: source ?? undefined,\n at: leased_at,\n by: leased_by,\n until: new Date(leased_until),\n retry,\n })\n );\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n logger.error(error);\n return [];\n } finally {\n client.release();\n }\n }\n\n /**\n * Acknowledge and release leases after processing, updating stream positions.\n *\n * @param leases - Leases to acknowledge, including last processed watermark and lease holder.\n * @returns Acked leases.\n */\n async ack(leases: Lease[]): Promise<Lease[]> {\n const client = await this._pool.connect();\n try {\n await client.query(\"BEGIN\");\n const { rows } = await client.query<{\n stream: string;\n source: string | null;\n at: number;\n retry: number;\n }>(\n `\n WITH input AS (\n SELECT * FROM jsonb_to_recordset($1::jsonb)\n AS x(stream text, by text, at int)\n )\n UPDATE ${this._fqs} AS s\n SET\n at = i.at,\n retry = -1,\n leased_by = NULL,\n leased_at = NULL,\n leased_until = NULL\n FROM input i\n WHERE s.stream = i.stream AND s.leased_by = i.by\n RETURNING s.stream, s.source, s.at, s.retry\n `,\n [JSON.stringify(leases)]\n );\n await client.query(\"COMMIT\");\n\n return rows.map((row) => ({\n stream: row.stream,\n source: row.source ?? undefined,\n at: row.at,\n by: \"\",\n retry: row.retry,\n }));\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n logger.error(error);\n return [];\n } finally {\n client.release();\n }\n }\n\n /**\n * Block a stream for processing after failing to process and reaching max retries with blocking enabled.\n * @param leases - Leases to block, including lease holder and last error message.\n * @returns Blocked leases.\n */\n async block(\n leases: Array<Lease & { error: string }>\n ): Promise<(Lease & { error: string })[]> {\n const client = await this._pool.connect();\n try {\n await client.query(\"BEGIN\");\n const { rows } = await client.query<{\n stream: string;\n source: string | null;\n at: number;\n by: string;\n retry: number;\n error: string;\n }>(\n `\n WITH input AS (\n SELECT * FROM jsonb_to_recordset($1::jsonb)\n AS x(stream text, by text, error text)\n )\n UPDATE ${this._fqs} AS s\n SET blocked = true, error = i.error\n FROM input i\n WHERE s.stream = i.stream AND s.leased_by = i.by AND s.blocked = false\n RETURNING s.stream, s.source, s.at, i.by, s.retry, s.error\n `,\n [JSON.stringify(leases)]\n );\n await client.query(\"COMMIT\");\n\n return rows.map((row) => ({\n stream: row.stream,\n source: row.source ?? undefined,\n at: row.at,\n by: row.by,\n retry: row.retry,\n error: row.error,\n }));\n } catch (error) {\n await client.query(\"ROLLBACK\").catch(() => {});\n logger.error(error);\n return [];\n } finally {\n client.release();\n }\n }\n}\n","/**\n * @module act-pg\n * Date reviver for JSON.parse to automatically convert ISO 8601 date strings to Date objects.\n *\n * Recognizes the following formats:\n * - YYYY-MM-DDTHH:MM:SS.sssZ\n * - YYYY-MM-DDTHH:MM:SS.sss+HH:MM\n * - YYYY-MM-DDTHH:MM:SS.sss-HH:MM\n *\n * @param key The key being parsed\n * @param value The value being parsed\n * @returns A Date object if the value matches ISO 8601, otherwise the original value\n *\n * @example\n * const obj = JSON.parse(jsonString, dateReviver);\n */\nconst ISO_8601 =\n /^(\\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(\\.\\d+)?(Z|[+-][0-2][0-9]:[0-5][0-9])?$/;\nexport const dateReviver = (key: string, value: string): string | Date => {\n if (typeof value === \"string\" && ISO_8601.test(value)) {\n return new Date(value);\n }\n return value;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACSA,iBAAqD;AACrD,gBAAe;;;ACMf,IAAM,WACJ;AACK,IAAM,cAAc,CAAC,KAAa,UAAiC;AACxE,MAAI,OAAO,UAAU,YAAY,SAAS,KAAK,KAAK,GAAG;AACrD,WAAO,IAAI,KAAK,KAAK;AAAA,EACvB;AACA,SAAO;AACT;;;ADVA,IAAM,EAAE,MAAM,MAAM,IAAI,UAAAA;AACxB,MAAM;AAAA,EAAc,MAAM,SAAS;AAAA,EAAO,CAAC,QACzC,KAAK,MAAM,KAAK,WAAW;AAC7B;AAYA,IAAM,iBAAyB;AAAA,EAC7B,MAAM;AAAA,EACN,MAAM;AAAA,EACN,UAAU;AAAA,EACV,MAAM;AAAA,EACN,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AACT;AAmBO,IAAM,gBAAN,MAAqC;AAAA,EAClC;AAAA,EACC;AAAA,EACD;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMR,YAAY,SAA0B,CAAC,GAAG;AACxC,SAAK,SAAS,EAAE,GAAG,gBAAgB,GAAG,OAAO;AAC7C,SAAK,QAAQ,IAAI,KAAK,KAAK,MAAM;AACjC,SAAK,OAAO,IAAI,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AACzD,SAAK,OAAO,IAAI,KAAK,OAAO,MAAM,MAAM,KAAK,OAAO,KAAK;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAU;AACd,UAAM,KAAK,MAAM,IAAI;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,OAAO;AACX,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AAExC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAG1B,YAAM,OAAO;AAAA,QACX,gCAAgC,KAAK,OAAO,MAAM;AAAA,MACpD;AAGA,YAAM,OAAO;AAAA,QACX,8BAA8B,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MASzC;AAGA,YAAM,OAAO;AAAA,QACX,sCAAsC,KAAK,OAAO,KAAK;AAAA,aAClD,KAAK,IAAI;AAAA,MAChB;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,aAC3C,KAAK,IAAI;AAAA,MAChB;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,aAC3C,KAAK,IAAI;AAAA,MAChB;AACA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,aAC3C,KAAK,IAAI;AAAA,MAChB;AAGA,YAAM,OAAO;AAAA,QACX,8BAA8B,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWzC;AAGA,YAAM,OAAO;AAAA,QACX,+BAA+B,KAAK,OAAO,KAAK;AAAA,aAC3C,KAAK,IAAI;AAAA,MAChB;AAEA,YAAM,OAAO,MAAM,QAAQ;AAC3B,wBAAO;AAAA,QACL,kBAAkB,KAAK,OAAO,MAAM,iBAAiB,KAAK,OAAO,KAAK;AAAA,MACxE;AAAA,IACF,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU;AAC7B,wBAAO,MAAM,yBAAyB,KAAK;AAC3C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAO;AACX,UAAM,KAAK,MAAM;AAAA,MACf;AAAA;AAAA;AAAA;AAAA,iCAI2B,KAAK,OAAO,MAAM;AAAA;AAAA,0CAET,KAAK,IAAI;AAAA,0CACT,KAAK,IAAI;AAAA,gBACnC,KAAK,OAAO,MAAM;AAAA,oCACE,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMlD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,MACJ,UACA,OACA;AACA,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa;AAAA,IACf,IAAI,SAAS,CAAC;AAEd,QAAI,MAAM,iBAAiB,KAAK,IAAI;AACpC,UAAM,aAAuB,CAAC;AAC9B,UAAM,SAAgB,CAAC;AAEvB,QAAI,OAAO;AACT,UAAI,OAAO,UAAU,aAAa;AAChC,eAAO,KAAK,KAAK;AACjB,mBAAW,KAAK,OAAO,OAAO,MAAM,EAAE;AAAA,MACxC,OAAO;AACL,mBAAW,KAAK,OAAO;AAAA,MACzB;AACA,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,mBAAW,KAAK,aAAa,OAAO,MAAM,EAAE;AAAA,MAC9C;AACA,UAAI,SAAS,MAAM,QAAQ;AACzB,eAAO,KAAK,KAAK;AACjB,mBAAW,KAAK,eAAe,OAAO,MAAM,GAAG;AAAA,MACjD;AACA,UAAI,QAAQ;AACV,eAAO,KAAK,MAAM;AAClB,mBAAW,KAAK,OAAO,OAAO,MAAM,EAAE;AAAA,MACxC;AACA,UAAI,eAAe;AACjB,eAAO,KAAK,cAAc,YAAY,CAAC;AACvC,mBAAW,KAAK,YAAY,OAAO,MAAM,EAAE;AAAA,MAC7C;AACA,UAAI,gBAAgB;AAClB,eAAO,KAAK,eAAe,YAAY,CAAC;AACxC,mBAAW,KAAK,YAAY,OAAO,MAAM,EAAE;AAAA,MAC7C;AACA,UAAI,aAAa;AACf,eAAO,KAAK,WAAW;AACvB,mBAAW,KAAK,yBAAyB,OAAO,MAAM,EAAE;AAAA,MAC1D;AACA,UAAI,CAAC,YAAY;AACf,mBAAW,KAAK,YAAY,qBAAU,GAAG;AAAA,MAC3C;AAAA,IACF;AACA,QAAI,WAAW,QAAQ;AACrB,aAAO,YAAY,WAAW,KAAK,OAAO;AAAA,IAC5C;AACA,WAAO,gBAAgB,WAAW,SAAS,KAAK;AAChD,QAAI,OAAO;AACT,aAAO,KAAK,KAAK;AACjB,aAAO,WAAW,OAAO,MAAM;AAAA,IACjC;AAEA,UAAM,SAAS,MAAM,KAAK,MAAM,MAA6B,KAAK,MAAM;AACxE,eAAW,OAAO,OAAO,KAAM,UAAS,GAAG;AAE3C,WAAO,OAAO,YAAY;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,OACJ,QACA,MACA,MACA,iBACA;AACA,QAAI,KAAK,WAAW,EAAG,QAAO,CAAC;AAC/B,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AACxC,QAAI,UAAU;AACd,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO,MAAM,OAAO;AAAA,QACxB;AAAA,eACO,KAAK,IAAI;AAAA;AAAA,QAEhB,CAAC,MAAM;AAAA,MACT;AACA,gBAAU,KAAK,WAAW,KAAK,KAAK,CAAC,EAAE,UAAU;AACjD,UAAI,OAAO,oBAAoB,YAAY,YAAY;AACrD,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEF,YAAM,YAAY,MAAM,QAAQ;AAAA,QAC9B,KAAK,IAAI,OAAO,EAAE,MAAM,KAAK,MAAM;AACjC;AACA,gBAAM,MAAM;AAAA,wBACE,KAAK,IAAI;AAAA;AAEvB,gBAAM,OAAO,CAAC,MAAM,MAAM,QAAQ,SAAS,IAAI;AAC/C,gBAAM,EAAE,KAAK,IAAI,MAAM,OAAO,MAA6B,KAAK,IAAI;AACpE,iBAAO,KAAK,GAAG,CAAC;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,YAAM,OACH;AAAA,QACC;AAAA,sBACY,KAAK,OAAO,KAAK,OAAO,KAAK,UAAU;AAAA,UAC/C,WAAW;AAAA,UACX,IAAI,UAAU,CAAC,EAAE;AAAA,UACjB,UAAU,UAAU,CAAC,EAAE;AAAA,QACzB,CAAC,CAAC;AAAA;AAAA;AAAA,MAGN,EACC,MAAM,CAAC,UAAU;AAChB,0BAAO,MAAM,KAAK;AAClB,cAAM,IAAI;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA,mBAAmB;AAAA,QACrB;AAAA,MACF,CAAC;AACH,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,YAAM;AAAA,IACR,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,KAAK,OAAe,aAAa,OAAO;AAC5C,UAAM,EAAE,KAAK,IAAI,MAAM,KAAK,MAAM;AAAA,MAKhC;AAAA;AAAA,aAEO,KAAK,IAAI;AAAA;AAAA,oBAEF,aAAa,SAAS,KAAK;AAAA;AAAA;AAAA,MAGzC,CAAC,KAAK;AAAA,IACR;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,MAAM,QAAiB,QAAkC;AAC7D,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AACxC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAE1B,YAAM,OAAO;AAAA,QACX;AAAA,sBACc,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA,QAKvB,CAAC,KAAK,UAAU,MAAM,CAAC;AAAA,MACzB;AAEA,YAAM,EAAE,KAAK,IAAI,MAAM,OAAO;AAAA,QAQ5B;AAAA;AAAA;AAAA;AAAA;AAAA,+BAKuB,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA,eAKzB,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAUhB,CAAC,KAAK,UAAU,MAAM,GAAG,MAAM;AAAA,MACjC;AACA,YAAM,OAAO,MAAM,QAAQ;AAE3B,aAAO,KAAK;AAAA,QACV,CAAC,EAAE,QAAQ,QAAQ,WAAW,WAAW,cAAc,MAAM,OAAO;AAAA,UAClE;AAAA,UACA,QAAQ,UAAU;AAAA,UAClB,IAAI;AAAA,UACJ,IAAI;AAAA,UACJ,OAAO,IAAI,KAAK,YAAY;AAAA,UAC5B;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,wBAAO,MAAM,KAAK;AAClB,aAAO,CAAC;AAAA,IACV,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,IAAI,QAAmC;AAC3C,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AACxC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAC1B,YAAM,EAAE,KAAK,IAAI,MAAM,OAAO;AAAA,QAM5B;AAAA;AAAA;AAAA;AAAA;AAAA,eAKO,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAWhB,CAAC,KAAK,UAAU,MAAM,CAAC;AAAA,MACzB;AACA,YAAM,OAAO,MAAM,QAAQ;AAE3B,aAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QACxB,QAAQ,IAAI;AAAA,QACZ,QAAQ,IAAI,UAAU;AAAA,QACtB,IAAI,IAAI;AAAA,QACR,IAAI;AAAA,QACJ,OAAO,IAAI;AAAA,MACb,EAAE;AAAA,IACJ,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,wBAAO,MAAM,KAAK;AAClB,aAAO,CAAC;AAAA,IACV,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,MACJ,QACwC;AACxC,UAAM,SAAS,MAAM,KAAK,MAAM,QAAQ;AACxC,QAAI;AACF,YAAM,OAAO,MAAM,OAAO;AAC1B,YAAM,EAAE,KAAK,IAAI,MAAM,OAAO;AAAA,QAQ5B;AAAA;AAAA;AAAA;AAAA;AAAA,eAKO,KAAK,IAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAMhB,CAAC,KAAK,UAAU,MAAM,CAAC;AAAA,MACzB;AACA,YAAM,OAAO,MAAM,QAAQ;AAE3B,aAAO,KAAK,IAAI,CAAC,SAAS;AAAA,QACxB,QAAQ,IAAI;AAAA,QACZ,QAAQ,IAAI,UAAU;AAAA,QACtB,IAAI,IAAI;AAAA,QACR,IAAI,IAAI;AAAA,QACR,OAAO,IAAI;AAAA,QACX,OAAO,IAAI;AAAA,MACb,EAAE;AAAA,IACJ,SAAS,OAAO;AACd,YAAM,OAAO,MAAM,UAAU,EAAE,MAAM,MAAM;AAAA,MAAC,CAAC;AAC7C,wBAAO,MAAM,KAAK;AAClB,aAAO,CAAC;AAAA,IACV,UAAE;AACA,aAAO,QAAQ;AAAA,IACjB;AAAA,EACF;AACF;","names":["pg"]}