@haathie/pgmb 0.2.8 → 0.2.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -32,8 +32,7 @@ Install PGMB by running the following command:
32
32
  npm install @haathie/pgmb
33
33
  ```
34
34
 
35
- Note: PGMB directly exports typescript files, so if you're using a bundler -- ensure it can handle typescript files. NodeJs (v22+), Deno, Bun can all run typescript files natively -- so about time we utilised this!
36
- For commonjs compatibility, the compiled JS files are also exported.
35
+ Note: PGMB is an ESM module.
37
36
 
38
37
  Before using PGMB, you'll need to run the setup script to create the required tables, functions & triggers in your database. You can do this by running:
39
38
 
@@ -0,0 +1,14 @@
1
+ type AAResult<T> = IteratorResult<T>;
2
+ export declare class AbortableAsyncIterator<T> implements AsyncIterableIterator<T> {
3
+ #private;
4
+ readonly signal: AbortSignal;
5
+ readonly onEnd: () => void;
6
+ ended: boolean;
7
+ constructor(signal: AbortSignal, onEnd?: () => void);
8
+ next(): Promise<AAResult<T>>;
9
+ enqueue(value: T): void;
10
+ throw(reason?: unknown): Promise<AAResult<T>>;
11
+ return(value?: any): Promise<AAResult<T>>;
12
+ [Symbol.asyncIterator](): this;
13
+ }
14
+ export {};
@@ -1,11 +1,5 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.AbortableAsyncIterator = void 0;
7
- const assert_1 = __importDefault(require("assert"));
8
- class AbortableAsyncIterator {
1
+ import assert from 'assert';
2
+ export class AbortableAsyncIterator {
9
3
  signal;
10
4
  onEnd;
11
5
  ended = false;
@@ -19,8 +13,8 @@ class AbortableAsyncIterator {
19
13
  signal.addEventListener('abort', this.#onAbort);
20
14
  }
21
15
  async next() {
22
- (0, assert_1.default)(!this.ended, 'Iterator has already been completed');
23
- (0, assert_1.default)(!this.#locked, 'Concurrent calls to next() are not allowed');
16
+ assert(!this.ended, 'Iterator has already been completed');
17
+ assert(!this.#locked, 'Concurrent calls to next() are not allowed');
24
18
  let nextItem = this.#queue.shift();
25
19
  if (nextItem) {
26
20
  return { value: nextItem, done: false };
@@ -39,7 +33,7 @@ class AbortableAsyncIterator {
39
33
  return { value: undefined, done: true };
40
34
  }
41
35
  enqueue(value) {
42
- (0, assert_1.default)(!this.ended, 'Iterator has already been completed');
36
+ assert(!this.ended, 'Iterator has already been completed');
43
37
  this.#queue.push(value);
44
38
  this.#resolve?.();
45
39
  }
@@ -83,4 +77,3 @@ class AbortableAsyncIterator {
83
77
  return this;
84
78
  }
85
79
  }
86
- exports.AbortableAsyncIterator = AbortableAsyncIterator;
@@ -0,0 +1,12 @@
1
+ import type { IEventData, PGMBEventBatcherOpts } from './types.ts';
2
+ export declare class PGMBEventBatcher<T extends IEventData> {
3
+ #private;
4
+ constructor({ shouldLog, publish, flushIntervalMs, maxBatchSize, logger }: PGMBEventBatcherOpts<T>);
5
+ end(): Promise<void>;
6
+ /**
7
+ * Enqueue a message to be published, will be flushed to the database
8
+ * when flush() is called (either manually or via interval)
9
+ */
10
+ enqueue(msg: T): void;
11
+ flush(): Promise<void>;
12
+ }
package/lib/batcher.js CHANGED
@@ -1,7 +1,4 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.PGMBEventBatcher = void 0;
4
- class PGMBEventBatcher {
1
+ export class PGMBEventBatcher {
5
2
  #publish;
6
3
  #flushIntervalMs;
7
4
  #maxBatchSize;
@@ -68,4 +65,3 @@ class PGMBEventBatcher {
68
65
  }
69
66
  }
70
67
  }
71
- exports.PGMBEventBatcher = PGMBEventBatcher;
@@ -0,0 +1,77 @@
1
+ import { type Logger } from 'pino';
2
+ import { PGMBEventBatcher } from './batcher.ts';
3
+ import type { PgClientLike } from './query-types.ts';
4
+ import type { GetWebhookInfoFn, IEphemeralListener, IEventData, IEventHandler, IFindEventsFn, IReadEvent, IReadNextEventsFn, ISplitFn, Pgmb2ClientOpts, registerReliableHandlerParams, RegisterSubscriptionParams } from './types.ts';
5
+ type IReliableListener<T extends IEventData> = {
6
+ type: 'reliable';
7
+ handler: IEventHandler<T>;
8
+ removeOnEmpty?: boolean;
9
+ extra?: unknown;
10
+ splitBy?: ISplitFn<T>;
11
+ queue: {
12
+ item: IReadEvent<T>;
13
+ checkpoint: Checkpoint;
14
+ }[];
15
+ };
16
+ type IFireAndForgetListener<T extends IEventData> = {
17
+ type: 'fire-and-forget';
18
+ stream: IEphemeralListener<T>;
19
+ };
20
+ type IListener<T extends IEventData> = IFireAndForgetListener<T> | IReliableListener<T>;
21
+ type Checkpoint = {
22
+ activeTasks: number;
23
+ nextCursor: string;
24
+ cancelled?: boolean;
25
+ };
26
+ export type IListenerStore<T extends IEventData> = {
27
+ values: {
28
+ [id: string]: IListener<T>;
29
+ };
30
+ };
31
+ export declare class PgmbClient<T extends IEventData = IEventData> extends PGMBEventBatcher<T> {
32
+ #private;
33
+ readonly client: PgClientLike;
34
+ readonly logger: Logger;
35
+ readonly groupId: string;
36
+ readonly readEventsIntervalMs: number;
37
+ readonly pollEventsIntervalMs: number;
38
+ readonly readChunkSize: number;
39
+ readonly subscriptionMaintenanceMs: number;
40
+ readonly tableMaintenanceMs: number;
41
+ readonly maxActiveCheckpoints: number;
42
+ readonly readNextEvents: IReadNextEventsFn;
43
+ readonly findEvents?: IFindEventsFn;
44
+ readonly getWebhookInfo: GetWebhookInfoFn;
45
+ readonly webhookHandler: IEventHandler<T>;
46
+ readonly listeners: {
47
+ [subId: string]: IListenerStore<T>;
48
+ };
49
+ constructor({ client, groupId, logger, readEventsIntervalMs, readChunkSize, maxActiveCheckpoints, pollEventsIntervalMs, subscriptionMaintenanceMs, tableMaintainanceMs, webhookHandlerOpts: { splitBy: whSplitBy, ...whHandlerOpts }, getWebhookInfo, readNextEvents, findEvents, ...batcherOpts }: Pgmb2ClientOpts<T>);
50
+ init(): Promise<void>;
51
+ end(): Promise<void>;
52
+ publish(events: T[], client?: PgClientLike): Promise<import("./queries.ts").IWriteEventsResult[]>;
53
+ assertSubscription(opts: RegisterSubscriptionParams, client?: PgClientLike): Promise<import("./queries.ts").IAssertSubscriptionResult>;
54
+ /**
55
+ * Registers a fire-and-forget handler, returning an async iterator
56
+ * that yields events as they arrive. The client does not wait for event
57
+ * processing acknowledgements. Useful for cases where data is eventually
58
+ * consistent, or when event delivery isn't critical
59
+ * (eg. http SSE, websockets).
60
+ */
61
+ registerFireAndForgetHandler(opts: RegisterSubscriptionParams): Promise<IEphemeralListener<T>>;
62
+ /**
63
+ * Registers a reliable handler for the given subscription params.
64
+ * If the handler throws an error, client will rollback to the last known
65
+ * good cursor, and re-deliver events.
66
+ * To avoid a full redelivery of a batch, a retry strategy can be provided
67
+ * to retry failed events by the handler itself, allowing for delayed retries
68
+ * with backoff, and without disrupting the overall event flow.
69
+ */
70
+ registerReliableHandler({ retryOpts, name, splitBy, ...opts }: registerReliableHandlerParams<T>, handler: IEventHandler<T>): Promise<{
71
+ subscriptionId: string;
72
+ cancel: () => void;
73
+ }>;
74
+ removeSubscription(subId: string): Promise<void>;
75
+ readChanges(): Promise<number>;
76
+ }
77
+ export {};
package/lib/client.js CHANGED
@@ -1,19 +1,13 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.PgmbClient = void 0;
7
- const assert_1 = __importDefault(require("assert"));
8
- const pino_1 = require("pino");
9
- const promises_1 = require("timers/promises");
10
- const abortable_async_iterator_ts_1 = require("./abortable-async-iterator.js");
11
- const batcher_ts_1 = require("./batcher.js");
12
- const queries_ts_1 = require("./queries.js");
13
- const retry_handler_ts_1 = require("./retry-handler.js");
14
- const utils_ts_1 = require("./utils.js");
15
- const webhook_handler_ts_1 = require("./webhook-handler.js");
16
- class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
1
+ import assert from 'assert';
2
+ import { pino } from 'pino';
3
+ import { setTimeout } from 'timers/promises';
4
+ import { AbortableAsyncIterator } from "./abortable-async-iterator.js";
5
+ import { PGMBEventBatcher } from "./batcher.js";
6
+ import { assertGroup, assertSubscription, deleteSubscriptions, getConfigValue, maintainEventsTable, markSubscriptionsActive, pollForEvents, readNextEvents as defaultReadNextEvents, releaseGroupLock, removeExpiredSubscriptions, setGroupCursor, writeEvents, } from "./queries.js";
7
+ import { createRetryHandler, normaliseRetryEventsInReadEventMap, } from "./retry-handler.js";
8
+ import { getEnvNumber } from "./utils.js";
9
+ import { createWebhookHandler } from "./webhook-handler.js";
10
+ export class PgmbClient extends PGMBEventBatcher {
17
11
  client;
18
12
  logger;
19
13
  groupId;
@@ -37,7 +31,7 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
37
31
  #tableMaintainTask;
38
32
  #inMemoryCursor = null;
39
33
  #activeCheckpoints = [];
40
- constructor({ client, groupId, logger = (0, pino_1.pino)(), readEventsIntervalMs = (0, utils_ts_1.getEnvNumber)('PGMB_READ_EVENTS_INTERVAL_MS', 1000), readChunkSize = (0, utils_ts_1.getEnvNumber)('PGMB_READ_CHUNK_SIZE', 1000), maxActiveCheckpoints = (0, utils_ts_1.getEnvNumber)('PGMB_MAX_ACTIVE_CHECKPOINTS', 10), pollEventsIntervalMs = (0, utils_ts_1.getEnvNumber)('PGMB_POLL_EVENTS_INTERVAL_MS', 1000), subscriptionMaintenanceMs = (0, utils_ts_1.getEnvNumber)('PGMB_SUBSCRIPTION_MAINTENANCE_S', 60) * 1000, tableMaintainanceMs = (0, utils_ts_1.getEnvNumber)('PGMB_TABLE_MAINTENANCE_M', 15) * 60 * 1000, webhookHandlerOpts: { splitBy: whSplitBy, ...whHandlerOpts } = {}, getWebhookInfo = () => ({}), readNextEvents = queries_ts_1.readNextEvents.run.bind(queries_ts_1.readNextEvents), findEvents, ...batcherOpts }) {
34
+ constructor({ client, groupId, logger = pino(), readEventsIntervalMs = getEnvNumber('PGMB_READ_EVENTS_INTERVAL_MS', 1000), readChunkSize = getEnvNumber('PGMB_READ_CHUNK_SIZE', 1000), maxActiveCheckpoints = getEnvNumber('PGMB_MAX_ACTIVE_CHECKPOINTS', 10), pollEventsIntervalMs = getEnvNumber('PGMB_POLL_EVENTS_INTERVAL_MS', 1000), subscriptionMaintenanceMs = getEnvNumber('PGMB_SUBSCRIPTION_MAINTENANCE_S', 60) * 1000, tableMaintainanceMs = getEnvNumber('PGMB_TABLE_MAINTENANCE_M', 15) * 60 * 1000, webhookHandlerOpts: { splitBy: whSplitBy, ...whHandlerOpts } = {}, getWebhookInfo = () => ({}), readNextEvents = defaultReadNextEvents.run.bind(defaultReadNextEvents), findEvents, ...batcherOpts }) {
41
35
  super({
42
36
  ...batcherOpts,
43
37
  logger,
@@ -51,7 +45,7 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
51
45
  this.pollEventsIntervalMs = pollEventsIntervalMs;
52
46
  this.subscriptionMaintenanceMs = subscriptionMaintenanceMs;
53
47
  this.maxActiveCheckpoints = maxActiveCheckpoints;
54
- this.webhookHandler = (0, webhook_handler_ts_1.createWebhookHandler)(whHandlerOpts);
48
+ this.webhookHandler = createWebhookHandler(whHandlerOpts);
55
49
  this.#webhookHandlerOpts = { splitBy: whSplitBy };
56
50
  this.getWebhookInfo = getWebhookInfo;
57
51
  this.tableMaintenanceMs = tableMaintainanceMs;
@@ -63,25 +57,25 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
63
57
  if ('connect' in this.client) {
64
58
  this.client.on('remove', this.#onPoolClientRemoved);
65
59
  }
66
- const [pgCronRslt] = await queries_ts_1.getConfigValue
60
+ const [pgCronRslt] = await getConfigValue
67
61
  .run({ key: 'use_pg_cron' }, this.client);
68
62
  const isPgCronEnabled = pgCronRslt?.value === 'true';
69
63
  if (!isPgCronEnabled) {
70
64
  // maintain event table
71
- await queries_ts_1.maintainEventsTable.run(undefined, this.client);
65
+ await maintainEventsTable.run(undefined, this.client);
72
66
  this.logger.debug('maintained events table');
73
67
  if (this.pollEventsIntervalMs) {
74
- this.#pollTask = this.#startLoop(queries_ts_1.pollForEvents.run.bind(queries_ts_1.pollForEvents, undefined, this.client), this.pollEventsIntervalMs);
68
+ this.#pollTask = this.#startLoop(pollForEvents.run.bind(pollForEvents, undefined, this.client), this.pollEventsIntervalMs);
75
69
  }
76
70
  if (this.tableMaintenanceMs) {
77
- this.#tableMaintainTask = this.#startLoop(queries_ts_1.maintainEventsTable.run
78
- .bind(queries_ts_1.maintainEventsTable, undefined, this.client), this.tableMaintenanceMs);
71
+ this.#tableMaintainTask = this.#startLoop(maintainEventsTable.run
72
+ .bind(maintainEventsTable, undefined, this.client), this.tableMaintenanceMs);
79
73
  }
80
74
  }
81
- await queries_ts_1.assertGroup.run({ id: this.groupId }, this.client);
75
+ await assertGroup.run({ id: this.groupId }, this.client);
82
76
  this.logger.debug({ groupId: this.groupId }, 'asserted group exists');
83
77
  // clean up expired subscriptions on start
84
- const [{ deleted }] = await queries_ts_1.removeExpiredSubscriptions.run({ groupId: this.groupId, activeIds: [] }, this.client);
78
+ const [{ deleted }] = await removeExpiredSubscriptions.run({ groupId: this.groupId, activeIds: [] }, this.client);
85
79
  this.logger.debug({ deleted }, 'removed expired subscriptions');
86
80
  this.#readTask = this.#startLoop(this.readChanges.bind(this), this.readEventsIntervalMs);
87
81
  if (this.subscriptionMaintenanceMs) {
@@ -93,7 +87,7 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
93
87
  await super.end();
94
88
  this.#endAc.abort();
95
89
  while (this.#activeCheckpoints.length) {
96
- await (0, promises_1.setTimeout)(100);
90
+ await setTimeout(100);
97
91
  }
98
92
  for (const id in this.listeners) {
99
93
  delete this.listeners[id];
@@ -111,14 +105,14 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
111
105
  this.#activeCheckpoints = [];
112
106
  }
113
107
  publish(events, client = this.client) {
114
- return queries_ts_1.writeEvents.run({
108
+ return writeEvents.run({
115
109
  topics: events.map((e) => e.topic),
116
110
  payloads: events.map((e) => e.payload),
117
111
  metadatas: events.map((e) => e.metadata || null),
118
112
  }, client);
119
113
  }
120
114
  async assertSubscription(opts, client = this.client) {
121
- const [rslt] = await queries_ts_1.assertSubscription.run({ ...opts, groupId: this.groupId }, client);
115
+ const [rslt] = await assertSubscription.run({ ...opts, groupId: this.groupId }, client);
122
116
  this.logger.debug({ ...opts, ...rslt }, 'asserted subscription');
123
117
  return rslt;
124
118
  }
@@ -144,10 +138,10 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
144
138
  async registerReliableHandler({ retryOpts, name = createListenerId(), splitBy, ...opts }, handler) {
145
139
  const { id: subId } = await this.assertSubscription(opts);
146
140
  if (retryOpts) {
147
- handler = (0, retry_handler_ts_1.createRetryHandler)(retryOpts, handler);
141
+ handler = createRetryHandler(retryOpts, handler);
148
142
  }
149
143
  const lts = (this.listeners[subId] ||= { values: {} });
150
- (0, assert_1.default)(!lts.values[name], `Handler with id ${name} already registered for subscription ${subId}.` +
144
+ assert(!lts.values[name], `Handler with id ${name} already registered for subscription ${subId}.` +
151
145
  ' Cancel the existing one or use a different id.');
152
146
  this.listeners[subId].values[name] = {
153
147
  type: 'reliable',
@@ -161,7 +155,7 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
161
155
  };
162
156
  }
163
157
  async removeSubscription(subId) {
164
- await queries_ts_1.deleteSubscriptions.run({ ids: [subId] }, this.client);
158
+ await deleteSubscriptions.run({ ids: [subId] }, this.client);
165
159
  this.logger.debug({ subId }, 'deleted subscription');
166
160
  const existingSubs = this.listeners[subId]?.values;
167
161
  delete this.listeners[subId];
@@ -173,7 +167,7 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
173
167
  }
174
168
  #listenForEvents(subId) {
175
169
  const lid = createListenerId();
176
- const iterator = new abortable_async_iterator_ts_1.AbortableAsyncIterator(this.#endAc.signal, () => this.#removeListener(subId, lid));
170
+ const iterator = new AbortableAsyncIterator(this.#endAc.signal, () => this.#removeListener(subId, lid));
177
171
  const stream = iterator;
178
172
  stream.id = subId;
179
173
  this.listeners[subId] ||= { values: {} };
@@ -191,9 +185,9 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
191
185
  }
192
186
  async #maintainSubscriptions() {
193
187
  const activeIds = Object.keys(this.listeners);
194
- await queries_ts_1.markSubscriptionsActive.run({ ids: activeIds }, this.client);
188
+ await markSubscriptionsActive.run({ ids: activeIds }, this.client);
195
189
  this.logger.trace({ activeSubscriptions: activeIds.length }, 'marked subscriptions as active');
196
- const [{ deleted }] = await queries_ts_1.removeExpiredSubscriptions.run({ groupId: this.groupId, activeIds }, this.client);
190
+ const [{ deleted }] = await removeExpiredSubscriptions.run({ groupId: this.groupId, activeIds }, this.client);
197
191
  this.logger.trace({ deleted }, 'removed expired subscriptions');
198
192
  }
199
193
  async readChanges() {
@@ -240,7 +234,7 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
240
234
  webhookCount++;
241
235
  }
242
236
  }
243
- const { map: subToEventMap, retryEvents, retryItemCount } = await (0, retry_handler_ts_1.normaliseRetryEventsInReadEventMap)(rows, this.client, this.findEvents);
237
+ const { map: subToEventMap, retryEvents, retryItemCount } = await normaliseRetryEventsInReadEventMap(rows, this.client, this.findEvents);
244
238
  const subs = Object.entries(subToEventMap);
245
239
  const checkpoint = {
246
240
  activeTasks: 0,
@@ -289,7 +283,7 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
289
283
  */
290
284
  async #enqueueEventInReliableListener(subId, lid, item, checkpoint) {
291
285
  const lt = this.listeners[subId]?.values?.[lid];
292
- (0, assert_1.default)(lt?.type === 'reliable', 'invalid listener type: ' + lt.type);
286
+ assert(lt?.type === 'reliable', 'invalid listener type: ' + lt.type);
293
287
  const { handler, queue, removeOnEmpty, extra, splitBy = defaultSplitBy } = lt;
294
288
  queue.push({ item, checkpoint });
295
289
  checkpoint.activeTasks++;
@@ -328,7 +322,7 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
328
322
  });
329
323
  }
330
324
  checkpoint.activeTasks--;
331
- (0, assert_1.default)(checkpoint.activeTasks >= 0, 'internal: checkpoint.activeTasks < 0');
325
+ assert(checkpoint.activeTasks >= 0, 'internal: checkpoint.activeTasks < 0');
332
326
  if (!checkpoint.activeTasks) {
333
327
  await this.#updateCursorFromCompletedCheckpoints();
334
328
  }
@@ -371,7 +365,7 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
371
365
  return;
372
366
  }
373
367
  const releaseLock = !this.#activeCheckpoints.length;
374
- await queries_ts_1.setGroupCursor.run({
368
+ await setGroupCursor.run({
375
369
  groupId: this.groupId,
376
370
  cursor: latestMaxCursor,
377
371
  releaseLock: releaseLock,
@@ -400,7 +394,7 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
400
394
  return;
401
395
  }
402
396
  try {
403
- await queries_ts_1.releaseGroupLock.run({ groupId: this.groupId }, this.#readClient);
397
+ await releaseGroupLock.run({ groupId: this.groupId }, this.#readClient);
404
398
  }
405
399
  catch (err) {
406
400
  this.logger.error({ err }, 'error releasing read client');
@@ -437,7 +431,7 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
437
431
  const signal = this.#endAc.signal;
438
432
  while (!signal.aborted) {
439
433
  try {
440
- await (0, promises_1.setTimeout)(sleepDurationMs, undefined, { signal });
434
+ await setTimeout(sleepDurationMs, undefined, { signal });
441
435
  await fn.call(this);
442
436
  }
443
437
  catch (err) {
@@ -449,7 +443,6 @@ class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
449
443
  }
450
444
  }
451
445
  }
452
- exports.PgmbClient = PgmbClient;
453
446
  function createListenerId() {
454
447
  return Math.random().toString(16).slice(2, 10);
455
448
  }
@@ -0,0 +1 @@
1
+ export declare const RETRY_EVENT = "pgmb-retry";
package/lib/consts.js CHANGED
@@ -1,4 +1 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.RETRY_EVENT = void 0;
4
- exports.RETRY_EVENT = 'pgmb-retry';
1
+ export const RETRY_EVENT = 'pgmb-retry';
package/lib/index.d.ts ADDED
@@ -0,0 +1,6 @@
1
+ export * from './client.ts';
2
+ export type * from './types.ts';
3
+ export type * from './query-types.ts';
4
+ export * from './utils.ts';
5
+ export * from './sse.ts';
6
+ export * from './queries.ts';
package/lib/index.js CHANGED
@@ -1,20 +1,4 @@
1
- "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
- for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
- };
16
- Object.defineProperty(exports, "__esModule", { value: true });
17
- __exportStar(require("./client.js"), exports);
18
- __exportStar(require("./utils.js"), exports);
19
- __exportStar(require("./sse.js"), exports);
20
- __exportStar(require("./queries.js"), exports);
1
+ export * from "./client.js";
2
+ export * from "./utils.js";
3
+ export * from "./sse.js";
4
+ export * from "./queries.js";