@haathie/pgmb 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/lib/abortable-async-iterator.d.ts +14 -0
- package/lib/abortable-async-iterator.js +86 -0
- package/lib/batcher.d.ts +12 -0
- package/lib/batcher.js +71 -0
- package/lib/client.d.ts +73 -0
- package/lib/client.js +432 -0
- package/lib/consts.d.ts +1 -0
- package/lib/consts.js +4 -0
- package/lib/index.d.ts +5 -0
- package/lib/index.js +19 -0
- package/lib/queries.d.ts +453 -0
- package/lib/queries.js +235 -0
- package/lib/query-types.d.ts +17 -0
- package/lib/query-types.js +2 -0
- package/lib/retry-handler.d.ts +11 -0
- package/lib/retry-handler.js +93 -0
- package/lib/sse.d.ts +4 -0
- package/lib/sse.js +137 -0
- package/lib/types.d.ts +202 -0
- package/lib/types.js +2 -0
- package/lib/utils.d.ts +15 -0
- package/lib/utils.js +52 -0
- package/lib/webhook-handler.d.ts +6 -0
- package/lib/webhook-handler.js +68 -0
- package/package.json +52 -0
- package/readme.md +493 -0
- package/sql/pgmb-0.1.12-0.2.0.sql +1018 -0
- package/sql/pgmb-0.1.12.sql +612 -0
- package/sql/pgmb-0.1.5-0.1.6.sql +256 -0
- package/sql/pgmb-0.1.6-0.1.12.sql +95 -0
- package/sql/pgmb.sql +1030 -0
- package/sql/queries.sql +154 -0
package/lib/client.js
ADDED
|
@@ -0,0 +1,432 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.PgmbClient = void 0;
|
|
7
|
+
const assert_1 = __importDefault(require("assert"));
|
|
8
|
+
const pino_1 = require("pino");
|
|
9
|
+
const promises_1 = require("timers/promises");
|
|
10
|
+
const abortable_async_iterator_ts_1 = require("./abortable-async-iterator.js");
|
|
11
|
+
const batcher_ts_1 = require("./batcher.js");
|
|
12
|
+
const queries_ts_1 = require("./queries.js");
|
|
13
|
+
const retry_handler_ts_1 = require("./retry-handler.js");
|
|
14
|
+
const webhook_handler_ts_1 = require("./webhook-handler.js");
|
|
15
|
+
class PgmbClient extends batcher_ts_1.PGMBEventBatcher {
|
|
16
|
+
client;
|
|
17
|
+
logger;
|
|
18
|
+
groupId;
|
|
19
|
+
sleepDurationMs;
|
|
20
|
+
readChunkSize;
|
|
21
|
+
subscriptionMaintenanceMs;
|
|
22
|
+
tableMaintenanceMs;
|
|
23
|
+
maxActiveCheckpoints;
|
|
24
|
+
getWebhookInfo;
|
|
25
|
+
webhookHandler;
|
|
26
|
+
listeners = {};
|
|
27
|
+
#readClient;
|
|
28
|
+
#endAc = new AbortController();
|
|
29
|
+
#shouldPoll;
|
|
30
|
+
#readTask;
|
|
31
|
+
#pollTask;
|
|
32
|
+
#subMaintainTask;
|
|
33
|
+
#tableMaintainTask;
|
|
34
|
+
#inMemoryCursor = null;
|
|
35
|
+
#activeCheckpoints = [];
|
|
36
|
+
constructor({ client, groupId, logger = (0, pino_1.pino)(), sleepDurationMs = 750, readChunkSize = 1000, maxActiveCheckpoints = 10, poll, subscriptionMaintenanceMs = 60 * 1000, webhookHandlerOpts = {}, getWebhookInfo = () => ({}), tableMaintainanceMs = 5 * 60 * 1000, ...batcherOpts }) {
|
|
37
|
+
super({
|
|
38
|
+
...batcherOpts,
|
|
39
|
+
logger,
|
|
40
|
+
publish: (...e) => this.publish(e)
|
|
41
|
+
});
|
|
42
|
+
this.client = client;
|
|
43
|
+
this.logger = logger;
|
|
44
|
+
this.groupId = groupId;
|
|
45
|
+
this.sleepDurationMs = sleepDurationMs;
|
|
46
|
+
this.readChunkSize = readChunkSize;
|
|
47
|
+
this.#shouldPoll = !!poll;
|
|
48
|
+
this.subscriptionMaintenanceMs = subscriptionMaintenanceMs;
|
|
49
|
+
this.maxActiveCheckpoints = maxActiveCheckpoints;
|
|
50
|
+
this.webhookHandler = (0, webhook_handler_ts_1.createWebhookHandler)(webhookHandlerOpts);
|
|
51
|
+
this.getWebhookInfo = getWebhookInfo;
|
|
52
|
+
this.tableMaintenanceMs = tableMaintainanceMs;
|
|
53
|
+
}
|
|
54
|
+
async init() {
|
|
55
|
+
this.#endAc = new AbortController();
|
|
56
|
+
if ('connect' in this.client) {
|
|
57
|
+
this.client.on('remove', this.#onPoolClientRemoved);
|
|
58
|
+
}
|
|
59
|
+
// maintain event table
|
|
60
|
+
await queries_ts_1.maintainEventsTable.run(undefined, this.client);
|
|
61
|
+
this.logger.debug('maintained events table');
|
|
62
|
+
await queries_ts_1.assertGroup.run({ id: this.groupId }, this.client);
|
|
63
|
+
this.logger.debug({ groupId: this.groupId }, 'asserted group exists');
|
|
64
|
+
// clean up expired subscriptions on start
|
|
65
|
+
const [{ deleted }] = await queries_ts_1.removeExpiredSubscriptions
|
|
66
|
+
.run({ groupId: this.groupId, activeIds: [] }, this.client);
|
|
67
|
+
this.logger.debug({ deleted }, 'removed expired subscriptions');
|
|
68
|
+
this.#readTask
|
|
69
|
+
= this.#startLoop(this.readChanges.bind(this), this.sleepDurationMs);
|
|
70
|
+
if (this.#shouldPoll) {
|
|
71
|
+
this.#pollTask = this.#startLoop(queries_ts_1.pollForEvents.run.bind(queries_ts_1.pollForEvents, undefined, this.client), this.sleepDurationMs);
|
|
72
|
+
}
|
|
73
|
+
if (this.subscriptionMaintenanceMs) {
|
|
74
|
+
this.#subMaintainTask = this.#startLoop(this.#maintainSubscriptions, this.subscriptionMaintenanceMs);
|
|
75
|
+
}
|
|
76
|
+
if (this.tableMaintenanceMs) {
|
|
77
|
+
this.#tableMaintainTask = this.#startLoop(queries_ts_1.maintainEventsTable.run
|
|
78
|
+
.bind(queries_ts_1.maintainEventsTable, undefined, this.client), this.tableMaintenanceMs);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
async end() {
|
|
82
|
+
await super.end();
|
|
83
|
+
this.#endAc.abort();
|
|
84
|
+
while (this.#activeCheckpoints.length) {
|
|
85
|
+
await (0, promises_1.setTimeout)(100);
|
|
86
|
+
}
|
|
87
|
+
for (const id in this.listeners) {
|
|
88
|
+
delete this.listeners[id];
|
|
89
|
+
}
|
|
90
|
+
await Promise.all([
|
|
91
|
+
this.#readTask,
|
|
92
|
+
this.#pollTask,
|
|
93
|
+
this.#subMaintainTask,
|
|
94
|
+
this.#tableMaintainTask
|
|
95
|
+
]);
|
|
96
|
+
await this.#unlockAndReleaseReadClient();
|
|
97
|
+
this.#readTask = undefined;
|
|
98
|
+
this.#pollTask = undefined;
|
|
99
|
+
this.#subMaintainTask = undefined;
|
|
100
|
+
this.#activeCheckpoints = [];
|
|
101
|
+
}
|
|
102
|
+
publish(events, client = this.client) {
|
|
103
|
+
return queries_ts_1.writeEvents.run({
|
|
104
|
+
topics: events.map(e => e.topic),
|
|
105
|
+
payloads: events.map(e => e.payload),
|
|
106
|
+
metadatas: events.map(e => e.metadata || null),
|
|
107
|
+
}, client);
|
|
108
|
+
}
|
|
109
|
+
async assertSubscription(opts, client = this.client) {
|
|
110
|
+
const [rslt] = await queries_ts_1.assertSubscription
|
|
111
|
+
.run({ ...opts, groupId: this.groupId }, client);
|
|
112
|
+
this.logger.debug({ ...opts, ...rslt }, 'asserted subscription');
|
|
113
|
+
return rslt;
|
|
114
|
+
}
|
|
115
|
+
/**
|
|
116
|
+
* Registers a fire-and-forget handler, returning an async iterator
|
|
117
|
+
* that yields events as they arrive. The client does not wait for event
|
|
118
|
+
* processing acknowledgements. Useful for cases where data is eventually
|
|
119
|
+
* consistent, or when event delivery isn't critical
|
|
120
|
+
* (eg. http SSE, websockets).
|
|
121
|
+
*/
|
|
122
|
+
async registerFireAndForgetHandler(opts) {
|
|
123
|
+
const { id: subId } = await this.assertSubscription(opts);
|
|
124
|
+
return this.#listenForEvents(subId);
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* Registers a reliable handler for the given subscription params.
|
|
128
|
+
* If the handler throws an error, client will rollback to the last known
|
|
129
|
+
* good cursor, and re-deliver events.
|
|
130
|
+
* To avoid a full redelivery of a batch, a retry strategy can be provided
|
|
131
|
+
* to retry failed events by the handler itself, allowing for delayed retries
|
|
132
|
+
* with backoff, and without disrupting the overall event flow.
|
|
133
|
+
*/
|
|
134
|
+
async registerReliableHandler({ retryOpts, name = createListenerId(), ...opts }, handler) {
|
|
135
|
+
const { id: subId } = await this.assertSubscription(opts);
|
|
136
|
+
if (retryOpts) {
|
|
137
|
+
handler = (0, retry_handler_ts_1.createRetryHandler)(retryOpts, handler);
|
|
138
|
+
}
|
|
139
|
+
const lts = (this.listeners[subId] ||= { values: {} });
|
|
140
|
+
(0, assert_1.default)(!lts.values[name], `Handler with id ${name} already registered for subscription ${subId}.`
|
|
141
|
+
+ ' Cancel the existing one or use a different id.');
|
|
142
|
+
this.listeners[subId].values[name] = { type: 'reliable', handler, queue: [] };
|
|
143
|
+
return {
|
|
144
|
+
subscriptionId: subId,
|
|
145
|
+
cancel: () => this.#removeListener(subId, name)
|
|
146
|
+
};
|
|
147
|
+
}
|
|
148
|
+
async removeSubscription(subId) {
|
|
149
|
+
await queries_ts_1.deleteSubscriptions.run({ ids: [subId] }, this.client);
|
|
150
|
+
this.logger.debug({ subId }, 'deleted subscription');
|
|
151
|
+
const existingSubs = this.listeners[subId]?.values;
|
|
152
|
+
delete this.listeners[subId];
|
|
153
|
+
if (!existingSubs) {
|
|
154
|
+
return;
|
|
155
|
+
}
|
|
156
|
+
await Promise.allSettled(Object.values(existingSubs).map(e => (e.type === 'fire-and-forget'
|
|
157
|
+
&& e.stream.throw(new Error('subscription removed')))));
|
|
158
|
+
}
|
|
159
|
+
#listenForEvents(subId) {
|
|
160
|
+
const lid = createListenerId();
|
|
161
|
+
const iterator = new abortable_async_iterator_ts_1.AbortableAsyncIterator(this.#endAc.signal, () => this.#removeListener(subId, lid));
|
|
162
|
+
const stream = iterator;
|
|
163
|
+
stream.id = subId;
|
|
164
|
+
this.listeners[subId] ||= { values: {} };
|
|
165
|
+
this.listeners[subId].values[lid] = { type: 'fire-and-forget', stream };
|
|
166
|
+
return stream;
|
|
167
|
+
}
|
|
168
|
+
#removeListener(subId, lid) {
|
|
169
|
+
const existingSubs = this.listeners[subId]?.values;
|
|
170
|
+
delete existingSubs?.[lid];
|
|
171
|
+
if (existingSubs && Object.keys(existingSubs).length) {
|
|
172
|
+
return;
|
|
173
|
+
}
|
|
174
|
+
delete this.listeners[subId];
|
|
175
|
+
this.logger.debug({ subId }, 'removed last subscriber for sub');
|
|
176
|
+
}
|
|
177
|
+
async #maintainSubscriptions() {
|
|
178
|
+
const activeIds = Object.keys(this.listeners);
|
|
179
|
+
await queries_ts_1.markSubscriptionsActive.run({ ids: activeIds }, this.client);
|
|
180
|
+
this.logger.trace({ activeSubscriptions: activeIds.length }, 'marked subscriptions as active');
|
|
181
|
+
const [{ deleted }] = await queries_ts_1.removeExpiredSubscriptions
|
|
182
|
+
.run({ groupId: this.groupId, activeIds }, this.client);
|
|
183
|
+
this.logger.trace({ deleted }, 'removed expired subscriptions');
|
|
184
|
+
}
|
|
185
|
+
async readChanges() {
|
|
186
|
+
if (this.#activeCheckpoints.length >= this.maxActiveCheckpoints) {
|
|
187
|
+
return 0;
|
|
188
|
+
}
|
|
189
|
+
const now = Date.now();
|
|
190
|
+
await this.#connectReadClient();
|
|
191
|
+
const rows = await queries_ts_1.readNextEvents.run({
|
|
192
|
+
groupId: this.groupId,
|
|
193
|
+
cursor: this.#inMemoryCursor,
|
|
194
|
+
chunkSize: this.readChunkSize
|
|
195
|
+
}, this.#readClient || this.client)
|
|
196
|
+
.catch(async (err) => {
|
|
197
|
+
if (err instanceof Error && err.message.includes('connection error')) {
|
|
198
|
+
await this.#unlockAndReleaseReadClient();
|
|
199
|
+
}
|
|
200
|
+
throw err;
|
|
201
|
+
});
|
|
202
|
+
if (!rows.length) {
|
|
203
|
+
// if nothing is happening and there are no active checkpoints,
|
|
204
|
+
// we can just let the read client go
|
|
205
|
+
if (!this.#activeCheckpoints.length) {
|
|
206
|
+
await this.#unlockAndReleaseReadClient();
|
|
207
|
+
}
|
|
208
|
+
return 0;
|
|
209
|
+
}
|
|
210
|
+
const uqSubIds = Array.from(new Set(rows.flatMap(r => r.subscriptionIds)));
|
|
211
|
+
const webhookSubs = await this.getWebhookInfo(uqSubIds);
|
|
212
|
+
let webhookCount = 0;
|
|
213
|
+
for (const sid in webhookSubs) {
|
|
214
|
+
const webhooks = webhookSubs[sid];
|
|
215
|
+
const lts = (this.listeners[sid] ||= { values: {} });
|
|
216
|
+
for (const wh of webhooks) {
|
|
217
|
+
// add reliable listener for each webhook
|
|
218
|
+
lts.values[wh.id] ||= {
|
|
219
|
+
type: 'reliable',
|
|
220
|
+
queue: [],
|
|
221
|
+
extra: wh,
|
|
222
|
+
removeOnEmpty: true,
|
|
223
|
+
handler: this.webhookHandler
|
|
224
|
+
};
|
|
225
|
+
webhookCount++;
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
const { map: subToEventMap, retryEvents, retryItemCount } = await (0, retry_handler_ts_1.normaliseRetryEventsInReadEventMap)(rows, this.client);
|
|
229
|
+
const subs = Object.entries(subToEventMap);
|
|
230
|
+
const checkpoint = { activeTasks: 0, nextCursor: rows[0].nextCursor };
|
|
231
|
+
for (const [subId, evs] of subs) {
|
|
232
|
+
const listeners = this.listeners[subId]?.values;
|
|
233
|
+
if (!listeners) {
|
|
234
|
+
continue;
|
|
235
|
+
}
|
|
236
|
+
for (const ev of evs) {
|
|
237
|
+
for (const lid in listeners) {
|
|
238
|
+
if (ev.retry?.handlerName && lid !== ev.retry.handlerName) {
|
|
239
|
+
continue;
|
|
240
|
+
}
|
|
241
|
+
const lt = listeners[lid];
|
|
242
|
+
if (lt.type === 'fire-and-forget') {
|
|
243
|
+
lt.stream.enqueue(ev);
|
|
244
|
+
continue;
|
|
245
|
+
}
|
|
246
|
+
this.#enqueueEventInReliableListener(subId, lid, ev, checkpoint);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
this.#activeCheckpoints.push(checkpoint);
|
|
251
|
+
this.#inMemoryCursor = checkpoint.nextCursor;
|
|
252
|
+
this.logger.debug({
|
|
253
|
+
rowsRead: rows.length,
|
|
254
|
+
subscriptions: subs.length,
|
|
255
|
+
durationMs: Date.now() - now,
|
|
256
|
+
checkpoint,
|
|
257
|
+
activeCheckpoints: this.#activeCheckpoints.length,
|
|
258
|
+
webhookCount,
|
|
259
|
+
retryEvents,
|
|
260
|
+
retryItemCount
|
|
261
|
+
}, 'read rows');
|
|
262
|
+
if (!checkpoint.activeTasks && this.#activeCheckpoints.length === 1) {
|
|
263
|
+
await this.#updateCursorFromCompletedCheckpoints();
|
|
264
|
+
}
|
|
265
|
+
return rows.length;
|
|
266
|
+
}
|
|
267
|
+
/**
|
|
268
|
+
* Runs the reliable listener's handler for each item in its queue,
|
|
269
|
+
* one after the other, till the queue is empty or the client has ended.
|
|
270
|
+
* Any errors are logged, swallowed, and processing continues.
|
|
271
|
+
*/
|
|
272
|
+
async #enqueueEventInReliableListener(subId, lid, item, checkpoint) {
|
|
273
|
+
const lt = this.listeners[subId]?.values?.[lid];
|
|
274
|
+
(0, assert_1.default)(lt?.type === 'reliable', 'invalid listener type: ' + lt.type);
|
|
275
|
+
const { handler, queue, removeOnEmpty, extra } = lt;
|
|
276
|
+
queue.push({ item, checkpoint });
|
|
277
|
+
checkpoint.activeTasks++;
|
|
278
|
+
if (queue.length > 1) {
|
|
279
|
+
return;
|
|
280
|
+
}
|
|
281
|
+
while (queue.length) {
|
|
282
|
+
const { item, checkpoint } = queue[0];
|
|
283
|
+
if (checkpoint.cancelled) {
|
|
284
|
+
queue.shift();
|
|
285
|
+
continue;
|
|
286
|
+
}
|
|
287
|
+
const logger = this.logger.child({
|
|
288
|
+
subId,
|
|
289
|
+
items: item.items.map(i => i.id),
|
|
290
|
+
extra,
|
|
291
|
+
retryNumber: item.retry?.retryNumber,
|
|
292
|
+
});
|
|
293
|
+
logger.trace({
|
|
294
|
+
cpActiveTasks: checkpoint.activeTasks,
|
|
295
|
+
queue: queue.length,
|
|
296
|
+
}, 'processing handler queue');
|
|
297
|
+
try {
|
|
298
|
+
await handler(item, {
|
|
299
|
+
client: this.client,
|
|
300
|
+
logger,
|
|
301
|
+
subscriptionId: subId,
|
|
302
|
+
extra,
|
|
303
|
+
name: lid,
|
|
304
|
+
});
|
|
305
|
+
checkpoint.activeTasks--;
|
|
306
|
+
(0, assert_1.default)(checkpoint.activeTasks >= 0, 'internal: checkpoint.activeTasks < 0');
|
|
307
|
+
if (!checkpoint.activeTasks) {
|
|
308
|
+
await this.#updateCursorFromCompletedCheckpoints();
|
|
309
|
+
}
|
|
310
|
+
logger.trace({
|
|
311
|
+
cpActiveTasks: checkpoint.activeTasks,
|
|
312
|
+
queue: queue.length,
|
|
313
|
+
}, 'completed handler task');
|
|
314
|
+
}
|
|
315
|
+
catch (err) {
|
|
316
|
+
logger.error({ err }, 'error in handler,'
|
|
317
|
+
+ 'cancelling all active checkpoints'
|
|
318
|
+
+ '. Restarting from last known good cursor.');
|
|
319
|
+
this.#cancelAllActiveCheckpoints();
|
|
320
|
+
}
|
|
321
|
+
finally {
|
|
322
|
+
queue.shift();
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
if (removeOnEmpty) {
|
|
326
|
+
return this.#removeListener(subId, lid);
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
/**
|
|
330
|
+
* Goes through all checkpoints, and sets the group cursor to the latest
|
|
331
|
+
* completed checkpoint. If a checkpoint has active tasks, stops there.
|
|
332
|
+
* This ensures that we don't accidentally move the cursor forward while
|
|
333
|
+
* there are still pending tasks for earlier checkpoints.
|
|
334
|
+
*/
|
|
335
|
+
async #updateCursorFromCompletedCheckpoints() {
|
|
336
|
+
let latestMaxCursor;
|
|
337
|
+
while (this.#activeCheckpoints.length) {
|
|
338
|
+
const cp = this.#activeCheckpoints[0];
|
|
339
|
+
if (cp.activeTasks > 0) {
|
|
340
|
+
break;
|
|
341
|
+
}
|
|
342
|
+
latestMaxCursor = cp.nextCursor;
|
|
343
|
+
this.#activeCheckpoints.shift();
|
|
344
|
+
}
|
|
345
|
+
if (!latestMaxCursor) {
|
|
346
|
+
return;
|
|
347
|
+
}
|
|
348
|
+
const releaseLock = !this.#activeCheckpoints.length;
|
|
349
|
+
await queries_ts_1.setGroupCursor.run({
|
|
350
|
+
groupId: this.groupId,
|
|
351
|
+
cursor: latestMaxCursor,
|
|
352
|
+
releaseLock: releaseLock
|
|
353
|
+
}, this.#readClient || this.client);
|
|
354
|
+
this.logger.debug({
|
|
355
|
+
cursor: latestMaxCursor,
|
|
356
|
+
activeCheckpoints: this.#activeCheckpoints.length
|
|
357
|
+
}, 'set cursor');
|
|
358
|
+
// if there are no more active checkpoints,
|
|
359
|
+
// clear in-memory cursor, so in case another process takes
|
|
360
|
+
// over, if & when we start reading again, we read from the DB cursor
|
|
361
|
+
if (releaseLock) {
|
|
362
|
+
this.#inMemoryCursor = null;
|
|
363
|
+
this.#releaseReadClient();
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
#cancelAllActiveCheckpoints() {
|
|
367
|
+
for (const cp of this.#activeCheckpoints) {
|
|
368
|
+
cp.cancelled = true;
|
|
369
|
+
}
|
|
370
|
+
this.#activeCheckpoints = [];
|
|
371
|
+
this.#inMemoryCursor = null;
|
|
372
|
+
}
|
|
373
|
+
async #unlockAndReleaseReadClient() {
|
|
374
|
+
if (!this.#readClient) {
|
|
375
|
+
return;
|
|
376
|
+
}
|
|
377
|
+
try {
|
|
378
|
+
await queries_ts_1.releaseGroupLock
|
|
379
|
+
.run({ groupId: this.groupId }, this.#readClient);
|
|
380
|
+
}
|
|
381
|
+
catch (err) {
|
|
382
|
+
this.logger.error({ err }, 'error releasing read client');
|
|
383
|
+
}
|
|
384
|
+
finally {
|
|
385
|
+
this.#releaseReadClient();
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
async #connectReadClient() {
|
|
389
|
+
if (!('connect' in this.client)) {
|
|
390
|
+
return false;
|
|
391
|
+
}
|
|
392
|
+
if (this.#readClient) {
|
|
393
|
+
return true;
|
|
394
|
+
}
|
|
395
|
+
this.#readClient = await this.client.connect();
|
|
396
|
+
this.logger.trace('acquired dedicated read client');
|
|
397
|
+
return true;
|
|
398
|
+
}
|
|
399
|
+
#onPoolClientRemoved = async (cl) => {
|
|
400
|
+
if (cl !== this.#readClient) {
|
|
401
|
+
return;
|
|
402
|
+
}
|
|
403
|
+
this.logger
|
|
404
|
+
.info('dedicated read client disconnected, may have dup event processing');
|
|
405
|
+
};
|
|
406
|
+
#releaseReadClient() {
|
|
407
|
+
try {
|
|
408
|
+
this.#readClient?.release();
|
|
409
|
+
}
|
|
410
|
+
catch { }
|
|
411
|
+
this.#readClient = undefined;
|
|
412
|
+
}
|
|
413
|
+
async #startLoop(fn, sleepDurationMs) {
|
|
414
|
+
const signal = this.#endAc.signal;
|
|
415
|
+
while (!signal.aborted) {
|
|
416
|
+
try {
|
|
417
|
+
await (0, promises_1.setTimeout)(sleepDurationMs, undefined, { signal });
|
|
418
|
+
await fn.call(this);
|
|
419
|
+
}
|
|
420
|
+
catch (err) {
|
|
421
|
+
if (err instanceof Error && err.name === 'AbortError') {
|
|
422
|
+
return;
|
|
423
|
+
}
|
|
424
|
+
this.logger.error({ err, fn: fn.name }, 'error in task');
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
exports.PgmbClient = PgmbClient;
|
|
430
|
+
function createListenerId() {
|
|
431
|
+
return Math.random().toString(16).slice(2, 10);
|
|
432
|
+
}
|
package/lib/consts.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const RETRY_EVENT = "pgmb-retry";
|
package/lib/consts.js
ADDED
package/lib/index.d.ts
ADDED
package/lib/index.js
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
__exportStar(require("./client.js"), exports);
|
|
18
|
+
__exportStar(require("./utils.js"), exports);
|
|
19
|
+
__exportStar(require("./sse.js"), exports);
|