@haathie/pgmb 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,17 @@
1
+ export type QueryResult<T> = {
2
+ rowCount: number;
3
+ rows: T[];
4
+ };
5
+ export interface PgClient {
6
+ query<T = any>(query: string, params?: unknown[]): Promise<QueryResult<T>>;
7
+ exec?(query: string): Promise<unknown>;
8
+ }
9
+ export interface PgReleasableClient extends PgClient {
10
+ release: () => void;
11
+ }
12
+ export interface PgPoolLike extends PgClient {
13
+ connect: () => Promise<PgReleasableClient>;
14
+ on(ev: 'remove', handler: (cl: PgReleasableClient) => void): this;
15
+ off(ev: 'remove', handler: (cl: PgReleasableClient) => void): this;
16
+ }
17
+ export type PgClientLike = PgClient | PgPoolLike;
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,11 @@
1
+ import type { IReadNextEventsResult } from './queries.ts';
2
+ import type { PgClientLike } from './query-types.ts';
3
+ import type { IEventData, IEventHandler, IReadEvent, IRetryHandlerOpts } from './types.ts';
4
+ export declare function createRetryHandler<T extends IEventData>({ retriesS }: IRetryHandlerOpts, handler: IEventHandler<T>): IEventHandler<T>;
5
+ export declare function normaliseRetryEventsInReadEventMap<T extends IEventData>(rows: IReadNextEventsResult[], client: PgClientLike): Promise<{
6
+ map: {
7
+ [sid: string]: IReadEvent<T>[];
8
+ };
9
+ retryEvents: number;
10
+ retryItemCount: number;
11
+ }>;
@@ -0,0 +1,93 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.createRetryHandler = createRetryHandler;
4
+ exports.normaliseRetryEventsInReadEventMap = normaliseRetryEventsInReadEventMap;
5
+ const consts_ts_1 = require("./consts.js");
6
+ const queries_ts_1 = require("./queries.js");
7
+ function createRetryHandler({ retriesS }, handler) {
8
+ return async (ev, ctx) => {
9
+ const { name, client, subscriptionId, logger } = ctx;
10
+ try {
11
+ await handler(ev, ctx);
12
+ }
13
+ catch (err) {
14
+ const retryNumber = (ev.retry?.retryNumber ?? 0);
15
+ const nextRetryGapS = retriesS[retryNumber];
16
+ logger.error({ err, nextRetryGapS }, 'error in event handler');
17
+ if (!nextRetryGapS) {
18
+ return;
19
+ }
20
+ await queries_ts_1.scheduleEventRetry.run({
21
+ subscriptionId,
22
+ ids: ev.items.map(i => i.id),
23
+ retryNumber: retryNumber + 1,
24
+ delayInterval: `${nextRetryGapS} seconds`,
25
+ handlerName: name,
26
+ }, client);
27
+ }
28
+ };
29
+ }
30
+ async function normaliseRetryEventsInReadEventMap(rows, client) {
31
+ const map = {};
32
+ const evsToPopulate = [];
33
+ const idsToLoad = [];
34
+ // reverse the map, do subscriptionId -> events
35
+ const subToEventMap = {};
36
+ for (const row of rows) {
37
+ for (const subId of row.subscriptionIds) {
38
+ subToEventMap[subId] ||= [];
39
+ subToEventMap[subId].push(row);
40
+ }
41
+ }
42
+ const subEventList = Object.entries(subToEventMap);
43
+ for (const [subscriptionId, items] of subEventList) {
44
+ for (let i = 0; i < items.length; i) {
45
+ const item = items[i];
46
+ if (item.topic !== consts_ts_1.RETRY_EVENT) {
47
+ i++;
48
+ continue;
49
+ }
50
+ const retry = item.payload;
51
+ if (!retry.ids?.length) {
52
+ continue;
53
+ }
54
+ idsToLoad.push(...retry.ids);
55
+ map[subscriptionId] ||= [];
56
+ const ev = { items: [], retry };
57
+ map[subscriptionId].push(ev);
58
+ evsToPopulate.push(ev);
59
+ items.splice(i, 1);
60
+ }
61
+ if (!items.length) {
62
+ continue;
63
+ }
64
+ map[subscriptionId] ||= [];
65
+ map[subscriptionId].push({ items: items });
66
+ }
67
+ if (!idsToLoad.length) {
68
+ return { map, retryEvents: 0, retryItemCount: 0 };
69
+ }
70
+ const fetchedEvents = await queries_ts_1.findEvents.run({ ids: idsToLoad }, client);
71
+ const fetchedEventMap = fetchedEvents.reduce((map, ev) => {
72
+ map[ev.id] = ev;
73
+ return map;
74
+ }, {});
75
+ // populate the events
76
+ for (const { items, retry } of evsToPopulate) {
77
+ if (!retry) {
78
+ continue;
79
+ }
80
+ for (const id of retry.ids) {
81
+ const ev = fetchedEventMap[id];
82
+ if (!ev) {
83
+ continue;
84
+ }
85
+ items.push(ev);
86
+ }
87
+ }
88
+ return {
89
+ map,
90
+ retryEvents: evsToPopulate.length,
91
+ retryItemCount: idsToLoad.length,
92
+ };
93
+ }
package/lib/sse.d.ts ADDED
@@ -0,0 +1,4 @@
1
+ import type { IncomingMessage, ServerResponse } from 'node:http';
2
+ import type { PgmbClient } from './client.ts';
3
+ import type { IEventData, SSERequestHandlerOpts } from './types.ts';
4
+ export declare function createSSERequestHandler<T extends IEventData>(this: PgmbClient<T>, { getSubscriptionOpts, maxReplayEvents, maxReplayIntervalMs, jsonifier }: SSERequestHandlerOpts): (req: IncomingMessage, res: ServerResponse<IncomingMessage>) => Promise<void>;
package/lib/sse.js ADDED
@@ -0,0 +1,137 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.createSSERequestHandler = createSSERequestHandler;
37
+ const node_assert_1 = __importStar(require("node:assert"));
38
+ const queries_ts_1 = require("./queries.js");
39
+ const utils_ts_1 = require("./utils.js");
40
+ function createSSERequestHandler({ getSubscriptionOpts, maxReplayEvents = 1000, maxReplayIntervalMs = 5 * 60 * 1000, jsonifier = JSON }) {
41
+ const replayEnabled = maxReplayEvents > 0;
42
+ return handleSSERequest.bind(this);
43
+ async function handleSSERequest(req, res) {
44
+ let sub;
45
+ let eventsToReplay = [];
46
+ try {
47
+ (0, node_assert_1.default)(req.method?.toLowerCase() === 'get', 'SSE only supports GET requests');
48
+ // validate last-event-id header
49
+ const fromEventId = req.headers['last-event-id'];
50
+ if (fromEventId) {
51
+ (0, node_assert_1.default)(replayEnabled, 'replay disabled on server');
52
+ (0, node_assert_1.default)(typeof fromEventId === 'string', 'invalid last-event-id header');
53
+ const fromDt = (0, utils_ts_1.getDateFromMessageId)(fromEventId);
54
+ (0, node_assert_1.default)(fromDt, 'invalid last-event-id header value');
55
+ (0, node_assert_1.default)(fromDt.getTime() >= (Date.now() - maxReplayIntervalMs), 'last-event-id is too old to replay');
56
+ }
57
+ sub = await this.registerFireAndForgetHandler({
58
+ ...await getSubscriptionOpts(req),
59
+ expiryInterval: `${maxReplayIntervalMs * 2} milliseconds`
60
+ });
61
+ if (fromEventId) {
62
+ const fromDt = (0, utils_ts_1.getDateFromMessageId)(fromEventId);
63
+ const subDt = (0, utils_ts_1.getCreateDateFromSubscriptionId)(sub.id);
64
+ (0, node_assert_1.default)(subDt, 'internal: invalid subscription id format');
65
+ (0, node_assert_1.default)(fromDt >= subDt, 'last-event-id is before subscription creation, cannot replay');
66
+ eventsToReplay = await queries_ts_1.replayEvents.run({
67
+ groupId: this.groupId,
68
+ subscriptionId: sub.id,
69
+ fromEventId: fromEventId,
70
+ maxEvents: maxReplayEvents
71
+ }, this.client);
72
+ this.logger.trace({ subId: sub.id, count: eventsToReplay.length }, 'got events to replay');
73
+ }
74
+ if (res.writableEnded) {
75
+ throw new Error('response already ended');
76
+ }
77
+ }
78
+ catch (err) {
79
+ this.logger
80
+ .error({ subId: sub?.id, err }, 'error in sse subscription setup');
81
+ await sub?.throw(err).catch(() => { });
82
+ if (res.writableEnded) {
83
+ return;
84
+ }
85
+ const message = err instanceof Error ? err.message : String(err);
86
+ // if an assertion failed, we cannot connect with these parameters
87
+ // so use 204 No Content
88
+ const code = err instanceof node_assert_1.AssertionError ? 204 : 500;
89
+ res
90
+ .writeHead(code, message)
91
+ .end();
92
+ return;
93
+ }
94
+ res.once('close', () => {
95
+ sub?.return();
96
+ });
97
+ res.once('error', err => {
98
+ sub?.throw(err).catch(() => { });
99
+ });
100
+ res.writeHead(200, {
101
+ 'content-type': 'text/event-stream',
102
+ 'cache-control': 'no-cache',
103
+ 'connection': 'keep-alive',
104
+ 'transfer-encoding': 'chunked',
105
+ });
106
+ res.flushHeaders();
107
+ try {
108
+ // send replayed events first
109
+ writeSseEvents(res, eventsToReplay);
110
+ for await (const { items } of sub) {
111
+ writeSseEvents(res, items);
112
+ }
113
+ }
114
+ catch (err) {
115
+ this.logger.error({ err }, 'error in sse subscription');
116
+ if (res.writableEnded) {
117
+ return;
118
+ }
119
+ // send error event
120
+ const message = err instanceof Error ? err.message : String(err);
121
+ const errData = jsonifier.stringify({ message });
122
+ res.write(`event: error\ndata: ${errData}\nretry: 250\n\n`);
123
+ res.end();
124
+ }
125
+ }
126
+ function writeSseEvents(res, items) {
127
+ for (const { id, payload, topic } of items) {
128
+ const data = jsonifier.stringify(payload);
129
+ if (!replayEnabled) {
130
+ // if replay is disabled, do not send an id field
131
+ res.write(`event: ${topic}\ndata: ${data}\n\n`);
132
+ continue;
133
+ }
134
+ res.write(`id: ${id}\nevent: ${topic}\ndata: ${data}\n\n`);
135
+ }
136
+ }
137
+ }
package/lib/types.d.ts ADDED
@@ -0,0 +1,202 @@
1
+ import type { IncomingMessage } from 'node:http';
2
+ import type { Logger } from 'pino';
3
+ import type { HeaderRecord } from 'undici-types/header.js';
4
+ import type { AbortableAsyncIterator } from './abortable-async-iterator.ts';
5
+ import type { IAssertSubscriptionParams } from './queries.ts';
6
+ import type { PgClientLike } from './query-types.ts';
7
+ export type SerialisedEvent = {
8
+ body: Buffer | string;
9
+ contentType: string;
10
+ };
11
+ export type WebhookInfo = {
12
+ id: string;
13
+ url: string | URL;
14
+ };
15
+ export type GetWebhookInfoFn = (subscriptionIds: string[]) => Promise<{
16
+ [id: string]: WebhookInfo[];
17
+ }> | {
18
+ [id: string]: WebhookInfo[];
19
+ };
20
+ export type PgmbWebhookOpts = {
21
+ /**
22
+ * Maximum time to wait for webhook request to complete
23
+ * @default 5 seconds
24
+ */
25
+ timeoutMs?: number;
26
+ headers?: HeaderRecord;
27
+ /**
28
+ * Configure retry intervals in seconds for failed webhook requests.
29
+ * If null, a failed handler will fail the event processor. Use carefully.
30
+ */
31
+ retryOpts?: IRetryHandlerOpts | null;
32
+ jsonifier?: JSONifier;
33
+ serialiseEvent?(ev: IReadEvent): SerialisedEvent;
34
+ };
35
+ export interface IEventData {
36
+ topic: string;
37
+ payload: unknown;
38
+ metadata?: unknown;
39
+ }
40
+ export type IEvent<T extends IEventData> = (T & {
41
+ id: string;
42
+ });
43
+ export type PGMBEventBatcherOpts<T extends IEventData> = {
44
+ /**
45
+ * Whether a particular published message should be logged.
46
+ * By default, all messages are logged -- in case of certain
47
+ * failures, the logs can be used to replay the messages.
48
+ */
49
+ shouldLog?(msg: T): boolean;
50
+ publish(...msgs: T[]): Promise<{
51
+ id: string;
52
+ }[]>;
53
+ logger?: Logger;
54
+ /**
55
+ * Automatically flush after this interval.
56
+ * Set to undefined or 0 to disable. Will need to
57
+ * manually call `flush()` to publish messages.
58
+ * @default undefined
59
+ */
60
+ flushIntervalMs?: number;
61
+ /**
62
+ * Max number of messages to send in a batch
63
+ * @default 2500
64
+ */
65
+ maxBatchSize?: number;
66
+ };
67
+ export type Pgmb2ClientOpts = {
68
+ client: PgClientLike;
69
+ /**
70
+ * Globally unique identifier for this Pgmb2Client instance. All subs
71
+ * registered with this client will use this groupId.
72
+ */
73
+ groupId: string;
74
+ logger?: Logger;
75
+ /** How long to sleep between polls & read fn calls */
76
+ sleepDurationMs?: number;
77
+ /**
78
+ * How often to mark subscriptions as active,
79
+ * and remove expired ones.
80
+ * @default 1 minute
81
+ */
82
+ subscriptionMaintenanceMs?: number;
83
+ /** How often to maintain the events tables
84
+ * (drop old partitions, create new ones, etc)
85
+ * @default 5 minutes
86
+ */
87
+ tableMaintainanceMs?: number;
88
+ readChunkSize?: number;
89
+ /**
90
+ * As we process in batches, a single handler taking time to finish
91
+ * can lead to buildup of unprocessed checkpoints. To avoid this,
92
+ * we keep moving forward while handlers run in the background, but
93
+ * to avoid an unbounded number of items being backlogged, we limit
94
+ * how much further we can go ahead from the earliest uncompleted checkpoint.
95
+ * @default 10
96
+ */
97
+ maxActiveCheckpoints?: number;
98
+ /**
99
+ * Should this client poll for new events?
100
+ * @default true
101
+ */
102
+ poll?: boolean;
103
+ webhookHandlerOpts?: Partial<PgmbWebhookOpts>;
104
+ getWebhookInfo?: GetWebhookInfoFn;
105
+ } & Pick<PGMBEventBatcherOpts<IEventData>, 'flushIntervalMs' | 'maxBatchSize' | 'shouldLog'>;
106
+ export type IReadEvent<T extends IEventData = IEventData> = {
107
+ items: IEvent<T>[];
108
+ retry?: IRetryEventPayload;
109
+ };
110
+ export type RegisterSubscriptionParams = Omit<IAssertSubscriptionParams, 'groupId'>;
111
+ export type registerReliableHandlerParams = RegisterSubscriptionParams & {
112
+ /**
113
+ * Name for the retry handler, used to ensure retries for a particular
114
+ * handler are not mixed with another handler. This name need only be
115
+ * unique for a particular subscription.
116
+ */
117
+ name?: string;
118
+ retryOpts?: IRetryHandlerOpts;
119
+ };
120
+ export type CreateTopicalSubscriptionOpts<T extends IEventData> = {
121
+ /**
122
+ * The topics to subscribe to.
123
+ */
124
+ topics: T['topic'][];
125
+ /**
126
+ * To scale out processing, you can partition the subscriptions.
127
+ * For example, with `current: 0, total: 3`, only messages
128
+ * where `hashtext(e.id) % 3 == 0` will be received by this subscription.
129
+ */
130
+ partition?: {
131
+ current: number;
132
+ total: number;
133
+ };
134
+ /**
135
+ * Add any additional params to filter by.
136
+ * i.e "s.params @> jsonb_build_object(...additionalFilters)"
137
+ * The value should be a valid SQL snippet.
138
+ */
139
+ additionalFilters?: Record<string, string>;
140
+ /** JSON to populate params */
141
+ additionalParams?: Record<string, any>;
142
+ expiryInterval?: RegisterSubscriptionParams['expiryInterval'];
143
+ };
144
+ export interface IEphemeralListener<T extends IEventData> extends AbortableAsyncIterator<IReadEvent<T>> {
145
+ id: string;
146
+ }
147
+ export type IEventHandlerContext = {
148
+ logger: Logger;
149
+ client: PgClientLike;
150
+ subscriptionId: string;
151
+ /** registered name of the handler */
152
+ name: string;
153
+ extra?: unknown;
154
+ };
155
+ export type IEventHandler<T extends IEventData = IEventData> = (item: IReadEvent<T>, ctx: IEventHandlerContext) => Promise<void>;
156
+ export type IRetryEventPayload = {
157
+ ids: string[];
158
+ handlerName: string;
159
+ retryNumber: number;
160
+ };
161
+ type SSESubscriptionOpts = Pick<RegisterSubscriptionParams, 'conditionsSql' | 'params'>;
162
+ export type SSERequestHandlerOpts = {
163
+ getSubscriptionOpts(req: IncomingMessage): Promise<SSESubscriptionOpts> | SSESubscriptionOpts;
164
+ /**
165
+ * Maximum interval to replay events for an SSE subscription.
166
+ * @default 5 minutes
167
+ */
168
+ maxReplayIntervalMs?: number;
169
+ /**
170
+ * Max number of events to replay for an SSE subscription.
171
+ * Set to 0 to disable replaying events.
172
+ * @default 1000
173
+ */
174
+ maxReplayEvents?: number;
175
+ jsonifier?: JSONifier;
176
+ };
177
+ export type IRetryHandlerOpts = {
178
+ retriesS: number[];
179
+ };
180
+ export interface JSONifier {
181
+ stringify(data: unknown): string;
182
+ parse(data: string): unknown;
183
+ }
184
+ export type ITableMutationEventData<T, N extends string> = {
185
+ topic: `${N}.insert`;
186
+ payload: T;
187
+ metadata: {};
188
+ } | {
189
+ topic: `${N}.delete`;
190
+ payload: T;
191
+ metadata: {};
192
+ } | {
193
+ topic: `${N}.update`;
194
+ /**
195
+ * The fields that were updated in the row
196
+ */
197
+ payload: Partial<T>;
198
+ metadata: {
199
+ old: T;
200
+ };
201
+ };
202
+ export {};
package/lib/types.js ADDED
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
package/lib/utils.d.ts ADDED
@@ -0,0 +1,15 @@
1
+ import type { CreateTopicalSubscriptionOpts, IEventData, RegisterSubscriptionParams } from './types';
2
+ /**
3
+ * Extract the date from a message ID, same as the PG function
4
+ */
5
+ export declare function getDateFromMessageId(messageId: string): Date | undefined;
6
+ /**
7
+ * Extract the date from a subscription ID
8
+ */
9
+ export declare function getCreateDateFromSubscriptionId(id: string): Date | undefined;
10
+ /**
11
+ * Creates subscription params for a subscription that matches
12
+ * 1 or more topics. Also supports partitioning the subscription
13
+ * such that only a subset of messages are received.
14
+ */
15
+ export declare function createTopicalSubscriptionParams<T extends IEventData>({ topics, partition, additionalFilters, additionalParams, ...rest }: CreateTopicalSubscriptionOpts<T>): RegisterSubscriptionParams;
package/lib/utils.js ADDED
@@ -0,0 +1,52 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.getDateFromMessageId = getDateFromMessageId;
7
+ exports.getCreateDateFromSubscriptionId = getCreateDateFromSubscriptionId;
8
+ exports.createTopicalSubscriptionParams = createTopicalSubscriptionParams;
9
+ const node_assert_1 = __importDefault(require("node:assert"));
10
+ /**
11
+ * Extract the date from a message ID, same as the PG function
12
+ */
13
+ function getDateFromMessageId(messageId) {
14
+ if (!messageId.startsWith('pm')) {
15
+ return undefined;
16
+ }
17
+ const micros = parseInt(messageId.slice(2, 15), 16);
18
+ if (isNaN(micros)) {
19
+ return undefined;
20
+ }
21
+ const date = new Date(micros / 1000);
22
+ return date;
23
+ }
24
+ /**
25
+ * Extract the date from a subscription ID
26
+ */
27
+ function getCreateDateFromSubscriptionId(id) {
28
+ if (!id.startsWith('su')) {
29
+ return undefined;
30
+ }
31
+ return getDateFromMessageId('pm' + id.slice(2));
32
+ }
33
+ /**
34
+ * Creates subscription params for a subscription that matches
35
+ * 1 or more topics. Also supports partitioning the subscription
36
+ * such that only a subset of messages are received.
37
+ */
38
+ function createTopicalSubscriptionParams({ topics, partition, additionalFilters = {}, additionalParams = {}, ...rest }) {
39
+ (0, node_assert_1.default)(topics.length > 0, 'At least one topic must be provided');
40
+ const filters = { ...additionalFilters };
41
+ filters['topics'] ||= 'ARRAY[e.topic]';
42
+ if (partition) {
43
+ filters['partition'] = `hashtext(e.id) % ${partition.total}`;
44
+ }
45
+ const strs = Object.entries(filters)
46
+ .map(([k, v]) => `'${k}',${v}`);
47
+ return {
48
+ conditionsSql: `s.params @> jsonb_build_object(${strs.join(',')})`,
49
+ params: { topics, partition: partition?.current, ...additionalParams },
50
+ ...rest
51
+ };
52
+ }
@@ -0,0 +1,6 @@
1
+ import type { IEventHandler, PgmbWebhookOpts } from './types.ts';
2
+ /**
3
+ * Create a handler that sends events to a webhook URL via HTTP POST.
4
+ * @param url Where to send the webhook requests
5
+ */
6
+ export declare function createWebhookHandler({ timeoutMs, headers, retryOpts, jsonifier, serialiseEvent }: Partial<PgmbWebhookOpts>): IEventHandler;
@@ -0,0 +1,68 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.createWebhookHandler = createWebhookHandler;
7
+ const node_assert_1 = __importDefault(require("node:assert"));
8
+ const node_crypto_1 = require("node:crypto");
9
+ const retry_handler_ts_1 = require("./retry-handler.js");
10
+ /**
11
+ * Create a handler that sends events to a webhook URL via HTTP POST.
12
+ * @param url Where to send the webhook requests
13
+ */
14
+ function createWebhookHandler({ timeoutMs = 5_000, headers, retryOpts = {
15
+ // retry after 5 minutes, then after 30 minutes
16
+ retriesS: [5 * 60, 30 * 60]
17
+ }, jsonifier = JSON, serialiseEvent = createSimpleSerialiser(jsonifier) }) {
18
+ const handler = async (ev, { logger, extra }) => {
19
+ (0, node_assert_1.default)(typeof extra === 'object'
20
+ && extra !== null
21
+ && 'url' in extra
22
+ && (typeof extra.url === 'string'
23
+ || extra.url instanceof URL), 'webhook handler requires extra.url parameter');
24
+ const { url } = extra;
25
+ const { body, contentType } = serialiseEvent(ev);
26
+ const { status, statusText, body: res } = await fetch(url, {
27
+ method: 'POST',
28
+ headers: {
29
+ 'content-type': contentType,
30
+ 'idempotency-key': getIdempotencyKeyHeader(ev),
31
+ ...headers
32
+ },
33
+ body,
34
+ redirect: 'manual',
35
+ signal: AbortSignal.timeout(timeoutMs)
36
+ });
37
+ // don't care about response body
38
+ await res?.cancel().catch(() => { });
39
+ // see: https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/Idempotency-Key
40
+ if (status === 422) { // unprocessable request, do not retry
41
+ logger.warn('webhook returned 422, dropping event');
42
+ return;
43
+ }
44
+ if (status < 200 || status >= 300) {
45
+ throw new Error(`Non-2xx response: ${status} (${statusText})`);
46
+ }
47
+ };
48
+ if (!retryOpts) {
49
+ return handler;
50
+ }
51
+ return (0, retry_handler_ts_1.createRetryHandler)(retryOpts, handler);
52
+ }
53
+ function getIdempotencyKeyHeader(ev) {
54
+ const hasher = (0, node_crypto_1.createHash)('sha256');
55
+ for (const item of ev.items) {
56
+ hasher.update(item.id);
57
+ }
58
+ return hasher.digest('hex').slice(0, 16);
59
+ }
60
+ function createSimpleSerialiser(jsonifier) {
61
+ return ev => ({
62
+ body: jsonifier.stringify({
63
+ items: ev.items
64
+ .map(({ id, payload, topic }) => ({ id, payload, topic }))
65
+ }),
66
+ contentType: 'application/json'
67
+ });
68
+ }
package/package.json ADDED
@@ -0,0 +1,52 @@
1
+ {
2
+ "name": "@haathie/pgmb",
3
+ "version": "0.2.0",
4
+ "description": "Postgres Message Broker",
5
+ "main": "lib/index.js",
6
+ "publishConfig": {
7
+ "registry": "https://registry.npmjs.org",
8
+ "access": "public"
9
+ },
10
+ "repository": "https://github.com/haathie/pgmb",
11
+ "scripts": {
12
+ "test": "TZ=UTC NODE_ENV=test node --env-file ./.env.test --test tests/*.test.ts",
13
+ "prepare": "tsc",
14
+ "build": "tsc",
15
+ "lint": "eslint ./ --ext .js,.ts,.jsx,.tsx",
16
+ "lint:fix": "eslint ./ --fix --ext .js,.ts,.jsx,.tsx",
17
+ "benchmark": "TZ=utc node --env-file ./.env.test src/benchmark/run.ts",
18
+ "gen:pgtyped": "pgtyped --config ./pgtyped.config.json"
19
+ },
20
+ "devDependencies": {
21
+ "@adiwajshing/eslint-config": "git+https://github.com/adiwajshing/eslint-config",
22
+ "@pgtyped/cli": "^2.4.3",
23
+ "@types/amqplib": "^0.10.0",
24
+ "@types/chance": "^1.1.6",
25
+ "@types/jest": "^27.0.0",
26
+ "@types/node": "^24.0.0",
27
+ "@types/pg": "^8.11.14",
28
+ "amqplib": "^0.10.7",
29
+ "chance": "^1.1.12",
30
+ "eslint": "^8.19.0",
31
+ "eventsource": "^4.1.0",
32
+ "pg": "^8.16.3",
33
+ "typescript": "^5.0.0"
34
+ },
35
+ "files": [
36
+ "lib",
37
+ "sql"
38
+ ],
39
+ "keywords": [
40
+ "postgres",
41
+ "message-broker",
42
+ "pgmb",
43
+ "pg-mb",
44
+ "postgres-message-broker"
45
+ ],
46
+ "author": "Adhiraj Singh",
47
+ "license": "MIT",
48
+ "dependencies": {
49
+ "@pgtyped/runtime": "^2.4.2",
50
+ "pino": "^9.0.0"
51
+ }
52
+ }