@haathie/pgmb 0.2.6 → 0.2.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +31 -0
- package/lib/client.js +9 -8
- package/lib/utils.js +11 -0
- package/package.json +12 -5
- package/src/abortable-async-iterator.ts +98 -0
- package/src/batcher.ts +90 -0
- package/src/client.ts +699 -0
- package/src/consts.ts +1 -0
- package/src/index.ts +6 -0
- package/src/queries.ts +570 -0
- package/src/query-types.ts +21 -0
- package/src/retry-handler.ts +125 -0
- package/src/sse.ts +148 -0
- package/src/types.ts +267 -0
- package/src/utils.ts +71 -0
- package/src/webhook-handler.ts +91 -0
- package/lib/abortable-async-iterator.d.ts +0 -14
- package/lib/batcher.d.ts +0 -12
- package/lib/client.d.ts +0 -76
- package/lib/consts.d.ts +0 -1
- package/lib/index.d.ts +0 -6
- package/lib/queries.d.ts +0 -453
- package/lib/query-types.d.ts +0 -17
- package/lib/retry-handler.d.ts +0 -11
- package/lib/sse.d.ts +0 -4
- package/lib/types.d.ts +0 -223
- package/lib/utils.d.ts +0 -15
- package/lib/webhook-handler.d.ts +0 -6
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import { RETRY_EVENT } from './consts.ts'
|
|
2
|
+
import type { IReadNextEventsResult } from './queries.ts'
|
|
3
|
+
import { findEvents, scheduleEventRetry } from './queries.ts'
|
|
4
|
+
import type { PgClientLike } from './query-types.ts'
|
|
5
|
+
import type { IEvent, IEventData, IEventHandler, IFindEventsFn, IReadEvent, IRetryEventPayload, IRetryHandlerOpts } from './types.ts'
|
|
6
|
+
|
|
7
|
+
const defaultFindEvents = findEvents.run.bind(findEvents)
|
|
8
|
+
|
|
9
|
+
export function createRetryHandler<T extends IEventData>(
|
|
10
|
+
{ retriesS }: IRetryHandlerOpts,
|
|
11
|
+
handler: IEventHandler<T>,
|
|
12
|
+
): IEventHandler<T> {
|
|
13
|
+
return async(ev, ctx) => {
|
|
14
|
+
const { name, client, subscriptionId, logger } = ctx
|
|
15
|
+
|
|
16
|
+
try {
|
|
17
|
+
await handler(ev, ctx)
|
|
18
|
+
} catch(err) {
|
|
19
|
+
const retryNumber = (ev.retry?.retryNumber ?? 0)
|
|
20
|
+
const nextRetryGapS = retriesS[retryNumber]
|
|
21
|
+
logger.error({ err, nextRetryGapS }, 'error in event handler')
|
|
22
|
+
|
|
23
|
+
if(!nextRetryGapS) {
|
|
24
|
+
return
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
await scheduleEventRetry.run(
|
|
28
|
+
{
|
|
29
|
+
subscriptionId,
|
|
30
|
+
ids: ev.items.map(i => i.id),
|
|
31
|
+
retryNumber: retryNumber + 1,
|
|
32
|
+
delayInterval: `${nextRetryGapS} seconds`,
|
|
33
|
+
handlerName: name,
|
|
34
|
+
},
|
|
35
|
+
client
|
|
36
|
+
)
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export async function normaliseRetryEventsInReadEventMap<T extends IEventData>(
|
|
42
|
+
rows: IReadNextEventsResult[],
|
|
43
|
+
client: PgClientLike,
|
|
44
|
+
findEvents: IFindEventsFn = defaultFindEvents,
|
|
45
|
+
) {
|
|
46
|
+
const map: { [sid: string]: IReadEvent<T>[] } = {}
|
|
47
|
+
const evsToPopulate: IReadEvent<T>[] = []
|
|
48
|
+
const idsToLoad: string[] = []
|
|
49
|
+
|
|
50
|
+
// reverse the map, do subscriptionId -> events
|
|
51
|
+
const subToEventMap: { [sid: string]: IReadNextEventsResult[] } = {}
|
|
52
|
+
for(const row of rows) {
|
|
53
|
+
for(const subId of row.subscriptionIds) {
|
|
54
|
+
subToEventMap[subId] ||= []
|
|
55
|
+
subToEventMap[subId].push(row)
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const subEventList = Object.entries(subToEventMap)
|
|
60
|
+
for(const [subscriptionId, items] of subEventList) {
|
|
61
|
+
for(let i = 0;i < items.length;i) {
|
|
62
|
+
const item = items[i]
|
|
63
|
+
if(item.topic !== RETRY_EVENT) {
|
|
64
|
+
i++
|
|
65
|
+
continue
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const retry = item.payload as IRetryEventPayload
|
|
69
|
+
if(!retry.ids?.length) {
|
|
70
|
+
continue
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
idsToLoad.push(...retry.ids)
|
|
74
|
+
|
|
75
|
+
map[subscriptionId] ||= []
|
|
76
|
+
|
|
77
|
+
const ev: IReadEvent<T> = { items: [], retry }
|
|
78
|
+
map[subscriptionId].push(ev)
|
|
79
|
+
evsToPopulate.push(ev)
|
|
80
|
+
items.splice(i, 1)
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
if(!items.length) {
|
|
84
|
+
continue
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
map[subscriptionId] ||= []
|
|
88
|
+
map[subscriptionId].push({ items: items as unknown as IEvent<T>[] })
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
if(!idsToLoad.length) {
|
|
92
|
+
return { map, retryEvents: 0, retryItemCount: 0 }
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
const fetchedEvents = await findEvents({ ids: idsToLoad }, client)
|
|
96
|
+
const fetchedEventMap = fetchedEvents.reduce(
|
|
97
|
+
(map, ev) => {
|
|
98
|
+
map[ev.id] = ev as IEvent<T>
|
|
99
|
+
return map
|
|
100
|
+
},
|
|
101
|
+
{} as { [id: string]: IEvent<T> }
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
// populate the events
|
|
105
|
+
for(const { items, retry } of evsToPopulate) {
|
|
106
|
+
if(!retry) {
|
|
107
|
+
continue
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
for(const id of retry.ids) {
|
|
111
|
+
const ev = fetchedEventMap[id]
|
|
112
|
+
if(!ev) {
|
|
113
|
+
continue
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
items.push(ev)
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
return {
|
|
121
|
+
map,
|
|
122
|
+
retryEvents: evsToPopulate.length,
|
|
123
|
+
retryItemCount: idsToLoad.length,
|
|
124
|
+
}
|
|
125
|
+
}
|
package/src/sse.ts
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import assert, { AssertionError } from 'node:assert'
|
|
2
|
+
import type { IncomingMessage, ServerResponse } from 'node:http'
|
|
3
|
+
import type { PgmbClient } from './client.ts'
|
|
4
|
+
import type { IReplayEventsResult } from './queries.ts'
|
|
5
|
+
import { replayEvents } from './queries.ts'
|
|
6
|
+
import type { IEphemeralListener, IEvent, IEventData, SSERequestHandlerOpts } from './types.ts'
|
|
7
|
+
import { getCreateDateFromSubscriptionId, getDateFromMessageId } from './utils.ts'
|
|
8
|
+
|
|
9
|
+
export function createSSERequestHandler<T extends IEventData>(
|
|
10
|
+
this: PgmbClient<T>,
|
|
11
|
+
{
|
|
12
|
+
getSubscriptionOpts,
|
|
13
|
+
maxReplayEvents = 1000,
|
|
14
|
+
maxReplayIntervalMs = 5 * 60 * 1000,
|
|
15
|
+
jsonifier = JSON
|
|
16
|
+
}: SSERequestHandlerOpts,
|
|
17
|
+
) {
|
|
18
|
+
const replayEnabled = maxReplayEvents > 0
|
|
19
|
+
return handleSSERequest.bind(this)
|
|
20
|
+
|
|
21
|
+
async function handleSSERequest(
|
|
22
|
+
this: PgmbClient<T>,
|
|
23
|
+
req: IncomingMessage,
|
|
24
|
+
res: ServerResponse
|
|
25
|
+
) {
|
|
26
|
+
let sub: IEphemeralListener<T> | undefined
|
|
27
|
+
let eventsToReplay: IReplayEventsResult[] = []
|
|
28
|
+
|
|
29
|
+
try {
|
|
30
|
+
assert(
|
|
31
|
+
req.method?.toLowerCase() === 'get',
|
|
32
|
+
'SSE only supports GET requests'
|
|
33
|
+
)
|
|
34
|
+
// validate last-event-id header
|
|
35
|
+
const fromEventId = req.headers['last-event-id']
|
|
36
|
+
if(fromEventId) {
|
|
37
|
+
assert(replayEnabled, 'replay disabled on server')
|
|
38
|
+
assert(typeof fromEventId === 'string', 'invalid last-event-id header')
|
|
39
|
+
const fromDt = getDateFromMessageId(fromEventId)
|
|
40
|
+
assert(fromDt, 'invalid last-event-id header value')
|
|
41
|
+
assert(
|
|
42
|
+
fromDt.getTime() >= (Date.now() - maxReplayIntervalMs),
|
|
43
|
+
'last-event-id is too old to replay'
|
|
44
|
+
)
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
sub = await this.registerFireAndForgetHandler({
|
|
48
|
+
...await getSubscriptionOpts(req),
|
|
49
|
+
expiryInterval: `${maxReplayIntervalMs * 2} milliseconds`
|
|
50
|
+
})
|
|
51
|
+
|
|
52
|
+
if(fromEventId) {
|
|
53
|
+
const fromDt = getDateFromMessageId(fromEventId)!
|
|
54
|
+
const subDt = getCreateDateFromSubscriptionId(sub.id)
|
|
55
|
+
assert(subDt, 'internal: invalid subscription id format')
|
|
56
|
+
assert(
|
|
57
|
+
fromDt >= subDt,
|
|
58
|
+
'last-event-id is before subscription creation, cannot replay'
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
eventsToReplay = await replayEvents.run(
|
|
62
|
+
{
|
|
63
|
+
groupId: this.groupId,
|
|
64
|
+
subscriptionId: sub.id,
|
|
65
|
+
fromEventId: fromEventId,
|
|
66
|
+
maxEvents: maxReplayEvents
|
|
67
|
+
},
|
|
68
|
+
this.client
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
this.logger.trace(
|
|
72
|
+
{ subId: sub.id, count: eventsToReplay.length },
|
|
73
|
+
'got events to replay'
|
|
74
|
+
)
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
if(res.writableEnded) {
|
|
78
|
+
throw new Error('response already ended')
|
|
79
|
+
}
|
|
80
|
+
} catch(err) {
|
|
81
|
+
this.logger
|
|
82
|
+
.error({ subId: sub?.id, err }, 'error in sse subscription setup')
|
|
83
|
+
|
|
84
|
+
await sub?.throw(err).catch(() => { })
|
|
85
|
+
|
|
86
|
+
if(res.writableEnded) {
|
|
87
|
+
return
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
const message = err instanceof Error ? err.message : String(err)
|
|
91
|
+
// if an assertion failed, we cannot connect with these parameters
|
|
92
|
+
// so use 204 No Content
|
|
93
|
+
const code = err instanceof AssertionError ? 204 : 500
|
|
94
|
+
res
|
|
95
|
+
.writeHead(code, message)
|
|
96
|
+
.end()
|
|
97
|
+
return
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
res.once('close', () => {
|
|
101
|
+
sub?.return()
|
|
102
|
+
})
|
|
103
|
+
res.once('error', err => {
|
|
104
|
+
sub?.throw(err).catch(() => {})
|
|
105
|
+
})
|
|
106
|
+
|
|
107
|
+
res.writeHead(200, {
|
|
108
|
+
'content-type': 'text/event-stream',
|
|
109
|
+
'cache-control': 'no-cache',
|
|
110
|
+
'connection': 'keep-alive',
|
|
111
|
+
'transfer-encoding': 'chunked',
|
|
112
|
+
})
|
|
113
|
+
res.flushHeaders()
|
|
114
|
+
|
|
115
|
+
try {
|
|
116
|
+
// send replayed events first
|
|
117
|
+
writeSseEvents(res, eventsToReplay as IEvent<T>[])
|
|
118
|
+
|
|
119
|
+
for await (const { items } of sub) {
|
|
120
|
+
writeSseEvents(res, items)
|
|
121
|
+
}
|
|
122
|
+
} catch(err) {
|
|
123
|
+
this.logger.error({ err }, 'error in sse subscription')
|
|
124
|
+
if(res.writableEnded) {
|
|
125
|
+
return
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// send error event
|
|
129
|
+
const message = err instanceof Error ? err.message : String(err)
|
|
130
|
+
const errData = jsonifier.stringify({ message })
|
|
131
|
+
res.write(`event: error\ndata: ${errData}\nretry: 250\n\n`)
|
|
132
|
+
res.end()
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
function writeSseEvents(res: ServerResponse, items: IEvent<T>[]) {
|
|
137
|
+
for(const { id, payload, topic } of items) {
|
|
138
|
+
const data = jsonifier.stringify(payload)
|
|
139
|
+
if(!replayEnabled) {
|
|
140
|
+
// if replay is disabled, do not send an id field
|
|
141
|
+
res.write(`event: ${topic}\ndata: ${data}\n\n`)
|
|
142
|
+
continue
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
res.write(`id: ${id}\nevent: ${topic}\ndata: ${data}\n\n`)
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
package/src/types.ts
ADDED
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
import type { IDatabaseConnection } from '@pgtyped/runtime'
|
|
2
|
+
import type { IncomingMessage } from 'node:http'
|
|
3
|
+
import type { Logger } from 'pino'
|
|
4
|
+
import type { HeaderRecord } from 'undici-types/header.js'
|
|
5
|
+
import type { AbortableAsyncIterator } from './abortable-async-iterator.ts'
|
|
6
|
+
import type { IAssertSubscriptionParams, IFindEventsParams, IFindEventsResult, IReadNextEventsParams, IReadNextEventsResult } from './queries.ts'
|
|
7
|
+
import type { PgClientLike } from './query-types.ts'
|
|
8
|
+
|
|
9
|
+
export type ISplitFn<T extends IEventData>
|
|
10
|
+
= (event: IReadEvent<T>) => IReadEvent<T>[]
|
|
11
|
+
|
|
12
|
+
export type SerialisedEvent = {
|
|
13
|
+
body: Buffer | string
|
|
14
|
+
contentType: string
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export type WebhookInfo = {
|
|
18
|
+
id: string
|
|
19
|
+
url: string | URL
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export type GetWebhookInfoFn = (
|
|
23
|
+
subscriptionIds: string[]
|
|
24
|
+
) => Promise<{ [id: string]: WebhookInfo[] }> | { [id: string]: WebhookInfo[] }
|
|
25
|
+
|
|
26
|
+
export type PgmbWebhookOpts<T extends IEventData> = {
|
|
27
|
+
/**
|
|
28
|
+
* Maximum time to wait for webhook request to complete
|
|
29
|
+
* @default 5 seconds
|
|
30
|
+
*/
|
|
31
|
+
timeoutMs?: number
|
|
32
|
+
headers?: HeaderRecord
|
|
33
|
+
/**
|
|
34
|
+
* Configure retry intervals in seconds for failed webhook requests.
|
|
35
|
+
* If null, a failed handler will fail the event processor. Use carefully.
|
|
36
|
+
*/
|
|
37
|
+
retryOpts?: IRetryHandlerOpts | null
|
|
38
|
+
splitBy?: ISplitFn<T>
|
|
39
|
+
jsonifier?: JSONifier
|
|
40
|
+
serialiseEvent?(ev: IReadEvent, logger: Logger): SerialisedEvent
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export interface IEventData {
|
|
44
|
+
topic: string
|
|
45
|
+
payload: unknown
|
|
46
|
+
metadata?: unknown
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
export type IEvent<T extends IEventData> = (T & { id: string })
|
|
50
|
+
|
|
51
|
+
export type PGMBEventBatcherOpts<T extends IEventData> = {
|
|
52
|
+
/**
|
|
53
|
+
* Whether a particular published message should be logged.
|
|
54
|
+
* By default, all messages are logged -- in case of certain
|
|
55
|
+
* failures, the logs can be used to replay the messages.
|
|
56
|
+
*/
|
|
57
|
+
shouldLog?(msg: T): boolean
|
|
58
|
+
|
|
59
|
+
publish(...msgs: T[]): Promise<{ id: string }[]>
|
|
60
|
+
|
|
61
|
+
logger?: Logger
|
|
62
|
+
/**
|
|
63
|
+
* Automatically flush after this interval.
|
|
64
|
+
* Set to undefined or 0 to disable. Will need to
|
|
65
|
+
* manually call `flush()` to publish messages.
|
|
66
|
+
* @default undefined
|
|
67
|
+
*/
|
|
68
|
+
flushIntervalMs?: number
|
|
69
|
+
/**
|
|
70
|
+
* Max number of messages to send in a batch
|
|
71
|
+
* @default 2500
|
|
72
|
+
*/
|
|
73
|
+
maxBatchSize?: number
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export type IReadNextEventsFn = (parmas: IReadNextEventsParams, db: IDatabaseConnection)
|
|
77
|
+
=> Promise<IReadNextEventsResult[]>
|
|
78
|
+
|
|
79
|
+
export type IFindEventsFn = (parmas: IFindEventsParams, db: IDatabaseConnection)
|
|
80
|
+
=> Promise<IFindEventsResult[]>
|
|
81
|
+
|
|
82
|
+
export type Pgmb2ClientOpts<T extends IEventData> = {
|
|
83
|
+
client: PgClientLike
|
|
84
|
+
/**
|
|
85
|
+
* Globally unique identifier for this Pgmb2Client instance. All subs
|
|
86
|
+
* registered with this client will use this groupId.
|
|
87
|
+
*/
|
|
88
|
+
groupId: string
|
|
89
|
+
logger?: Logger
|
|
90
|
+
/**
|
|
91
|
+
* How long to sleep between polling for new events from
|
|
92
|
+
* the global events table.
|
|
93
|
+
* Only one global call is required across all clients.
|
|
94
|
+
* Set to 0 to disable polling.
|
|
95
|
+
*
|
|
96
|
+
* @default 1 second
|
|
97
|
+
* */
|
|
98
|
+
pollEventsIntervalMs?: number
|
|
99
|
+
/**
|
|
100
|
+
* Group level configuration for how often to read new events
|
|
101
|
+
* relevant to the group's subscriptions.
|
|
102
|
+
* @default 1 second
|
|
103
|
+
*/
|
|
104
|
+
readEventsIntervalMs?: number
|
|
105
|
+
/**
|
|
106
|
+
* How often to mark subscriptions as active,
|
|
107
|
+
* and remove expired ones.
|
|
108
|
+
* @default 1 minute
|
|
109
|
+
*/
|
|
110
|
+
subscriptionMaintenanceMs?: number
|
|
111
|
+
/**
|
|
112
|
+
* How often to maintain the events tables
|
|
113
|
+
* (drop old partitions, create new ones, etc)
|
|
114
|
+
* Set to 0 to disable automatic maintenance.
|
|
115
|
+
*
|
|
116
|
+
* @default 5 minutes
|
|
117
|
+
*/
|
|
118
|
+
tableMaintainanceMs?: number
|
|
119
|
+
|
|
120
|
+
readChunkSize?: number
|
|
121
|
+
/**
|
|
122
|
+
* As we process in batches, a single handler taking time to finish
|
|
123
|
+
* can lead to buildup of unprocessed checkpoints. To avoid this,
|
|
124
|
+
* we keep moving forward while handlers run in the background, but
|
|
125
|
+
* to avoid an unbounded number of items being backlogged, we limit
|
|
126
|
+
* how much further we can go ahead from the earliest uncompleted checkpoint.
|
|
127
|
+
* @default 10
|
|
128
|
+
*/
|
|
129
|
+
maxActiveCheckpoints?: number
|
|
130
|
+
webhookHandlerOpts?: Partial<PgmbWebhookOpts<T>>
|
|
131
|
+
getWebhookInfo?: GetWebhookInfoFn
|
|
132
|
+
/**
|
|
133
|
+
* Override the default readNextEvents implementation
|
|
134
|
+
*/
|
|
135
|
+
readNextEvents?: IReadNextEventsFn
|
|
136
|
+
/**
|
|
137
|
+
* Override the default findEvents implementation
|
|
138
|
+
*/
|
|
139
|
+
findEvents?: IFindEventsFn
|
|
140
|
+
} & Pick<
|
|
141
|
+
PGMBEventBatcherOpts<IEventData>,
|
|
142
|
+
'flushIntervalMs' | 'maxBatchSize' | 'shouldLog'
|
|
143
|
+
>
|
|
144
|
+
|
|
145
|
+
export type IReadEvent<T extends IEventData = IEventData> = {
|
|
146
|
+
items: IEvent<T>[]
|
|
147
|
+
retry?: IRetryEventPayload
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
export type RegisterSubscriptionParams
|
|
151
|
+
= Omit<IAssertSubscriptionParams, 'groupId'>
|
|
152
|
+
|
|
153
|
+
export type registerReliableHandlerParams<T extends IEventData = IEventData> = RegisterSubscriptionParams & {
|
|
154
|
+
/**
|
|
155
|
+
* Name for the retry handler, used to ensure retries for a particular
|
|
156
|
+
* handler are not mixed with another handler. This name need only be
|
|
157
|
+
* unique for a particular subscription.
|
|
158
|
+
*/
|
|
159
|
+
name?: string
|
|
160
|
+
retryOpts?: IRetryHandlerOpts
|
|
161
|
+
/**
|
|
162
|
+
* If provided, will split an incoming event into multiple events
|
|
163
|
+
* as determined by the function.
|
|
164
|
+
*/
|
|
165
|
+
splitBy?: ISplitFn<T>
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
export type CreateTopicalSubscriptionOpts<T extends IEventData> = {
|
|
169
|
+
/**
|
|
170
|
+
* The topics to subscribe to.
|
|
171
|
+
*/
|
|
172
|
+
topics: T['topic'][]
|
|
173
|
+
/**
|
|
174
|
+
* To scale out processing, you can partition the subscriptions.
|
|
175
|
+
* For example, with `current: 0, total: 3`, only messages
|
|
176
|
+
* where `hashtext(e.id) % 3 == 0` will be received by this subscription.
|
|
177
|
+
* This will result in an approximate even split for all processors, the only
|
|
178
|
+
* caveat being it requires knowing the number of event processors on this
|
|
179
|
+
* subscription beforehand.
|
|
180
|
+
*/
|
|
181
|
+
partition?: {
|
|
182
|
+
current: number
|
|
183
|
+
total: number
|
|
184
|
+
}
|
|
185
|
+
/**
|
|
186
|
+
* Add any additional params to filter by.
|
|
187
|
+
* i.e "s.params @> jsonb_build_object(...additionalFilters)"
|
|
188
|
+
* The value should be a valid SQL snippet.
|
|
189
|
+
*/
|
|
190
|
+
additionalFilters?: Record<string, string>
|
|
191
|
+
/** JSON to populate params */
|
|
192
|
+
additionalParams?: Record<string, any>
|
|
193
|
+
|
|
194
|
+
expiryInterval?: RegisterSubscriptionParams['expiryInterval']
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
export interface IEphemeralListener<T extends IEventData>
|
|
198
|
+
extends AbortableAsyncIterator<IReadEvent<T>> {
|
|
199
|
+
id: string
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
export type IEventHandlerContext = {
|
|
203
|
+
logger: Logger
|
|
204
|
+
client: PgClientLike
|
|
205
|
+
subscriptionId: string
|
|
206
|
+
/** registered name of the handler */
|
|
207
|
+
name: string
|
|
208
|
+
extra?: unknown
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
export type IEventHandler<T extends IEventData = IEventData>
|
|
212
|
+
= (item: IReadEvent<T>, ctx: IEventHandlerContext) => Promise<void>
|
|
213
|
+
|
|
214
|
+
export type IRetryEventPayload = {
|
|
215
|
+
ids: string[]
|
|
216
|
+
handlerName: string
|
|
217
|
+
retryNumber: number
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
type SSESubscriptionOpts
|
|
221
|
+
= Pick<RegisterSubscriptionParams, 'conditionsSql' | 'params'>
|
|
222
|
+
|
|
223
|
+
export type SSERequestHandlerOpts = {
|
|
224
|
+
getSubscriptionOpts(req: IncomingMessage):
|
|
225
|
+
Promise<SSESubscriptionOpts> | SSESubscriptionOpts
|
|
226
|
+
/**
|
|
227
|
+
* Maximum interval to replay events for an SSE subscription.
|
|
228
|
+
* @default 5 minutes
|
|
229
|
+
*/
|
|
230
|
+
maxReplayIntervalMs?: number
|
|
231
|
+
/**
|
|
232
|
+
* Max number of events to replay for an SSE subscription.
|
|
233
|
+
* Set to 0 to disable replaying events.
|
|
234
|
+
* @default 1000
|
|
235
|
+
*/
|
|
236
|
+
maxReplayEvents?: number
|
|
237
|
+
|
|
238
|
+
jsonifier?: JSONifier
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
export type IRetryHandlerOpts = {
|
|
242
|
+
retriesS: number[]
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
export interface JSONifier {
|
|
246
|
+
stringify(data: unknown): string
|
|
247
|
+
parse(data: string): unknown
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
export type ITableMutationEventData<T, N extends string> = {
|
|
251
|
+
topic: `${N}.insert`
|
|
252
|
+
payload: T
|
|
253
|
+
metadata: {}
|
|
254
|
+
} | {
|
|
255
|
+
topic: `${N}.delete`
|
|
256
|
+
payload: T
|
|
257
|
+
metadata: {}
|
|
258
|
+
} | {
|
|
259
|
+
topic: `${N}.update`
|
|
260
|
+
/**
|
|
261
|
+
* The fields that were updated in the row
|
|
262
|
+
*/
|
|
263
|
+
payload: Partial<T>
|
|
264
|
+
metadata: {
|
|
265
|
+
old: T
|
|
266
|
+
}
|
|
267
|
+
}
|
package/src/utils.ts
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import assert from 'node:assert'
|
|
2
|
+
import type { CreateTopicalSubscriptionOpts, IEventData, RegisterSubscriptionParams } from './types'
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Extract the date from a message ID, same as the PG function
|
|
6
|
+
*/
|
|
7
|
+
export function getDateFromMessageId(messageId: string) {
|
|
8
|
+
if(!messageId.startsWith('pm')) {
|
|
9
|
+
return undefined
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
const micros = parseInt(messageId.slice(2, 15), 16)
|
|
13
|
+
if(isNaN(micros)) {
|
|
14
|
+
return undefined
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const date = new Date(micros / 1000)
|
|
18
|
+
return date
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Extract the date from a subscription ID
|
|
23
|
+
*/
|
|
24
|
+
export function getCreateDateFromSubscriptionId(id: string) {
|
|
25
|
+
if(!id.startsWith('su')) {
|
|
26
|
+
return undefined
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
return getDateFromMessageId('pm' + id.slice(2))
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Creates subscription params for a subscription that matches
|
|
34
|
+
* 1 or more topics. Also supports partitioning the subscription
|
|
35
|
+
* such that only a subset of messages are received.
|
|
36
|
+
*/
|
|
37
|
+
export function createTopicalSubscriptionParams<T extends IEventData>({
|
|
38
|
+
topics,
|
|
39
|
+
partition,
|
|
40
|
+
additionalFilters = {},
|
|
41
|
+
additionalParams = {},
|
|
42
|
+
...rest
|
|
43
|
+
}: CreateTopicalSubscriptionOpts<T>): RegisterSubscriptionParams {
|
|
44
|
+
assert(topics.length > 0, 'At least one topic must be provided')
|
|
45
|
+
|
|
46
|
+
const filters = { ...additionalFilters }
|
|
47
|
+
filters['topics'] ||= 'ARRAY[e.topic]'
|
|
48
|
+
if(partition) {
|
|
49
|
+
filters['partition'] = `hashtext(e.id) % ${partition.total}`
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
const strs = Object.entries(filters)
|
|
53
|
+
.map(([k, v]) => `'${k}',${v}`)
|
|
54
|
+
return {
|
|
55
|
+
conditionsSql: `s.params @> jsonb_build_object(${strs.join(',')})`,
|
|
56
|
+
params: { topics, partition: partition?.current, ...additionalParams },
|
|
57
|
+
...rest
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Get an environment variable as a number
|
|
63
|
+
*/
|
|
64
|
+
export function getEnvNumber(key: string, defaultValue = 0) {
|
|
65
|
+
const num = +(process.env[key] || defaultValue)
|
|
66
|
+
if(isNaN(num) || !isFinite(num)) {
|
|
67
|
+
return defaultValue
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return num
|
|
71
|
+
}
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import assert from 'node:assert'
|
|
2
|
+
import { createHash } from 'node:crypto'
|
|
3
|
+
import { createRetryHandler } from './retry-handler.ts'
|
|
4
|
+
import type { IEventData, IEventHandler, IReadEvent, JSONifier, PgmbWebhookOpts, SerialisedEvent } from './types.ts'
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Create a handler that sends events to a webhook URL via HTTP POST.
|
|
8
|
+
* @param url Where to send the webhook requests
|
|
9
|
+
*/
|
|
10
|
+
export function createWebhookHandler<T extends IEventData>(
|
|
11
|
+
{
|
|
12
|
+
timeoutMs = 5_000,
|
|
13
|
+
headers,
|
|
14
|
+
retryOpts = {
|
|
15
|
+
// retry after 5 minutes, then after 30 minutes
|
|
16
|
+
retriesS: [5 * 60, 30 * 60]
|
|
17
|
+
},
|
|
18
|
+
jsonifier = JSON,
|
|
19
|
+
serialiseEvent = createSimpleSerialiser(jsonifier)
|
|
20
|
+
}: Partial<PgmbWebhookOpts<T>>
|
|
21
|
+
) {
|
|
22
|
+
const handler: IEventHandler = async(ev, { logger, extra }) => {
|
|
23
|
+
assert(
|
|
24
|
+
typeof extra === 'object'
|
|
25
|
+
&& extra !== null
|
|
26
|
+
&& 'url' in extra
|
|
27
|
+
&& (
|
|
28
|
+
typeof extra.url === 'string'
|
|
29
|
+
|| extra.url instanceof URL
|
|
30
|
+
),
|
|
31
|
+
'webhook handler requires extra.url parameter'
|
|
32
|
+
)
|
|
33
|
+
const { url } = extra
|
|
34
|
+
const idempotencyKey = getIdempotencyKeyHeader(ev)
|
|
35
|
+
logger = logger.child({ idempotencyKey })
|
|
36
|
+
|
|
37
|
+
const { body, contentType } = serialiseEvent(ev, logger)
|
|
38
|
+
|
|
39
|
+
const { status, statusText, body: res } = await fetch(url, {
|
|
40
|
+
method: 'POST',
|
|
41
|
+
headers: {
|
|
42
|
+
'content-type': contentType,
|
|
43
|
+
'idempotency-key': idempotencyKey,
|
|
44
|
+
...headers
|
|
45
|
+
},
|
|
46
|
+
body,
|
|
47
|
+
redirect: 'manual',
|
|
48
|
+
signal: AbortSignal.timeout(timeoutMs)
|
|
49
|
+
})
|
|
50
|
+
// don't care about response body
|
|
51
|
+
await res?.cancel().catch(() => { })
|
|
52
|
+
// see: https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/Idempotency-Key
|
|
53
|
+
if(status === 422) { // unprocessable request, do not retry
|
|
54
|
+
logger.warn('webhook returned 422, dropping event')
|
|
55
|
+
return
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if(status < 200 || status >= 300) {
|
|
59
|
+
throw new Error(`Non-2xx response: ${status} (${statusText})`)
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
logger.info({ status }, 'webhook sent successfully')
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if(!retryOpts) {
|
|
66
|
+
return handler
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
return createRetryHandler(retryOpts, handler)
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
function getIdempotencyKeyHeader(ev: IReadEvent) {
|
|
73
|
+
const hasher = createHash('sha256')
|
|
74
|
+
for(const item of ev.items) {
|
|
75
|
+
hasher.update(item.id)
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
return hasher.digest('hex').slice(0, 16)
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
function createSimpleSerialiser(
|
|
82
|
+
jsonifier: JSONifier
|
|
83
|
+
): ((ev: IReadEvent) => SerialisedEvent) {
|
|
84
|
+
return ev => ({
|
|
85
|
+
body: jsonifier.stringify({
|
|
86
|
+
items: ev.items
|
|
87
|
+
.map(({ id, payload, topic }) => ({ id, payload, topic }))
|
|
88
|
+
}),
|
|
89
|
+
contentType: 'application/json'
|
|
90
|
+
})
|
|
91
|
+
}
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
type AAResult<T> = IteratorResult<T>;
|
|
2
|
-
export declare class AbortableAsyncIterator<T> implements AsyncIterableIterator<T> {
|
|
3
|
-
#private;
|
|
4
|
-
readonly signal: AbortSignal;
|
|
5
|
-
readonly onEnd: () => void;
|
|
6
|
-
ended: boolean;
|
|
7
|
-
constructor(signal: AbortSignal, onEnd?: () => void);
|
|
8
|
-
next(): Promise<AAResult<T>>;
|
|
9
|
-
enqueue(value: T): void;
|
|
10
|
-
throw(reason?: unknown): Promise<AAResult<T>>;
|
|
11
|
-
return(value?: any): Promise<AAResult<T>>;
|
|
12
|
-
[Symbol.asyncIterator](): this;
|
|
13
|
-
}
|
|
14
|
-
export {};
|