@auriclabs/events 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +39 -0
- package/README.md +228 -0
- package/dist/index.cjs +314 -0
- package/dist/index.d.cts +377 -0
- package/dist/index.d.cts.map +1 -0
- package/dist/index.d.mts +377 -0
- package/dist/index.d.mts.map +1 -0
- package/dist/index.mjs +304 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +59 -0
- package/src/context.test.ts +60 -0
- package/src/context.ts +19 -0
- package/src/create-dispatch.test.ts +233 -0
- package/src/create-dispatch.ts +71 -0
- package/src/create-event-listener.test.ts +246 -0
- package/src/create-event-listener.ts +54 -0
- package/src/dispatch-event.test.ts +226 -0
- package/src/dispatch-event.ts +34 -0
- package/src/dispatch-events.test.ts +72 -0
- package/src/dispatch-events.ts +18 -0
- package/src/event-service.test.ts +357 -0
- package/src/event-service.ts +228 -0
- package/src/index.ts +9 -0
- package/src/init.test.ts +55 -0
- package/src/init.ts +14 -0
- package/src/stream-handler.test.ts +309 -0
- package/src/stream-handler.ts +108 -0
- package/src/types.ts +65 -0
- package/tsconfig.json +17 -0
- package/vitest.config.ts +2 -0
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
|
|
2
|
+
> @auriclabs/events@0.1.0 build /home/runner/work/packages/packages/packages/events
|
|
3
|
+
> tsdown src/index.ts --format cjs,esm --dts --no-hash
|
|
4
|
+
|
|
5
|
+
[33m[tsdown] Node.js v20.20.1 is deprecated. Support will be removed in the next minor release. Please upgrade to Node.js 22.18.0 or later.[39m
|
|
6
|
+
[34mℹ[39m tsdown [2mv0.21.4[22m powered by rolldown [2mv1.0.0-rc.9[22m
|
|
7
|
+
[34mℹ[39m entry: [34msrc/index.ts[39m
|
|
8
|
+
[34mℹ[39m tsconfig: [34mtsconfig.json[39m
|
|
9
|
+
[34mℹ[39m Build start
|
|
10
|
+
[34mℹ[39m [33m[CJS][39m [2mdist/[22m[1mindex.cjs[22m [2m11.06 kB[22m [2m│ gzip: 3.31 kB[22m
|
|
11
|
+
[34mℹ[39m [33m[CJS][39m 1 files, total: 11.06 kB
|
|
12
|
+
[34mℹ[39m Hint: consider adding [34mdeps.onlyBundle[39m option to avoid unintended bundling of dependencies, or set [34mdeps.onlyBundle: false[39m to disable this hint.
|
|
13
|
+
See more at [4mhttps://tsdown.dev/options/dependencies#deps-onlybundle[24m
|
|
14
|
+
Detected dependencies in bundle:
|
|
15
|
+
- [34m@types/aws-lambda[39m
|
|
16
|
+
[34mℹ[39m Hint: consider adding [34mdeps.onlyBundle[39m option to avoid unintended bundling of dependencies, or set [34mdeps.onlyBundle: false[39m to disable this hint.
|
|
17
|
+
See more at [4mhttps://tsdown.dev/options/dependencies#deps-onlybundle[24m
|
|
18
|
+
Detected dependencies in bundle:
|
|
19
|
+
- [34m@types/aws-lambda[39m
|
|
20
|
+
[34mℹ[39m [34m[ESM][39m [2mdist/[22m[1mindex.mjs[22m [2m10.31 kB[22m [2m│ gzip: 3.19 kB[22m
|
|
21
|
+
[34mℹ[39m [34m[ESM][39m [2mdist/[22mindex.mjs.map [2m23.73 kB[22m [2m│ gzip: 6.75 kB[22m
|
|
22
|
+
[34mℹ[39m [34m[ESM][39m [2mdist/[22mindex.d.mts.map [2m 7.93 kB[22m [2m│ gzip: 2.65 kB[22m
|
|
23
|
+
[34mℹ[39m [34m[ESM][39m [2mdist/[22m[32m[1mindex.d.mts[22m[39m [2m15.74 kB[22m [2m│ gzip: 4.28 kB[22m
|
|
24
|
+
[34mℹ[39m [34m[ESM][39m 4 files, total: 57.71 kB
|
|
25
|
+
[33m[PLUGIN_TIMINGS] Warning:[0m Your build spent significant time in plugins. Here is a breakdown:
|
|
26
|
+
- tsdown:external (41%)
|
|
27
|
+
- rolldown-plugin-dts:generate (39%)
|
|
28
|
+
See https://rolldown.rs/options/checks#plugintimings for more details.
|
|
29
|
+
|
|
30
|
+
[32m✔[39m Build complete in [32m5273ms[39m
|
|
31
|
+
[34mℹ[39m [33m[CJS][39m [2mdist/[22mindex.d.cts.map [2m 7.93 kB[22m [2m│ gzip: 2.65 kB[22m
|
|
32
|
+
[34mℹ[39m [33m[CJS][39m [2mdist/[22m[32m[1mindex.d.cts[22m[39m [2m15.74 kB[22m [2m│ gzip: 4.28 kB[22m
|
|
33
|
+
[34mℹ[39m [33m[CJS][39m 2 files, total: 23.67 kB
|
|
34
|
+
[33m[PLUGIN_TIMINGS] Warning:[0m Your build spent significant time in plugins. Here is a breakdown:
|
|
35
|
+
- rolldown-plugin-dts:resolver (57%)
|
|
36
|
+
- rolldown-plugin-dts:generate (25%)
|
|
37
|
+
See https://rolldown.rs/options/checks#plugintimings for more details.
|
|
38
|
+
|
|
39
|
+
[32m✔[39m Build complete in [32m5275ms[39m
|
package/README.md
ADDED
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
# @auriclabs/events
|
|
2
|
+
|
|
3
|
+
Event sourcing runtime utilities for DynamoDB-backed event stores. Provides event dispatching, listeners, context management, and DynamoDB stream handling.
|
|
4
|
+
|
|
5
|
+
## Setup
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pnpm add @auriclabs/events
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
### Peer dependencies
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
pnpm add @auriclabs/api-core @auriclabs/logger @auriclabs/pagination
|
|
15
|
+
pnpm add @aws-sdk/client-dynamodb @aws-sdk/lib-dynamodb @aws-sdk/client-eventbridge @aws-sdk/client-sqs @aws-sdk/util-dynamodb
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
## Initialization
|
|
19
|
+
|
|
20
|
+
Call `initEvents()` once per Lambda cold start before using any event functions:
|
|
21
|
+
|
|
22
|
+
```typescript
|
|
23
|
+
import { initEvents } from '@auriclabs/events';
|
|
24
|
+
import { Resource } from 'sst';
|
|
25
|
+
|
|
26
|
+
initEvents({ tableName: Resource.EventStoreTable.name });
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
## API Reference
|
|
30
|
+
|
|
31
|
+
### `initEvents(config: { tableName: string })`
|
|
32
|
+
|
|
33
|
+
Initializes the module-level event service with the DynamoDB table name. Must be called before any other function.
|
|
34
|
+
|
|
35
|
+
### `getEventService(): EventService`
|
|
36
|
+
|
|
37
|
+
Returns the initialized `EventService` instance. Throws if `initEvents()` hasn't been called.
|
|
38
|
+
|
|
39
|
+
### `EventService` interface
|
|
40
|
+
|
|
41
|
+
```typescript
|
|
42
|
+
interface EventService {
|
|
43
|
+
appendEvent<P>(args: AppendArgs<P>): Promise<AppendEventResult>;
|
|
44
|
+
getHead(aggregateType: string, aggregateId: string): Promise<AggregateHead | undefined>;
|
|
45
|
+
getEvent(aggregateType: string, aggregateId: string, version: number): Promise<EventRecord | undefined>;
|
|
46
|
+
listEvents(params: {
|
|
47
|
+
aggregateType: string;
|
|
48
|
+
aggregateId: string;
|
|
49
|
+
fromVersionExclusive?: number;
|
|
50
|
+
toVersionInclusive?: number;
|
|
51
|
+
limit?: number;
|
|
52
|
+
}): Promise<PaginationResponse<EventRecord>>;
|
|
53
|
+
}
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
Low-level DynamoDB operations with optimistic concurrency control and idempotent writes. Most consumers should use `createDispatch()` instead.
|
|
57
|
+
|
|
58
|
+
### `createDispatch(record, options?)`
|
|
59
|
+
|
|
60
|
+
Type-safe factory for creating domain-specific dispatch functions. This is the primary way to dispatch events.
|
|
61
|
+
|
|
62
|
+
```typescript
|
|
63
|
+
import { createDispatch } from '@auriclabs/events';
|
|
64
|
+
|
|
65
|
+
const dispatch = createDispatch(
|
|
66
|
+
{
|
|
67
|
+
funded: (walletId: string, amount: number) => ({
|
|
68
|
+
aggregateId: walletId,
|
|
69
|
+
eventType: 'Wallet.Funded',
|
|
70
|
+
payload: { amount },
|
|
71
|
+
}),
|
|
72
|
+
withdrawn: (walletId: string, amount: number) => ({
|
|
73
|
+
aggregateId: walletId,
|
|
74
|
+
eventType: 'Wallet.Withdrawn',
|
|
75
|
+
payload: { amount },
|
|
76
|
+
}),
|
|
77
|
+
},
|
|
78
|
+
{ aggregateType: 'wallet', source: 'billing' },
|
|
79
|
+
);
|
|
80
|
+
|
|
81
|
+
// Usage:
|
|
82
|
+
await dispatch.funded('wal_123', 5000);
|
|
83
|
+
await dispatch.withdrawn('wal_123', 1000);
|
|
84
|
+
```
|
|
85
|
+
|
|
86
|
+
Each dispatch function automatically:
|
|
87
|
+
- Generates a ULID-based `eventId`
|
|
88
|
+
- Reads the current aggregate HEAD for version tracking
|
|
89
|
+
- Retries on OCC conflicts
|
|
90
|
+
- Merges event context (correlation/causation/actor IDs)
|
|
91
|
+
|
|
92
|
+
### `dispatchEvent(event: DispatchEventArgs)`
|
|
93
|
+
|
|
94
|
+
Low-level single event dispatch. Auto-generates `eventId`, reads HEAD version, retries on OCC.
|
|
95
|
+
|
|
96
|
+
```typescript
|
|
97
|
+
import { dispatchEvent } from '@auriclabs/events';
|
|
98
|
+
|
|
99
|
+
await dispatchEvent({
|
|
100
|
+
aggregateType: 'wallet',
|
|
101
|
+
aggregateId: 'wal_123',
|
|
102
|
+
source: 'billing',
|
|
103
|
+
eventType: 'Wallet.Funded',
|
|
104
|
+
payload: { amount: 5000 },
|
|
105
|
+
});
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
### `dispatchEvents(events, options?)`
|
|
109
|
+
|
|
110
|
+
Dispatches multiple events. By default dispatches in parallel. Set `{ inOrder: true }` for sequential dispatch (required when events target the same aggregate).
|
|
111
|
+
|
|
112
|
+
```typescript
|
|
113
|
+
import { dispatchEvents } from '@auriclabs/events';
|
|
114
|
+
|
|
115
|
+
await dispatchEvents([event1, event2, event3], { inOrder: true });
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
### `createEventListener(handlers, options?)`
|
|
119
|
+
|
|
120
|
+
Creates an SQS batch handler for processing events. Returns a Lambda handler function.
|
|
121
|
+
|
|
122
|
+
```typescript
|
|
123
|
+
import { createEventListener } from '@auriclabs/events';
|
|
124
|
+
|
|
125
|
+
export const handler = createEventListener({
|
|
126
|
+
'Wallet.Funded': async (event) => {
|
|
127
|
+
// Process the event
|
|
128
|
+
console.log(event.payload, event.aggregateId);
|
|
129
|
+
},
|
|
130
|
+
// String values create aliases:
|
|
131
|
+
'Wallet.Credited': 'Wallet.Funded',
|
|
132
|
+
}, { debug: true });
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
The listener:
|
|
136
|
+
- Parses `EventRecord` from SQS message body
|
|
137
|
+
- Resolves string aliases to handler functions
|
|
138
|
+
- Sets event context (causation/correlation/actor IDs) before calling the handler
|
|
139
|
+
- Returns `SQSBatchResponse` with failures for partial batch retry
|
|
140
|
+
|
|
141
|
+
### `createStreamHandler(config)`
|
|
142
|
+
|
|
143
|
+
Creates a DynamoDB Streams handler that fans out events to SQS queues and EventBridge.
|
|
144
|
+
|
|
145
|
+
```typescript
|
|
146
|
+
import { createStreamHandler } from '@auriclabs/events';
|
|
147
|
+
import { Resource } from 'sst';
|
|
148
|
+
|
|
149
|
+
export const handler = createStreamHandler({
|
|
150
|
+
busName: Resource.EventBus.name,
|
|
151
|
+
queueUrls: JSON.parse(process.env.QUEUE_URL_LIST ?? '[]'),
|
|
152
|
+
});
|
|
153
|
+
```
|
|
154
|
+
|
|
155
|
+
### Context Management
|
|
156
|
+
|
|
157
|
+
```typescript
|
|
158
|
+
import { setEventContext, getEventContext, resetEventContext, appendEventContext } from '@auriclabs/events';
|
|
159
|
+
|
|
160
|
+
// Set context for current request
|
|
161
|
+
setEventContext({ correlationId: 'corr-123', actorId: 'user-456' });
|
|
162
|
+
|
|
163
|
+
// Read current context
|
|
164
|
+
const ctx = getEventContext();
|
|
165
|
+
|
|
166
|
+
// Append to existing context
|
|
167
|
+
appendEventContext({ causationId: 'evt-789' });
|
|
168
|
+
|
|
169
|
+
// Reset between requests
|
|
170
|
+
resetEventContext();
|
|
171
|
+
```
|
|
172
|
+
|
|
173
|
+
Context is automatically merged into dispatched events. `createEventListener` sets causation/correlation context from the incoming event.
|
|
174
|
+
|
|
175
|
+
## Types
|
|
176
|
+
|
|
177
|
+
### `EventRecord<P>`
|
|
178
|
+
|
|
179
|
+
```typescript
|
|
180
|
+
interface EventRecord<P = unknown> {
|
|
181
|
+
pk: AggregatePK; // "AGG#{type}#{id}"
|
|
182
|
+
sk: EventSK; // "EVT#000000042"
|
|
183
|
+
itemType: 'event';
|
|
184
|
+
source: Source;
|
|
185
|
+
aggregateId: AggregateId;
|
|
186
|
+
aggregateType: AggregateType;
|
|
187
|
+
version: number;
|
|
188
|
+
eventId: EventId;
|
|
189
|
+
eventType: string;
|
|
190
|
+
schemaVersion?: number;
|
|
191
|
+
occurredAt: string; // ISO timestamp
|
|
192
|
+
correlationId?: string;
|
|
193
|
+
causationId?: string;
|
|
194
|
+
actorId?: string;
|
|
195
|
+
payload: Readonly<P>;
|
|
196
|
+
}
|
|
197
|
+
```
|
|
198
|
+
|
|
199
|
+
### `AggregateHead`
|
|
200
|
+
|
|
201
|
+
```typescript
|
|
202
|
+
interface AggregateHead {
|
|
203
|
+
pk: AggregatePK;
|
|
204
|
+
sk: 'HEAD';
|
|
205
|
+
itemType: 'head';
|
|
206
|
+
aggregateId: AggregateId;
|
|
207
|
+
aggregateType: AggregateType;
|
|
208
|
+
currentVersion: number;
|
|
209
|
+
lastEventId?: EventId;
|
|
210
|
+
lastIdemKey?: string;
|
|
211
|
+
updatedAt: string;
|
|
212
|
+
}
|
|
213
|
+
```
|
|
214
|
+
|
|
215
|
+
### Branded types
|
|
216
|
+
|
|
217
|
+
`Source`, `AggregateId`, `AggregateType`, `EventId` — branded string types for type safety. Cast with `as Source`, etc.
|
|
218
|
+
|
|
219
|
+
## DynamoDB Table Schema
|
|
220
|
+
|
|
221
|
+
The event store uses a single-table design:
|
|
222
|
+
|
|
223
|
+
| Key | Format | Description |
|
|
224
|
+
|-----|--------|-------------|
|
|
225
|
+
| `pk` | `AGG#{aggregateType}#{aggregateId}` | Partition key |
|
|
226
|
+
| `sk` | `EVT#{version}` or `HEAD` | Sort key |
|
|
227
|
+
|
|
228
|
+
Events are immutable INSERT records. HEAD is updated atomically with each event via DynamoDB transactions.
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,314 @@
|
|
|
1
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
2
|
+
let _auriclabs_pagination = require("@auriclabs/pagination");
|
|
3
|
+
let _aws_sdk_client_dynamodb = require("@aws-sdk/client-dynamodb");
|
|
4
|
+
let _aws_sdk_lib_dynamodb = require("@aws-sdk/lib-dynamodb");
|
|
5
|
+
let _auriclabs_api_core = require("@auriclabs/api-core");
|
|
6
|
+
let _auriclabs_logger = require("@auriclabs/logger");
|
|
7
|
+
let ulid = require("ulid");
|
|
8
|
+
let _aws_sdk_client_eventbridge = require("@aws-sdk/client-eventbridge");
|
|
9
|
+
let _aws_sdk_client_sqs = require("@aws-sdk/client-sqs");
|
|
10
|
+
let _aws_sdk_util_dynamodb = require("@aws-sdk/util-dynamodb");
|
|
11
|
+
let lodash_es = require("lodash-es");
|
|
12
|
+
//#region src/event-service.ts
|
|
13
|
+
const ddb = _aws_sdk_lib_dynamodb.DynamoDBDocumentClient.from(new _aws_sdk_client_dynamodb.DynamoDBClient(), { marshallOptions: { removeUndefinedValues: true } });
|
|
14
|
+
const pad = (n, w = 9) => String(n).padStart(w, "0");
|
|
15
|
+
const pkFor = (aggregateType, aggregateId) => `AGG#${aggregateType}#${aggregateId}`;
|
|
16
|
+
function createEventService(tableName) {
|
|
17
|
+
const TABLE = tableName;
|
|
18
|
+
return {
|
|
19
|
+
async appendEvent(args) {
|
|
20
|
+
const { aggregateType, aggregateId, expectedVersion, idempotencyKey, eventId, eventType, occurredAt, source, payload, schemaVersion, correlationId, causationId, actorId } = args;
|
|
21
|
+
const pk = pkFor(aggregateType, aggregateId);
|
|
22
|
+
const nextVersion = expectedVersion + 1;
|
|
23
|
+
const sk = `EVT#${pad(nextVersion)}`;
|
|
24
|
+
const nowIso = (/* @__PURE__ */ new Date()).toISOString();
|
|
25
|
+
const eventOccurredAt = occurredAt ?? nowIso;
|
|
26
|
+
try {
|
|
27
|
+
await ddb.send(new _aws_sdk_lib_dynamodb.TransactWriteCommand({ TransactItems: [{ Update: {
|
|
28
|
+
TableName: TABLE,
|
|
29
|
+
Key: {
|
|
30
|
+
pk,
|
|
31
|
+
sk: "HEAD"
|
|
32
|
+
},
|
|
33
|
+
UpdateExpression: "SET currentVersion = :next, lastEventId = :eid, lastIdemKey = :idem, updatedAt = :now, aggregateId = if_not_exists(aggregateId, :aid), aggregateType = if_not_exists(aggregateType, :atype)",
|
|
34
|
+
ConditionExpression: "(attribute_not_exists(currentVersion) AND :expected = :zero) OR currentVersion = :expected OR lastIdemKey = :idem",
|
|
35
|
+
ExpressionAttributeValues: {
|
|
36
|
+
":zero": 0,
|
|
37
|
+
":expected": expectedVersion,
|
|
38
|
+
":next": nextVersion,
|
|
39
|
+
":eid": eventId,
|
|
40
|
+
":idem": idempotencyKey,
|
|
41
|
+
":now": nowIso,
|
|
42
|
+
":aid": aggregateId,
|
|
43
|
+
":atype": aggregateType
|
|
44
|
+
}
|
|
45
|
+
} }, { Put: {
|
|
46
|
+
TableName: TABLE,
|
|
47
|
+
Item: {
|
|
48
|
+
pk,
|
|
49
|
+
sk,
|
|
50
|
+
itemType: "event",
|
|
51
|
+
source,
|
|
52
|
+
aggregateId,
|
|
53
|
+
aggregateType,
|
|
54
|
+
version: nextVersion,
|
|
55
|
+
eventId,
|
|
56
|
+
eventType,
|
|
57
|
+
schemaVersion: schemaVersion ?? 1,
|
|
58
|
+
occurredAt: eventOccurredAt,
|
|
59
|
+
correlationId,
|
|
60
|
+
causationId,
|
|
61
|
+
actorId,
|
|
62
|
+
payload
|
|
63
|
+
},
|
|
64
|
+
ConditionExpression: "attribute_not_exists(pk) OR eventId = :eid",
|
|
65
|
+
ExpressionAttributeValues: { ":eid": eventId }
|
|
66
|
+
} }] }));
|
|
67
|
+
} catch (err) {
|
|
68
|
+
if (err instanceof _aws_sdk_client_dynamodb.ConditionalCheckFailedException) throw new Error(`OCC failed for aggregate ${aggregateType}/${aggregateId}: expectedVersion=${expectedVersion}`);
|
|
69
|
+
throw err;
|
|
70
|
+
}
|
|
71
|
+
return {
|
|
72
|
+
pk,
|
|
73
|
+
sk,
|
|
74
|
+
version: nextVersion
|
|
75
|
+
};
|
|
76
|
+
},
|
|
77
|
+
async getHead(aggregateType, aggregateId) {
|
|
78
|
+
const pk = pkFor(aggregateType, aggregateId);
|
|
79
|
+
return (await ddb.send(new _aws_sdk_lib_dynamodb.GetCommand({
|
|
80
|
+
TableName: TABLE,
|
|
81
|
+
Key: {
|
|
82
|
+
pk,
|
|
83
|
+
sk: "HEAD"
|
|
84
|
+
}
|
|
85
|
+
}))).Item;
|
|
86
|
+
},
|
|
87
|
+
async getEvent(aggregateType, aggregateId, version) {
|
|
88
|
+
const pk = pkFor(aggregateType, aggregateId);
|
|
89
|
+
const sk = `EVT#${pad(version)}`;
|
|
90
|
+
return (await ddb.send(new _aws_sdk_lib_dynamodb.GetCommand({
|
|
91
|
+
TableName: TABLE,
|
|
92
|
+
Key: {
|
|
93
|
+
pk,
|
|
94
|
+
sk
|
|
95
|
+
}
|
|
96
|
+
}))).Item;
|
|
97
|
+
},
|
|
98
|
+
async listEvents(params) {
|
|
99
|
+
const pk = pkFor(params.aggregateType, params.aggregateId);
|
|
100
|
+
const fromSk = params.fromVersionExclusive != null ? `EVT#${pad(params.fromVersionExclusive + 1)}` : "EVT#000000000";
|
|
101
|
+
const toSk = params.toVersionInclusive != null ? `EVT#${pad(params.toVersionInclusive)}` : "EVT#999999999";
|
|
102
|
+
const res = await ddb.send(new _aws_sdk_lib_dynamodb.QueryCommand({
|
|
103
|
+
TableName: TABLE,
|
|
104
|
+
KeyConditionExpression: "pk = :pk AND sk BETWEEN :from AND :to",
|
|
105
|
+
ExpressionAttributeValues: {
|
|
106
|
+
":pk": pk,
|
|
107
|
+
":from": fromSk,
|
|
108
|
+
":to": toSk
|
|
109
|
+
},
|
|
110
|
+
ScanIndexForward: true,
|
|
111
|
+
Limit: params.limit
|
|
112
|
+
}));
|
|
113
|
+
return (0, _auriclabs_pagination.normalizePaginationResponse)({
|
|
114
|
+
data: res.Items ?? [],
|
|
115
|
+
cursor: res.LastEvaluatedKey && res.LastEvaluatedKey.sk
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
//#endregion
|
|
121
|
+
//#region src/init.ts
|
|
122
|
+
let _eventService;
|
|
123
|
+
function initEvents(config) {
|
|
124
|
+
_eventService = createEventService(config.tableName);
|
|
125
|
+
}
|
|
126
|
+
function getEventService() {
|
|
127
|
+
if (!_eventService) throw new Error("Call initEvents() before using events");
|
|
128
|
+
return _eventService;
|
|
129
|
+
}
|
|
130
|
+
//#endregion
|
|
131
|
+
//#region src/context.ts
|
|
132
|
+
let context = {};
|
|
133
|
+
const setEventContext = (newContext) => {
|
|
134
|
+
context = { ...newContext };
|
|
135
|
+
};
|
|
136
|
+
const getEventContext = () => context;
|
|
137
|
+
const resetEventContext = () => {
|
|
138
|
+
context = {};
|
|
139
|
+
};
|
|
140
|
+
const appendEventContext = (event) => {
|
|
141
|
+
context = {
|
|
142
|
+
...context,
|
|
143
|
+
...event
|
|
144
|
+
};
|
|
145
|
+
};
|
|
146
|
+
//#endregion
|
|
147
|
+
//#region src/dispatch-event.ts
|
|
148
|
+
const dispatchEvent = async (event) => {
|
|
149
|
+
const eventService = getEventService();
|
|
150
|
+
const eventId = event.eventId ?? `evt-${(0, ulid.ulid)()}`;
|
|
151
|
+
const occurredAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
152
|
+
const idempotencyKey = event.idempotencyKey ?? eventId;
|
|
153
|
+
return (0, _auriclabs_api_core.retry)(async () => {
|
|
154
|
+
const head = await eventService.getHead(event.aggregateType, event.aggregateId);
|
|
155
|
+
_auriclabs_logger.logger.debug({ event }, "Dispatching event");
|
|
156
|
+
return eventService.appendEvent({
|
|
157
|
+
...getEventContext(),
|
|
158
|
+
...event,
|
|
159
|
+
eventId,
|
|
160
|
+
expectedVersion: head?.currentVersion ?? 0,
|
|
161
|
+
schemaVersion: 1,
|
|
162
|
+
occurredAt,
|
|
163
|
+
idempotencyKey
|
|
164
|
+
});
|
|
165
|
+
});
|
|
166
|
+
};
|
|
167
|
+
//#endregion
|
|
168
|
+
//#region src/dispatch-events.ts
|
|
169
|
+
const dispatchEvents = async (events, { inOrder = false } = {}) => {
|
|
170
|
+
if (inOrder) for (const event of events) await dispatchEvent(event);
|
|
171
|
+
else await Promise.all(events.map((event) => dispatchEvent(event)));
|
|
172
|
+
};
|
|
173
|
+
//#endregion
|
|
174
|
+
//#region src/create-dispatch.ts
|
|
175
|
+
function createDispatch(record, optionsOrFactory) {
|
|
176
|
+
return Object.fromEntries(Object.entries(record).map(([key, value]) => [key, (...args) => {
|
|
177
|
+
const eventId = `evt-${(0, ulid.ulid)()}`;
|
|
178
|
+
const result = value(...args);
|
|
179
|
+
const context = {
|
|
180
|
+
eventId,
|
|
181
|
+
...getEventContext()
|
|
182
|
+
};
|
|
183
|
+
const executeValueFn = (value) => typeof value === "function" ? value(context) : value;
|
|
184
|
+
const parseResponse = (result) => Object.fromEntries(Object.entries(result).map(([key, value]) => [key, executeValueFn(value)]));
|
|
185
|
+
Object.assign(context, typeof result === "function" ? result(context) : parseResponse(result));
|
|
186
|
+
Object.assign(context, typeof optionsOrFactory === "function" ? optionsOrFactory(context) : parseResponse(optionsOrFactory));
|
|
187
|
+
return dispatchEvent(context);
|
|
188
|
+
}]));
|
|
189
|
+
}
|
|
190
|
+
//#endregion
|
|
191
|
+
//#region src/create-event-listener.ts
|
|
192
|
+
const createEventListener = (eventHandlers, { debug = false } = {}) => async (sqsEvent) => {
|
|
193
|
+
const response = { batchItemFailures: [] };
|
|
194
|
+
let hasFailed = false;
|
|
195
|
+
for (const record of sqsEvent.Records) {
|
|
196
|
+
if (hasFailed) {
|
|
197
|
+
response.batchItemFailures.push({ itemIdentifier: record.messageId });
|
|
198
|
+
continue;
|
|
199
|
+
}
|
|
200
|
+
let event;
|
|
201
|
+
try {
|
|
202
|
+
event = JSON.parse(record.body);
|
|
203
|
+
if (debug) _auriclabs_logger.logger.debug({ event }, "Processing event");
|
|
204
|
+
let handler = eventHandlers[event.eventType];
|
|
205
|
+
while (typeof handler === "string") handler = eventHandlers[handler];
|
|
206
|
+
if (typeof handler === "function") {
|
|
207
|
+
setEventContext({
|
|
208
|
+
causationId: event.eventId,
|
|
209
|
+
correlationId: event.correlationId,
|
|
210
|
+
actorId: event.actorId
|
|
211
|
+
});
|
|
212
|
+
await handler(event);
|
|
213
|
+
}
|
|
214
|
+
} catch (error) {
|
|
215
|
+
hasFailed = true;
|
|
216
|
+
_auriclabs_logger.logger.error({
|
|
217
|
+
error,
|
|
218
|
+
event,
|
|
219
|
+
body: record.body
|
|
220
|
+
}, "Error processing event");
|
|
221
|
+
response.batchItemFailures.push({ itemIdentifier: record.messageId });
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
return response;
|
|
225
|
+
};
|
|
226
|
+
//#endregion
|
|
227
|
+
//#region src/stream-handler.ts
|
|
228
|
+
const BATCH_SIZE = 10;
|
|
229
|
+
/**
|
|
230
|
+
* Creates a Lambda handler for DynamoDB stream events.
|
|
231
|
+
* Processes INSERT events from the event store table and forwards them to SQS queues and EventBridge.
|
|
232
|
+
*/
|
|
233
|
+
function createStreamHandler(config) {
|
|
234
|
+
const sqsClient = new _aws_sdk_client_sqs.SQSClient();
|
|
235
|
+
const eventBridge = new _aws_sdk_client_eventbridge.EventBridgeClient({});
|
|
236
|
+
function chunkArray(array, chunkSize) {
|
|
237
|
+
const chunks = [];
|
|
238
|
+
for (let i = 0; i < array.length; i += chunkSize) chunks.push(array.slice(i, i + chunkSize));
|
|
239
|
+
return chunks;
|
|
240
|
+
}
|
|
241
|
+
async function sendToQueuesBatch(eventRecords) {
|
|
242
|
+
await Promise.all(config.queueUrls.map((queue) => sendToQueueBatch(eventRecords, queue)));
|
|
243
|
+
}
|
|
244
|
+
async function sendToQueueBatch(eventRecords, queue) {
|
|
245
|
+
const batches = chunkArray(eventRecords, BATCH_SIZE);
|
|
246
|
+
for (const batch of batches) try {
|
|
247
|
+
const entries = batch.map((eventRecord, index) => ({
|
|
248
|
+
Id: `${eventRecord.eventId}-${index}`,
|
|
249
|
+
MessageBody: JSON.stringify(eventRecord),
|
|
250
|
+
MessageGroupId: eventRecord.aggregateId,
|
|
251
|
+
MessageDeduplicationId: eventRecord.eventId
|
|
252
|
+
}));
|
|
253
|
+
await sqsClient.send(new _aws_sdk_client_sqs.SendMessageBatchCommand({
|
|
254
|
+
QueueUrl: queue,
|
|
255
|
+
Entries: entries
|
|
256
|
+
}));
|
|
257
|
+
} catch (error) {
|
|
258
|
+
_auriclabs_logger.logger.error({
|
|
259
|
+
error,
|
|
260
|
+
batch,
|
|
261
|
+
queue
|
|
262
|
+
}, "Error sending batch to queue");
|
|
263
|
+
throw error;
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
async function sendToBusBatch(eventRecords) {
|
|
267
|
+
const batches = chunkArray(eventRecords, BATCH_SIZE);
|
|
268
|
+
for (const batch of batches) try {
|
|
269
|
+
const entries = batch.map((eventRecord) => {
|
|
270
|
+
return {
|
|
271
|
+
Source: eventRecord.source ?? (0, lodash_es.kebabCase)(eventRecord.aggregateType.split(".")[0]),
|
|
272
|
+
DetailType: eventRecord.eventType,
|
|
273
|
+
Detail: JSON.stringify(eventRecord),
|
|
274
|
+
EventBusName: config.busName
|
|
275
|
+
};
|
|
276
|
+
});
|
|
277
|
+
await eventBridge.send(new _aws_sdk_client_eventbridge.PutEventsCommand({ Entries: entries }));
|
|
278
|
+
} catch (error) {
|
|
279
|
+
_auriclabs_logger.logger.error({
|
|
280
|
+
error,
|
|
281
|
+
batch
|
|
282
|
+
}, "Error sending batch to bus");
|
|
283
|
+
throw error;
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
return async (event) => {
|
|
287
|
+
const eventRecords = event.Records.filter((record) => record.eventName === "INSERT").map((record) => {
|
|
288
|
+
try {
|
|
289
|
+
const data = record.dynamodb?.NewImage;
|
|
290
|
+
return (0, _aws_sdk_util_dynamodb.unmarshall)(data);
|
|
291
|
+
} catch (error) {
|
|
292
|
+
_auriclabs_logger.logger.error({
|
|
293
|
+
error,
|
|
294
|
+
record
|
|
295
|
+
}, "Error unmarshalling event record");
|
|
296
|
+
return;
|
|
297
|
+
}
|
|
298
|
+
}).filter((eventRecord) => eventRecord?.itemType === "event");
|
|
299
|
+
if (eventRecords.length > 0) await Promise.all([sendToBusBatch(eventRecords), sendToQueuesBatch(eventRecords)]);
|
|
300
|
+
};
|
|
301
|
+
}
|
|
302
|
+
//#endregion
|
|
303
|
+
exports.appendEventContext = appendEventContext;
|
|
304
|
+
exports.createDispatch = createDispatch;
|
|
305
|
+
exports.createEventListener = createEventListener;
|
|
306
|
+
exports.createEventService = createEventService;
|
|
307
|
+
exports.createStreamHandler = createStreamHandler;
|
|
308
|
+
exports.dispatchEvent = dispatchEvent;
|
|
309
|
+
exports.dispatchEvents = dispatchEvents;
|
|
310
|
+
exports.getEventContext = getEventContext;
|
|
311
|
+
exports.getEventService = getEventService;
|
|
312
|
+
exports.initEvents = initEvents;
|
|
313
|
+
exports.resetEventContext = resetEventContext;
|
|
314
|
+
exports.setEventContext = setEventContext;
|