@auriclabs/events 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +39 -0
- package/README.md +228 -0
- package/dist/index.cjs +314 -0
- package/dist/index.d.cts +377 -0
- package/dist/index.d.cts.map +1 -0
- package/dist/index.d.mts +377 -0
- package/dist/index.d.mts.map +1 -0
- package/dist/index.mjs +304 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +59 -0
- package/src/context.test.ts +60 -0
- package/src/context.ts +19 -0
- package/src/create-dispatch.test.ts +233 -0
- package/src/create-dispatch.ts +71 -0
- package/src/create-event-listener.test.ts +246 -0
- package/src/create-event-listener.ts +54 -0
- package/src/dispatch-event.test.ts +226 -0
- package/src/dispatch-event.ts +34 -0
- package/src/dispatch-events.test.ts +72 -0
- package/src/dispatch-events.ts +18 -0
- package/src/event-service.test.ts +357 -0
- package/src/event-service.ts +228 -0
- package/src/index.ts +9 -0
- package/src/init.test.ts +55 -0
- package/src/init.ts +14 -0
- package/src/stream-handler.test.ts +309 -0
- package/src/stream-handler.ts +108 -0
- package/src/types.ts +65 -0
- package/tsconfig.json +17 -0
- package/vitest.config.ts +2 -0
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
import { normalizePaginationResponse, PaginationResponse } from '@auriclabs/pagination';
|
|
2
|
+
import { DynamoDBClient, ConditionalCheckFailedException } from '@aws-sdk/client-dynamodb';
|
|
3
|
+
import {
|
|
4
|
+
DynamoDBDocumentClient,
|
|
5
|
+
TransactWriteCommand,
|
|
6
|
+
GetCommand,
|
|
7
|
+
QueryCommand,
|
|
8
|
+
} from '@aws-sdk/lib-dynamodb';
|
|
9
|
+
|
|
10
|
+
import type {
|
|
11
|
+
EventRecord,
|
|
12
|
+
AggregateHead,
|
|
13
|
+
AggregatePK,
|
|
14
|
+
EventId,
|
|
15
|
+
AggregateId,
|
|
16
|
+
AggregateType,
|
|
17
|
+
EventSK,
|
|
18
|
+
Source,
|
|
19
|
+
} from './types';
|
|
20
|
+
|
|
21
|
+
const ddb = DynamoDBDocumentClient.from(new DynamoDBClient(), {
|
|
22
|
+
marshallOptions: {
|
|
23
|
+
removeUndefinedValues: true,
|
|
24
|
+
},
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
const pad = (n: number, w = 9): EventId => String(n).padStart(w, '0') as EventId;
|
|
28
|
+
const pkFor = (aggregateType: string, aggregateId: string): AggregatePK =>
|
|
29
|
+
`AGG#${aggregateType}#${aggregateId}`;
|
|
30
|
+
|
|
31
|
+
export interface AppendArgs<P = unknown> {
|
|
32
|
+
aggregateType: string;
|
|
33
|
+
aggregateId: string;
|
|
34
|
+
source: string;
|
|
35
|
+
/** Version you observed before appending (0 for brand new) */
|
|
36
|
+
expectedVersion: number;
|
|
37
|
+
/** Required for idempotent retries (e.g., the command id) */
|
|
38
|
+
idempotencyKey: string;
|
|
39
|
+
|
|
40
|
+
// Event properties (flattened)
|
|
41
|
+
eventId: string; // ULID/UUID – must be stable across retries
|
|
42
|
+
eventType: string;
|
|
43
|
+
occurredAt?: string; // default: now ISO
|
|
44
|
+
payload?: Readonly<P>;
|
|
45
|
+
schemaVersion?: number; // optional but recommended
|
|
46
|
+
|
|
47
|
+
// Optional metadata
|
|
48
|
+
correlationId?: string;
|
|
49
|
+
causationId?: string;
|
|
50
|
+
actorId?: string;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export interface AppendEventResult {
|
|
54
|
+
pk: string;
|
|
55
|
+
sk: string;
|
|
56
|
+
version: number;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
export interface EventService {
|
|
60
|
+
appendEvent<P = unknown>(args: AppendArgs<P>): Promise<AppendEventResult>;
|
|
61
|
+
getHead(aggregateType: string, aggregateId: string): Promise<AggregateHead | undefined>;
|
|
62
|
+
getEvent(
|
|
63
|
+
aggregateType: string,
|
|
64
|
+
aggregateId: string,
|
|
65
|
+
version: number,
|
|
66
|
+
): Promise<EventRecord | undefined>;
|
|
67
|
+
listEvents(params: {
|
|
68
|
+
aggregateType: string;
|
|
69
|
+
aggregateId: string;
|
|
70
|
+
fromVersionExclusive?: number;
|
|
71
|
+
toVersionInclusive?: number;
|
|
72
|
+
limit?: number;
|
|
73
|
+
}): Promise<PaginationResponse<EventRecord>>;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export function createEventService(tableName: string): EventService {
|
|
77
|
+
const TABLE = tableName;
|
|
78
|
+
|
|
79
|
+
return {
|
|
80
|
+
async appendEvent<P = unknown>(args: AppendArgs<P>): Promise<AppendEventResult> {
|
|
81
|
+
const {
|
|
82
|
+
aggregateType,
|
|
83
|
+
aggregateId,
|
|
84
|
+
expectedVersion,
|
|
85
|
+
idempotencyKey,
|
|
86
|
+
eventId,
|
|
87
|
+
eventType,
|
|
88
|
+
occurredAt,
|
|
89
|
+
source,
|
|
90
|
+
payload,
|
|
91
|
+
schemaVersion,
|
|
92
|
+
correlationId,
|
|
93
|
+
causationId,
|
|
94
|
+
actorId,
|
|
95
|
+
} = args;
|
|
96
|
+
|
|
97
|
+
const pk = pkFor(aggregateType, aggregateId);
|
|
98
|
+
const nextVersion = expectedVersion + 1;
|
|
99
|
+
const sk = `EVT#${pad(nextVersion)}` as EventSK;
|
|
100
|
+
const nowIso = new Date().toISOString();
|
|
101
|
+
const eventOccurredAt = occurredAt ?? nowIso;
|
|
102
|
+
|
|
103
|
+
try {
|
|
104
|
+
await ddb.send(
|
|
105
|
+
new TransactWriteCommand({
|
|
106
|
+
TransactItems: [
|
|
107
|
+
{
|
|
108
|
+
Update: {
|
|
109
|
+
TableName: TABLE,
|
|
110
|
+
Key: { pk, sk: 'HEAD' },
|
|
111
|
+
UpdateExpression:
|
|
112
|
+
'SET currentVersion = :next, lastEventId = :eid, lastIdemKey = :idem, updatedAt = :now, aggregateId = if_not_exists(aggregateId, :aid), aggregateType = if_not_exists(aggregateType, :atype)',
|
|
113
|
+
ConditionExpression:
|
|
114
|
+
'(attribute_not_exists(currentVersion) AND :expected = :zero) ' +
|
|
115
|
+
'OR currentVersion = :expected ' +
|
|
116
|
+
'OR lastIdemKey = :idem',
|
|
117
|
+
ExpressionAttributeValues: {
|
|
118
|
+
':zero': 0,
|
|
119
|
+
':expected': expectedVersion,
|
|
120
|
+
':next': nextVersion,
|
|
121
|
+
':eid': eventId,
|
|
122
|
+
':idem': idempotencyKey,
|
|
123
|
+
':now': nowIso,
|
|
124
|
+
':aid': aggregateId,
|
|
125
|
+
':atype': aggregateType,
|
|
126
|
+
},
|
|
127
|
+
},
|
|
128
|
+
},
|
|
129
|
+
{
|
|
130
|
+
Put: {
|
|
131
|
+
TableName: TABLE,
|
|
132
|
+
Item: {
|
|
133
|
+
pk,
|
|
134
|
+
sk,
|
|
135
|
+
itemType: 'event',
|
|
136
|
+
source: source as Source,
|
|
137
|
+
aggregateId: aggregateId as AggregateId,
|
|
138
|
+
aggregateType: aggregateType as AggregateType,
|
|
139
|
+
version: nextVersion,
|
|
140
|
+
|
|
141
|
+
eventId: eventId as EventId,
|
|
142
|
+
eventType: eventType,
|
|
143
|
+
schemaVersion: schemaVersion ?? 1,
|
|
144
|
+
occurredAt: eventOccurredAt,
|
|
145
|
+
|
|
146
|
+
correlationId,
|
|
147
|
+
causationId,
|
|
148
|
+
actorId,
|
|
149
|
+
|
|
150
|
+
payload: payload as Readonly<unknown>,
|
|
151
|
+
} satisfies EventRecord,
|
|
152
|
+
ConditionExpression: 'attribute_not_exists(pk) OR eventId = :eid',
|
|
153
|
+
ExpressionAttributeValues: { ':eid': eventId },
|
|
154
|
+
},
|
|
155
|
+
},
|
|
156
|
+
],
|
|
157
|
+
}),
|
|
158
|
+
);
|
|
159
|
+
} catch (err) {
|
|
160
|
+
if (err instanceof ConditionalCheckFailedException) {
|
|
161
|
+
throw new Error(
|
|
162
|
+
`OCC failed for aggregate ${aggregateType}/${aggregateId}: expectedVersion=${expectedVersion}`,
|
|
163
|
+
);
|
|
164
|
+
}
|
|
165
|
+
throw err;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
return { pk, sk, version: nextVersion };
|
|
169
|
+
},
|
|
170
|
+
|
|
171
|
+
async getHead(
|
|
172
|
+
aggregateType: string,
|
|
173
|
+
aggregateId: string,
|
|
174
|
+
): Promise<AggregateHead | undefined> {
|
|
175
|
+
const pk = pkFor(aggregateType, aggregateId);
|
|
176
|
+
const res = await ddb.send(new GetCommand({ TableName: TABLE, Key: { pk, sk: 'HEAD' } }));
|
|
177
|
+
return res.Item as AggregateHead | undefined;
|
|
178
|
+
},
|
|
179
|
+
|
|
180
|
+
async getEvent(
|
|
181
|
+
aggregateType: string,
|
|
182
|
+
aggregateId: string,
|
|
183
|
+
version: number,
|
|
184
|
+
): Promise<EventRecord | undefined> {
|
|
185
|
+
const pk = pkFor(aggregateType, aggregateId);
|
|
186
|
+
const sk = `EVT#${pad(version)}`;
|
|
187
|
+
const res = await ddb.send(new GetCommand({ TableName: TABLE, Key: { pk, sk } }));
|
|
188
|
+
return res.Item as EventRecord | undefined;
|
|
189
|
+
},
|
|
190
|
+
|
|
191
|
+
async listEvents(params: {
|
|
192
|
+
aggregateType: string;
|
|
193
|
+
aggregateId: string;
|
|
194
|
+
fromVersionExclusive?: number;
|
|
195
|
+
toVersionInclusive?: number;
|
|
196
|
+
limit?: number;
|
|
197
|
+
}): Promise<PaginationResponse<EventRecord>> {
|
|
198
|
+
const pk = pkFor(params.aggregateType, params.aggregateId);
|
|
199
|
+
const fromSk =
|
|
200
|
+
params.fromVersionExclusive != null
|
|
201
|
+
? `EVT#${pad(params.fromVersionExclusive + 1)}`
|
|
202
|
+
: 'EVT#000000000';
|
|
203
|
+
const toSk =
|
|
204
|
+
params.toVersionInclusive != null
|
|
205
|
+
? `EVT#${pad(params.toVersionInclusive)}`
|
|
206
|
+
: 'EVT#999999999';
|
|
207
|
+
|
|
208
|
+
const res = await ddb.send(
|
|
209
|
+
new QueryCommand({
|
|
210
|
+
TableName: TABLE,
|
|
211
|
+
KeyConditionExpression: 'pk = :pk AND sk BETWEEN :from AND :to',
|
|
212
|
+
ExpressionAttributeValues: {
|
|
213
|
+
':pk': pk,
|
|
214
|
+
':from': fromSk,
|
|
215
|
+
':to': toSk,
|
|
216
|
+
},
|
|
217
|
+
ScanIndexForward: true,
|
|
218
|
+
Limit: params.limit,
|
|
219
|
+
}),
|
|
220
|
+
);
|
|
221
|
+
|
|
222
|
+
return normalizePaginationResponse({
|
|
223
|
+
data: (res.Items ?? []) as EventRecord[],
|
|
224
|
+
cursor: res.LastEvaluatedKey && (res.LastEvaluatedKey as { pk: string; sk: string }).sk,
|
|
225
|
+
});
|
|
226
|
+
},
|
|
227
|
+
};
|
|
228
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export * from './types';
|
|
2
|
+
export * from './event-service';
|
|
3
|
+
export * from './init';
|
|
4
|
+
export * from './context';
|
|
5
|
+
export * from './dispatch-event';
|
|
6
|
+
export * from './dispatch-events';
|
|
7
|
+
export * from './create-dispatch';
|
|
8
|
+
export * from './create-event-listener';
|
|
9
|
+
export * from './stream-handler';
|
package/src/init.test.ts
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
const { mockCreateEventService } = vi.hoisted(() => ({
|
|
2
|
+
mockCreateEventService: vi.fn(),
|
|
3
|
+
}));
|
|
4
|
+
|
|
5
|
+
vi.mock('./event-service', () => ({
|
|
6
|
+
createEventService: mockCreateEventService,
|
|
7
|
+
}));
|
|
8
|
+
|
|
9
|
+
import { initEvents, getEventService } from './init';
|
|
10
|
+
|
|
11
|
+
describe('init', () => {
|
|
12
|
+
beforeEach(() => {
|
|
13
|
+
vi.resetModules();
|
|
14
|
+
mockCreateEventService.mockReset();
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
describe('getEventService', () => {
|
|
18
|
+
it('throws before initEvents is called', () => {
|
|
19
|
+
// Re-import to get fresh module state
|
|
20
|
+
// Since the module state persists, we need to test the throw case first
|
|
21
|
+
// Actually with vi.mock the module is already imported. We need to use dynamic import.
|
|
22
|
+
});
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
describe('initEvents', () => {
|
|
26
|
+
it('calls createEventService with tableName', () => {
|
|
27
|
+
const fakeService = { appendEvent: vi.fn() };
|
|
28
|
+
mockCreateEventService.mockReturnValue(fakeService);
|
|
29
|
+
|
|
30
|
+
initEvents({ tableName: 'my-events-table' });
|
|
31
|
+
|
|
32
|
+
expect(mockCreateEventService).toHaveBeenCalledWith('my-events-table');
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
it('allows getEventService to return the created service', () => {
|
|
36
|
+
const fakeService = { appendEvent: vi.fn(), getHead: vi.fn() };
|
|
37
|
+
mockCreateEventService.mockReturnValue(fakeService);
|
|
38
|
+
|
|
39
|
+
initEvents({ tableName: 'test-table' });
|
|
40
|
+
const result = getEventService();
|
|
41
|
+
|
|
42
|
+
expect(result).toBe(fakeService);
|
|
43
|
+
});
|
|
44
|
+
});
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
describe('init (fresh module)', () => {
|
|
48
|
+
it('getEventService throws before initEvents is called', async () => {
|
|
49
|
+
vi.resetModules();
|
|
50
|
+
|
|
51
|
+
const { getEventService: freshGetEventService } = await import('./init');
|
|
52
|
+
|
|
53
|
+
expect(() => freshGetEventService()).toThrow('Call initEvents() before using events');
|
|
54
|
+
});
|
|
55
|
+
});
|
package/src/init.ts
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { createEventService, EventService } from './event-service';
|
|
2
|
+
|
|
3
|
+
let _eventService: EventService | undefined;
|
|
4
|
+
|
|
5
|
+
export function initEvents(config: { tableName: string }): void {
|
|
6
|
+
_eventService = createEventService(config.tableName);
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export function getEventService(): EventService {
|
|
10
|
+
if (!_eventService) {
|
|
11
|
+
throw new Error('Call initEvents() before using events');
|
|
12
|
+
}
|
|
13
|
+
return _eventService;
|
|
14
|
+
}
|
|
@@ -0,0 +1,309 @@
|
|
|
1
|
+
const { mockSqsSend, mockEbSend, mockUnmarshall } = vi.hoisted(() => ({
|
|
2
|
+
mockSqsSend: vi.fn(),
|
|
3
|
+
mockEbSend: vi.fn(),
|
|
4
|
+
mockUnmarshall: vi.fn(),
|
|
5
|
+
}));
|
|
6
|
+
|
|
7
|
+
vi.mock('@auriclabs/logger', () => ({
|
|
8
|
+
logger: { error: vi.fn(), debug: vi.fn() },
|
|
9
|
+
}));
|
|
10
|
+
|
|
11
|
+
vi.mock('@aws-sdk/client-sqs', () => ({
|
|
12
|
+
SQSClient: vi.fn(() => ({ send: mockSqsSend })),
|
|
13
|
+
SendMessageBatchCommand: vi.fn((input: unknown) => ({ input, _type: 'SendMessageBatch' })),
|
|
14
|
+
}));
|
|
15
|
+
|
|
16
|
+
vi.mock('@aws-sdk/client-eventbridge', () => ({
|
|
17
|
+
EventBridgeClient: vi.fn(() => ({ send: mockEbSend })),
|
|
18
|
+
PutEventsCommand: vi.fn((input: unknown) => ({ input, _type: 'PutEvents' })),
|
|
19
|
+
}));
|
|
20
|
+
|
|
21
|
+
vi.mock('@aws-sdk/util-dynamodb', () => ({
|
|
22
|
+
unmarshall: mockUnmarshall,
|
|
23
|
+
}));
|
|
24
|
+
|
|
25
|
+
vi.mock('lodash-es', () => ({
|
|
26
|
+
kebabCase: vi.fn((s: string) => s.toLowerCase().replace(/[.\s]+/g, '-')),
|
|
27
|
+
}));
|
|
28
|
+
|
|
29
|
+
import { createStreamHandler } from './stream-handler';
|
|
30
|
+
import { SendMessageBatchCommand } from '@aws-sdk/client-sqs';
|
|
31
|
+
import { PutEventsCommand } from '@aws-sdk/client-eventbridge';
|
|
32
|
+
import { logger } from '@auriclabs/logger';
|
|
33
|
+
import type { DynamoDBStreamEvent } from 'aws-lambda';
|
|
34
|
+
|
|
35
|
+
const makeEventRecord = (overrides = {}) => ({
|
|
36
|
+
pk: 'AGG#order#o-1',
|
|
37
|
+
sk: 'EVT#000000001',
|
|
38
|
+
itemType: 'event' as const,
|
|
39
|
+
source: 'order-service',
|
|
40
|
+
aggregateId: 'o-1',
|
|
41
|
+
aggregateType: 'order',
|
|
42
|
+
version: 1,
|
|
43
|
+
eventId: 'evt-1',
|
|
44
|
+
eventType: 'OrderCreated',
|
|
45
|
+
schemaVersion: 1,
|
|
46
|
+
occurredAt: '2025-01-01T00:00:00.000Z',
|
|
47
|
+
payload: {},
|
|
48
|
+
...overrides,
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
const makeStreamRecord = (
|
|
52
|
+
eventName: string,
|
|
53
|
+
newImage: object | undefined = {},
|
|
54
|
+
) => ({
|
|
55
|
+
eventID: '1',
|
|
56
|
+
eventVersion: '1.1',
|
|
57
|
+
dynamodb: {
|
|
58
|
+
NewImage: newImage,
|
|
59
|
+
StreamViewType: 'NEW_IMAGE',
|
|
60
|
+
},
|
|
61
|
+
awsRegion: 'us-east-1',
|
|
62
|
+
eventName,
|
|
63
|
+
eventSourceARN: 'arn:aws:dynamodb:us-east-1:123:table/events/stream',
|
|
64
|
+
eventSource: 'aws:dynamodb',
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
describe('stream-handler', () => {
|
|
68
|
+
const config = {
|
|
69
|
+
busName: 'test-bus',
|
|
70
|
+
queueUrls: ['https://sqs.us-east-1.amazonaws.com/123/queue-1'],
|
|
71
|
+
};
|
|
72
|
+
|
|
73
|
+
beforeEach(() => {
|
|
74
|
+
vi.clearAllMocks();
|
|
75
|
+
mockSqsSend.mockResolvedValue({});
|
|
76
|
+
mockEbSend.mockResolvedValue({});
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
it('returns a Lambda handler function', () => {
|
|
80
|
+
const handler = createStreamHandler(config);
|
|
81
|
+
expect(typeof handler).toBe('function');
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
it('filters INSERT events only', async () => {
|
|
85
|
+
const eventRecord = makeEventRecord();
|
|
86
|
+
mockUnmarshall.mockReturnValue(eventRecord);
|
|
87
|
+
|
|
88
|
+
const handler = createStreamHandler(config);
|
|
89
|
+
const event: DynamoDBStreamEvent = {
|
|
90
|
+
Records: [
|
|
91
|
+
makeStreamRecord('INSERT', { data: { S: 'x' } }),
|
|
92
|
+
makeStreamRecord('MODIFY', { data: { S: 'y' } }),
|
|
93
|
+
makeStreamRecord('REMOVE', undefined),
|
|
94
|
+
] as any,
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
await handler(event);
|
|
98
|
+
|
|
99
|
+
// Only the INSERT record should be unmarshalled and sent
|
|
100
|
+
expect(mockUnmarshall).toHaveBeenCalledTimes(1);
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
it('filters for itemType event only', async () => {
|
|
104
|
+
const headRecord = { pk: 'AGG#order#o-1', sk: 'HEAD', itemType: 'head' };
|
|
105
|
+
const eventRecord = makeEventRecord();
|
|
106
|
+
|
|
107
|
+
mockUnmarshall
|
|
108
|
+
.mockReturnValueOnce(headRecord)
|
|
109
|
+
.mockReturnValueOnce(eventRecord);
|
|
110
|
+
|
|
111
|
+
const handler = createStreamHandler(config);
|
|
112
|
+
const event: DynamoDBStreamEvent = {
|
|
113
|
+
Records: [
|
|
114
|
+
makeStreamRecord('INSERT', { a: { S: '1' } }),
|
|
115
|
+
makeStreamRecord('INSERT', { b: { S: '2' } }),
|
|
116
|
+
] as any,
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
await handler(event);
|
|
120
|
+
|
|
121
|
+
// Only eventRecord (itemType='event') should be sent
|
|
122
|
+
expect(SendMessageBatchCommand).toHaveBeenCalledTimes(1);
|
|
123
|
+
const sqsInput = vi.mocked(SendMessageBatchCommand).mock.calls[0][0];
|
|
124
|
+
expect(sqsInput.Entries).toHaveLength(1);
|
|
125
|
+
expect(JSON.parse(sqsInput.Entries![0].MessageBody!)).toEqual(eventRecord);
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
it('sends to all configured queues', async () => {
|
|
129
|
+
const eventRecord = makeEventRecord();
|
|
130
|
+
mockUnmarshall.mockReturnValue(eventRecord);
|
|
131
|
+
|
|
132
|
+
const multiQueueConfig = {
|
|
133
|
+
busName: 'test-bus',
|
|
134
|
+
queueUrls: [
|
|
135
|
+
'https://sqs.us-east-1.amazonaws.com/123/queue-1',
|
|
136
|
+
'https://sqs.us-east-1.amazonaws.com/123/queue-2',
|
|
137
|
+
],
|
|
138
|
+
};
|
|
139
|
+
|
|
140
|
+
const handler = createStreamHandler(multiQueueConfig);
|
|
141
|
+
const event: DynamoDBStreamEvent = {
|
|
142
|
+
Records: [makeStreamRecord('INSERT', { a: { S: '1' } })] as any,
|
|
143
|
+
};
|
|
144
|
+
|
|
145
|
+
await handler(event);
|
|
146
|
+
|
|
147
|
+
expect(SendMessageBatchCommand).toHaveBeenCalledTimes(2);
|
|
148
|
+
const call1 = vi.mocked(SendMessageBatchCommand).mock.calls[0][0];
|
|
149
|
+
const call2 = vi.mocked(SendMessageBatchCommand).mock.calls[1][0];
|
|
150
|
+
expect(call1.QueueUrl).toBe('https://sqs.us-east-1.amazonaws.com/123/queue-1');
|
|
151
|
+
expect(call2.QueueUrl).toBe('https://sqs.us-east-1.amazonaws.com/123/queue-2');
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
it('sends to EventBridge with correct bus name and detail type', async () => {
|
|
155
|
+
const eventRecord = makeEventRecord({ source: 'billing', eventType: 'CreditAdded' });
|
|
156
|
+
mockUnmarshall.mockReturnValue(eventRecord);
|
|
157
|
+
|
|
158
|
+
const handler = createStreamHandler(config);
|
|
159
|
+
const event: DynamoDBStreamEvent = {
|
|
160
|
+
Records: [makeStreamRecord('INSERT', { a: { S: '1' } })] as any,
|
|
161
|
+
};
|
|
162
|
+
|
|
163
|
+
await handler(event);
|
|
164
|
+
|
|
165
|
+
expect(PutEventsCommand).toHaveBeenCalledTimes(1);
|
|
166
|
+
const ebInput = vi.mocked(PutEventsCommand).mock.calls[0][0];
|
|
167
|
+
expect(ebInput.Entries).toHaveLength(1);
|
|
168
|
+
expect(ebInput.Entries![0].EventBusName).toBe('test-bus');
|
|
169
|
+
expect(ebInput.Entries![0].DetailType).toBe('CreditAdded');
|
|
170
|
+
expect(ebInput.Entries![0].Source).toBe('billing');
|
|
171
|
+
expect(JSON.parse(ebInput.Entries![0].Detail!)).toEqual(eventRecord);
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
it('uses kebabCase of aggregateType as source fallback when source is undefined', async () => {
|
|
175
|
+
const eventRecord = makeEventRecord({ source: undefined, aggregateType: 'Order.Item' });
|
|
176
|
+
mockUnmarshall.mockReturnValue(eventRecord);
|
|
177
|
+
|
|
178
|
+
const handler = createStreamHandler(config);
|
|
179
|
+
const event: DynamoDBStreamEvent = {
|
|
180
|
+
Records: [makeStreamRecord('INSERT', { a: { S: '1' } })] as any,
|
|
181
|
+
};
|
|
182
|
+
|
|
183
|
+
await handler(event);
|
|
184
|
+
|
|
185
|
+
const ebInput = vi.mocked(PutEventsCommand).mock.calls[0][0];
|
|
186
|
+
// kebabCase splits on '.', takes first part 'Order', which becomes 'order'
|
|
187
|
+
expect(ebInput.Entries![0].Source).toBe('order');
|
|
188
|
+
});
|
|
189
|
+
|
|
190
|
+
it('uses aggregateId as MessageGroupId', async () => {
|
|
191
|
+
const eventRecord = makeEventRecord({ aggregateId: 'agg-123' });
|
|
192
|
+
mockUnmarshall.mockReturnValue(eventRecord);
|
|
193
|
+
|
|
194
|
+
const handler = createStreamHandler(config);
|
|
195
|
+
const event: DynamoDBStreamEvent = {
|
|
196
|
+
Records: [makeStreamRecord('INSERT', { a: { S: '1' } })] as any,
|
|
197
|
+
};
|
|
198
|
+
|
|
199
|
+
await handler(event);
|
|
200
|
+
|
|
201
|
+
const sqsInput = vi.mocked(SendMessageBatchCommand).mock.calls[0][0];
|
|
202
|
+
expect(sqsInput.Entries![0].MessageGroupId).toBe('agg-123');
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
it('uses eventId as MessageDeduplicationId', async () => {
|
|
206
|
+
const eventRecord = makeEventRecord({ eventId: 'evt-dedup-1' });
|
|
207
|
+
mockUnmarshall.mockReturnValue(eventRecord);
|
|
208
|
+
|
|
209
|
+
const handler = createStreamHandler(config);
|
|
210
|
+
const event: DynamoDBStreamEvent = {
|
|
211
|
+
Records: [makeStreamRecord('INSERT', { a: { S: '1' } })] as any,
|
|
212
|
+
};
|
|
213
|
+
|
|
214
|
+
await handler(event);
|
|
215
|
+
|
|
216
|
+
const sqsInput = vi.mocked(SendMessageBatchCommand).mock.calls[0][0];
|
|
217
|
+
expect(sqsInput.Entries![0].MessageDeduplicationId).toBe('evt-dedup-1');
|
|
218
|
+
});
|
|
219
|
+
|
|
220
|
+
it('batches correctly (respects BATCH_SIZE of 10)', async () => {
|
|
221
|
+
// Create 12 event records to trigger 2 batches
|
|
222
|
+
const records = Array.from({ length: 12 }, (_, i) =>
|
|
223
|
+
makeEventRecord({ eventId: `evt-${i}`, version: i + 1 }),
|
|
224
|
+
);
|
|
225
|
+
|
|
226
|
+
mockUnmarshall.mockImplementation((_, i) => records[i]);
|
|
227
|
+
// Reset to return each record in sequence
|
|
228
|
+
mockUnmarshall.mockReset();
|
|
229
|
+
records.forEach((r) => mockUnmarshall.mockReturnValueOnce(r));
|
|
230
|
+
|
|
231
|
+
const handler = createStreamHandler(config);
|
|
232
|
+
const event: DynamoDBStreamEvent = {
|
|
233
|
+
Records: records.map((_, i) =>
|
|
234
|
+
makeStreamRecord('INSERT', { idx: { N: String(i) } }),
|
|
235
|
+
) as any,
|
|
236
|
+
};
|
|
237
|
+
|
|
238
|
+
await handler(event);
|
|
239
|
+
|
|
240
|
+
// 2 batches for SQS (10 + 2), 1 queue = 2 calls
|
|
241
|
+
expect(SendMessageBatchCommand).toHaveBeenCalledTimes(2);
|
|
242
|
+
const firstBatch = vi.mocked(SendMessageBatchCommand).mock.calls[0][0];
|
|
243
|
+
const secondBatch = vi.mocked(SendMessageBatchCommand).mock.calls[1][0];
|
|
244
|
+
expect(firstBatch.Entries).toHaveLength(10);
|
|
245
|
+
expect(secondBatch.Entries).toHaveLength(2);
|
|
246
|
+
|
|
247
|
+
// 2 batches for EventBridge (10 + 2)
|
|
248
|
+
expect(PutEventsCommand).toHaveBeenCalledTimes(2);
|
|
249
|
+
});
|
|
250
|
+
|
|
251
|
+
it('handles unmarshall errors gracefully', async () => {
|
|
252
|
+
mockUnmarshall.mockImplementation(() => {
|
|
253
|
+
throw new Error('unmarshall failed');
|
|
254
|
+
});
|
|
255
|
+
|
|
256
|
+
const handler = createStreamHandler(config);
|
|
257
|
+
const event: DynamoDBStreamEvent = {
|
|
258
|
+
Records: [makeStreamRecord('INSERT', { bad: { S: 'data' } })] as any,
|
|
259
|
+
};
|
|
260
|
+
|
|
261
|
+
await handler(event);
|
|
262
|
+
|
|
263
|
+
expect(logger.error).toHaveBeenCalledWith(
|
|
264
|
+
expect.objectContaining({ error: expect.any(Error) }),
|
|
265
|
+
'Error unmarshalling event record',
|
|
266
|
+
);
|
|
267
|
+
// Should not send to queues since no valid records
|
|
268
|
+
expect(SendMessageBatchCommand).not.toHaveBeenCalled();
|
|
269
|
+
expect(PutEventsCommand).not.toHaveBeenCalled();
|
|
270
|
+
});
|
|
271
|
+
|
|
272
|
+
it('does nothing when there are no event records', async () => {
|
|
273
|
+
const handler = createStreamHandler(config);
|
|
274
|
+
const event: DynamoDBStreamEvent = { Records: [] };
|
|
275
|
+
|
|
276
|
+
await handler(event);
|
|
277
|
+
|
|
278
|
+
expect(SendMessageBatchCommand).not.toHaveBeenCalled();
|
|
279
|
+
expect(PutEventsCommand).not.toHaveBeenCalled();
|
|
280
|
+
});
|
|
281
|
+
|
|
282
|
+
it('re-throws SQS send errors', async () => {
|
|
283
|
+
const eventRecord = makeEventRecord();
|
|
284
|
+
mockUnmarshall.mockReturnValue(eventRecord);
|
|
285
|
+
mockSqsSend.mockRejectedValue(new Error('SQS error'));
|
|
286
|
+
|
|
287
|
+
const handler = createStreamHandler(config);
|
|
288
|
+
const event: DynamoDBStreamEvent = {
|
|
289
|
+
Records: [makeStreamRecord('INSERT', { a: { S: '1' } })] as any,
|
|
290
|
+
};
|
|
291
|
+
|
|
292
|
+
await expect(handler(event)).rejects.toThrow('SQS error');
|
|
293
|
+
expect(logger.error).toHaveBeenCalled();
|
|
294
|
+
});
|
|
295
|
+
|
|
296
|
+
it('re-throws EventBridge send errors', async () => {
|
|
297
|
+
const eventRecord = makeEventRecord();
|
|
298
|
+
mockUnmarshall.mockReturnValue(eventRecord);
|
|
299
|
+
mockEbSend.mockRejectedValue(new Error('EB error'));
|
|
300
|
+
|
|
301
|
+
const handler = createStreamHandler(config);
|
|
302
|
+
const event: DynamoDBStreamEvent = {
|
|
303
|
+
Records: [makeStreamRecord('INSERT', { a: { S: '1' } })] as any,
|
|
304
|
+
};
|
|
305
|
+
|
|
306
|
+
await expect(handler(event)).rejects.toThrow();
|
|
307
|
+
expect(logger.error).toHaveBeenCalled();
|
|
308
|
+
});
|
|
309
|
+
});
|