@squiz/dx-common-lib 1.72.1 → 1.72.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,440 @@
1
+ /*!
2
+ * @license
3
+ * Copyright Squiz Australia Pty Ltd. All Rights Reserved.
4
+ */
5
+ import { createEventStreamHandler, EventMapper } from './EventStreamHandler';
6
+ import { DynamoDBStreamEvent, DynamoDBRecord, Context } from 'aws-lambda';
7
+
8
+ interface MockLogger {
9
+ info: jest.Mock;
10
+ debug: jest.Mock;
11
+ error: jest.Mock;
12
+ }
13
+
14
+ interface MockEventBusService {
15
+ setTenantId: jest.Mock;
16
+ publishEvents: jest.Mock;
17
+ }
18
+
19
+ describe('EventStreamHandler', () => {
20
+ let mockLogger: MockLogger;
21
+ let mockEventBusService: MockEventBusService;
22
+
23
+ beforeEach(() => {
24
+ mockLogger = {
25
+ info: jest.fn(),
26
+ debug: jest.fn(),
27
+ error: jest.fn(),
28
+ };
29
+
30
+ mockEventBusService = {
31
+ setTenantId: jest.fn(),
32
+ publishEvents: jest.fn().mockResolvedValue(undefined),
33
+ };
34
+ });
35
+
36
+ const createMockRecord = (overrides: Partial<DynamoDBRecord> = {}): DynamoDBRecord => {
37
+ return {
38
+ eventID: '1',
39
+ eventVersion: '1.0',
40
+ dynamodb: {
41
+ Keys: {
42
+ pk: { S: 'cv#component-123' },
43
+ sk: { S: 'v#1.0.0' },
44
+ },
45
+ SequenceNumber: '111',
46
+ SizeBytes: 26,
47
+ StreamViewType: 'NEW_AND_OLD_IMAGES',
48
+ },
49
+ awsRegion: 'us-east-1',
50
+ eventName: 'INSERT',
51
+ eventSource: 'aws:dynamodb',
52
+ eventSourceARN:
53
+ 'arn:aws:dynamodb:us-east-1:123456789012:table/dev.cmp-service.tenant123/stream/2024-01-01T00:00:00.000',
54
+ ...overrides,
55
+ } as DynamoDBRecord;
56
+ };
57
+
58
+ const createMockContext = (): Context =>
59
+ ({
60
+ awsRequestId: 'test-request-id',
61
+ invokeid: 'test-invoke-id',
62
+ logGroupName: 'test-log-group',
63
+ logStreamName: 'test-log-stream',
64
+ functionName: 'test-function',
65
+ memoryLimitInMB: '128',
66
+ functionVersion: '1',
67
+ getRemainingTimeInMillis: jest.fn(() => 30000),
68
+ done: jest.fn(),
69
+ fail: jest.fn(),
70
+ succeed: jest.fn(),
71
+ } as unknown as Context);
72
+
73
+ describe('createEventStreamHandler', () => {
74
+ it('should successfully process records with mapped events', async () => {
75
+ const mockMapper: EventMapper = jest.fn(async () => [
76
+ { detailType: 'component.change.create', detail: { name: 'test' } },
77
+ ]);
78
+
79
+ const handle = createEventStreamHandler({
80
+ logger: mockLogger as any,
81
+ eventBusService: mockEventBusService as any,
82
+ tableServiceIdentifier: 'cmp',
83
+ entityTypePrefixMap: { 'cv#': 'componentVersion' },
84
+ mappers: { componentVersion: mockMapper },
85
+ extractTenantId: (arn: string, _identifier: string) => {
86
+ const tableName = arn.split('/')[1];
87
+ const parts = tableName.split('.');
88
+ return parts[parts.length - 1];
89
+ },
90
+ batchSize: 10,
91
+ });
92
+
93
+ const event: DynamoDBStreamEvent = {
94
+ Records: [createMockRecord()],
95
+ };
96
+
97
+ const response = await handle(event, createMockContext());
98
+
99
+ expect(mockLogger.info).toHaveBeenCalledWith('Processing DynamoDB stream batch', expect.any(Object));
100
+ expect(mockEventBusService.setTenantId).toHaveBeenCalledWith('tenant123');
101
+ expect(mockEventBusService.publishEvents).toHaveBeenCalledWith([
102
+ { detailType: 'component.change.create', detail: { name: 'test' } },
103
+ ]);
104
+ expect(response.batchItemFailures).toHaveLength(0);
105
+ });
106
+
107
+ it('should pass the stream record as the third argument to extractTenantId', async () => {
108
+ const extractTenantId = jest.fn((arn: string, _identifier: string, _record: DynamoDBRecord) => {
109
+ const tableName = arn.split('/')[1];
110
+ const parts = tableName.split('.');
111
+ return parts[parts.length - 1];
112
+ });
113
+
114
+ const mockMapper: EventMapper = jest.fn(async () => [{ detailType: 'component.change.create', detail: {} }]);
115
+
116
+ const handle = createEventStreamHandler({
117
+ logger: mockLogger as any,
118
+ eventBusService: mockEventBusService as any,
119
+ tableServiceIdentifier: 'cmp',
120
+ entityTypePrefixMap: { 'cv#': 'componentVersion' },
121
+ mappers: { componentVersion: mockMapper },
122
+ extractTenantId,
123
+ });
124
+
125
+ const streamRecord = createMockRecord();
126
+ await handle({ Records: [streamRecord] }, createMockContext());
127
+
128
+ expect(extractTenantId).toHaveBeenCalledWith(streamRecord.eventSourceARN, 'cmp', streamRecord);
129
+ });
130
+
131
+ it('should handle records with no mapper registered', async () => {
132
+ const handle = createEventStreamHandler({
133
+ logger: mockLogger as any,
134
+ eventBusService: mockEventBusService as any,
135
+ tableServiceIdentifier: 'cmp',
136
+ entityTypePrefixMap: { 'cv#': 'componentVersion' },
137
+ mappers: {},
138
+ extractTenantId: (arn: string, _identifier: string) => {
139
+ const tableName = arn.split('/')[1];
140
+ const parts = tableName.split('.');
141
+ return parts[parts.length - 1];
142
+ },
143
+ });
144
+
145
+ const event: DynamoDBStreamEvent = {
146
+ Records: [createMockRecord()],
147
+ };
148
+
149
+ const response = await handle(event, createMockContext());
150
+
151
+ expect(mockLogger.debug).toHaveBeenCalledWith('No event mapper registered for entity type: componentVersion');
152
+ expect(mockEventBusService.publishEvents).not.toHaveBeenCalled();
153
+ expect(response.batchItemFailures).toHaveLength(0);
154
+ });
155
+
156
+ it('should handle records with unknown entity types', async () => {
157
+ const handle = createEventStreamHandler({
158
+ logger: mockLogger as any,
159
+ eventBusService: mockEventBusService as any,
160
+ tableServiceIdentifier: 'cmp',
161
+ entityTypePrefixMap: { 'cs#': 'componentSet' },
162
+ mappers: {},
163
+ extractTenantId: (arn: string, _identifier: string) => {
164
+ const tableName = arn.split('/')[1];
165
+ const parts = tableName.split('.');
166
+ return parts[parts.length - 1];
167
+ },
168
+ });
169
+
170
+ const event: DynamoDBStreamEvent = {
171
+ Records: [createMockRecord()],
172
+ };
173
+
174
+ const response = await handle(event, createMockContext());
175
+
176
+ expect(mockEventBusService.publishEvents).not.toHaveBeenCalled();
177
+ expect(response.batchItemFailures).toHaveLength(0);
178
+ });
179
+
180
+ it('should handle records without eventSourceARN', async () => {
181
+ const handle = createEventStreamHandler({
182
+ logger: mockLogger as any,
183
+ eventBusService: mockEventBusService as any,
184
+ tableServiceIdentifier: 'cmp',
185
+ entityTypePrefixMap: { 'cv#': 'componentVersion' },
186
+ mappers: {},
187
+ extractTenantId: (arn: string, _identifier: string) => {
188
+ const tableName = arn.split('/')[1];
189
+ const parts = tableName.split('.');
190
+ return parts[parts.length - 1];
191
+ },
192
+ });
193
+
194
+ const event: DynamoDBStreamEvent = {
195
+ Records: [createMockRecord({ eventSourceARN: undefined })],
196
+ };
197
+
198
+ const response = await handle(event, createMockContext());
199
+
200
+ expect(mockLogger.error).toHaveBeenCalledWith('Failed to process record', expect.any(Object));
201
+ expect(response.batchItemFailures).toHaveLength(0);
202
+ });
203
+
204
+ it('should batch records according to batchSize', async () => {
205
+ const mockMapper: EventMapper = jest.fn(async () => [{ detailType: 'component.change.create', detail: {} }]);
206
+
207
+ const handle = createEventStreamHandler({
208
+ logger: mockLogger as any,
209
+ eventBusService: mockEventBusService as any,
210
+ tableServiceIdentifier: 'cmp',
211
+ entityTypePrefixMap: { 'cv#': 'componentVersion' },
212
+ mappers: { componentVersion: mockMapper },
213
+ extractTenantId: (arn: string, _identifier: string) => {
214
+ const tableName = arn.split('/')[1];
215
+ const parts = tableName.split('.');
216
+ return parts[parts.length - 1];
217
+ },
218
+ batchSize: 2,
219
+ });
220
+
221
+ const records = [
222
+ createMockRecord({ eventID: '1' }),
223
+ createMockRecord({ eventID: '2' }),
224
+ createMockRecord({ eventID: '3' }),
225
+ ];
226
+
227
+ const event: DynamoDBStreamEvent = { Records: records };
228
+ await handle(event, createMockContext());
229
+
230
+ expect(mockMapper).toHaveBeenCalledTimes(3);
231
+ });
232
+
233
+ it('should mark records as failed when event publishing fails', async () => {
234
+ const mockMapper: EventMapper = jest.fn(async () => [
235
+ { detailType: 'component.change.create', detail: { name: 'test' } },
236
+ ]);
237
+
238
+ mockEventBusService.publishEvents.mockRejectedValue(new Error('Publishing failed'));
239
+
240
+ const handle = createEventStreamHandler({
241
+ logger: mockLogger as any,
242
+ eventBusService: mockEventBusService as any,
243
+ tableServiceIdentifier: 'cmp',
244
+ entityTypePrefixMap: { 'cv#': 'componentVersion' },
245
+ mappers: { componentVersion: mockMapper },
246
+ extractTenantId: (arn: string, _identifier: string) => {
247
+ const tableName = arn.split('/')[1];
248
+ const parts = tableName.split('.');
249
+ return parts[parts.length - 1];
250
+ },
251
+ });
252
+
253
+ const event: DynamoDBStreamEvent = {
254
+ Records: [createMockRecord()],
255
+ };
256
+
257
+ const response = await handle(event, createMockContext());
258
+
259
+ expect(mockLogger.error).toHaveBeenCalledWith('Failed to publish events for tenant', expect.any(Object));
260
+ expect(response.batchItemFailures).toHaveLength(1);
261
+ expect(response.batchItemFailures[0].itemIdentifier).toBe('111');
262
+ });
263
+
264
+ it('should handle mapper returning empty events', async () => {
265
+ const mockMapper: EventMapper = jest.fn(async () => []);
266
+
267
+ const handle = createEventStreamHandler({
268
+ logger: mockLogger as any,
269
+ eventBusService: mockEventBusService as any,
270
+ tableServiceIdentifier: 'cmp',
271
+ entityTypePrefixMap: { 'cv#': 'componentVersion' },
272
+ mappers: { componentVersion: mockMapper },
273
+ extractTenantId: (arn: string, _identifier: string) => {
274
+ const tableName = arn.split('/')[1];
275
+ const parts = tableName.split('.');
276
+ return parts[parts.length - 1];
277
+ },
278
+ });
279
+
280
+ const event: DynamoDBStreamEvent = {
281
+ Records: [createMockRecord()],
282
+ };
283
+
284
+ const response = await handle(event, createMockContext());
285
+
286
+ expect(mockEventBusService.publishEvents).not.toHaveBeenCalled();
287
+ expect(response.batchItemFailures).toHaveLength(0);
288
+ });
289
+
290
+ it('should handle multiple mappers for different entity types', async () => {
291
+ const cvMapper: EventMapper = jest.fn(async () => [
292
+ { detailType: 'component.change.create', detail: { type: 'cv' } },
293
+ ]);
294
+ const csMapper: EventMapper = jest.fn(async () => [{ detailType: 'set.change.create', detail: { type: 'cs' } }]);
295
+
296
+ const handle = createEventStreamHandler({
297
+ logger: mockLogger as any,
298
+ eventBusService: mockEventBusService as any,
299
+ tableServiceIdentifier: 'cmp',
300
+ entityTypePrefixMap: { 'cv#': 'componentVersion', 'cs#': 'componentSet' },
301
+ mappers: { componentVersion: cvMapper, componentSet: csMapper },
302
+ extractTenantId: (arn: string, _identifier: string) => {
303
+ const tableName = arn.split('/')[1];
304
+ const parts = tableName.split('.');
305
+ return parts[parts.length - 1];
306
+ },
307
+ });
308
+
309
+ const event: DynamoDBStreamEvent = {
310
+ Records: [
311
+ createMockRecord({ eventID: '1' }),
312
+ createMockRecord({ eventID: '2', dynamodb: { Keys: { pk: { S: 'cs#set-123' } } } as any }),
313
+ ],
314
+ };
315
+
316
+ await handle(event, createMockContext());
317
+
318
+ expect(cvMapper).toHaveBeenCalledTimes(1);
319
+ expect(csMapper).toHaveBeenCalledTimes(1);
320
+ });
321
+
322
+ it('should handle mapper errors gracefully', async () => {
323
+ const mockMapper: EventMapper = jest.fn(async () => {
324
+ throw new Error('Mapper failed');
325
+ });
326
+
327
+ const handle = createEventStreamHandler({
328
+ logger: mockLogger as any,
329
+ eventBusService: mockEventBusService as any,
330
+ tableServiceIdentifier: 'cmp',
331
+ entityTypePrefixMap: { 'cv#': 'componentVersion' },
332
+ mappers: { componentVersion: mockMapper },
333
+ extractTenantId: (arn: string, _identifier: string) => {
334
+ const tableName = arn.split('/')[1];
335
+ const parts = tableName.split('.');
336
+ return parts[parts.length - 1];
337
+ },
338
+ });
339
+
340
+ const event: DynamoDBStreamEvent = {
341
+ Records: [createMockRecord()],
342
+ };
343
+
344
+ const response = await handle(event, createMockContext());
345
+
346
+ expect(mockLogger.error).toHaveBeenCalledWith('Failed to process record', expect.any(Object));
347
+ expect(response.batchItemFailures).toHaveLength(0);
348
+ });
349
+
350
+ it('should group events by tenant and publish separately', async () => {
351
+ const mockMapper: EventMapper = jest.fn(async () => [{ detailType: 'component.change.create', detail: {} }]);
352
+
353
+ const handle = createEventStreamHandler({
354
+ logger: mockLogger as any,
355
+ eventBusService: mockEventBusService as any,
356
+ tableServiceIdentifier: 'cmp',
357
+ entityTypePrefixMap: { 'cv#': 'componentVersion' },
358
+ mappers: { componentVersion: mockMapper },
359
+ extractTenantId: (arn: string) => {
360
+ if (arn.includes('tenant1')) return 'tenant1';
361
+ if (arn.includes('tenant2')) return 'tenant2';
362
+ return 'unknown';
363
+ },
364
+ });
365
+
366
+ const event: DynamoDBStreamEvent = {
367
+ Records: [
368
+ createMockRecord({
369
+ eventID: '1',
370
+ eventSourceARN: 'arn:aws:dynamodb:us-east-1:123456789012:table/dev.cmp-service.tenant1/stream/ts',
371
+ }),
372
+ createMockRecord({
373
+ eventID: '2',
374
+ eventSourceARN: 'arn:aws:dynamodb:us-east-1:123456789012:table/dev.cmp-service.tenant2/stream/ts',
375
+ }),
376
+ ],
377
+ };
378
+
379
+ await handle(event, createMockContext());
380
+
381
+ expect(mockEventBusService.setTenantId).toHaveBeenCalledWith('tenant1');
382
+ expect(mockEventBusService.setTenantId).toHaveBeenCalledWith('tenant2');
383
+ expect(mockEventBusService.publishEvents).toHaveBeenCalledTimes(2);
384
+ });
385
+
386
+ it('should handle records without primary key', async () => {
387
+ const handle = createEventStreamHandler({
388
+ logger: mockLogger as any,
389
+ eventBusService: mockEventBusService as any,
390
+ tableServiceIdentifier: 'cmp',
391
+ entityTypePrefixMap: { 'cv#': 'componentVersion' },
392
+ mappers: {},
393
+ extractTenantId: (arn: string, _identifier: string) => {
394
+ const tableName = arn.split('/')[1];
395
+ const parts = tableName.split('.');
396
+ return parts[parts.length - 1];
397
+ },
398
+ });
399
+
400
+ const event: DynamoDBStreamEvent = {
401
+ Records: [createMockRecord({ dynamodb: { SequenceNumber: '111', SizeBytes: 0 } as any })],
402
+ };
403
+
404
+ const response = await handle(event, createMockContext());
405
+
406
+ expect(response.batchItemFailures).toHaveLength(0);
407
+ });
408
+
409
+ it('should log batch processing summary', async () => {
410
+ const mockMapper: EventMapper = jest.fn(async () => [{ detailType: 'component.change.create', detail: {} }]);
411
+
412
+ const handle = createEventStreamHandler({
413
+ logger: mockLogger as any,
414
+ eventBusService: mockEventBusService as any,
415
+ tableServiceIdentifier: 'cmp',
416
+ entityTypePrefixMap: { 'cv#': 'componentVersion' },
417
+ mappers: { componentVersion: mockMapper },
418
+ extractTenantId: (arn: string, _identifier: string) => {
419
+ const tableName = arn.split('/')[1];
420
+ const parts = tableName.split('.');
421
+ return parts[parts.length - 1];
422
+ },
423
+ });
424
+
425
+ const event: DynamoDBStreamEvent = {
426
+ Records: [createMockRecord()],
427
+ };
428
+
429
+ await handle(event, createMockContext());
430
+
431
+ const finalLog = mockLogger.info.mock.calls.find((call) => call[0] === 'Batch processing complete');
432
+ expect(finalLog).toBeDefined();
433
+ expect(finalLog[1]).toEqual({
434
+ totalRecords: 1,
435
+ failedRecords: 0,
436
+ service: 'cmp',
437
+ });
438
+ });
439
+ });
440
+ });
@@ -0,0 +1,192 @@
1
+ /*!
2
+ * @license
3
+ * Copyright Squiz Australia Pty Ltd. All Rights Reserved.
4
+ */
5
+ import { DynamoDBStreamEvent, DynamoDBRecord, Context, DynamoDBBatchResponse } from 'aws-lambda';
6
+ import { Logger } from '@squiz/dx-logger-lib';
7
+ import { EventBusService } from '../events/EventBusService';
8
+
9
+ export interface EventPayload {
10
+ detailType: string;
11
+ detail: object;
12
+ }
13
+
14
+ export type EventMapper = (record: DynamoDBRecord) => Promise<EventPayload[]>;
15
+
16
+ /** Resolves tenant id for publishing; use `record` when tenant is carried on the item rather than derivable from the table ARN. */
17
+ export type ExtractTenantId = (arn: string, tableServiceIdentifier: string, record: DynamoDBRecord) => string;
18
+
19
+ interface MappedRecord {
20
+ recordIndex: number;
21
+ record: DynamoDBRecord;
22
+ tenantId: string;
23
+ events: EventPayload[];
24
+ }
25
+
26
+ interface EventStreamHandlerConfig<T extends string> {
27
+ logger: Logger;
28
+ eventBusService: EventBusService;
29
+ /** Matches the table name segment in ARNs (e.g. `cmp` for `*.cmp-service.*`); also used as log field `service`. */
30
+ tableServiceIdentifier: string;
31
+ entityTypePrefixMap: Record<string, T>;
32
+ mappers: Partial<Record<T, EventMapper>>;
33
+ extractTenantId: ExtractTenantId;
34
+ batchSize?: number;
35
+ }
36
+
37
+ /**
38
+ * DynamoDB stream Lambda handler factory for mapping stream records to domain events and publishing via the event bus.
39
+ *
40
+ * Handles:
41
+ * - Record batching by configurable size
42
+ * - Tenant extraction from stream ARN and/or stream record (via `extractTenantId`)
43
+ * - Entity type detection via prefix matching
44
+ * - Mapper invocation for known entity types
45
+ * - Event grouping by tenant
46
+ * - Batch event publishing with retry tracking
47
+ * - Failure reporting for Lambda retry mechanism
48
+ */
49
+ export function createEventStreamHandler<T extends string>(config: EventStreamHandlerConfig<T>) {
50
+ const {
51
+ logger,
52
+ eventBusService,
53
+ tableServiceIdentifier,
54
+ entityTypePrefixMap,
55
+ mappers,
56
+ extractTenantId,
57
+ batchSize = 10,
58
+ } = config;
59
+
60
+ function extractEntityType(record: DynamoDBRecord): T | undefined {
61
+ const pk = record.dynamodb?.Keys?.pk?.S;
62
+ if (!pk) {
63
+ return undefined;
64
+ }
65
+
66
+ for (const [prefix, entityType] of Object.entries(entityTypePrefixMap)) {
67
+ if (pk.startsWith(prefix)) {
68
+ return entityType as T;
69
+ }
70
+ }
71
+
72
+ return undefined;
73
+ }
74
+
75
+ return async function handler(event: DynamoDBStreamEvent, context: Context): Promise<DynamoDBBatchResponse> {
76
+ logger.info('Processing DynamoDB stream batch', {
77
+ recordCount: event.Records.length,
78
+ requestId: context.awsRequestId,
79
+ service: tableServiceIdentifier,
80
+ });
81
+
82
+ const mappedRecords: MappedRecord[] = [];
83
+
84
+ // Process records in batches
85
+ for (let i = 0; i < event.Records.length; i += batchSize) {
86
+ const batch = event.Records.slice(i, i + batchSize);
87
+
88
+ const batchResults = await Promise.all(
89
+ batch.map(async (record, batchIndex) => {
90
+ try {
91
+ if (!record.eventSourceARN) {
92
+ throw new Error('Missing eventSourceARN on stream record');
93
+ }
94
+
95
+ const tenantId = extractTenantId(record.eventSourceARN, tableServiceIdentifier, record);
96
+ const tableName = record.eventSourceARN.split('/')[1];
97
+ const entityType = extractEntityType(record);
98
+
99
+ logger.info('Processed stream record', {
100
+ eventName: record.eventName,
101
+ tenantId,
102
+ tableName,
103
+ entityType,
104
+ pk: record.dynamodb?.Keys?.pk?.S,
105
+ sk: record.dynamodb?.Keys?.sk?.S,
106
+ });
107
+
108
+ if (entityType !== undefined) {
109
+ const mapper = mappers[entityType];
110
+ if (mapper) {
111
+ // Note: For CMP, mappers return 0 or 1 event per record
112
+ const events = await mapper(record);
113
+ return { recordIndex: i + batchIndex, record, tenantId, events };
114
+ } else {
115
+ logger.debug(`No event mapper registered for entity type: ${entityType}`);
116
+ return { recordIndex: i + batchIndex, record, tenantId, events: [] };
117
+ }
118
+ }
119
+
120
+ return { recordIndex: i + batchIndex, record, tenantId, events: [] };
121
+ } catch (error) {
122
+ logger.error('Failed to process record', {
123
+ error: error instanceof Error ? error.message : String(error),
124
+ eventSourceARN: record.eventSourceARN,
125
+ });
126
+ return { recordIndex: i + batchIndex, record, tenantId: 'unknown', events: [] };
127
+ }
128
+ }),
129
+ );
130
+
131
+ mappedRecords.push(...batchResults);
132
+ }
133
+
134
+ // Group events by tenant and publish separately for each tenant
135
+ const eventsByTenant = new Map<
136
+ string,
137
+ Array<{ recordIndex: number; record: DynamoDBRecord; events: EventPayload[] }>
138
+ >();
139
+
140
+ for (const { recordIndex, record, tenantId, events } of mappedRecords) {
141
+ if (events.length > 0) {
142
+ const existing = eventsByTenant.get(tenantId) || [];
143
+ existing.push({ recordIndex, record, events });
144
+ eventsByTenant.set(tenantId, existing);
145
+ }
146
+ }
147
+
148
+ // Track failed records for retry
149
+ const failedRecordIndices = new Set<string>();
150
+
151
+ // Publish batches for each tenant with their own context in parallel
152
+ await Promise.all(
153
+ Array.from(eventsByTenant.entries()).map(async ([tenantId, recordsWithEvents]) => {
154
+ const events = recordsWithEvents.flatMap((r) => r.events);
155
+ logger.info('Publishing batch of events for tenant', { tenantId, eventCount: events.length });
156
+
157
+ try {
158
+ eventBusService.setTenantId(tenantId);
159
+ await eventBusService.publishEvents(events);
160
+ logger.info('Successfully published all events for tenant', { tenantId });
161
+ } catch (error) {
162
+ logger.error('Failed to publish events for tenant', {
163
+ tenantId,
164
+ error: error instanceof Error ? error.message : String(error),
165
+ });
166
+
167
+ // Mark all records for this tenant as failed
168
+ for (const { record } of recordsWithEvents) {
169
+ const sequenceNumber = record.dynamodb?.SequenceNumber;
170
+ if (sequenceNumber) {
171
+ failedRecordIndices.add(sequenceNumber);
172
+ }
173
+ }
174
+ }
175
+ }),
176
+ );
177
+
178
+ const batchItemFailures: Array<{ itemIdentifier: string }> = Array.from(failedRecordIndices).map(
179
+ (sequenceNumber) => ({
180
+ itemIdentifier: sequenceNumber,
181
+ }),
182
+ );
183
+
184
+ logger.info('Batch processing complete', {
185
+ totalRecords: event.Records.length,
186
+ failedRecords: batchItemFailures.length,
187
+ service: tableServiceIdentifier,
188
+ });
189
+
190
+ return { batchItemFailures };
191
+ };
192
+ }