@auriclabs/events 0.2.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"file":"stream-handler.entry.d.mts","names":[],"sources":["../src/stream-handler.entry.ts"],"mappings":";;;cAEa,OAAA,GAAO,KAAA,EAGlB,mBAAA,KAHkB,OAAA"}
@@ -0,0 +1,10 @@
1
+ import { t as createStreamHandler } from "./stream-handler.mjs";
2
+ //#region src/stream-handler.entry.ts
3
+ const handler = createStreamHandler({
4
+ busName: process.env.EVENT_BUS_NAME,
5
+ queueUrls: JSON.parse(process.env.QUEUE_URL_LIST ?? "[]")
6
+ });
7
+ //#endregion
8
+ export { handler };
9
+
10
+ //# sourceMappingURL=stream-handler.entry.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"stream-handler.entry.mjs","names":[],"sources":["../src/stream-handler.entry.ts"],"sourcesContent":["import { createStreamHandler } from './stream-handler';\n\nexport const handler = createStreamHandler({\n busName: process.env.EVENT_BUS_NAME,\n queueUrls: JSON.parse(process.env.QUEUE_URL_LIST ?? '[]') as string[],\n});\n"],"mappings":";;AAEA,MAAa,UAAU,oBAAoB;CACzC,SAAS,QAAQ,IAAI;CACrB,WAAW,KAAK,MAAM,QAAQ,IAAI,kBAAkB,KAAK;CAC1D,CAAC"}
@@ -0,0 +1,88 @@
1
+ import { logger } from "@auriclabs/logger";
2
+ import { EventBridgeClient, PutEventsCommand } from "@aws-sdk/client-eventbridge";
3
+ import { SQSClient, SendMessageBatchCommand } from "@aws-sdk/client-sqs";
4
+ import { unmarshall } from "@aws-sdk/util-dynamodb";
5
+ import { kebabCase } from "lodash-es";
6
+ //#region src/stream-handler.ts
7
+ const BATCH_SIZE = 10;
8
+ /**
9
+ * Creates a Lambda handler for DynamoDB stream events.
10
+ * Processes INSERT events from the event store table and forwards them to SQS queues and EventBridge.
11
+ */
12
+ function createStreamHandler(config) {
13
+ const sqsClient = new SQSClient();
14
+ const eventBridge = new EventBridgeClient({});
15
+ function chunkArray(array, chunkSize) {
16
+ const chunks = [];
17
+ for (let i = 0; i < array.length; i += chunkSize) chunks.push(array.slice(i, i + chunkSize));
18
+ return chunks;
19
+ }
20
+ async function sendToQueuesBatch(eventRecords) {
21
+ await Promise.all(config.queueUrls.map((queue) => sendToQueueBatch(eventRecords, queue)));
22
+ }
23
+ async function sendToQueueBatch(eventRecords, queue) {
24
+ const batches = chunkArray(eventRecords, BATCH_SIZE);
25
+ for (const batch of batches) try {
26
+ const entries = batch.map((eventRecord, index) => ({
27
+ Id: `${eventRecord.eventId}-${index}`,
28
+ MessageBody: JSON.stringify(eventRecord),
29
+ MessageGroupId: eventRecord.aggregateId,
30
+ MessageDeduplicationId: eventRecord.eventId
31
+ }));
32
+ await sqsClient.send(new SendMessageBatchCommand({
33
+ QueueUrl: queue,
34
+ Entries: entries
35
+ }));
36
+ } catch (error) {
37
+ logger.error({
38
+ error,
39
+ batch,
40
+ queue
41
+ }, "Error sending batch to queue");
42
+ throw error;
43
+ }
44
+ }
45
+ async function sendToBusBatch(eventRecords) {
46
+ const batches = chunkArray(eventRecords, BATCH_SIZE);
47
+ for (const batch of batches) try {
48
+ const entries = batch.map((eventRecord) => {
49
+ return {
50
+ Source: eventRecord.source ?? kebabCase(eventRecord.aggregateType.split(".")[0]),
51
+ DetailType: eventRecord.eventType,
52
+ Detail: JSON.stringify(eventRecord),
53
+ EventBusName: config.busName
54
+ };
55
+ });
56
+ await eventBridge.send(new PutEventsCommand({ Entries: entries }));
57
+ } catch (error) {
58
+ logger.error({
59
+ error,
60
+ batch
61
+ }, "Error sending batch to bus");
62
+ throw error;
63
+ }
64
+ }
65
+ return async (event) => {
66
+ const eventRecords = event.Records.filter((record) => record.eventName === "INSERT").map((record) => {
67
+ try {
68
+ const data = record.dynamodb?.NewImage;
69
+ return unmarshall(data);
70
+ } catch (error) {
71
+ logger.error({
72
+ error,
73
+ record
74
+ }, "Error unmarshalling event record");
75
+ return;
76
+ }
77
+ }).filter((eventRecord) => eventRecord?.itemType === "event");
78
+ if (eventRecords.length > 0) {
79
+ const tasks = [sendToQueuesBatch(eventRecords)];
80
+ if (config.busName) tasks.push(sendToBusBatch(eventRecords));
81
+ await Promise.all(tasks);
82
+ }
83
+ };
84
+ }
85
+ //#endregion
86
+ export { createStreamHandler as t };
87
+
88
+ //# sourceMappingURL=stream-handler.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"stream-handler.mjs","names":[],"sources":["../src/stream-handler.ts"],"sourcesContent":["import { logger } from '@auriclabs/logger';\nimport { AttributeValue } from '@aws-sdk/client-dynamodb';\nimport { EventBridgeClient, PutEventsCommand } from '@aws-sdk/client-eventbridge';\nimport { SendMessageBatchCommand, SQSClient } from '@aws-sdk/client-sqs';\nimport { unmarshall } from '@aws-sdk/util-dynamodb';\nimport { DynamoDBStreamEvent } from 'aws-lambda';\nimport { kebabCase } from 'lodash-es';\n\nimport { AggregateHead, EventRecord } from './types';\n\nconst BATCH_SIZE = 10;\n\nexport interface CreateStreamHandlerConfig {\n busName?: string;\n queueUrls: string[];\n}\n\n/**\n * Creates a Lambda handler for DynamoDB stream events.\n * Processes INSERT events from the event store table and forwards them to SQS queues and EventBridge.\n */\nexport function createStreamHandler(config: CreateStreamHandlerConfig) {\n const sqsClient = new SQSClient();\n const eventBridge = new EventBridgeClient({});\n\n function chunkArray<T>(array: T[], chunkSize: number): T[][] {\n const chunks: T[][] = [];\n for (let i = 0; i < array.length; i += chunkSize) {\n chunks.push(array.slice(i, i + chunkSize));\n }\n return chunks;\n }\n\n async function sendToQueuesBatch(eventRecords: EventRecord[]) {\n await Promise.all(config.queueUrls.map((queue) => sendToQueueBatch(eventRecords, queue)));\n }\n\n async function sendToQueueBatch(eventRecords: EventRecord[], queue: string) {\n const batches = chunkArray(eventRecords, BATCH_SIZE);\n\n for (const batch of batches) {\n try {\n const entries = batch.map((eventRecord, index) => ({\n Id: `${eventRecord.eventId}-${index}`,\n MessageBody: JSON.stringify(eventRecord),\n MessageGroupId: eventRecord.aggregateId,\n MessageDeduplicationId: eventRecord.eventId,\n }));\n\n await sqsClient.send(\n new SendMessageBatchCommand({\n QueueUrl: queue,\n Entries: entries,\n }),\n );\n } catch (error) {\n logger.error({ error, batch, queue }, 'Error sending batch to queue');\n throw error;\n }\n }\n }\n\n async function sendToBusBatch(eventRecords: EventRecord[]) {\n const batches = chunkArray(eventRecords, BATCH_SIZE);\n\n for (const batch of batches) {\n try {\n const entries = batch.map((eventRecord) => {\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n const source = eventRecord.source ?? kebabCase(eventRecord.aggregateType.split('.')[0]);\n return {\n Source: source,\n DetailType: eventRecord.eventType,\n Detail: JSON.stringify(eventRecord),\n EventBusName: config.busName,\n };\n });\n\n await eventBridge.send(\n new PutEventsCommand({\n Entries: entries,\n }),\n );\n } catch (error) {\n logger.error({ error, batch }, 'Error sending batch to bus');\n throw error;\n }\n }\n }\n\n return async (event: DynamoDBStreamEvent): Promise<void> => {\n const eventRecords = event.Records.filter((record) => record.eventName === 'INSERT')\n .map((record) => {\n try {\n const data = record.dynamodb?.NewImage;\n return unmarshall(data as Record<string, AttributeValue>) as EventRecord | AggregateHead;\n } catch (error) {\n logger.error({ error, record }, 'Error unmarshalling event record');\n return undefined;\n }\n })\n .filter((eventRecord): eventRecord is EventRecord => eventRecord?.itemType === 'event');\n\n if (eventRecords.length > 0) {\n const tasks: Promise<void>[] = [sendToQueuesBatch(eventRecords)];\n if (config.busName) {\n tasks.push(sendToBusBatch(eventRecords));\n }\n await Promise.all(tasks);\n }\n };\n}\n"],"mappings":";;;;;;AAUA,MAAM,aAAa;;;;;AAWnB,SAAgB,oBAAoB,QAAmC;CACrE,MAAM,YAAY,IAAI,WAAW;CACjC,MAAM,cAAc,IAAI,kBAAkB,EAAE,CAAC;CAE7C,SAAS,WAAc,OAAY,WAA0B;EAC3D,MAAM,SAAgB,EAAE;AACxB,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,UACrC,QAAO,KAAK,MAAM,MAAM,GAAG,IAAI,UAAU,CAAC;AAE5C,SAAO;;CAGT,eAAe,kBAAkB,cAA6B;AAC5D,QAAM,QAAQ,IAAI,OAAO,UAAU,KAAK,UAAU,iBAAiB,cAAc,MAAM,CAAC,CAAC;;CAG3F,eAAe,iBAAiB,cAA6B,OAAe;EAC1E,MAAM,UAAU,WAAW,cAAc,WAAW;AAEpD,OAAK,MAAM,SAAS,QAClB,KAAI;GACF,MAAM,UAAU,MAAM,KAAK,aAAa,WAAW;IACjD,IAAI,GAAG,YAAY,QAAQ,GAAG;IAC9B,aAAa,KAAK,UAAU,YAAY;IACxC,gBAAgB,YAAY;IAC5B,wBAAwB,YAAY;IACrC,EAAE;AAEH,SAAM,UAAU,KACd,IAAI,wBAAwB;IAC1B,UAAU;IACV,SAAS;IACV,CAAC,CACH;WACM,OAAO;AACd,UAAO,MAAM;IAAE;IAAO;IAAO;IAAO,EAAE,+BAA+B;AACrE,SAAM;;;CAKZ,eAAe,eAAe,cAA6B;EACzD,MAAM,UAAU,WAAW,cAAc,WAAW;AAEpD,OAAK,MAAM,SAAS,QAClB,KAAI;GACF,MAAM,UAAU,MAAM,KAAK,gBAAgB;AAGzC,WAAO;KACL,QAFa,YAAY,UAAU,UAAU,YAAY,cAAc,MAAM,IAAI,CAAC,GAAG;KAGrF,YAAY,YAAY;KACxB,QAAQ,KAAK,UAAU,YAAY;KACnC,cAAc,OAAO;KACtB;KACD;AAEF,SAAM,YAAY,KAChB,IAAI,iBAAiB,EACnB,SAAS,SACV,CAAC,CACH;WACM,OAAO;AACd,UAAO,MAAM;IAAE;IAAO;IAAO,EAAE,6BAA6B;AAC5D,SAAM;;;AAKZ,QAAO,OAAO,UAA8C;EAC1D,MAAM,eAAe,MAAM,QAAQ,QAAQ,WAAW,OAAO,cAAc,SAAS,CACjF,KAAK,WAAW;AACf,OAAI;IACF,MAAM,OAAO,OAAO,UAAU;AAC9B,WAAO,WAAW,KAAuC;YAClD,OAAO;AACd,WAAO,MAAM;KAAE;KAAO;KAAQ,EAAE,mCAAmC;AACnE;;IAEF,CACD,QAAQ,gBAA4C,aAAa,aAAa,QAAQ;AAEzF,MAAI,aAAa,SAAS,GAAG;GAC3B,MAAM,QAAyB,CAAC,kBAAkB,aAAa,CAAC;AAChE,OAAI,OAAO,QACT,OAAM,KAAK,eAAe,aAAa,CAAC;AAE1C,SAAM,QAAQ,IAAI,MAAM"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@auriclabs/events",
3
- "version": "0.2.0",
3
+ "version": "0.4.0",
4
4
  "description": "Event sourcing runtime utilities for DynamoDB-backed event stores",
5
5
  "prettier": "@auriclabs/prettier-config",
6
6
  "main": "dist/index.cjs",
@@ -11,6 +11,11 @@
11
11
  "types": "./dist/index.d.mts",
12
12
  "import": "./dist/index.mjs",
13
13
  "require": "./dist/index.cjs"
14
+ },
15
+ "./stream-handler": {
16
+ "types": "./dist/stream-handler.entry.d.mts",
17
+ "import": "./dist/stream-handler.entry.mjs",
18
+ "require": "./dist/stream-handler.entry.cjs"
14
19
  }
15
20
  },
16
21
  "keywords": [],
@@ -47,7 +52,7 @@
47
52
  "directory": "packages/events"
48
53
  },
49
54
  "scripts": {
50
- "build": "tsdown src/index.ts --format cjs,esm --dts --no-hash",
55
+ "build": "tsdown src/index.ts src/stream-handler.entry.ts --format cjs,esm --dts --no-hash",
51
56
  "dev": "concurrently \"pnpm build --watch\" \"pnpm:y:watch\"",
52
57
  "y:watch": "chokidar dist --initial --silent -c \"yalc publish --push\"",
53
58
  "lint": "eslint .",
@@ -14,13 +14,13 @@ import { logger } from '@auriclabs/logger';
14
14
 
15
15
  import { createEventListener } from './create-event-listener';
16
16
 
17
- import type { SQSEvent } from 'aws-lambda';
17
+ import type { SQSEvent, SQSRecordAttributes } from 'aws-lambda';
18
18
 
19
19
  const makeRecord = (body: object, messageId = 'msg-1') => ({
20
20
  messageId,
21
21
  body: JSON.stringify(body),
22
22
  receiptHandle: 'handle',
23
- attributes: {} as any,
23
+ attributes: {} as SQSRecordAttributes,
24
24
  messageAttributes: {},
25
25
  md5OfBody: '',
26
26
  eventSource: 'aws:sqs',
@@ -31,6 +31,7 @@ const makeRecord = (body: object, messageId = 'msg-1') => ({
31
31
  const makeEvent = (overrides = {}) => ({
32
32
  eventType: 'OrderCreated',
33
33
  eventId: 'evt-1',
34
+ tenantId: 'tenant-1',
34
35
  aggregateType: 'order',
35
36
  aggregateId: 'o-1',
36
37
  correlationId: 'corr-1',
@@ -186,7 +187,7 @@ describe('createEventListener', () => {
186
187
  await handler(sqsEvent);
187
188
 
188
189
  expect(logger.debug).toHaveBeenCalledWith(
189
- { event: expect.objectContaining({ eventType: 'OrderCreated' }) },
190
+ { event: expect.objectContaining({ eventType: 'OrderCreated' }) as unknown },
190
191
  'Processing event',
191
192
  );
192
193
  });
@@ -219,7 +220,7 @@ describe('createEventListener', () => {
219
220
  await handler(sqsEvent);
220
221
 
221
222
  expect(logger.error).toHaveBeenCalledWith(
222
- expect.objectContaining({ error, event: expect.any(Object) }),
223
+ expect.objectContaining({ error, event: expect.any(Object) as unknown }),
223
224
  'Error processing event',
224
225
  );
225
226
  });
@@ -50,6 +50,7 @@ describe('dispatchEvent', () => {
50
50
  aggregateId: 'o-1',
51
51
  source: 'test',
52
52
  eventType: 'OrderCreated',
53
+ tenantId: 'tenant-1',
53
54
  });
54
55
 
55
56
  expect(mockAppendEvent).toHaveBeenCalledWith(
@@ -65,6 +66,7 @@ describe('dispatchEvent', () => {
65
66
  aggregateId: 'o-1',
66
67
  source: 'test',
67
68
  eventType: 'OrderCreated',
69
+ tenantId: 'tenant-1',
68
70
  eventId: 'custom-event-id',
69
71
  });
70
72
 
@@ -81,6 +83,7 @@ describe('dispatchEvent', () => {
81
83
  aggregateId: 'o-1',
82
84
  source: 'test',
83
85
  eventType: 'OrderCreated',
86
+ tenantId: 'tenant-1',
84
87
  idempotencyKey: 'my-idem-key',
85
88
  });
86
89
 
@@ -97,6 +100,7 @@ describe('dispatchEvent', () => {
97
100
  aggregateId: 'o-1',
98
101
  source: 'test',
99
102
  eventType: 'OrderCreated',
103
+ tenantId: 'tenant-1',
100
104
  });
101
105
 
102
106
  expect(mockAppendEvent).toHaveBeenCalledWith(
@@ -114,6 +118,7 @@ describe('dispatchEvent', () => {
114
118
  aggregateId: 'o-1',
115
119
  source: 'test',
116
120
  eventType: 'OrderUpdated',
121
+ tenantId: 'tenant-1',
117
122
  });
118
123
 
119
124
  expect(mockGetHead).toHaveBeenCalledWith('order', 'o-1');
@@ -132,6 +137,7 @@ describe('dispatchEvent', () => {
132
137
  aggregateId: 'o-1',
133
138
  source: 'test',
134
139
  eventType: 'OrderCreated',
140
+ tenantId: 'tenant-1',
135
141
  });
136
142
 
137
143
  expect(mockAppendEvent).toHaveBeenCalledWith(
@@ -152,6 +158,7 @@ describe('dispatchEvent', () => {
152
158
  aggregateId: 'o-1',
153
159
  source: 'test',
154
160
  eventType: 'OrderCreated',
161
+ tenantId: 'tenant-1',
155
162
  });
156
163
 
157
164
  expect(mockAppendEvent).toHaveBeenCalledWith(
@@ -173,6 +180,7 @@ describe('dispatchEvent', () => {
173
180
  aggregateId: 'o-1',
174
181
  source: 'test',
175
182
  eventType: 'OrderCreated',
183
+ tenantId: 'tenant-1',
176
184
  correlationId: 'event-corr',
177
185
  });
178
186
 
@@ -190,6 +198,7 @@ describe('dispatchEvent', () => {
190
198
  aggregateId: 'o-1',
191
199
  source: 'test',
192
200
  eventType: 'OrderCreated',
201
+ tenantId: 'tenant-1',
193
202
  });
194
203
 
195
204
  expect(retry).toHaveBeenCalledTimes(1);
@@ -202,6 +211,7 @@ describe('dispatchEvent', () => {
202
211
  aggregateId: 'o-1',
203
212
  source: 'test',
204
213
  eventType: 'OrderCreated',
214
+ tenantId: 'tenant-1',
205
215
  });
206
216
 
207
217
  expect(mockAppendEvent).toHaveBeenCalledWith(
@@ -220,6 +230,7 @@ describe('dispatchEvent', () => {
220
230
  aggregateId: 'o-1',
221
231
  source: 'test',
222
232
  eventType: 'OrderCreated',
233
+ tenantId: 'tenant-1',
223
234
  });
224
235
 
225
236
  expect(result).toEqual(expected);
@@ -30,10 +30,10 @@ describe('dispatchEvents', () => {
30
30
  });
31
31
 
32
32
  it('dispatches events sequentially with inOrder: true', async () => {
33
- const callOrder: number[] = [];
34
- mockDispatchEvent.mockImplementation(async (event) => {
33
+ const callOrder: string[] = [];
34
+ mockDispatchEvent.mockImplementation((event: { aggregateId: string }) => {
35
35
  callOrder.push(event.aggregateId);
36
- return { pk: 'pk', sk: 'sk', version: 1 };
36
+ return Promise.resolve({ pk: 'pk', sk: 'sk', version: 1 });
37
37
  });
38
38
 
39
39
  const events = [
@@ -27,6 +27,41 @@ import { TransactWriteCommand, GetCommand, QueryCommand } from '@aws-sdk/lib-dyn
27
27
 
28
28
  import { createEventService } from './event-service';
29
29
 
30
+ interface TransactItem {
31
+ TableName?: string;
32
+ Key?: Record<string, string>;
33
+ Item?: {
34
+ pk?: string;
35
+ sk?: string;
36
+ itemType?: string;
37
+ eventType?: string;
38
+ payload?: unknown;
39
+ version?: number;
40
+ source?: string;
41
+ schemaVersion?: number;
42
+ occurredAt?: string;
43
+ correlationId?: string;
44
+ causationId?: string;
45
+ actorId?: string;
46
+ [key: string]: unknown;
47
+ };
48
+ [key: string]: unknown;
49
+ }
50
+
51
+ interface TransactWriteInput {
52
+ TransactItems?: {
53
+ Update?: TransactItem;
54
+ Put?: TransactItem;
55
+ [key: string]: unknown;
56
+ }[];
57
+ }
58
+
59
+ interface QueryInput {
60
+ ExpressionAttributeValues?: Record<string, unknown>;
61
+ Limit?: number;
62
+ [key: string]: unknown;
63
+ }
64
+
30
65
  describe('event-service', () => {
31
66
  const TABLE_NAME = 'test-events';
32
67
 
@@ -53,6 +88,7 @@ describe('event-service', () => {
53
88
  mockSend.mockResolvedValue({});
54
89
 
55
90
  await service.appendEvent({
91
+ tenantId: 'tenant-1',
56
92
  aggregateType: 'order',
57
93
  aggregateId: 'order-123',
58
94
  source: 'order-service',
@@ -64,24 +100,24 @@ describe('event-service', () => {
64
100
  });
65
101
 
66
102
  expect(TransactWriteCommand).toHaveBeenCalledTimes(1);
67
- const cmdInput = vi.mocked(TransactWriteCommand).mock.calls[0][0];
103
+ const cmdInput = vi.mocked(TransactWriteCommand).mock.calls[0][0] as TransactWriteInput;
68
104
 
69
105
  // Check Update item (HEAD)
70
- const updateItem = cmdInput.TransactItems![0].Update!;
71
- expect(updateItem.TableName).toBe(TABLE_NAME);
72
- expect(updateItem.Key).toEqual({ pk: 'AGG#order#order-123', sk: 'HEAD' });
106
+ const updateItem = cmdInput.TransactItems?.[0]?.Update;
107
+ expect(updateItem?.TableName).toBe(TABLE_NAME);
108
+ expect(updateItem?.Key).toEqual({ pk: 'AGG#order#order-123', sk: 'HEAD' });
73
109
 
74
110
  // Check Put item (event)
75
- const putItem = cmdInput.TransactItems![1].Put!;
76
- expect(putItem.TableName).toBe(TABLE_NAME);
77
- expect(putItem.Item!.pk).toBe('AGG#order#order-123');
78
- expect(putItem.Item!.sk).toBe('EVT#000000001');
79
- expect(putItem.Item!.itemType).toBe('event');
80
- expect(putItem.Item!.eventType).toBe('OrderCreated');
81
- expect(putItem.Item!.payload).toEqual({ total: 100 });
82
- expect(putItem.Item!.version).toBe(1);
83
- expect(putItem.Item!.source).toBe('order-service');
84
- expect(putItem.Item!.schemaVersion).toBe(1);
111
+ const putItem = cmdInput.TransactItems?.[1]?.Put;
112
+ expect(putItem?.TableName).toBe(TABLE_NAME);
113
+ expect(putItem?.Item?.pk).toBe('AGG#order#order-123');
114
+ expect(putItem?.Item?.sk).toBe('EVT#000000001');
115
+ expect(putItem?.Item?.itemType).toBe('event');
116
+ expect(putItem?.Item?.eventType).toBe('OrderCreated');
117
+ expect(putItem?.Item?.payload).toEqual({ total: 100 });
118
+ expect(putItem?.Item?.version).toBe(1);
119
+ expect(putItem?.Item?.source).toBe('order-service');
120
+ expect(putItem?.Item?.schemaVersion).toBe(1);
85
121
  });
86
122
 
87
123
  it('pads version number to 9 digits', async () => {
@@ -89,6 +125,7 @@ describe('event-service', () => {
89
125
  mockSend.mockResolvedValue({});
90
126
 
91
127
  await service.appendEvent({
128
+ tenantId: 'tenant-1',
92
129
  aggregateType: 'order',
93
130
  aggregateId: '1',
94
131
  source: 'test',
@@ -98,10 +135,10 @@ describe('event-service', () => {
98
135
  eventType: 'T',
99
136
  });
100
137
 
101
- const cmdInput = vi.mocked(TransactWriteCommand).mock.calls[0][0];
102
- const putItem = cmdInput.TransactItems![1].Put!;
103
- expect(putItem.Item!.sk).toBe('EVT#000000042');
104
- expect(putItem.Item!.version).toBe(42);
138
+ const cmdInput = vi.mocked(TransactWriteCommand).mock.calls[0][0] as TransactWriteInput;
139
+ const putItem = cmdInput.TransactItems?.[1]?.Put;
140
+ expect(putItem?.Item?.sk).toBe('EVT#000000042');
141
+ expect(putItem?.Item?.version).toBe(42);
105
142
  });
106
143
 
107
144
  it('returns pk, sk, and version', async () => {
@@ -109,6 +146,7 @@ describe('event-service', () => {
109
146
  mockSend.mockResolvedValue({});
110
147
 
111
148
  const result = await service.appendEvent({
149
+ tenantId: 'tenant-1',
112
150
  aggregateType: 'wallet',
113
151
  aggregateId: 'w-1',
114
152
  source: 'billing',
@@ -130,6 +168,7 @@ describe('event-service', () => {
130
168
  mockSend.mockResolvedValue({});
131
169
 
132
170
  await service.appendEvent({
171
+ tenantId: 'tenant-1',
133
172
  aggregateType: 'order',
134
173
  aggregateId: '1',
135
174
  source: 'test',
@@ -140,9 +179,9 @@ describe('event-service', () => {
140
179
  occurredAt: '2025-01-01T00:00:00.000Z',
141
180
  });
142
181
 
143
- const cmdInput = vi.mocked(TransactWriteCommand).mock.calls[0][0];
144
- const putItem = cmdInput.TransactItems![1].Put!;
145
- expect(putItem.Item!.occurredAt).toBe('2025-01-01T00:00:00.000Z');
182
+ const cmdInput = vi.mocked(TransactWriteCommand).mock.calls[0][0] as TransactWriteInput;
183
+ const putItem = cmdInput.TransactItems?.[1]?.Put;
184
+ expect(putItem?.Item?.occurredAt).toBe('2025-01-01T00:00:00.000Z');
146
185
  });
147
186
 
148
187
  it('includes optional metadata fields', async () => {
@@ -150,6 +189,7 @@ describe('event-service', () => {
150
189
  mockSend.mockResolvedValue({});
151
190
 
152
191
  await service.appendEvent({
192
+ tenantId: 'tenant-1',
153
193
  aggregateType: 'order',
154
194
  aggregateId: '1',
155
195
  source: 'test',
@@ -163,12 +203,12 @@ describe('event-service', () => {
163
203
  schemaVersion: 2,
164
204
  });
165
205
 
166
- const cmdInput = vi.mocked(TransactWriteCommand).mock.calls[0][0];
167
- const putItem = cmdInput.TransactItems![1].Put!;
168
- expect(putItem.Item!.correlationId).toBe('corr-1');
169
- expect(putItem.Item!.causationId).toBe('cause-1');
170
- expect(putItem.Item!.actorId).toBe('user-1');
171
- expect(putItem.Item!.schemaVersion).toBe(2);
206
+ const cmdInput = vi.mocked(TransactWriteCommand).mock.calls[0][0] as TransactWriteInput;
207
+ const putItem = cmdInput.TransactItems?.[1]?.Put;
208
+ expect(putItem?.Item?.correlationId).toBe('corr-1');
209
+ expect(putItem?.Item?.causationId).toBe('cause-1');
210
+ expect(putItem?.Item?.actorId).toBe('user-1');
211
+ expect(putItem?.Item?.schemaVersion).toBe(2);
172
212
  });
173
213
 
174
214
  it('throws OCC error on ConditionalCheckFailedException', async () => {
@@ -178,6 +218,7 @@ describe('event-service', () => {
178
218
 
179
219
  await expect(
180
220
  service.appendEvent({
221
+ tenantId: 'tenant-1',
181
222
  aggregateType: 'order',
182
223
  aggregateId: 'o-1',
183
224
  source: 'test',
@@ -195,6 +236,7 @@ describe('event-service', () => {
195
236
 
196
237
  await expect(
197
238
  service.appendEvent({
239
+ tenantId: 'tenant-1',
198
240
  aggregateType: 'order',
199
241
  aggregateId: 'o-1',
200
242
  source: 'test',
@@ -285,9 +327,9 @@ describe('event-service', () => {
285
327
  fromVersionExclusive: 5,
286
328
  });
287
329
 
288
- const cmdInput = vi.mocked(QueryCommand).mock.calls[0][0];
330
+ const cmdInput = vi.mocked(QueryCommand).mock.calls[0][0] as QueryInput;
289
331
  // fromVersionExclusive=5 means start from version 6
290
- expect(cmdInput.ExpressionAttributeValues![':from']).toBe('EVT#000000006');
332
+ expect(cmdInput.ExpressionAttributeValues?.[':from']).toBe('EVT#000000006');
291
333
  });
292
334
 
293
335
  it('respects toVersionInclusive', async () => {
@@ -300,8 +342,8 @@ describe('event-service', () => {
300
342
  toVersionInclusive: 10,
301
343
  });
302
344
 
303
- const cmdInput = vi.mocked(QueryCommand).mock.calls[0][0];
304
- expect(cmdInput.ExpressionAttributeValues![':to']).toBe('EVT#000000010');
345
+ const cmdInput = vi.mocked(QueryCommand).mock.calls[0][0] as QueryInput;
346
+ expect(cmdInput.ExpressionAttributeValues?.[':to']).toBe('EVT#000000010');
305
347
  });
306
348
 
307
349
  it('respects limit parameter', async () => {
@@ -314,7 +356,7 @@ describe('event-service', () => {
314
356
  limit: 25,
315
357
  });
316
358
 
317
- const cmdInput = vi.mocked(QueryCommand).mock.calls[0][0];
359
+ const cmdInput = vi.mocked(QueryCommand).mock.calls[0][0] as QueryInput;
318
360
  expect(cmdInput.Limit).toBe(25);
319
361
  });
320
362
 
@@ -29,6 +29,7 @@ const pkFor = (aggregateType: string, aggregateId: string): AggregatePK =>
29
29
  `AGG#${aggregateType}#${aggregateId}`;
30
30
 
31
31
  export interface AppendArgs<P = unknown> {
32
+ tenantId: string;
32
33
  aggregateType: string;
33
34
  aggregateId: string;
34
35
  source: string;
@@ -79,6 +80,7 @@ export function createEventService(tableName: string): EventService {
79
80
  return {
80
81
  async appendEvent<P = unknown>(args: AppendArgs<P>): Promise<AppendEventResult> {
81
82
  const {
83
+ tenantId,
82
84
  aggregateType,
83
85
  aggregateId,
84
86
  expectedVersion,
@@ -138,6 +140,8 @@ export function createEventService(tableName: string): EventService {
138
140
  aggregateType: aggregateType as AggregateType,
139
141
  version: nextVersion,
140
142
 
143
+ tenantId,
144
+
141
145
  eventId: eventId as EventId,
142
146
  eventType: eventType,
143
147
  schemaVersion: schemaVersion ?? 1,
package/src/init.test.ts CHANGED
@@ -19,6 +19,8 @@ describe('init', () => {
19
19
  // Re-import to get fresh module state
20
20
  // Since the module state persists, we need to test the throw case first
21
21
  // Actually with vi.mock the module is already imported. We need to use dynamic import.
22
+ // See 'init (fresh module)' describe block below for the actual test
23
+ expect(true).toBe(true);
22
24
  });
23
25
  });
24
26
 
@@ -0,0 +1,6 @@
1
+ import { createStreamHandler } from './stream-handler';
2
+
3
+ export const handler = createStreamHandler({
4
+ busName: process.env.EVENT_BUS_NAME,
5
+ queueUrls: JSON.parse(process.env.QUEUE_URL_LIST ?? '[]') as string[],
6
+ });