omgkit 2.0.7 → 2.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. package/package.json +2 -2
  2. package/plugin/skills/backend/api-architecture/SKILL.md +857 -0
  3. package/plugin/skills/backend/caching-strategies/SKILL.md +755 -0
  4. package/plugin/skills/backend/event-driven-architecture/SKILL.md +753 -0
  5. package/plugin/skills/backend/real-time-systems/SKILL.md +635 -0
  6. package/plugin/skills/databases/database-optimization/SKILL.md +571 -0
  7. package/plugin/skills/databases/postgresql/SKILL.md +494 -18
  8. package/plugin/skills/devops/docker/SKILL.md +466 -18
  9. package/plugin/skills/devops/monorepo-management/SKILL.md +595 -0
  10. package/plugin/skills/devops/observability/SKILL.md +622 -0
  11. package/plugin/skills/devops/performance-profiling/SKILL.md +905 -0
  12. package/plugin/skills/frameworks/nextjs/SKILL.md +407 -44
  13. package/plugin/skills/frameworks/react/SKILL.md +1006 -32
  14. package/plugin/skills/frontend/advanced-ui-design/SKILL.md +426 -0
  15. package/plugin/skills/integrations/ai-integration/SKILL.md +730 -0
  16. package/plugin/skills/integrations/payment-integration/SKILL.md +735 -0
  17. package/plugin/skills/languages/python/SKILL.md +489 -25
  18. package/plugin/skills/languages/typescript/SKILL.md +379 -30
  19. package/plugin/skills/methodology/problem-solving/SKILL.md +355 -0
  20. package/plugin/skills/methodology/research-validation/SKILL.md +668 -0
  21. package/plugin/skills/methodology/sequential-thinking/SKILL.md +260 -0
  22. package/plugin/skills/mobile/mobile-development/SKILL.md +756 -0
  23. package/plugin/skills/security/security-hardening/SKILL.md +633 -0
  24. package/plugin/skills/tools/document-processing/SKILL.md +916 -0
  25. package/plugin/skills/tools/image-processing/SKILL.md +748 -0
  26. package/plugin/skills/tools/mcp-development/SKILL.md +883 -0
  27. package/plugin/skills/tools/media-processing/SKILL.md +831 -0
@@ -0,0 +1,753 @@
1
+ ---
2
+ name: event-driven-architecture
3
+ description: Event sourcing, CQRS, and message queue patterns with RabbitMQ and Kafka for distributed systems
4
+ category: backend
5
+ triggers:
6
+ - event driven
7
+ - event sourcing
8
+ - cqrs
9
+ - message queue
10
+ - rabbitmq
11
+ - kafka
12
+ - saga pattern
13
+ ---
14
+
15
+ # Event-Driven Architecture
16
+
17
+ Implement **event-driven systems** with event sourcing, CQRS, and message queues. This skill covers distributed patterns for scalable, resilient applications.
18
+
19
+ ## Purpose
20
+
21
+ Build loosely coupled, scalable systems:
22
+
23
+ - Implement event sourcing for audit trails
24
+ - Apply CQRS for read/write optimization
25
+ - Use message queues for async processing
26
+ - Handle distributed transactions with sagas
27
+ - Ensure eventual consistency
28
+ - Build replay and recovery capabilities
29
+
30
+ ## Features
31
+
32
+ ### 1. Event Sourcing
33
+
34
+ ```typescript
35
+ // Event definitions
36
+ interface DomainEvent {
37
+ eventId: string;
38
+ eventType: string;
39
+ aggregateId: string;
40
+ aggregateType: string;
41
+ timestamp: Date;
42
+ version: number;
43
+ data: Record<string, any>;
44
+ metadata: {
45
+ userId?: string;
46
+ correlationId?: string;
47
+ causationId?: string;
48
+ };
49
+ }
50
+
51
+ // Order aggregate events
52
+ type OrderEvent =
53
+ | { type: 'OrderCreated'; data: { customerId: string; items: OrderItem[] } }
54
+ | { type: 'OrderItemAdded'; data: { item: OrderItem } }
55
+ | { type: 'OrderItemRemoved'; data: { itemId: string } }
56
+ | { type: 'OrderSubmitted'; data: { submittedAt: Date } }
57
+ | { type: 'PaymentReceived'; data: { paymentId: string; amount: number } }
58
+ | { type: 'OrderShipped'; data: { trackingNumber: string; carrier: string } }
59
+ | { type: 'OrderDelivered'; data: { deliveredAt: Date } }
60
+ | { type: 'OrderCancelled'; data: { reason: string } };
61
+
62
+ // Event store
63
+ class EventStore {
64
+ async append(
65
+ aggregateId: string,
66
+ events: DomainEvent[],
67
+ expectedVersion: number
68
+ ): Promise<void> {
69
+ // Optimistic concurrency check
70
+ const currentVersion = await this.getVersion(aggregateId);
71
+
72
+ if (currentVersion !== expectedVersion) {
73
+ throw new ConcurrencyError(
74
+ `Expected version ${expectedVersion}, but found ${currentVersion}`
75
+ );
76
+ }
77
+
78
+ // Append events atomically
79
+ await db.$transaction(async (tx) => {
80
+ for (let i = 0; i < events.length; i++) {
81
+ await tx.event.create({
82
+ data: {
83
+ ...events[i],
84
+ version: expectedVersion + i + 1,
85
+ },
86
+ });
87
+ }
88
+ });
89
+
90
+ // Publish to event bus
91
+ for (const event of events) {
92
+ await eventBus.publish(event);
93
+ }
94
+ }
95
+
96
+ async getEvents(
97
+ aggregateId: string,
98
+ fromVersion?: number
99
+ ): Promise<DomainEvent[]> {
100
+ return db.event.findMany({
101
+ where: {
102
+ aggregateId,
103
+ version: fromVersion ? { gt: fromVersion } : undefined,
104
+ },
105
+ orderBy: { version: 'asc' },
106
+ });
107
+ }
108
+
109
+ async getVersion(aggregateId: string): Promise<number> {
110
+ const lastEvent = await db.event.findFirst({
111
+ where: { aggregateId },
112
+ orderBy: { version: 'desc' },
113
+ });
114
+
115
+ return lastEvent?.version ?? 0;
116
+ }
117
+ }
118
+
119
+ // Aggregate with event sourcing
120
+ class OrderAggregate {
121
+ private id: string;
122
+ private state: OrderState;
123
+ private version: number = 0;
124
+ private uncommittedEvents: OrderEvent[] = [];
125
+
126
+ static async load(eventStore: EventStore, id: string): Promise<OrderAggregate> {
127
+ const aggregate = new OrderAggregate(id);
128
+ const events = await eventStore.getEvents(id);
129
+
130
+ for (const event of events) {
131
+ aggregate.apply(event, false);
132
+ }
133
+
134
+ return aggregate;
135
+ }
136
+
137
+ // Command handlers
138
+ create(customerId: string, items: OrderItem[]): void {
139
+ if (this.state) {
140
+ throw new Error('Order already exists');
141
+ }
142
+
143
+ this.applyChange({
144
+ type: 'OrderCreated',
145
+ data: { customerId, items },
146
+ });
147
+ }
148
+
149
+ addItem(item: OrderItem): void {
150
+ this.ensureState(['draft']);
151
+
152
+ this.applyChange({
153
+ type: 'OrderItemAdded',
154
+ data: { item },
155
+ });
156
+ }
157
+
158
+ submit(): void {
159
+ this.ensureState(['draft']);
160
+
161
+ if (this.state.items.length === 0) {
162
+ throw new Error('Cannot submit empty order');
163
+ }
164
+
165
+ this.applyChange({
166
+ type: 'OrderSubmitted',
167
+ data: { submittedAt: new Date() },
168
+ });
169
+ }
170
+
171
+ // Event application
172
+ private apply(event: OrderEvent, isNew: boolean): void {
173
+ switch (event.type) {
174
+ case 'OrderCreated':
175
+ this.state = {
176
+ status: 'draft',
177
+ customerId: event.data.customerId,
178
+ items: event.data.items,
179
+ total: this.calculateTotal(event.data.items),
180
+ };
181
+ break;
182
+
183
+ case 'OrderItemAdded':
184
+ this.state.items.push(event.data.item);
185
+ this.state.total = this.calculateTotal(this.state.items);
186
+ break;
187
+
188
+ case 'OrderSubmitted':
189
+ this.state.status = 'submitted';
190
+ this.state.submittedAt = event.data.submittedAt;
191
+ break;
192
+
193
+ // ... other event handlers
194
+ }
195
+
196
+ this.version++;
197
+
198
+ if (isNew) {
199
+ this.uncommittedEvents.push(event);
200
+ }
201
+ }
202
+
203
+ private applyChange(event: OrderEvent): void {
204
+ this.apply(event, true);
205
+ }
206
+
207
+ async save(eventStore: EventStore): Promise<void> {
208
+ const domainEvents = this.uncommittedEvents.map((e, i) => ({
209
+ eventId: uuid(),
210
+ eventType: e.type,
211
+ aggregateId: this.id,
212
+ aggregateType: 'Order',
213
+ timestamp: new Date(),
214
+ version: this.version - this.uncommittedEvents.length + i + 1,
215
+ data: e.data,
216
+ metadata: {},
217
+ }));
218
+
219
+ await eventStore.append(
220
+ this.id,
221
+ domainEvents,
222
+ this.version - this.uncommittedEvents.length
223
+ );
224
+
225
+ this.uncommittedEvents = [];
226
+ }
227
+ }
228
+ ```
229
+
230
+ ### 2. CQRS Pattern
231
+
232
+ ```typescript
233
+ // Command side (writes)
234
+ interface Command {
235
+ type: string;
236
+ payload: any;
237
+ metadata: {
238
+ userId: string;
239
+ timestamp: Date;
240
+ correlationId: string;
241
+ };
242
+ }
243
+
244
+ class CommandBus {
245
+ private handlers = new Map<string, CommandHandler>();
246
+
247
+ register(commandType: string, handler: CommandHandler): void {
248
+ this.handlers.set(commandType, handler);
249
+ }
250
+
251
+ async dispatch(command: Command): Promise<void> {
252
+ const handler = this.handlers.get(command.type);
253
+
254
+ if (!handler) {
255
+ throw new Error(`No handler for command: ${command.type}`);
256
+ }
257
+
258
+ await handler.handle(command);
259
+ }
260
+ }
261
+
262
+ // Command handler
263
+ class CreateOrderHandler implements CommandHandler {
264
+ constructor(
265
+ private eventStore: EventStore,
266
+ private orderRepository: OrderRepository
267
+ ) {}
268
+
269
+ async handle(command: CreateOrderCommand): Promise<void> {
270
+ const order = new OrderAggregate(uuid());
271
+ order.create(command.payload.customerId, command.payload.items);
272
+ await order.save(this.eventStore);
273
+ }
274
+ }
275
+
276
+ // Query side (reads)
277
+ interface Query {
278
+ type: string;
279
+ params: any;
280
+ }
281
+
282
+ class QueryBus {
283
+ private handlers = new Map<string, QueryHandler>();
284
+
285
+ register(queryType: string, handler: QueryHandler): void {
286
+ this.handlers.set(queryType, handler);
287
+ }
288
+
289
+ async execute<T>(query: Query): Promise<T> {
290
+ const handler = this.handlers.get(query.type);
291
+
292
+ if (!handler) {
293
+ throw new Error(`No handler for query: ${query.type}`);
294
+ }
295
+
296
+ return handler.handle(query);
297
+ }
298
+ }
299
+
300
+ // Read model projection
301
+ class OrderReadModel {
302
+ async project(event: DomainEvent): Promise<void> {
303
+ switch (event.eventType) {
304
+ case 'OrderCreated':
305
+ await db.orderView.create({
306
+ data: {
307
+ id: event.aggregateId,
308
+ customerId: event.data.customerId,
309
+ status: 'draft',
310
+ itemCount: event.data.items.length,
311
+ total: event.data.total,
312
+ createdAt: event.timestamp,
313
+ },
314
+ });
315
+ break;
316
+
317
+ case 'OrderSubmitted':
318
+ await db.orderView.update({
319
+ where: { id: event.aggregateId },
320
+ data: {
321
+ status: 'submitted',
322
+ submittedAt: event.data.submittedAt,
323
+ },
324
+ });
325
+ break;
326
+
327
+ case 'OrderShipped':
328
+ await db.orderView.update({
329
+ where: { id: event.aggregateId },
330
+ data: {
331
+ status: 'shipped',
332
+ trackingNumber: event.data.trackingNumber,
333
+ },
334
+ });
335
+ break;
336
+ }
337
+ }
338
+
339
+ // Rebuild projection from events
340
+ async rebuild(): Promise<void> {
341
+ // Clear existing read model
342
+ await db.orderView.deleteMany();
343
+
344
+ // Replay all events
345
+ const events = await eventStore.getAllEvents();
346
+
347
+ for (const event of events) {
348
+ await this.project(event);
349
+ }
350
+ }
351
+ }
352
+ ```
353
+
354
+ ### 3. Message Queues with RabbitMQ
355
+
356
+ ```typescript
357
+ import amqp from 'amqplib';
358
+
359
+ class RabbitMQBroker {
360
+ private connection: amqp.Connection;
361
+ private channel: amqp.Channel;
362
+
363
+ async connect(): Promise<void> {
364
+ this.connection = await amqp.connect(process.env.RABBITMQ_URL!);
365
+ this.channel = await this.connection.createChannel();
366
+
367
+ // Setup exchanges
368
+ await this.channel.assertExchange('events', 'topic', { durable: true });
369
+ await this.channel.assertExchange('commands', 'direct', { durable: true });
370
+ await this.channel.assertExchange('dlx', 'fanout', { durable: true });
371
+ }
372
+
373
+ async publish(exchange: string, routingKey: string, message: any): Promise<void> {
374
+ const content = Buffer.from(JSON.stringify(message));
375
+
376
+ this.channel.publish(exchange, routingKey, content, {
377
+ persistent: true,
378
+ contentType: 'application/json',
379
+ messageId: uuid(),
380
+ timestamp: Date.now(),
381
+ });
382
+ }
383
+
384
+ async subscribe(
385
+ queue: string,
386
+ exchange: string,
387
+ routingKey: string,
388
+ handler: (message: any) => Promise<void>
389
+ ): Promise<void> {
390
+ // Setup queue with dead letter exchange
391
+ await this.channel.assertQueue(queue, {
392
+ durable: true,
393
+ deadLetterExchange: 'dlx',
394
+ deadLetterRoutingKey: `${queue}.dlq`,
395
+ });
396
+
397
+ await this.channel.bindQueue(queue, exchange, routingKey);
398
+
399
+ // Consume messages
400
+ await this.channel.consume(queue, async (msg) => {
401
+ if (!msg) return;
402
+
403
+ try {
404
+ const content = JSON.parse(msg.content.toString());
405
+ await handler(content);
406
+ this.channel.ack(msg);
407
+ } catch (error) {
408
+ console.error('Message processing failed:', error);
409
+
410
+ // Retry or dead-letter
411
+ const retryCount = (msg.properties.headers?.['x-retry-count'] || 0) + 1;
412
+
413
+ if (retryCount < 3) {
414
+ // Retry with exponential backoff
415
+ setTimeout(() => {
416
+ this.channel.publish(exchange, routingKey, msg.content, {
417
+ ...msg.properties,
418
+ headers: {
419
+ ...msg.properties.headers,
420
+ 'x-retry-count': retryCount,
421
+ },
422
+ });
423
+ this.channel.ack(msg);
424
+ }, Math.pow(2, retryCount) * 1000);
425
+ } else {
426
+ // Send to dead letter queue
427
+ this.channel.reject(msg, false);
428
+ }
429
+ }
430
+ });
431
+ }
432
+ }
433
+
434
+ // Event publishing
435
+ class EventPublisher {
436
+ constructor(private broker: RabbitMQBroker) {}
437
+
438
+ async publish(event: DomainEvent): Promise<void> {
439
+ const routingKey = `${event.aggregateType}.${event.eventType}`;
440
+ await this.broker.publish('events', routingKey, event);
441
+ }
442
+ }
443
+
444
+ // Event consumer
445
+ class OrderEventConsumer {
446
+ constructor(
447
+ private broker: RabbitMQBroker,
448
+ private readModel: OrderReadModel
449
+ ) {}
450
+
451
+ async start(): Promise<void> {
452
+ await this.broker.subscribe(
453
+ 'order-projector',
454
+ 'events',
455
+ 'Order.*',
456
+ async (event) => {
457
+ await this.readModel.project(event);
458
+ }
459
+ );
460
+ }
461
+ }
462
+ ```
463
+
464
+ ### 4. Saga Pattern for Distributed Transactions
465
+
466
+ ```typescript
467
+ // Saga orchestrator
468
+ interface SagaStep {
469
+ name: string;
470
+ execute: (context: SagaContext) => Promise<void>;
471
+ compensate: (context: SagaContext) => Promise<void>;
472
+ }
473
+
474
+ class SagaOrchestrator {
475
+ private steps: SagaStep[] = [];
476
+ private executedSteps: SagaStep[] = [];
477
+
478
+ addStep(step: SagaStep): this {
479
+ this.steps.push(step);
480
+ return this;
481
+ }
482
+
483
+ async execute(context: SagaContext): Promise<void> {
484
+ try {
485
+ for (const step of this.steps) {
486
+ console.log(`Executing step: ${step.name}`);
487
+ await step.execute(context);
488
+ this.executedSteps.push(step);
489
+ }
490
+ } catch (error) {
491
+ console.error('Saga failed, compensating...', error);
492
+ await this.compensate(context);
493
+ throw error;
494
+ }
495
+ }
496
+
497
+ private async compensate(context: SagaContext): Promise<void> {
498
+ // Execute compensation in reverse order
499
+ for (const step of this.executedSteps.reverse()) {
500
+ try {
501
+ console.log(`Compensating step: ${step.name}`);
502
+ await step.compensate(context);
503
+ } catch (error) {
504
+ console.error(`Compensation failed for ${step.name}:`, error);
505
+ // Log for manual intervention
506
+ await this.logCompensationFailure(step, context, error);
507
+ }
508
+ }
509
+ }
510
+ }
511
+
512
+ // Order saga example
513
+ const createOrderSaga = new SagaOrchestrator()
514
+ .addStep({
515
+ name: 'Reserve Inventory',
516
+ execute: async (ctx) => {
517
+ const reservation = await inventoryService.reserve(ctx.items);
518
+ ctx.reservationId = reservation.id;
519
+ },
520
+ compensate: async (ctx) => {
521
+ if (ctx.reservationId) {
522
+ await inventoryService.releaseReservation(ctx.reservationId);
523
+ }
524
+ },
525
+ })
526
+ .addStep({
527
+ name: 'Process Payment',
528
+ execute: async (ctx) => {
529
+ const payment = await paymentService.charge(ctx.customerId, ctx.total);
530
+ ctx.paymentId = payment.id;
531
+ },
532
+ compensate: async (ctx) => {
533
+ if (ctx.paymentId) {
534
+ await paymentService.refund(ctx.paymentId);
535
+ }
536
+ },
537
+ })
538
+ .addStep({
539
+ name: 'Create Order',
540
+ execute: async (ctx) => {
541
+ const order = await orderService.create({
542
+ customerId: ctx.customerId,
543
+ items: ctx.items,
544
+ paymentId: ctx.paymentId,
545
+ reservationId: ctx.reservationId,
546
+ });
547
+ ctx.orderId = order.id;
548
+ },
549
+ compensate: async (ctx) => {
550
+ if (ctx.orderId) {
551
+ await orderService.cancel(ctx.orderId);
552
+ }
553
+ },
554
+ })
555
+ .addStep({
556
+ name: 'Send Confirmation',
557
+ execute: async (ctx) => {
558
+ await notificationService.sendOrderConfirmation(ctx.orderId);
559
+ },
560
+ compensate: async (ctx) => {
561
+ // No compensation needed for notifications
562
+ },
563
+ });
564
+
565
+ // Execute saga
566
+ async function handleCreateOrder(command: CreateOrderCommand): Promise<void> {
567
+ const context: SagaContext = {
568
+ customerId: command.customerId,
569
+ items: command.items,
570
+ total: calculateTotal(command.items),
571
+ };
572
+
573
+ await createOrderSaga.execute(context);
574
+ }
575
+ ```
576
+
577
+ ### 5. Kafka Streaming
578
+
579
+ ```typescript
580
+ import { Kafka, Producer, Consumer, EachMessagePayload } from 'kafkajs';
581
+
582
+ class KafkaService {
583
+ private kafka: Kafka;
584
+ private producer: Producer;
585
+ private consumers: Map<string, Consumer> = new Map();
586
+
587
+ constructor() {
588
+ this.kafka = new Kafka({
589
+ clientId: process.env.SERVICE_NAME,
590
+ brokers: (process.env.KAFKA_BROKERS || '').split(','),
591
+ });
592
+ }
593
+
594
+ async connect(): Promise<void> {
595
+ this.producer = this.kafka.producer({
596
+ idempotent: true,
597
+ maxInFlightRequests: 5,
598
+ });
599
+
600
+ await this.producer.connect();
601
+ }
602
+
603
+ async publish(topic: string, messages: KafkaMessage[]): Promise<void> {
604
+ await this.producer.send({
605
+ topic,
606
+ messages: messages.map(m => ({
607
+ key: m.key,
608
+ value: JSON.stringify(m.value),
609
+ headers: m.headers,
610
+ partition: m.partition,
611
+ })),
612
+ });
613
+ }
614
+
615
+ async subscribe(
616
+ groupId: string,
617
+ topics: string[],
618
+ handler: (payload: EachMessagePayload) => Promise<void>
619
+ ): Promise<void> {
620
+ const consumer = this.kafka.consumer({
621
+ groupId,
622
+ sessionTimeout: 30000,
623
+ heartbeatInterval: 3000,
624
+ });
625
+
626
+ await consumer.connect();
627
+ await consumer.subscribe({ topics, fromBeginning: false });
628
+
629
+ await consumer.run({
630
+ eachMessage: async (payload) => {
631
+ try {
632
+ await handler(payload);
633
+ } catch (error) {
634
+ console.error('Message processing failed:', error);
635
+ // Implement retry/DLQ logic
636
+ }
637
+ },
638
+ });
639
+
640
+ this.consumers.set(groupId, consumer);
641
+ }
642
+
643
+ async disconnect(): Promise<void> {
644
+ await this.producer.disconnect();
645
+ for (const consumer of this.consumers.values()) {
646
+ await consumer.disconnect();
647
+ }
648
+ }
649
+ }
650
+
651
+ // Stream processing
652
+ class OrderStreamProcessor {
653
+ constructor(private kafka: KafkaService) {}
654
+
655
+ async start(): Promise<void> {
656
+ await this.kafka.subscribe(
657
+ 'order-processor',
658
+ ['order-events'],
659
+ async ({ topic, partition, message }) => {
660
+ const event = JSON.parse(message.value?.toString() || '{}');
661
+
662
+ switch (event.type) {
663
+ case 'OrderCreated':
664
+ await this.handleOrderCreated(event);
665
+ break;
666
+ case 'OrderCompleted':
667
+ await this.handleOrderCompleted(event);
668
+ break;
669
+ }
670
+ }
671
+ );
672
+ }
673
+
674
+ private async handleOrderCreated(event: any): Promise<void> {
675
+ // Update analytics
676
+ await analyticsService.recordOrder(event.data);
677
+
678
+ // Trigger downstream processes
679
+ await this.kafka.publish('inventory-commands', [{
680
+ key: event.aggregateId,
681
+ value: {
682
+ type: 'ReserveInventory',
683
+ orderId: event.aggregateId,
684
+ items: event.data.items,
685
+ },
686
+ }]);
687
+ }
688
+ }
689
+ ```
690
+
691
+ ## Use Cases
692
+
693
+ ### 1. Order Processing System
694
+
695
+ ```typescript
696
+ // Complete order workflow
697
+ async function processOrder(orderId: string): Promise<void> {
698
+ const saga = new SagaOrchestrator()
699
+ .addStep(reserveInventoryStep)
700
+ .addStep(processPaymentStep)
701
+ .addStep(createShipmentStep)
702
+ .addStep(sendNotificationStep);
703
+
704
+ await saga.execute({ orderId });
705
+ }
706
+ ```
707
+
708
+ ### 2. Real-time Analytics
709
+
710
+ ```typescript
711
+ // Stream aggregation
712
+ const orderTotalsStream = kafka.subscribe(
713
+ 'analytics-aggregator',
714
+ ['order-events'],
715
+ async (event) => {
716
+ await updateDailySales(event.data.total);
717
+ await updateProductMetrics(event.data.items);
718
+ }
719
+ );
720
+ ```
721
+
722
+ ## Best Practices
723
+
724
+ ### Do's
725
+
726
+ - **Design events as facts** - Immutable, past-tense naming
727
+ - **Implement idempotent handlers** - Handle duplicates gracefully
728
+ - **Plan for event versioning** - Schema evolution
729
+ - **Use dead letter queues** - Handle failures
730
+ - **Monitor queue depths** - Alert on backlogs
731
+ - **Test with chaos** - Simulate failures
732
+
733
+ ### Don'ts
734
+
735
+ - Don't couple services through shared databases
736
+ - Don't ignore message ordering requirements
737
+ - Don't skip compensation logic
738
+ - Don't forget about exactly-once semantics
739
+ - Don't over-engineer for simple use cases
740
+ - Don't ignore backpressure
741
+
742
+ ## Related Skills
743
+
744
+ - **redis** - Pub/sub and caching
745
+ - **real-time-systems** - WebSocket integration
746
+ - **backend-development** - Service architecture
747
+
748
+ ## Reference Resources
749
+
750
+ - [Event Sourcing Pattern](https://martinfowler.com/eaaDev/EventSourcing.html)
751
+ - [CQRS Pattern](https://martinfowler.com/bliki/CQRS.html)
752
+ - [RabbitMQ Documentation](https://www.rabbitmq.com/documentation.html)
753
+ - [Apache Kafka Documentation](https://kafka.apache.org/documentation/)