specweave 0.30.12 → 0.30.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (127) hide show
  1. package/.claude-plugin/marketplace.json +0 -11
  2. package/CLAUDE.md +1 -1
  3. package/bin/fix-marketplace-errors.sh +1 -1
  4. package/dist/src/cli/commands/init.d.ts.map +1 -1
  5. package/dist/src/cli/commands/init.js +13 -0
  6. package/dist/src/cli/commands/init.js.map +1 -1
  7. package/dist/src/cli/helpers/ado-area-selector.d.ts.map +1 -1
  8. package/dist/src/cli/helpers/ado-area-selector.js +13 -0
  9. package/dist/src/cli/helpers/ado-area-selector.js.map +1 -1
  10. package/dist/src/cli/helpers/issue-tracker/index.d.ts.map +1 -1
  11. package/dist/src/cli/helpers/issue-tracker/index.js +7 -2
  12. package/dist/src/cli/helpers/issue-tracker/index.js.map +1 -1
  13. package/dist/src/cli/helpers/issue-tracker/sync-config-writer.d.ts +7 -0
  14. package/dist/src/cli/helpers/issue-tracker/sync-config-writer.d.ts.map +1 -1
  15. package/dist/src/cli/helpers/issue-tracker/sync-config-writer.js +33 -2
  16. package/dist/src/cli/helpers/issue-tracker/sync-config-writer.js.map +1 -1
  17. package/dist/src/cli/workers/clone-worker.js +19 -3
  18. package/dist/src/cli/workers/clone-worker.js.map +1 -1
  19. package/dist/src/core/living-docs/board-matcher.d.ts +120 -0
  20. package/dist/src/core/living-docs/board-matcher.d.ts.map +1 -0
  21. package/dist/src/core/living-docs/board-matcher.js +466 -0
  22. package/dist/src/core/living-docs/board-matcher.js.map +1 -0
  23. package/dist/src/core/living-docs/foundation-builder.js +1 -1
  24. package/dist/src/core/living-docs/foundation-builder.js.map +1 -1
  25. package/dist/src/core/living-docs/living-docs-sync.d.ts +19 -8
  26. package/dist/src/core/living-docs/living-docs-sync.d.ts.map +1 -1
  27. package/dist/src/core/living-docs/living-docs-sync.js +148 -52
  28. package/dist/src/core/living-docs/living-docs-sync.js.map +1 -1
  29. package/dist/src/core/living-docs/suggestions-generator.js +1 -1
  30. package/dist/src/core/living-docs/suggestions-generator.js.map +1 -1
  31. package/dist/src/core/living-docs/umbrella-detector.d.ts +4 -0
  32. package/dist/src/core/living-docs/umbrella-detector.d.ts.map +1 -1
  33. package/dist/src/core/living-docs/umbrella-detector.js +20 -1
  34. package/dist/src/core/living-docs/umbrella-detector.js.map +1 -1
  35. package/dist/src/core/living-docs/workitem-matcher.js +5 -5
  36. package/dist/src/core/living-docs/workitem-matcher.js.map +1 -1
  37. package/dist/src/importers/item-converter.d.ts +4 -0
  38. package/dist/src/importers/item-converter.d.ts.map +1 -1
  39. package/dist/src/importers/item-converter.js +4 -0
  40. package/dist/src/importers/item-converter.js.map +1 -1
  41. package/dist/src/init/repo/types.d.ts +1 -1
  42. package/dist/src/living-docs/enterprise-analyzer.d.ts.map +1 -1
  43. package/dist/src/living-docs/enterprise-analyzer.js +70 -19
  44. package/dist/src/living-docs/enterprise-analyzer.js.map +1 -1
  45. package/dist/src/living-docs/epic-id-allocator.d.ts +4 -0
  46. package/dist/src/living-docs/epic-id-allocator.d.ts.map +1 -1
  47. package/dist/src/living-docs/epic-id-allocator.js +4 -0
  48. package/dist/src/living-docs/epic-id-allocator.js.map +1 -1
  49. package/dist/src/living-docs/fs-id-allocator.d.ts +4 -0
  50. package/dist/src/living-docs/fs-id-allocator.d.ts.map +1 -1
  51. package/dist/src/living-docs/fs-id-allocator.js +4 -0
  52. package/dist/src/living-docs/fs-id-allocator.js.map +1 -1
  53. package/dist/src/living-docs/smart-doc-organizer.d.ts +114 -0
  54. package/dist/src/living-docs/smart-doc-organizer.d.ts.map +1 -0
  55. package/dist/src/living-docs/smart-doc-organizer.js +535 -0
  56. package/dist/src/living-docs/smart-doc-organizer.js.map +1 -0
  57. package/package.json +13 -13
  58. package/plugins/PLUGINS-INDEX.md +2 -3
  59. package/plugins/specweave/commands/specweave-judge.md +265 -0
  60. package/plugins/specweave/commands/specweave-organize-docs.md +185 -0
  61. package/plugins/specweave/hooks/hooks.json +3 -3
  62. package/plugins/specweave/hooks/universal/hook-wrapper.cmd +26 -0
  63. package/plugins/specweave/hooks/universal/hook-wrapper.sh +67 -0
  64. package/plugins/specweave-ado/commands/{specweave-ado-close-workitem.md → close.md} +9 -5
  65. package/plugins/specweave-ado/commands/{specweave-ado-create-workitem.md → create.md} +9 -5
  66. package/plugins/specweave-ado/commands/pull.md +459 -0
  67. package/plugins/specweave-ado/commands/push.md +361 -0
  68. package/plugins/specweave-ado/commands/{specweave-ado-status.md → status.md} +12 -0
  69. package/plugins/specweave-ado/commands/{specweave-ado-sync.md → sync.md} +64 -3
  70. package/plugins/specweave-ado/hooks/README.md +1 -1
  71. package/plugins/specweave-docs/commands/build.md +158 -0
  72. package/plugins/specweave-docs/commands/{docs-generate.md → generate.md} +10 -5
  73. package/plugins/specweave-docs/commands/health.md +268 -0
  74. package/plugins/specweave-docs/commands/{docs-init.md → init.md} +11 -6
  75. package/plugins/specweave-docs/commands/organize.md +184 -0
  76. package/plugins/specweave-docs/commands/preview.md +138 -0
  77. package/plugins/specweave-docs/skills/preview/SKILL.md +105 -0
  78. package/plugins/specweave-github/agents/user-story-updater/AGENT.md +1 -1
  79. package/plugins/specweave-github/commands/{specweave-github-close-issue.md → close.md} +2 -2
  80. package/plugins/specweave-github/commands/{specweave-github-create-issue.md → create.md} +2 -2
  81. package/plugins/specweave-github/commands/pull.md +142 -0
  82. package/plugins/specweave-github/commands/push.md +154 -0
  83. package/plugins/specweave-github/commands/{specweave-github-sync.md → sync.md} +19 -5
  84. package/plugins/specweave-github/commands/{specweave-github-update-user-story.md → update-user-story.md} +1 -1
  85. package/plugins/specweave-github/hooks/README.md +1 -1
  86. package/plugins/specweave-jira/commands/pull.md +164 -0
  87. package/plugins/specweave-jira/commands/push.md +170 -0
  88. package/plugins/specweave-jira/commands/{specweave-jira-sync.md → sync.md} +18 -3
  89. package/plugins/specweave-jira/hooks/README.md +1 -1
  90. package/plugins/specweave-kafka/README.md +20 -0
  91. package/plugins/specweave-kafka/benchmarks/kafka-throughput.benchmark.ts +551 -0
  92. package/plugins/specweave-kafka/examples/README.md +191 -0
  93. package/plugins/specweave-kafka/examples/avro-schema-registry/.env.example +8 -0
  94. package/plugins/specweave-kafka/examples/avro-schema-registry/README.md +69 -0
  95. package/plugins/specweave-kafka/examples/avro-schema-registry/consumer.js +37 -0
  96. package/plugins/specweave-kafka/examples/avro-schema-registry/package.json +14 -0
  97. package/plugins/specweave-kafka/examples/avro-schema-registry/producer.js +57 -0
  98. package/plugins/specweave-kafka/examples/exactly-once-semantics/.env.example +5 -0
  99. package/plugins/specweave-kafka/examples/exactly-once-semantics/README.md +30 -0
  100. package/plugins/specweave-kafka/examples/exactly-once-semantics/eos-pipeline.js +79 -0
  101. package/plugins/specweave-kafka/examples/exactly-once-semantics/package.json +11 -0
  102. package/plugins/specweave-kafka/examples/kafka-streams-app/.env.example +4 -0
  103. package/plugins/specweave-kafka/examples/kafka-streams-app/README.md +30 -0
  104. package/plugins/specweave-kafka/examples/kafka-streams-app/package.json +11 -0
  105. package/plugins/specweave-kafka/examples/kafka-streams-app/windowed-aggregation.js +66 -0
  106. package/plugins/specweave-kafka/examples/n8n-workflow/README.md +54 -0
  107. package/plugins/specweave-kafka/examples/n8n-workflow/docker-compose.yml +19 -0
  108. package/plugins/specweave-kafka/examples/n8n-workflow/kafka-to-slack.json +50 -0
  109. package/plugins/specweave-kafka/examples/simple-producer-consumer/.env.example +15 -0
  110. package/plugins/specweave-kafka/examples/simple-producer-consumer/README.md +183 -0
  111. package/plugins/specweave-kafka/examples/simple-producer-consumer/consumer.js +60 -0
  112. package/plugins/specweave-kafka/examples/simple-producer-consumer/docker-compose.yml +30 -0
  113. package/plugins/specweave-kafka/examples/simple-producer-consumer/package.json +18 -0
  114. package/plugins/specweave-kafka/examples/simple-producer-consumer/producer.js +52 -0
  115. package/plugins/specweave-release/commands/specweave-release-npm.md +26 -239
  116. package/plugins/specweave-docs-preview/.claude-plugin/plugin.json +0 -21
  117. package/plugins/specweave-docs-preview/commands/build.md +0 -489
  118. package/plugins/specweave-docs-preview/commands/preview.md +0 -355
  119. package/plugins/specweave-docs-preview/skills/docs-preview/SKILL.md +0 -386
  120. /package/plugins/specweave-ado/commands/{specweave-ado-clone-repos.md → clone.md} +0 -0
  121. /package/plugins/specweave-ado/commands/{specweave-ado-import-areas.md → import-areas.md} +0 -0
  122. /package/plugins/specweave-ado/commands/{specweave-ado-import-projects.md → import-projects.md} +0 -0
  123. /package/plugins/specweave-github/commands/{specweave-github-cleanup-duplicates.md → cleanup-duplicates.md} +0 -0
  124. /package/plugins/specweave-github/commands/{specweave-github-reconcile.md → reconcile.md} +0 -0
  125. /package/plugins/specweave-github/commands/{specweave-github-status.md → status.md} +0 -0
  126. /package/plugins/specweave-jira/commands/{specweave-jira-import-boards.md → import-boards.md} +0 -0
  127. /package/plugins/specweave-jira/commands/{specweave-jira-import-projects.md → import-projects-full.md} +0 -0
@@ -0,0 +1,191 @@
1
+ # Kafka Examples
2
+
3
+ **Working code examples for SpecWeave Kafka plugins**
4
+
5
+ This directory contains 5 complete, runnable examples demonstrating Kafka best practices.
6
+
7
+ ## Quick Start
8
+
9
+ Each example is self-contained with its own README, dependencies, and Docker Compose file.
10
+
11
+ ```bash
12
+ cd simple-producer-consumer
13
+ npm install
14
+ npm start
15
+ ```
16
+
17
+ ## Examples
18
+
19
+ ### 1. [Simple Producer-Consumer](./simple-producer-consumer/)
20
+
21
+ **Basic Kafka operations**
22
+
23
+ - ✅ Connect to Kafka
24
+ - ✅ Produce JSON messages
25
+ - ✅ Consume with consumer groups
26
+ - ✅ Local Kafka cluster (Docker Compose)
27
+
28
+ **Difficulty**: Beginner
29
+ **Time**: 15 minutes
30
+
31
+ ### 2. [Avro Schema Registry](./avro-schema-registry/)
32
+
33
+ **Schema-based serialization**
34
+
35
+ - ✅ Register Avro schemas
36
+ - ✅ Schema evolution (backward compatible)
37
+ - ✅ Binary serialization (smaller than JSON)
38
+ - ✅ Message validation
39
+
40
+ **Difficulty**: Intermediate
41
+ **Time**: 20 minutes
42
+
43
+ ### 3. [Exactly-Once Semantics](./exactly-once-semantics/)
44
+
45
+ **Zero message duplication or loss**
46
+
47
+ - ✅ Transactional producers
48
+ - ✅ Read-committed consumers
49
+ - ✅ Atomic offset commits
50
+ - ✅ End-to-end exactly-once pipeline
51
+
52
+ **Difficulty**: Advanced
53
+ **Time**: 30 minutes
54
+
55
+ ### 4. [Kafka Streams Application](./kafka-streams-app/)
56
+
57
+ **Real-time stream processing**
58
+
59
+ - ✅ Windowed aggregations (tumbling windows)
60
+ - ✅ Stateful processing
61
+ - ✅ Event-time processing
62
+ - ✅ RocksDB state store
63
+
64
+ **Difficulty**: Advanced
65
+ **Time**: 45 minutes
66
+
67
+ ### 5. [n8n Workflow Integration](./n8n-workflow/)
68
+
69
+ **No-code event-driven automation**
70
+
71
+ - ✅ Kafka trigger workflows
72
+ - ✅ Event filtering and routing
73
+ - ✅ Multi-sink integration (Slack, DB, email)
74
+ - ✅ Visual workflow builder
75
+
76
+ **Difficulty**: Beginner
77
+ **Time**: 20 minutes
78
+
79
+ ## Learning Path
80
+
81
+ ### Beginner
82
+ 1. Start with **simple-producer-consumer** to understand basics
83
+ 2. Try **n8n-workflow** for no-code integration
84
+
85
+ ### Intermediate
86
+ 3. Learn schema management with **avro-schema-registry**
87
+ 4. Explore windowed aggregations in **kafka-streams-app**
88
+
89
+ ### Advanced
90
+ 5. Master reliability with **exactly-once-semantics**
91
+
92
+ ## Prerequisites
93
+
94
+ All examples require:
95
+ - **Node.js 18+** (`node --version`)
96
+ - **Docker Desktop** (for local Kafka cluster)
97
+ - **npm** (`npm --version`)
98
+
99
+ Optional (for specific examples):
100
+ - **Schema Registry** (avro-schema-registry example)
101
+ - **n8n** (n8n-workflow example)
102
+
103
+ ## Quick Start Local Kafka
104
+
105
+ Each example includes a `docker-compose.yml` for local Kafka:
106
+
107
+ ```bash
108
+ # Start Kafka
109
+ docker-compose up -d
110
+
111
+ # Wait for cluster (~30 seconds)
112
+ docker logs kafka-broker -f
113
+
114
+ # Stop Kafka
115
+ docker-compose down
116
+ ```
117
+
118
+ ## Common Issues
119
+
120
+ ### Port 9092 Already in Use
121
+
122
+ ```bash
123
+ # Find process using port 9092
124
+ lsof -i :9092
125
+
126
+ # Kill the process
127
+ kill -9 <PID>
128
+ ```
129
+
130
+ ### Kafka Container Won't Start
131
+
132
+ ```bash
133
+ # Check Docker resources
134
+ docker system df
135
+
136
+ # Increase Docker memory to 8GB
137
+ # Docker Desktop → Preferences → Resources → Memory
138
+ ```
139
+
140
+ ### Consumer Not Receiving Messages
141
+
142
+ ```bash
143
+ # Check topic exists
144
+ kafka-topics --bootstrap-server localhost:9092 --list
145
+
146
+ # Reset consumer group offsets
147
+ kafka-consumer-groups --bootstrap-server localhost:9092 \
148
+ --group my-group \
149
+ --topic my-topic \
150
+ --reset-offsets --to-earliest \
151
+ --execute
152
+ ```
153
+
154
+ ## Running Tests
155
+
156
+ Some examples include tests:
157
+
158
+ ```bash
159
+ cd simple-producer-consumer
160
+ npm test
161
+ ```
162
+
163
+ ## Production Deployment
164
+
165
+ These examples are for learning. For production:
166
+
167
+ 1. **Security**: Enable SASL/SSL authentication
168
+ 2. **Monitoring**: Add Prometheus + Grafana
169
+ 3. **Reliability**: Use 3+ brokers with replication
170
+ 4. **Performance**: Tune batching, compression, partitions
171
+ 5. **Deployment**: Use Terraform modules (see `../terraform/`)
172
+
173
+ ## Documentation
174
+
175
+ - **Getting Started**: [kafka-getting-started.md](../../../.specweave/docs/public/guides/kafka-getting-started.md)
176
+ - **Advanced Usage**: [kafka-advanced-usage.md](../../../.specweave/docs/public/guides/kafka-advanced-usage.md)
177
+ - **Terraform Guide**: [kafka-terraform.md](../../../.specweave/docs/public/guides/kafka-terraform.md)
178
+ - **Troubleshooting**: [kafka-troubleshooting.md](../../../.specweave/docs/public/guides/kafka-troubleshooting.md)
179
+
180
+ ## Contributing
181
+
182
+ Found a bug or want to add an example? See [CONTRIBUTING.md](../../../CONTRIBUTING.md).
183
+
184
+ ## Support
185
+
186
+ - **GitHub Issues**: [Report a problem](https://github.com/anton-abyzov/specweave/issues)
187
+ - **Discussions**: [Ask questions](https://github.com/anton-abyzov/specweave/discussions)
188
+
189
+ ---
190
+
191
+ **Last Updated**: 2025-11-15
@@ -0,0 +1,8 @@
1
+ KAFKA_BROKERS=localhost:9092
2
+ SCHEMA_REGISTRY_URL=http://localhost:8081
3
+
4
+ # For Confluent Cloud:
5
+ # KAFKA_BROKERS=pkc-xxxxx.us-east-1.aws.confluent.cloud:9092
6
+ # SCHEMA_REGISTRY_URL=https://psrc-xxxxx.us-east-1.aws.confluent.cloud
7
+ # SCHEMA_REGISTRY_API_KEY=your-api-key
8
+ # SCHEMA_REGISTRY_API_SECRET=your-secret
@@ -0,0 +1,69 @@
1
+ # Avro Schema Registry Example
2
+
3
+ **Schema-based serialization with Confluent Schema Registry**
4
+
5
+ Demonstrates Avro schema registration, evolution, and message validation.
6
+
7
+ ## Prerequisites
8
+
9
+ - Node.js 18+
10
+ - Kafka cluster with Schema Registry
11
+ - Schemas can evolve while maintaining backward compatibility
12
+
13
+ ## Setup
14
+
15
+ ```bash
16
+ npm install
17
+ cp .env.example .env
18
+ # Configure KAFKA_BROKERS and SCHEMA_REGISTRY_URL
19
+ ```
20
+
21
+ ## Run
22
+
23
+ ```bash
24
+ # Register schema and produce
25
+ npm run producer
26
+
27
+ # Consume with schema validation
28
+ npm run consumer
29
+ ```
30
+
31
+ ## Features
32
+
33
+ - **Schema Registration** - Auto-register Avro schemas
34
+ - **Schema Evolution** - Add optional fields safely
35
+ - **Validation** - Messages must match schema
36
+ - **Serialization** - Binary Avro encoding (smaller than JSON)
37
+
38
+ ## Schema Evolution Example
39
+
40
+ ```javascript
41
+ // V1 - Initial schema
42
+ const userSchemaV1 = {
43
+ type: 'record',
44
+ name: 'User',
45
+ fields: [
46
+ { name: 'id', type: 'long' },
47
+ { name: 'name', type: 'string' }
48
+ ]
49
+ };
50
+
51
+ // V2 - BACKWARD compatible (add optional field)
52
+ const userSchemaV2 = {
53
+ type: 'record',
54
+ name: 'User',
55
+ fields: [
56
+ { name: 'id', type: 'long' },
57
+ { name: 'name', type: 'string' },
58
+ { name: 'email', type: ['null', 'string'], default: null } // NEW
59
+ ]
60
+ };
61
+
62
+ // Old consumers can still read V2 messages (ignore email)
63
+ // New producers can send V2 messages with email field
64
+ ```
65
+
66
+ ## Documentation
67
+
68
+ - [Avro Specification](https://avro.apache.org/docs/current/spec.html)
69
+ - [Confluent Schema Registry](https://docs.confluent.io/platform/current/schema-registry/index.html)
@@ -0,0 +1,37 @@
1
+ require('dotenv').config();
2
+ const { Kafka } = require('kafkajs');
3
+ const { SchemaRegistry } = require('@kafkajs/confluent-schema-registry');
4
+
5
+ const kafka = new Kafka({
6
+ brokers: (process.env.KAFKA_BROKERS || 'localhost:9092').split(',')
7
+ });
8
+
9
+ const registry = new SchemaRegistry({
10
+ host: process.env.SCHEMA_REGISTRY_URL || 'http://localhost:8081'
11
+ });
12
+
13
+ async function run() {
14
+ const consumer = kafka.consumer({ groupId: 'avro-consumer-group' });
15
+
16
+ try {
17
+ await consumer.connect();
18
+ await consumer.subscribe({ topic: 'users', fromBeginning: true });
19
+ console.log('✓ Consumer ready');
20
+
21
+ await consumer.run({
22
+ eachMessage: async ({ message }) => {
23
+ // Decode automatically (schema ID in message)
24
+ const user = await registry.decode(message.value);
25
+ console.log('📥 Received user:', user);
26
+ // Output: { id: 1, name: 'Alice', email: 'alice@example.com' }
27
+ }
28
+ });
29
+
30
+ } catch (error) {
31
+ console.error('❌ Error:', error);
32
+ }
33
+ }
34
+
35
+ process.on('SIGINT', async () => process.exit(0));
36
+
37
+ run().catch(console.error);
@@ -0,0 +1,14 @@
1
+ {
2
+ "name": "kafka-avro-schema-registry",
3
+ "version": "1.0.0",
4
+ "description": "Avro Schema Registry integration example",
5
+ "scripts": {
6
+ "producer": "node producer.js",
7
+ "consumer": "node consumer.js"
8
+ },
9
+ "dependencies": {
10
+ "kafkajs": "^2.2.4",
11
+ "@kafkajs/confluent-schema-registry": "^3.3.0",
12
+ "dotenv": "^16.3.1"
13
+ }
14
+ }
@@ -0,0 +1,57 @@
1
+ require('dotenv').config();
2
+ const { Kafka } = require('kafkajs');
3
+ const { SchemaRegistry, SchemaType } = require('@kafkajs/confluent-schema-registry');
4
+
5
+ const kafka = new Kafka({
6
+ brokers: (process.env.KAFKA_BROKERS || 'localhost:9092').split(',')
7
+ });
8
+
9
+ const registry = new SchemaRegistry({
10
+ host: process.env.SCHEMA_REGISTRY_URL || 'http://localhost:8081'
11
+ });
12
+
13
+ // Avro schema definition
14
+ const schema = {
15
+ type: 'record',
16
+ name: 'User',
17
+ fields: [
18
+ { name: 'id', type: 'long' },
19
+ { name: 'name', type: 'string' },
20
+ { name: 'email', type: ['null', 'string'], default: null }
21
+ ]
22
+ };
23
+
24
+ async function run() {
25
+ const producer = kafka.producer();
26
+
27
+ try {
28
+ await producer.connect();
29
+ console.log('✓ Connected to Kafka');
30
+
31
+ // Register schema
32
+ const { id } = await registry.register({
33
+ type: SchemaType.AVRO,
34
+ schema: JSON.stringify(schema)
35
+ });
36
+ console.log(`✓ Schema registered with ID: ${id}`);
37
+
38
+ // Encode message with schema
39
+ const message = { id: 1, name: 'Alice', email: 'alice@example.com' };
40
+ const encodedValue = await registry.encode(id, message);
41
+
42
+ // Send message
43
+ await producer.send({
44
+ topic: 'users',
45
+ messages: [{ key: '1', value: encodedValue }]
46
+ });
47
+
48
+ console.log('✅ Message sent with schema validation');
49
+
50
+ } catch (error) {
51
+ console.error('❌ Error:', error);
52
+ } finally {
53
+ await producer.disconnect();
54
+ }
55
+ }
56
+
57
+ run().catch(console.error);
@@ -0,0 +1,5 @@
1
+ KAFKA_BROKERS=localhost:9092
2
+
3
+ # Important: Ensure broker configuration supports transactions:
4
+ # transaction.state.log.replication.factor=3
5
+ # transaction.state.log.min.isr=2
@@ -0,0 +1,30 @@
1
+ # Exactly-Once Semantics (EOS) Example
2
+
3
+ **Zero message duplication or loss with transactional producers**
4
+
5
+ Demonstrates end-to-end exactly-once processing for financial transactions.
6
+
7
+ ## Features
8
+
9
+ - **Transactional Producer** - Atomic writes to multiple topics
10
+ - **Read-Committed Consumer** - Only reads committed messages
11
+ - **Offset Management** - Commits offsets within transactions
12
+ - **Guaranteed Delivery** - Zero duplicates, zero message loss
13
+
14
+ ## Run
15
+
16
+ ```bash
17
+ npm install
18
+ npm run eos-pipeline
19
+ ```
20
+
21
+ ## Use Cases
22
+
23
+ - Financial transactions
24
+ - Order processing
25
+ - Billing systems
26
+ - Any scenario where duplicates are unacceptable
27
+
28
+ ## Documentation
29
+
30
+ See [Advanced Usage Guide](../../.specweave/docs/public/guides/kafka-advanced-usage.md#exactly-once-semantics-eos)
@@ -0,0 +1,79 @@
1
+ require('dotenv').config();
2
+ const { Kafka } = require('kafkajs');
3
+
4
+ const kafka = new Kafka({
5
+ brokers: (process.env.KAFKA_BROKERS || 'localhost:9092').split(',')
6
+ });
7
+
8
+ const consumer = kafka.consumer({
9
+ groupId: 'eos-pipeline',
10
+ isolation: 'read_committed' // Only read committed transactions
11
+ });
12
+
13
+ const producer = kafka.producer({
14
+ transactional: true,
15
+ transactionalId: 'eos-pipeline-tx-001', // Unique per instance
16
+ idempotent: true, // Prevent duplicates
17
+ maxInFlightRequests: 1,
18
+ acks: -1
19
+ });
20
+
21
+ async function run() {
22
+ await consumer.connect();
23
+ await producer.connect();
24
+
25
+ await consumer.subscribe({ topic: 'input-events', fromBeginning: false });
26
+
27
+ console.log('✓ EOS Pipeline started');
28
+ console.log('⏳ Processing messages with exactly-once semantics...\n');
29
+
30
+ await consumer.run({
31
+ eachMessage: async ({ topic, partition, message }) => {
32
+ const transaction = await producer.transaction();
33
+
34
+ try {
35
+ // Parse input
36
+ const input = JSON.parse(message.value.toString());
37
+ console.log('📥 Input:', input);
38
+
39
+ // Transform
40
+ const output = {
41
+ ...input,
42
+ processed: true,
43
+ timestamp: new Date().toISOString()
44
+ };
45
+
46
+ // Send to output topic
47
+ await transaction.send({
48
+ topic: 'output-events',
49
+ messages: [{ key: message.key, value: JSON.stringify(output) }]
50
+ });
51
+
52
+ // Commit offset within transaction (atomic!)
53
+ await transaction.sendOffsets({
54
+ consumerGroupId: 'eos-pipeline',
55
+ topics: [{
56
+ topic,
57
+ partitions: [{ partition, offset: (parseInt(message.offset) + 1).toString() }]
58
+ }]
59
+ });
60
+
61
+ // Commit transaction (both output + offset committed atomically)
62
+ await transaction.commit();
63
+ console.log('✅ Processed:', output.id || 'message');
64
+
65
+ } catch (error) {
66
+ console.error('❌ Error, aborting transaction:', error.message);
67
+ await transaction.abort();
68
+ }
69
+ }
70
+ });
71
+ }
72
+
73
+ process.on('SIGINT', async () => {
74
+ await consumer.disconnect();
75
+ await producer.disconnect();
76
+ process.exit(0);
77
+ });
78
+
79
+ run().catch(console.error);
@@ -0,0 +1,11 @@
1
+ {
2
+ "name": "kafka-exactly-once-semantics",
3
+ "version": "1.0.0",
4
+ "scripts": {
5
+ "eos-pipeline": "node eos-pipeline.js"
6
+ },
7
+ "dependencies": {
8
+ "kafkajs": "^2.2.4",
9
+ "dotenv": "^16.3.1"
10
+ }
11
+ }
@@ -0,0 +1,4 @@
1
+ KAFKA_BROKERS=localhost:9092
2
+
3
+ # For production, use RocksDB for state store:
4
+ # STATE_STORE_DIR=./state-store
@@ -0,0 +1,30 @@
1
+ # Kafka Streams Application Example
2
+
3
+ **Real-time stream processing with windowing and aggregations**
4
+
5
+ Demonstrates Kafka Streams DSL for stateful processing.
6
+
7
+ ## Features
8
+
9
+ - **Windowed Aggregations** - Count events per user in 5-minute windows
10
+ - **Stream-Table Joins** - Enrich events with user data
11
+ - **Stateful Processing** - Maintain aggregated state
12
+ - **RocksDB State Store** - Persistent local state
13
+
14
+ ## Run
15
+
16
+ ```bash
17
+ npm install
18
+ npm run streams
19
+ ```
20
+
21
+ ## Use Cases
22
+
23
+ - Real-time analytics
24
+ - User behavior tracking
25
+ - Fraud detection
26
+ - IoT data aggregation
27
+
28
+ ## Documentation
29
+
30
+ See [Advanced Usage Guide](../../.specweave/docs/public/guides/kafka-advanced-usage.md#kafka-streams-applications)
@@ -0,0 +1,11 @@
1
+ {
2
+ "name": "kafka-streams-windowing",
3
+ "version": "1.0.0",
4
+ "scripts": {
5
+ "streams": "node windowed-aggregation.js"
6
+ },
7
+ "dependencies": {
8
+ "kafkajs": "^2.2.4",
9
+ "dotenv": "^16.3.1"
10
+ }
11
+ }
@@ -0,0 +1,66 @@
1
+ require('dotenv').config();
2
+ const { Kafka } = require('kafkajs');
3
+
4
+ const kafka = new Kafka({
5
+ brokers: (process.env.KAFKA_BROKERS || 'localhost:9092').split(',')
6
+ });
7
+
8
+ const WINDOW_SIZE_MS = 60000; // 1 minute tumbling windows
9
+ const windows = new Map(); // In-memory state (use RocksDB for production)
10
+
11
+ async function run() {
12
+ const consumer = kafka.consumer({ groupId: 'streams-aggregator' });
13
+ const producer = kafka.producer();
14
+
15
+ await consumer.connect();
16
+ await producer.connect();
17
+
18
+ await consumer.subscribe({ topic: 'user-events', fromBeginning: false });
19
+
20
+ console.log('✓ Kafka Streams app started');
21
+ console.log('⏳ Aggregating events in 1-minute windows...\n');
22
+
23
+ await consumer.run({
24
+ eachMessage: async ({ message }) => {
25
+ const event = JSON.parse(message.value.toString());
26
+
27
+ // Calculate window start
28
+ const timestamp = event.timestamp || Date.now();
29
+ const windowStart = Math.floor(timestamp / WINDOW_SIZE_MS) * WINDOW_SIZE_MS;
30
+ const windowEnd = windowStart + WINDOW_SIZE_MS;
31
+
32
+ // Aggregate by userId + window
33
+ const windowKey = `${event.userId}:${windowStart}`;
34
+
35
+ if (!windows.has(event.userId)) {
36
+ windows.set(event.userId, new Map());
37
+ }
38
+
39
+ const userWindows = windows.get(event.userId);
40
+ const count = (userWindows.get(windowStart) || 0) + 1;
41
+ userWindows.set(windowStart, count);
42
+
43
+ // Emit windowed count
44
+ const result = {
45
+ userId: event.userId,
46
+ count,
47
+ windowStart: new Date(windowStart).toISOString(),
48
+ windowEnd: new Date(windowEnd).toISOString()
49
+ };
50
+
51
+ await producer.send({
52
+ topic: 'user-event-counts-1min',
53
+ messages: [{
54
+ key: windowKey,
55
+ value: JSON.stringify(result)
56
+ }]
57
+ });
58
+
59
+ console.log(`📊 Window [${result.windowStart}]: User ${event.userId} → ${count} events`);
60
+ }
61
+ });
62
+ }
63
+
64
+ process.on('SIGINT', async () => process.exit(0));
65
+
66
+ run().catch(console.error);
@@ -0,0 +1,54 @@
1
+ # n8n Kafka Workflow Example
2
+
3
+ **No-code Kafka workflows with n8n automation**
4
+
5
+ Demonstrates integrating Kafka with n8n for event-driven workflows.
6
+
7
+ ## Features
8
+
9
+ - **Kafka Trigger** - Start workflows from Kafka messages
10
+ - **Enrichment** - Call external APIs to enrich data
11
+ - **Filtering** - Route messages based on conditions
12
+ - **Multi-Sink** - Send to database, Slack, email, etc.
13
+
14
+ ## Setup
15
+
16
+ ```bash
17
+ # Start n8n
18
+ docker-compose up -d
19
+
20
+ # Access n8n UI
21
+ open http://localhost:5678
22
+ ```
23
+
24
+ ## Import Workflow
25
+
26
+ 1. Open n8n at http://localhost:5678
27
+ 2. Click "Import from File"
28
+ 3. Select `kafka-to-slack.json`
29
+ 4. Configure Kafka credentials
30
+ 5. Activate workflow
31
+
32
+ ## Example Workflow
33
+
34
+ ```
35
+ Kafka Consumer (user-events)
36
+
37
+ Filter (event_type === 'purchase')
38
+
39
+ HTTP Request (fetch user details)
40
+
41
+ Slack (send notification)
42
+ ```
43
+
44
+ ## Use Cases
45
+
46
+ - Alerts and notifications
47
+ - Data pipeline orchestration
48
+ - Event-driven automation
49
+ - Integration with 3rd party services
50
+
51
+ ## Documentation
52
+
53
+ - [n8n Documentation](https://docs.n8n.io/)
54
+ - [n8n Kafka Node](https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-base.kafka/)
@@ -0,0 +1,19 @@
1
+ version: '3.8'
2
+
3
+ services:
4
+ n8n:
5
+ image: n8nio/n8n:latest
6
+ container_name: n8n
7
+ ports:
8
+ - "5678:5678"
9
+ environment:
10
+ - N8N_BASIC_AUTH_ACTIVE=true
11
+ - N8N_BASIC_AUTH_USER=admin
12
+ - N8N_BASIC_AUTH_PASSWORD=admin
13
+ volumes:
14
+ - n8n-data:/home/node/.n8n
15
+ - ./workflows:/home/node/workflows
16
+
17
+ volumes:
18
+ n8n-data:
19
+ driver: local