@veridot/kafka 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,61 @@
1
+ # @veridot/kafka
2
+
3
+ Kafka implementation for Veridot metadata broker.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ npm install @veridot/kafka @veridot/core
9
+ ```
10
+
11
+ ## Usage
12
+
13
+ ```typescript
14
+ import { KafkaMetadataBroker } from '@veridot/kafka';
15
+ import { GenericSignerVerifier, BasicConfigurer, TokenMode } from '@veridot/core';
16
+
17
+ // Create Kafka broker
18
+ const broker = await KafkaMetadataBroker.of({
19
+ clientId: 'my-service',
20
+ brokers: ['localhost:9092'],
21
+ topic: 'veridot_keys',
22
+ dbPath: './data/veridot-keys',
23
+ });
24
+
25
+ // Create signer/verifier
26
+ const signer = new GenericSignerVerifier(broker, process.env.APP_SALT!);
27
+
28
+ // Sign a token
29
+ const config = new BasicConfigurer()
30
+ .mode(TokenMode.JWT)
31
+ .validity(30, 'minutes');
32
+
33
+ const token = await signer.sign({ userId: 123, role: 'admin' }, config);
34
+
35
+ // Verify token (in another service)
36
+ const payload = await signer.verify(token, (data) => JSON.parse(data));
37
+ console.log('User:', payload.userId); // 123
38
+
39
+ // Cleanup
40
+ await signer.shutdown();
41
+ await broker.disconnect();
42
+ ```
43
+
44
+ ## Configuration
45
+
46
+ Environment variables:
47
+
48
+ - `VDOT_KAFKA_BOOTSTRAP_SERVERS`: Kafka broker addresses (default: `localhost:9092`)
49
+ - `VDOT_TOKEN_VERIFIER_TOPIC`: Kafka topic name (default: `veridot_keys`)
50
+ - `VDOT_EMBEDDED_DATABASE_PATH`: LMDB database path (default: `./veridot-keys`)
51
+
52
+ ## Features
53
+
54
+ - **Distributed**: Public keys propagated via Kafka to all services
55
+ - **Cached**: Keys stored locally in LMDB for fast retrieval
56
+ - **Persistent**: LMDB ensures keys survive service restarts
57
+ - **Scalable**: Kafka handles high-throughput key distribution
58
+
59
+ ## License
60
+
61
+ MIT © Darlin TEUMA
@@ -0,0 +1,212 @@
1
+ import { MetadataBroker } from '@veridot/core';
2
+ /**
3
+ * Configuration options for {@link KafkaMetadataBroker}.
4
+ *
5
+ * @public
6
+ */
7
+ export interface KafkaMetadataBrokerOptions {
8
+ /**
9
+ * Kafka client ID for this service instance.
10
+ */
11
+ clientId: string;
12
+ /**
13
+ * Kafka broker addresses (comma-separated or array).
14
+ *
15
+ * @example ['localhost:9092', 'kafka2:9092']
16
+ */
17
+ brokers: string | string[];
18
+ /**
19
+ * Kafka topic name for key distribution.
20
+ *
21
+ * @default 'veridot_keys' (from Config.TOKEN_VERIFIER_TOPIC)
22
+ */
23
+ topic?: string;
24
+ /**
25
+ * Path to the embedded LMDB database for local caching.
26
+ *
27
+ * @default './veridot-keys' (from Config.EMBEDDED_DATABASE_PATH)
28
+ */
29
+ dbPath?: string;
30
+ /**
31
+ * Consumer group ID for Kafka.
32
+ *
33
+ * @default clientId + '-consumer'
34
+ */
35
+ groupId?: string;
36
+ }
37
+ /**
38
+ * Kafka-based implementation of {@link MetadataBroker}.
39
+ *
40
+ * This broker uses:
41
+ * - **Kafka**: For distributed message propagation between services
42
+ * - **LMDB**: For local persistent caching of public keys
43
+ *
44
+ * ## Architecture
45
+ *
46
+ * ```
47
+ * ┌──────────────┐ ┌──────────────┐ ┌──────────────┐
48
+ * │ Service A │ │ Kafka │ │ Service B │
49
+ * │ │─────>│ Topic │─────>│ │
50
+ * │ [Publisher] │ │ veridot_keys │ │ [Consumer] │
51
+ * └──────────────┘ └──────────────┘ └──────────────┘
52
+ * │ │
53
+ * v v
54
+ * ┌──────────────┐ ┌──────────────┐
55
+ * │ LMDB (local) │ │ LMDB (local) │
56
+ * │ Key: keyId │ │ Key: keyId │
57
+ * │ Val: pubKey │ │ Val: pubKey │
58
+ * └──────────────┘ └──────────────┘
59
+ * ```
60
+ *
61
+ * ## Key Features
62
+ *
63
+ * - **Distributed**: Public keys are propagated via Kafka to all services
64
+ * - **Cached**: Keys are stored locally in LMDB for fast retrieval
65
+ * - **Persistent**: LMDB ensures keys survive service restarts
66
+ * - **Scalable**: Kafka handles high-throughput key distribution
67
+ *
68
+ * ## Usage Example
69
+ *
70
+ * ```typescript
71
+ * import { KafkaMetadataBroker } from '@veridot/kafka';
72
+ * import { GenericSignerVerifier } from '@veridot/core';
73
+ *
74
+ * // Create broker
75
+ * const broker = await KafkaMetadataBroker.of({
76
+ * clientId: 'my-service',
77
+ * brokers: ['localhost:9092'],
78
+ * topic: 'veridot_keys',
79
+ * dbPath: './data/veridot-keys',
80
+ * });
81
+ *
82
+ * // Use with GenericSignerVerifier
83
+ * const signer = new GenericSignerVerifier(broker, process.env.APP_SALT!);
84
+ *
85
+ * // Sign and verify tokens
86
+ * const token = await signer.sign({ userId: 123 }, config);
87
+ * const payload = await signer.verify(token, JSON.parse);
88
+ *
89
+ * // Cleanup
90
+ * await broker.disconnect();
91
+ * ```
92
+ *
93
+ * ## Environment Variables
94
+ *
95
+ * - `VDOT_KAFKA_BOOTSTRAP_SERVERS`: Kafka broker addresses
96
+ * - `VDOT_TOKEN_VERIFIER_TOPIC`: Kafka topic name
97
+ * - `VDOT_EMBEDDED_DATABASE_PATH`: LMDB database path
98
+ *
99
+ * @public
100
+ * @since 2.2.0
101
+ * @author Darlin TEUMA
102
+ */
103
+ export declare class KafkaMetadataBroker implements MetadataBroker {
104
+ private readonly producer;
105
+ private readonly consumer;
106
+ private readonly db;
107
+ private readonly topic;
108
+ private isConnected;
109
+ /**
110
+ * Private constructor. Use {@link KafkaMetadataBroker.of} to create instances.
111
+ *
112
+ * @internal
113
+ */
114
+ private constructor();
115
+ /**
116
+ * Factory method to create and initialize a KafkaMetadataBroker.
117
+ *
118
+ * This method:
119
+ * 1. Creates Kafka producer and consumer
120
+ * 2. Opens LMDB database
121
+ * 3. Connects to Kafka
122
+ * 4. Subscribes to the topic
123
+ * 5. Starts consuming messages
124
+ *
125
+ * @param options - Configuration options
126
+ * @returns A fully initialized KafkaMetadataBroker
127
+ *
128
+ * @throws {Error} If initialization fails
129
+ *
130
+ * @example
131
+ * ```typescript
132
+ * const broker = await KafkaMetadataBroker.of({
133
+ * clientId: 'my-service',
134
+ * brokers: ['localhost:9092'],
135
+ * });
136
+ * ```
137
+ */
138
+ static of(options: KafkaMetadataBrokerOptions): Promise<KafkaMetadataBroker>;
139
+ /**
140
+ * Publishes a metadata message associated with a given key.
141
+ *
142
+ * The message is sent to Kafka and also stored locally in LMDB.
143
+ *
144
+ * @param key - The key (typically keyId or tracking ID)
145
+ * @param message - The metadata message
146
+ *
147
+ * @throws {BrokerTransportException} If sending fails
148
+ *
149
+ * @example
150
+ * ```typescript
151
+ * await broker.send('abc123xyz', 'jwt:MIIBIjAN...:1731331200000');
152
+ * ```
153
+ */
154
+ send(key: string, message: string): Promise<void>;
155
+ /**
156
+ * Retrieves the metadata message associated with the given key.
157
+ *
158
+ * First checks the local LMDB cache. If not found, the method throws
159
+ * an exception (keys should have been received via Kafka consumer).
160
+ *
161
+ * @param key - The key to retrieve
162
+ * @returns The metadata message
163
+ *
164
+ * @throws {BrokerExtractionException} If the key is not found
165
+ *
166
+ * @example
167
+ * ```typescript
168
+ * const metadata = await broker.get('abc123xyz');
169
+ * console.log(metadata); // 'jwt:MIIBIjAN...:1731331200000'
170
+ * ```
171
+ */
172
+ get(key: string): Promise<string>;
173
+ /**
174
+ * Connects to Kafka and starts consuming messages.
175
+ *
176
+ * @internal
177
+ */
178
+ private connect;
179
+ /**
180
+ * Handles incoming Kafka messages by storing them in LMDB.
181
+ *
182
+ * @internal
183
+ */
184
+ private handleMessage;
185
+ /**
186
+ * Disconnects from Kafka and closes the LMDB database.
187
+ *
188
+ * Call this method when shutting down the application.
189
+ *
190
+ * @example
191
+ * ```typescript
192
+ * process.on('SIGTERM', async () => {
193
+ * await broker.disconnect();
194
+ * process.exit(0);
195
+ * });
196
+ * ```
197
+ */
198
+ disconnect(): Promise<void>;
199
+ /**
200
+ * Checks if the broker is connected.
201
+ *
202
+ * @returns True if connected
203
+ */
204
+ isReady(): boolean;
205
+ /**
206
+ * Gets the topic name.
207
+ *
208
+ * @returns The Kafka topic name
209
+ */
210
+ getTopic(): string;
211
+ }
212
+ //# sourceMappingURL=KafkaMetadataBroker.d.ts.map
@@ -0,0 +1,325 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.KafkaMetadataBroker = void 0;
4
+ const kafkajs_1 = require("kafkajs");
5
+ const lmdb_1 = require("lmdb");
6
+ const core_1 = require("@veridot/core");
7
+ const core_2 = require("@veridot/core");
8
+ /**
9
+ * Kafka-based implementation of {@link MetadataBroker}.
10
+ *
11
+ * This broker uses:
12
+ * - **Kafka**: For distributed message propagation between services
13
+ * - **LMDB**: For local persistent caching of public keys
14
+ *
15
+ * ## Architecture
16
+ *
17
+ * ```
18
+ * ┌──────────────┐ ┌──────────────┐ ┌──────────────┐
19
+ * │ Service A │ │ Kafka │ │ Service B │
20
+ * │ │─────>│ Topic │─────>│ │
21
+ * │ [Publisher] │ │ veridot_keys │ │ [Consumer] │
22
+ * └──────────────┘ └──────────────┘ └──────────────┘
23
+ * │ │
24
+ * v v
25
+ * ┌──────────────┐ ┌──────────────┐
26
+ * │ LMDB (local) │ │ LMDB (local) │
27
+ * │ Key: keyId │ │ Key: keyId │
28
+ * │ Val: pubKey │ │ Val: pubKey │
29
+ * └──────────────┘ └──────────────┘
30
+ * ```
31
+ *
32
+ * ## Key Features
33
+ *
34
+ * - **Distributed**: Public keys are propagated via Kafka to all services
35
+ * - **Cached**: Keys are stored locally in LMDB for fast retrieval
36
+ * - **Persistent**: LMDB ensures keys survive service restarts
37
+ * - **Scalable**: Kafka handles high-throughput key distribution
38
+ *
39
+ * ## Usage Example
40
+ *
41
+ * ```typescript
42
+ * import { KafkaMetadataBroker } from '@veridot/kafka';
43
+ * import { GenericSignerVerifier } from '@veridot/core';
44
+ *
45
+ * // Create broker
46
+ * const broker = await KafkaMetadataBroker.of({
47
+ * clientId: 'my-service',
48
+ * brokers: ['localhost:9092'],
49
+ * topic: 'veridot_keys',
50
+ * dbPath: './data/veridot-keys',
51
+ * });
52
+ *
53
+ * // Use with GenericSignerVerifier
54
+ * const signer = new GenericSignerVerifier(broker, process.env.APP_SALT!);
55
+ *
56
+ * // Sign and verify tokens
57
+ * const token = await signer.sign({ userId: 123 }, config);
58
+ * const payload = await signer.verify(token, JSON.parse);
59
+ *
60
+ * // Cleanup
61
+ * await broker.disconnect();
62
+ * ```
63
+ *
64
+ * ## Environment Variables
65
+ *
66
+ * - `VDOT_KAFKA_BOOTSTRAP_SERVERS`: Kafka broker addresses
67
+ * - `VDOT_TOKEN_VERIFIER_TOPIC`: Kafka topic name
68
+ * - `VDOT_EMBEDDED_DATABASE_PATH`: LMDB database path
69
+ *
70
+ * @public
71
+ * @since 2.2.0
72
+ * @author Darlin TEUMA
73
+ */
74
+ class KafkaMetadataBroker {
75
+ /**
76
+ * Private constructor. Use {@link KafkaMetadataBroker.of} to create instances.
77
+ *
78
+ * @internal
79
+ */
80
+ constructor(producer, consumer, db, topic) {
81
+ this.isConnected = false;
82
+ this.producer = producer;
83
+ this.consumer = consumer;
84
+ this.db = db;
85
+ this.topic = topic;
86
+ }
87
+ /**
88
+ * Factory method to create and initialize a KafkaMetadataBroker.
89
+ *
90
+ * This method:
91
+ * 1. Creates Kafka producer and consumer
92
+ * 2. Opens LMDB database
93
+ * 3. Connects to Kafka
94
+ * 4. Subscribes to the topic
95
+ * 5. Starts consuming messages
96
+ *
97
+ * @param options - Configuration options
98
+ * @returns A fully initialized KafkaMetadataBroker
99
+ *
100
+ * @throws {Error} If initialization fails
101
+ *
102
+ * @example
103
+ * ```typescript
104
+ * const broker = await KafkaMetadataBroker.of({
105
+ * clientId: 'my-service',
106
+ * brokers: ['localhost:9092'],
107
+ * });
108
+ * ```
109
+ */
110
+ static async of(options) {
111
+ console.log('[KafkaMetadataBroker] Initializing...');
112
+ // Parse brokers
113
+ const brokers = Array.isArray(options.brokers)
114
+ ? options.brokers
115
+ : options.brokers.split(',').map((b) => b.trim());
116
+ // Get topic name
117
+ const topic = options.topic || core_2.Config.TOKEN_VERIFIER_TOPIC;
118
+ // Get database path
119
+ const dbPath = options.dbPath || core_2.Config.EMBEDDED_DATABASE_PATH;
120
+ // Get group ID
121
+ const groupId = options.groupId || `${options.clientId}-consumer`;
122
+ console.log(`[KafkaMetadataBroker] Config: brokers=${brokers.join(',')}, topic=${topic}, dbPath=${dbPath}`);
123
+ // Create Kafka client
124
+ const kafka = new kafkajs_1.Kafka({
125
+ clientId: options.clientId,
126
+ brokers,
127
+ });
128
+ // Create producer
129
+ const producer = kafka.producer();
130
+ // Create consumer
131
+ const consumer = kafka.consumer({ groupId });
132
+ // Open LMDB database
133
+ const db = (0, lmdb_1.open)({
134
+ path: dbPath,
135
+ compression: true,
136
+ });
137
+ // Create broker instance
138
+ const broker = new KafkaMetadataBroker(producer, consumer, db, topic);
139
+ // Connect and initialize
140
+ await broker.connect();
141
+ console.log('[KafkaMetadataBroker] Initialized successfully');
142
+ return broker;
143
+ }
144
+ /**
145
+ * Publishes a metadata message associated with a given key.
146
+ *
147
+ * The message is sent to Kafka and also stored locally in LMDB.
148
+ *
149
+ * @param key - The key (typically keyId or tracking ID)
150
+ * @param message - The metadata message
151
+ *
152
+ * @throws {BrokerTransportException} If sending fails
153
+ *
154
+ * @example
155
+ * ```typescript
156
+ * await broker.send('abc123xyz', 'jwt:MIIBIjAN...:1731331200000');
157
+ * ```
158
+ */
159
+ async send(key, message) {
160
+ if (!this.isConnected) {
161
+ throw new core_1.BrokerTransportException('Broker is not connected');
162
+ }
163
+ try {
164
+ // Store in local LMDB database
165
+ await this.db.put(key, message);
166
+ // Send to Kafka
167
+ await this.producer.send({
168
+ topic: this.topic,
169
+ messages: [
170
+ {
171
+ key,
172
+ value: message,
173
+ },
174
+ ],
175
+ });
176
+ console.log(`[KafkaMetadataBroker] Sent metadata for key: ${key}`);
177
+ }
178
+ catch (error) {
179
+ throw new core_1.BrokerTransportException(`Failed to send metadata for key ${key}: ${error.message}`, error);
180
+ }
181
+ }
182
+ /**
183
+ * Retrieves the metadata message associated with the given key.
184
+ *
185
+ * First checks the local LMDB cache. If not found, the method throws
186
+ * an exception (keys should have been received via Kafka consumer).
187
+ *
188
+ * @param key - The key to retrieve
189
+ * @returns The metadata message
190
+ *
191
+ * @throws {BrokerExtractionException} If the key is not found
192
+ *
193
+ * @example
194
+ * ```typescript
195
+ * const metadata = await broker.get('abc123xyz');
196
+ * console.log(metadata); // 'jwt:MIIBIjAN...:1731331200000'
197
+ * ```
198
+ */
199
+ async get(key) {
200
+ if (!this.isConnected) {
201
+ throw new core_1.BrokerExtractionException('Broker is not connected');
202
+ }
203
+ try {
204
+ // Retrieve from local LMDB database
205
+ const message = await this.db.get(key);
206
+ if (!message) {
207
+ throw new core_1.BrokerExtractionException(`Metadata not found for key: ${key}. ` +
208
+ 'The key may have expired, been revoked, or never existed.');
209
+ }
210
+ return message;
211
+ }
212
+ catch (error) {
213
+ if (error instanceof core_1.BrokerExtractionException) {
214
+ throw error;
215
+ }
216
+ throw new core_1.BrokerExtractionException(`Failed to retrieve metadata for key ${key}: ${error.message}`);
217
+ }
218
+ }
219
+ /**
220
+ * Connects to Kafka and starts consuming messages.
221
+ *
222
+ * @internal
223
+ */
224
+ async connect() {
225
+ try {
226
+ console.log('[KafkaMetadataBroker] Connecting to Kafka...');
227
+ // Connect producer
228
+ await this.producer.connect();
229
+ console.log('[KafkaMetadataBroker] Producer connected');
230
+ // Connect consumer
231
+ await this.consumer.connect();
232
+ console.log('[KafkaMetadataBroker] Consumer connected');
233
+ // Subscribe to topic
234
+ await this.consumer.subscribe({ topic: this.topic, fromBeginning: true });
235
+ console.log(`[KafkaMetadataBroker] Subscribed to topic: ${this.topic}`);
236
+ // Start consuming
237
+ await this.consumer.run({
238
+ eachMessage: async (payload) => {
239
+ await this.handleMessage(payload);
240
+ },
241
+ });
242
+ this.isConnected = true;
243
+ console.log('[KafkaMetadataBroker] Connected successfully');
244
+ }
245
+ catch (error) {
246
+ throw new Error(`Failed to connect to Kafka: ${error.message}`);
247
+ }
248
+ }
249
+ /**
250
+ * Handles incoming Kafka messages by storing them in LMDB.
251
+ *
252
+ * @internal
253
+ */
254
+ async handleMessage(payload) {
255
+ const { message } = payload;
256
+ if (!message.key || !message.value) {
257
+ console.warn('[KafkaMetadataBroker] Received message without key or value');
258
+ return;
259
+ }
260
+ const key = message.key.toString();
261
+ const value = message.value.toString();
262
+ try {
263
+ // Store in local LMDB database
264
+ await this.db.put(key, value);
265
+ console.log(`[KafkaMetadataBroker] Stored metadata for key: ${key}`);
266
+ }
267
+ catch (error) {
268
+ console.error(`[KafkaMetadataBroker] Failed to store metadata for key ${key}:`, error);
269
+ }
270
+ }
271
+ /**
272
+ * Disconnects from Kafka and closes the LMDB database.
273
+ *
274
+ * Call this method when shutting down the application.
275
+ *
276
+ * @example
277
+ * ```typescript
278
+ * process.on('SIGTERM', async () => {
279
+ * await broker.disconnect();
280
+ * process.exit(0);
281
+ * });
282
+ * ```
283
+ */
284
+ async disconnect() {
285
+ if (!this.isConnected) {
286
+ return;
287
+ }
288
+ console.log('[KafkaMetadataBroker] Disconnecting...');
289
+ try {
290
+ // Disconnect consumer
291
+ await this.consumer.disconnect();
292
+ console.log('[KafkaMetadataBroker] Consumer disconnected');
293
+ // Disconnect producer
294
+ await this.producer.disconnect();
295
+ console.log('[KafkaMetadataBroker] Producer disconnected');
296
+ // Close LMDB database
297
+ this.db.close();
298
+ console.log('[KafkaMetadataBroker] Database closed');
299
+ this.isConnected = false;
300
+ console.log('[KafkaMetadataBroker] Disconnected successfully');
301
+ }
302
+ catch (error) {
303
+ console.error('[KafkaMetadataBroker] Error during disconnect:', error);
304
+ throw error;
305
+ }
306
+ }
307
+ /**
308
+ * Checks if the broker is connected.
309
+ *
310
+ * @returns True if connected
311
+ */
312
+ isReady() {
313
+ return this.isConnected;
314
+ }
315
+ /**
316
+ * Gets the topic name.
317
+ *
318
+ * @returns The Kafka topic name
319
+ */
320
+ getTopic() {
321
+ return this.topic;
322
+ }
323
+ }
324
+ exports.KafkaMetadataBroker = KafkaMetadataBroker;
325
+ //# sourceMappingURL=KafkaMetadataBroker.js.map
@@ -0,0 +1,14 @@
1
+ /**
2
+ * @veridot/kafka - Kafka implementation for Veridot metadata broker.
3
+ *
4
+ * This module provides a Kafka-based implementation of the MetadataBroker
5
+ * interface, using Kafka for distributed message propagation and LMDB for
6
+ * local persistent caching.
7
+ *
8
+ * @packageDocumentation
9
+ * @module @veridot/kafka
10
+ * @author Darlin TEUMA
11
+ * @since 2.2.0
12
+ */
13
+ export { KafkaMetadataBroker, KafkaMetadataBrokerOptions } from './KafkaMetadataBroker';
14
+ //# sourceMappingURL=index.d.ts.map
package/dist/index.js ADDED
@@ -0,0 +1,18 @@
1
+ "use strict";
2
+ /**
3
+ * @veridot/kafka - Kafka implementation for Veridot metadata broker.
4
+ *
5
+ * This module provides a Kafka-based implementation of the MetadataBroker
6
+ * interface, using Kafka for distributed message propagation and LMDB for
7
+ * local persistent caching.
8
+ *
9
+ * @packageDocumentation
10
+ * @module @veridot/kafka
11
+ * @author Darlin TEUMA
12
+ * @since 2.2.0
13
+ */
14
+ Object.defineProperty(exports, "__esModule", { value: true });
15
+ exports.KafkaMetadataBroker = void 0;
16
+ var KafkaMetadataBroker_1 = require("./KafkaMetadataBroker");
17
+ Object.defineProperty(exports, "KafkaMetadataBroker", { enumerable: true, get: function () { return KafkaMetadataBroker_1.KafkaMetadataBroker; } });
18
+ //# sourceMappingURL=index.js.map
package/package.json ADDED
@@ -0,0 +1,34 @@
1
+ {
2
+ "name": "@veridot/kafka",
3
+ "version": "2.2.0",
4
+ "description": "Kafka implementation module for Veridot - Provides Kafka-based metadata broker",
5
+ "main": "dist/index.js",
6
+ "types": "dist/index.d.ts",
7
+ "scripts": {
8
+ "build": "tsc",
9
+ "clean": "rm -rf dist",
10
+ "prepublishOnly": "pnpm run clean && pnpm run build"
11
+ },
12
+ "keywords": [
13
+ "veridot",
14
+ "kafka",
15
+ "metadata",
16
+ "broker",
17
+ "distributed"
18
+ ],
19
+ "author": "Darlin TEUMA <darlinteuma2@gmail.com>",
20
+ "contributors": [
21
+ "Frank KOSSI <frank.kossi@kunrin.com> (Original Veridot creator and architect)"
22
+ ],
23
+ "license": "MIT",
24
+ "dependencies": {
25
+ "@veridot/core": "workspace:*",
26
+ "kafkajs": "^2.2.4",
27
+ "lmdb": "^3.2.6"
28
+ },
29
+ "devDependencies": {
30
+ "@types/node": "^20.0.0",
31
+ "typescript": "^5.3.0"
32
+ }
33
+ }
34
+