@car-parts/common 1.0.2 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/events/base-listener.d.ts +21 -0
- package/build/events/base-listener.js +78 -0
- package/build/events/base-publisher.d.ts +16 -0
- package/build/events/base-publisher.js +91 -0
- package/build/events/subjects.d.ts +3 -0
- package/build/events/subjects.js +8 -0
- package/build/events/user-created-event.d.ts +9 -0
- package/build/events/user-created-event.js +2 -0
- package/build/index.d.ts +4 -0
- package/build/index.js +4 -0
- package/package.json +2 -1
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { Consumer, Kafka, ConsumerConfig, KafkaMessage, TopicPartitionOffsetAndMetadata } from 'kafkajs';
|
|
2
|
+
import { Subjects } from './subjects';
|
|
3
|
+
interface Event {
|
|
4
|
+
subject: Subjects;
|
|
5
|
+
data: any;
|
|
6
|
+
}
|
|
7
|
+
export declare abstract class KafkaListener<T extends Event> {
|
|
8
|
+
abstract subject: T['subject'];
|
|
9
|
+
abstract groupId: string;
|
|
10
|
+
abstract onMessage(data: T['data'], message: KafkaMessage): Promise<void>;
|
|
11
|
+
protected kafka: Kafka;
|
|
12
|
+
protected consumer: Consumer;
|
|
13
|
+
protected sessionTimeout: number;
|
|
14
|
+
protected consumerConfig?: Partial<ConsumerConfig>;
|
|
15
|
+
constructor(kafka: Kafka, consumerConfig?: Partial<ConsumerConfig>);
|
|
16
|
+
listen(): Promise<void>;
|
|
17
|
+
parseMessage(message: KafkaMessage): any;
|
|
18
|
+
disconnect(): Promise<void>;
|
|
19
|
+
commitOffsets(offsets: Array<TopicPartitionOffsetAndMetadata>): Promise<void>;
|
|
20
|
+
}
|
|
21
|
+
export {};
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.KafkaListener = void 0;
|
|
13
|
+
class KafkaListener {
|
|
14
|
+
constructor(kafka, consumerConfig) {
|
|
15
|
+
this.sessionTimeout = 30000; // 30 seconds
|
|
16
|
+
this.kafka = kafka;
|
|
17
|
+
this.consumerConfig = consumerConfig;
|
|
18
|
+
}
|
|
19
|
+
listen() {
|
|
20
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
21
|
+
// Initialize consumer here where groupId is available
|
|
22
|
+
this.consumer = this.kafka.consumer(Object.assign({ groupId: this.groupId, sessionTimeout: this.sessionTimeout }, this.consumerConfig));
|
|
23
|
+
yield this.consumer.connect();
|
|
24
|
+
console.log(`Kafka consumer connected: ${this.groupId}`);
|
|
25
|
+
yield this.consumer.subscribe({
|
|
26
|
+
topic: this.subject,
|
|
27
|
+
fromBeginning: true, // Similar to setDeliverAllAvailable in NATS
|
|
28
|
+
});
|
|
29
|
+
yield this.consumer.run({
|
|
30
|
+
eachMessage: (_a) => __awaiter(this, [_a], void 0, function* ({ topic, partition, message, }) {
|
|
31
|
+
console.log(`Message received: ${topic} / ${this.groupId} / Partition: ${partition}`);
|
|
32
|
+
try {
|
|
33
|
+
const parsedData = this.parseMessage(message);
|
|
34
|
+
yield this.onMessage(parsedData, message);
|
|
35
|
+
// Kafka automatically commits offsets by default (autoCommit: true)
|
|
36
|
+
// If you want manual commit, set autoCommit to false in run() options
|
|
37
|
+
}
|
|
38
|
+
catch (err) {
|
|
39
|
+
console.error('Error processing message:', err);
|
|
40
|
+
// Implement your error handling strategy here
|
|
41
|
+
// You might want to:
|
|
42
|
+
// 1. Skip the message and continue
|
|
43
|
+
// 2. Send to dead letter queue
|
|
44
|
+
// 3. Retry with backoff
|
|
45
|
+
throw err; // This will pause the consumer
|
|
46
|
+
}
|
|
47
|
+
}),
|
|
48
|
+
});
|
|
49
|
+
// Handle errors
|
|
50
|
+
this.consumer.on('consumer.crash', (event) => {
|
|
51
|
+
console.error('Consumer crashed:', event.payload.error);
|
|
52
|
+
});
|
|
53
|
+
this.consumer.on('consumer.disconnect', () => {
|
|
54
|
+
console.log('Consumer disconnected');
|
|
55
|
+
});
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
parseMessage(message) {
|
|
59
|
+
if (!message.value) {
|
|
60
|
+
throw new Error('Message value is null');
|
|
61
|
+
}
|
|
62
|
+
const data = message.value.toString('utf8');
|
|
63
|
+
return JSON.parse(data);
|
|
64
|
+
}
|
|
65
|
+
disconnect() {
|
|
66
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
67
|
+
yield this.consumer.disconnect();
|
|
68
|
+
console.log('Kafka consumer disconnected');
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
// Manual commit if needed (set autoCommit: false in consumer.run())
|
|
72
|
+
commitOffsets(offsets) {
|
|
73
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
74
|
+
yield this.consumer.commitOffsets(offsets);
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
exports.KafkaListener = KafkaListener;
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { Producer, RecordMetadata } from 'kafkajs';
|
|
2
|
+
import { Subjects } from './subjects';
|
|
3
|
+
interface Event {
|
|
4
|
+
subject: Subjects;
|
|
5
|
+
data: any;
|
|
6
|
+
}
|
|
7
|
+
export declare abstract class Publisher<T extends Event> {
|
|
8
|
+
abstract subject: T['subject'];
|
|
9
|
+
protected producer: Producer;
|
|
10
|
+
constructor(producer: Producer);
|
|
11
|
+
publish(data: T['data']): Promise<RecordMetadata[]>;
|
|
12
|
+
publishBatch(dataArray: T['data'][]): Promise<RecordMetadata[]>;
|
|
13
|
+
publishWithKey(data: T['data'], key: string): Promise<RecordMetadata[]>;
|
|
14
|
+
disconnect(): Promise<void>;
|
|
15
|
+
}
|
|
16
|
+
export {};
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.Publisher = void 0;
|
|
13
|
+
class Publisher {
|
|
14
|
+
constructor(producer) {
|
|
15
|
+
this.producer = producer;
|
|
16
|
+
}
|
|
17
|
+
publish(data) {
|
|
18
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
19
|
+
try {
|
|
20
|
+
const result = yield this.producer.send({
|
|
21
|
+
topic: this.subject,
|
|
22
|
+
messages: [
|
|
23
|
+
{
|
|
24
|
+
value: JSON.stringify(data),
|
|
25
|
+
// Optional: add key for partitioning
|
|
26
|
+
// key: data.id,
|
|
27
|
+
// Optional: add headers
|
|
28
|
+
// headers: {
|
|
29
|
+
// 'correlation-id': 'some-id',
|
|
30
|
+
// },
|
|
31
|
+
},
|
|
32
|
+
],
|
|
33
|
+
});
|
|
34
|
+
console.log('Event published to topic:', this.subject);
|
|
35
|
+
return result;
|
|
36
|
+
}
|
|
37
|
+
catch (err) {
|
|
38
|
+
console.error('Error publishing event:', err);
|
|
39
|
+
throw err;
|
|
40
|
+
}
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
// Publish multiple messages at once (batch)
|
|
44
|
+
publishBatch(dataArray) {
|
|
45
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
46
|
+
try {
|
|
47
|
+
const result = yield this.producer.send({
|
|
48
|
+
topic: this.subject,
|
|
49
|
+
messages: dataArray.map((data) => ({
|
|
50
|
+
value: JSON.stringify(data),
|
|
51
|
+
})),
|
|
52
|
+
});
|
|
53
|
+
console.log(`${dataArray.length} events published to topic:`, this.subject);
|
|
54
|
+
return result;
|
|
55
|
+
}
|
|
56
|
+
catch (err) {
|
|
57
|
+
console.error('Error publishing batch events:', err);
|
|
58
|
+
throw err;
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
// Publish with custom partition key
|
|
63
|
+
publishWithKey(data, key) {
|
|
64
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
65
|
+
try {
|
|
66
|
+
const result = yield this.producer.send({
|
|
67
|
+
topic: this.subject,
|
|
68
|
+
messages: [
|
|
69
|
+
{
|
|
70
|
+
key: key,
|
|
71
|
+
value: JSON.stringify(data),
|
|
72
|
+
},
|
|
73
|
+
],
|
|
74
|
+
});
|
|
75
|
+
console.log('Event published to topic:', this.subject, 'with key:', key);
|
|
76
|
+
return result;
|
|
77
|
+
}
|
|
78
|
+
catch (err) {
|
|
79
|
+
console.error('Error publishing event:', err);
|
|
80
|
+
throw err;
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
disconnect() {
|
|
85
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
86
|
+
yield this.producer.disconnect();
|
|
87
|
+
console.log('Kafka producer disconnected');
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
exports.Publisher = Publisher;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Subjects = void 0;
|
|
4
|
+
var Subjects;
|
|
5
|
+
(function (Subjects) {
|
|
6
|
+
Subjects["UserCreated"] = "user:created";
|
|
7
|
+
})(Subjects || (exports.Subjects = Subjects = {}));
|
|
8
|
+
// interface to make sure event names are consistent
|
package/build/index.d.ts
CHANGED
|
@@ -10,3 +10,7 @@ export * from './middlewares/error-handler';
|
|
|
10
10
|
export * from './middlewares/require-auth';
|
|
11
11
|
export * from './middlewares/validate-request';
|
|
12
12
|
export * from './middlewares/restrinct-to';
|
|
13
|
+
export * from './events/base-listener';
|
|
14
|
+
export * from './events/base-publisher';
|
|
15
|
+
export * from './events/subjects';
|
|
16
|
+
export * from './events/user-created-event';
|
package/build/index.js
CHANGED
|
@@ -26,3 +26,7 @@ __exportStar(require("./middlewares/error-handler"), exports);
|
|
|
26
26
|
__exportStar(require("./middlewares/require-auth"), exports);
|
|
27
27
|
__exportStar(require("./middlewares/validate-request"), exports);
|
|
28
28
|
__exportStar(require("./middlewares/restrinct-to"), exports);
|
|
29
|
+
__exportStar(require("./events/base-listener"), exports);
|
|
30
|
+
__exportStar(require("./events/base-publisher"), exports);
|
|
31
|
+
__exportStar(require("./events/subjects"), exports);
|
|
32
|
+
__exportStar(require("./events/user-created-event"), exports);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@car-parts/common",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.4",
|
|
4
4
|
"main": "./build/index.js",
|
|
5
5
|
"types": "./build/index.d.ts",
|
|
6
6
|
"files": [
|
|
@@ -28,6 +28,7 @@
|
|
|
28
28
|
"express": "^5.1.0",
|
|
29
29
|
"express-validator": "^7.2.1",
|
|
30
30
|
"jsonwebtoken": "^9.0.2",
|
|
31
|
+
"kafkajs": "^2.2.4",
|
|
31
32
|
"node-nats-streaming": "^0.3.2"
|
|
32
33
|
}
|
|
33
34
|
}
|