@lucaapp/service-utils 1.4.0 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +41 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +1 -0
- package/dist/lib/kafka/events/wsEvent.d.ts +5 -0
- package/dist/lib/kafka/events/wsEvent.js +2 -0
- package/dist/lib/kafka/events.d.ts +16 -6
- package/dist/lib/kafka/events.js +9 -3
- package/dist/lib/kafka/kafkaClient.d.ts +6 -5
- package/dist/lib/kafka/kafkaClient.js +29 -11
- package/dist/lib/kafka/types.d.ts +3 -5
- package/dist/lib/serviceIdentity/service.d.ts +2 -1
- package/dist/lib/serviceIdentity/service.js +1 -0
- package/dist/lib/wsEvent/index.d.ts +4 -0
- package/dist/lib/wsEvent/index.js +24 -0
- package/package.json +4 -2
package/README.md
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
## service-utils
|
|
2
|
+
|
|
3
|
+
`service-utils` aims at swapping common functionality for most/all backend services in order to reduce code duplication.
|
|
4
|
+
Future use-cases should include logging, inter-service communication, metrics etc.
|
|
5
|
+
|
|
6
|
+
#### Development
|
|
7
|
+
|
|
8
|
+
Development of service-utils package will take place in your local machine, not within the docker context.
|
|
9
|
+
Therefore, some adoptions and preparations are necessary.
|
|
10
|
+
Follow these steps for initial setup:
|
|
11
|
+
|
|
12
|
+
* Navigate to `packages/service-utils`
|
|
13
|
+
* Run `yarn link`
|
|
14
|
+
* Navigate to `services/backend`
|
|
15
|
+
* Run `yarn link @lucaapp/service-utils`
|
|
16
|
+
|
|
17
|
+
This will connect your local sources for `@lucaapp/service-utils`.
|
|
18
|
+
When making changes within the package, make sure to run the `build` target within `packages/service-utils/package.json` to have the changes available to backend web.
|
|
19
|
+
|
|
20
|
+
After this preparations run `package-development` from `services/backend/package.json`.
|
|
21
|
+
The following environment should be at least in place when running the target to make sure your local `backend` setup uses the proper endpoints:
|
|
22
|
+
|
|
23
|
+
```shell
|
|
24
|
+
DB_HOSTNAME=localhost
|
|
25
|
+
KAFKA_BROKER=localhost:9094
|
|
26
|
+
REDIS_HOSTNAME=localhost
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
It will utilize the linked package and run `backend` locally.
|
|
30
|
+
Make sure your docker stack has at least `database`, `kafka` and `redis`, but *not* `backend` up and running.
|
|
31
|
+
|
|
32
|
+
You should now be able to develop changes for `@lucaapp/service-utils`.
|
|
33
|
+
|
|
34
|
+
#### Packaging
|
|
35
|
+
|
|
36
|
+
Packaging of `@lucaapp/service-utils` will be automatically published, when run in `dev`.
|
|
37
|
+
See `./gitlab-ci/publish[.template].yml` for reference.
|
|
38
|
+
|
|
39
|
+
Make sure to follow [semantic-release](https://www.npmjs.com/package/semantic-release) conventions when writing commit messages.
|
|
40
|
+
|
|
41
|
+
Depending on the used semantic-release indicators a new version of `@lucaapp/service-utils` will be made available and tagged within the luca-web repository.
|
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
|
@@ -17,3 +17,4 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
17
17
|
__exportStar(require("./lib/kafka"), exports);
|
|
18
18
|
__exportStar(require("./lib/serviceIdentity"), exports);
|
|
19
19
|
__exportStar(require("./lib/metrics"), exports);
|
|
20
|
+
__exportStar(require("./lib/wsEvent"), exports);
|
|
@@ -1,21 +1,31 @@
|
|
|
1
1
|
import type { Payment } from './events/payment';
|
|
2
2
|
import type { Consumer } from './events/consumer';
|
|
3
3
|
import type { Operator } from './events/operator';
|
|
4
|
+
import type { WsEvent } from './events/wsEvent';
|
|
4
5
|
import { Service } from '../serviceIdentity';
|
|
5
6
|
declare enum KafkaTopic {
|
|
6
|
-
PAYMENTS = "
|
|
7
|
-
CONSUMERS = "
|
|
8
|
-
OPERATORS = "
|
|
7
|
+
PAYMENTS = "payments",
|
|
8
|
+
CONSUMERS = "consumers",
|
|
9
|
+
OPERATORS = "operators",
|
|
10
|
+
WS_EVENT_backend = "wsevent_backend",
|
|
11
|
+
WS_EVENT_backend_pay = "wsevent_backend-pay",
|
|
12
|
+
WS_EVENT_backend_pos = "wsevent_backend-pos"
|
|
9
13
|
}
|
|
10
14
|
declare type MessageFormats = {
|
|
11
15
|
[KafkaTopic.PAYMENTS]: Payment;
|
|
12
16
|
[KafkaTopic.CONSUMERS]: Consumer;
|
|
13
17
|
[KafkaTopic.OPERATORS]: Operator;
|
|
18
|
+
[KafkaTopic.WS_EVENT_backend]: WsEvent;
|
|
19
|
+
[KafkaTopic.WS_EVENT_backend_pay]: WsEvent;
|
|
20
|
+
[KafkaTopic.WS_EVENT_backend_pos]: WsEvent;
|
|
14
21
|
};
|
|
15
22
|
declare const MessageIssuer: {
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
23
|
+
payments: Service;
|
|
24
|
+
consumers: Service;
|
|
25
|
+
operators: Service;
|
|
26
|
+
wsevent_backend: Service;
|
|
27
|
+
"wsevent_backend-pay": Service;
|
|
28
|
+
"wsevent_backend-pos": Service;
|
|
19
29
|
};
|
|
20
30
|
export { KafkaTopic, MessageIssuer };
|
|
21
31
|
export type { MessageFormats };
|
package/dist/lib/kafka/events.js
CHANGED
|
@@ -4,14 +4,20 @@ exports.MessageIssuer = exports.KafkaTopic = void 0;
|
|
|
4
4
|
const serviceIdentity_1 = require("../serviceIdentity");
|
|
5
5
|
var KafkaTopic;
|
|
6
6
|
(function (KafkaTopic) {
|
|
7
|
-
KafkaTopic["PAYMENTS"] = "
|
|
8
|
-
KafkaTopic["CONSUMERS"] = "
|
|
9
|
-
KafkaTopic["OPERATORS"] = "
|
|
7
|
+
KafkaTopic["PAYMENTS"] = "payments";
|
|
8
|
+
KafkaTopic["CONSUMERS"] = "consumers";
|
|
9
|
+
KafkaTopic["OPERATORS"] = "operators";
|
|
10
|
+
KafkaTopic["WS_EVENT_backend"] = "wsevent_backend";
|
|
11
|
+
KafkaTopic["WS_EVENT_backend_pay"] = "wsevent_backend-pay";
|
|
12
|
+
KafkaTopic["WS_EVENT_backend_pos"] = "wsevent_backend-pos";
|
|
10
13
|
})(KafkaTopic || (KafkaTopic = {}));
|
|
11
14
|
exports.KafkaTopic = KafkaTopic;
|
|
12
15
|
const MessageIssuer = {
|
|
13
16
|
[KafkaTopic.PAYMENTS]: serviceIdentity_1.Service.BACKEND_PAY,
|
|
14
17
|
[KafkaTopic.CONSUMERS]: serviceIdentity_1.Service.BACKEND,
|
|
15
18
|
[KafkaTopic.OPERATORS]: serviceIdentity_1.Service.BACKEND,
|
|
19
|
+
[KafkaTopic.WS_EVENT_backend]: serviceIdentity_1.Service.PUBSUB,
|
|
20
|
+
[KafkaTopic.WS_EVENT_backend_pay]: serviceIdentity_1.Service.PUBSUB,
|
|
21
|
+
[KafkaTopic.WS_EVENT_backend_pos]: serviceIdentity_1.Service.PUBSUB,
|
|
16
22
|
};
|
|
17
23
|
exports.MessageIssuer = MessageIssuer;
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
import { Consumer } from 'kafkajs';
|
|
2
2
|
import { Logger } from 'pino';
|
|
3
3
|
import { ServiceIdentity } from '../serviceIdentity';
|
|
4
|
-
import type { EventPayloadHandler, KafkaConfiguration, KafkaEvent
|
|
4
|
+
import type { EventPayloadHandler, KafkaConfiguration, KafkaEvent } from './types';
|
|
5
|
+
import { KafkaTopic } from './events';
|
|
5
6
|
declare class KafkaClient {
|
|
6
7
|
private readonly environment;
|
|
7
8
|
private readonly kafkaClient;
|
|
@@ -9,8 +10,8 @@ declare class KafkaClient {
|
|
|
9
10
|
private readonly topicSecrets;
|
|
10
11
|
private readonly admin;
|
|
11
12
|
private readonly producer;
|
|
12
|
-
|
|
13
|
-
constructor(parentLogger: Logger, kafkaConfig: KafkaConfiguration, topicSecrets: Record<
|
|
13
|
+
readonly serviceIdentity: ServiceIdentity;
|
|
14
|
+
constructor(parentLogger: Logger, kafkaConfig: KafkaConfiguration, topicSecrets: Record<KafkaTopic, string | null>, serviceIdentity: ServiceIdentity);
|
|
14
15
|
connect: () => Promise<void>;
|
|
15
16
|
private getTopic;
|
|
16
17
|
private getTopicSecret;
|
|
@@ -20,7 +21,7 @@ declare class KafkaClient {
|
|
|
20
21
|
private verifySignature;
|
|
21
22
|
private parseValue;
|
|
22
23
|
private ensureTopics;
|
|
23
|
-
consume: <T extends
|
|
24
|
-
produce: <T extends
|
|
24
|
+
consume: <T extends KafkaTopic>(groupId: string, kafkaTopic: T, handler: EventPayloadHandler<T>, fromBeginning?: boolean) => Promise<Consumer>;
|
|
25
|
+
produce: <T extends KafkaTopic>(kafkaTopic: T, key: string, value: KafkaEvent<T>) => Promise<void>;
|
|
25
26
|
}
|
|
26
27
|
export { KafkaClient };
|
|
@@ -38,11 +38,21 @@ const messageProducedSizeCounter = new metrics_1.metricsClient.Histogram({
|
|
|
38
38
|
labelNames: ['topic'],
|
|
39
39
|
buckets: [64, 128, 256, 512, 1024, 2048, 4096, 8182],
|
|
40
40
|
});
|
|
41
|
+
const messageProduceError = new metrics_1.metricsClient.Counter({
|
|
42
|
+
name: 'kafka_message_produce_error_count',
|
|
43
|
+
help: 'Total number of errors during message produce',
|
|
44
|
+
labelNames: ['topic'],
|
|
45
|
+
});
|
|
41
46
|
const messageConsumedCounter = new metrics_1.metricsClient.Counter({
|
|
42
47
|
name: 'kafka_message_consume_count',
|
|
43
48
|
help: 'Total number of messages consumed',
|
|
44
49
|
labelNames: ['topic', 'groupId'],
|
|
45
50
|
});
|
|
51
|
+
const messageConsumedErrorCounter = new metrics_1.metricsClient.Counter({
|
|
52
|
+
name: 'kafka_message_consume_error_count',
|
|
53
|
+
help: 'Total number of errors during message consume',
|
|
54
|
+
labelNames: ['topic', 'groupId'],
|
|
55
|
+
});
|
|
46
56
|
const messageAcknowledgedCounter = new metrics_1.metricsClient.Counter({
|
|
47
57
|
name: 'kafka_message_consume_ack_count',
|
|
48
58
|
help: 'Total number of messages acknowledged',
|
|
@@ -153,17 +163,24 @@ class KafkaClient {
|
|
|
153
163
|
await consumer.run({
|
|
154
164
|
autoCommit: true,
|
|
155
165
|
eachMessage: async ({ message }) => {
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
.
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
166
|
+
try {
|
|
167
|
+
messageConsumedCounter
|
|
168
|
+
.labels({ topic: kafkaTopic.valueOf(), groupId })
|
|
169
|
+
.inc();
|
|
170
|
+
const decryptedValue = await this.decryptValue(kafkaTopic, message.value);
|
|
171
|
+
await this.verifySignature(kafkaTopic, decryptedValue, message.headers);
|
|
172
|
+
const value = this.parseValue(decryptedValue);
|
|
173
|
+
this.logger.debug({ key: message.key, value, timestamp: message.timestamp }, 'Record received');
|
|
174
|
+
await handler({ ...message, value });
|
|
175
|
+
messageAcknowledgedCounter
|
|
176
|
+
.labels({ topic: kafkaTopic.valueOf(), groupId })
|
|
177
|
+
.inc();
|
|
178
|
+
}
|
|
179
|
+
catch (error) {
|
|
180
|
+
messageConsumedErrorCounter
|
|
181
|
+
.labels({ topic: kafkaTopic.valueOf(), groupId })
|
|
182
|
+
.inc();
|
|
183
|
+
}
|
|
167
184
|
},
|
|
168
185
|
});
|
|
169
186
|
return consumer;
|
|
@@ -195,6 +212,7 @@ class KafkaClient {
|
|
|
195
212
|
.observe(Buffer.byteLength(encryptedValue));
|
|
196
213
|
}
|
|
197
214
|
catch (error) {
|
|
215
|
+
messageProduceError.labels({ topic }).inc();
|
|
198
216
|
throw (0, utils_1.logAndGetError)(this.logger, `Could not create producer for topic=${topic}`, error);
|
|
199
217
|
}
|
|
200
218
|
};
|
|
@@ -1,9 +1,7 @@
|
|
|
1
1
|
import { KafkaTopic, MessageFormats } from './events';
|
|
2
2
|
import { Environment } from '../serviceIdentity';
|
|
3
3
|
import { KafkaMessage } from 'kafkajs';
|
|
4
|
-
declare type
|
|
5
|
-
declare type KafkaTopicType = typeof KafkaTopic[KafkaTopics];
|
|
6
|
-
declare type KafkaEvent<T extends KafkaTopicType> = {
|
|
4
|
+
declare type KafkaEvent<T extends KafkaTopic> = {
|
|
7
5
|
id: string;
|
|
8
6
|
type: 'create' | 'update' | 'destroy';
|
|
9
7
|
entity: MessageFormats[T];
|
|
@@ -16,7 +14,7 @@ declare type KafkaConfiguration = {
|
|
|
16
14
|
password?: string;
|
|
17
15
|
ssl?: boolean;
|
|
18
16
|
};
|
|
19
|
-
declare type EventPayloadHandler<T extends
|
|
17
|
+
declare type EventPayloadHandler<T extends KafkaTopic> = (message: Omit<KafkaMessage, 'value'> & {
|
|
20
18
|
value: KafkaEvent<T>;
|
|
21
19
|
}) => Promise<void>;
|
|
22
|
-
export type {
|
|
20
|
+
export type { KafkaEvent, KafkaConfiguration, EventPayloadHandler, };
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.emitWebsocketEvent = exports.generateSubscriptionId = void 0;
|
|
4
|
+
const uuid_1 = require("uuid");
|
|
5
|
+
const crypto_1 = require("crypto");
|
|
6
|
+
const generateSubscriptionId = () => {
|
|
7
|
+
return (0, crypto_1.randomBytes)(32).toString('base64');
|
|
8
|
+
};
|
|
9
|
+
exports.generateSubscriptionId = generateSubscriptionId;
|
|
10
|
+
const emitWebsocketEvent = async (kafkaClient, subId, data) => {
|
|
11
|
+
const issuer = kafkaClient.serviceIdentity.identityName;
|
|
12
|
+
// using WS_EVENT_backend since all WS_EVENTs use same schema
|
|
13
|
+
const topic = `wsevent_${issuer}`;
|
|
14
|
+
const message = {
|
|
15
|
+
id: (0, uuid_1.v4)(),
|
|
16
|
+
type: 'create',
|
|
17
|
+
entity: {
|
|
18
|
+
subId,
|
|
19
|
+
data,
|
|
20
|
+
},
|
|
21
|
+
};
|
|
22
|
+
await kafkaClient.produce(topic, subId, message);
|
|
23
|
+
};
|
|
24
|
+
exports.emitWebsocketEvent = emitWebsocketEvent;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@lucaapp/service-utils",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.5.0",
|
|
4
4
|
"main": "dist/index.js",
|
|
5
5
|
"types": "dist/index.d.ts",
|
|
6
6
|
"files": [
|
|
@@ -21,6 +21,7 @@
|
|
|
21
21
|
"@types/express": "4.17.13",
|
|
22
22
|
"@types/jest": "^28.1.5",
|
|
23
23
|
"@types/pino": "^7.0.5",
|
|
24
|
+
"@types/uuid": "^8.3.4",
|
|
24
25
|
"@typescript-eslint/eslint-plugin": "^5.30.6",
|
|
25
26
|
"@typescript-eslint/parser": "^5.30.6",
|
|
26
27
|
"conventional-changelog-conventionalcommits": "^5.0.0",
|
|
@@ -40,6 +41,7 @@
|
|
|
40
41
|
"jose": "4.9.2",
|
|
41
42
|
"kafkajs": "2.1.0",
|
|
42
43
|
"moment": "^2.29.4",
|
|
43
|
-
"prom-client": "14.1.0"
|
|
44
|
+
"prom-client": "14.1.0",
|
|
45
|
+
"uuid": "^9.0.0"
|
|
44
46
|
}
|
|
45
47
|
}
|