@rsdk/kafka.transport 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +63 -0
- package/dist/commit-offset.d.ts +9 -0
- package/dist/commit-offset.js +32 -0
- package/dist/commit-offset.js.map +1 -0
- package/dist/constants.d.ts +1 -0
- package/dist/constants.js +5 -0
- package/dist/constants.js.map +1 -0
- package/dist/decorators/consume.decorator.d.ts +2 -0
- package/dist/decorators/consume.decorator.js +27 -0
- package/dist/decorators/consume.decorator.js.map +1 -0
- package/dist/decorators/index.d.ts +2 -0
- package/dist/decorators/index.js +19 -0
- package/dist/decorators/index.js.map +1 -0
- package/dist/decorators/metadata.decorator.d.ts +10 -0
- package/dist/decorators/metadata.decorator.js +29 -0
- package/dist/decorators/metadata.decorator.js.map +1 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +11 -0
- package/dist/index.js.map +1 -0
- package/dist/kafka-errors.formatter.d.ts +5 -0
- package/dist/kafka-errors.formatter.js +14 -0
- package/dist/kafka-errors.formatter.js.map +1 -0
- package/dist/kafka.transport.d.ts +18 -0
- package/dist/kafka.transport.js +73 -0
- package/dist/kafka.transport.js.map +1 -0
- package/dist/module/kafka-consumed-health.indicator.d.ts +9 -0
- package/dist/module/kafka-consumed-health.indicator.js +40 -0
- package/dist/module/kafka-consumed-health.indicator.js.map +1 -0
- package/dist/module/kafka-transport.module.d.ts +2 -0
- package/dist/module/kafka-transport.module.js +22 -0
- package/dist/module/kafka-transport.module.js.map +1 -0
- package/dist/patched-server-kafka/events.deserializer.d.ts +21 -0
- package/dist/patched-server-kafka/events.deserializer.js +51 -0
- package/dist/patched-server-kafka/events.deserializer.js.map +1 -0
- package/dist/patched-server-kafka/patched-server-kafka.d.ts +27 -0
- package/dist/patched-server-kafka/patched-server-kafka.js +98 -0
- package/dist/patched-server-kafka/patched-server-kafka.js.map +1 -0
- package/package.json +27 -0
- package/src/commit-offset.ts +35 -0
- package/src/constants.ts +1 -0
- package/src/decorators/consume.decorator.ts +36 -0
- package/src/decorators/index.ts +2 -0
- package/src/decorators/metadata.decorator.ts +21 -0
- package/src/index.ts +3 -0
- package/src/kafka-errors.formatter.ts +12 -0
- package/src/kafka.transport.ts +93 -0
- package/src/module/kafka-consumed-health.indicator.ts +26 -0
- package/src/module/kafka-transport.module.ts +10 -0
- package/src/patched-server-kafka/events.deserializer.ts +57 -0
- package/src/patched-server-kafka/patched-server-kafka.ts +144 -0
- package/tsconfig.json +9 -0
package/CHANGELOG.md
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
# Change Log
|
|
2
|
+
|
|
3
|
+
All notable changes to this project will be documented in this file.
|
|
4
|
+
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
|
5
|
+
|
|
6
|
+
# [2.0.0](https://github.com/R-Vision/rsdk/compare/v1.0.12...v2.0.0) (2023-06-06)
|
|
7
|
+
|
|
8
|
+
### Bug Fixes
|
|
9
|
+
|
|
10
|
+
* cycle deps, remove outDir from base.json in tsconfig, rework interact with AutodocMetadata and RsdkMetadata, fix unprovided imports ([cc6434b](https://github.com/R-Vision/rsdk/commit/cc6434b9d165e570bdbb2baad76b00acd14577b2))
|
|
11
|
+
|
|
12
|
+
### Draft
|
|
13
|
+
|
|
14
|
+
* Pfm 303 static to meta migration ([#45](https://github.com/R-Vision/rsdk/issues/45)) ([d98e303](https://github.com/R-Vision/rsdk/commit/d98e3032f9b10446b478427d841b9209ed68fa2d))
|
|
15
|
+
|
|
16
|
+
### BREAKING CHANGES
|
|
17
|
+
|
|
18
|
+
* GrpcClientModule removed
|
|
19
|
+
new way for declaration with `GrpcServers.define()` and `@InjectGrpcClient`
|
|
20
|
+
|
|
21
|
+
* fix: rework interaction with metadata and generating modules from metadata
|
|
22
|
+
|
|
23
|
+
* refactor: destructring added
|
|
24
|
+
|
|
25
|
+
* docs: added docs for autodoc pre-commit hook
|
|
26
|
+
|
|
27
|
+
* chore: spell fix
|
|
28
|
+
|
|
29
|
+
* chore: some improves declaration
|
|
30
|
+
|
|
31
|
+
* chore: laconic naming for constants
|
|
32
|
+
|
|
33
|
+
* chore: remove useless
|
|
34
|
+
|
|
35
|
+
* chore: move file to suitable place
|
|
36
|
+
|
|
37
|
+
* chore: remove useless imports
|
|
38
|
+
|
|
39
|
+
* chore!: remove boostrap tasks from Makefile
|
|
40
|
+
|
|
41
|
+
* chore: rework test tasks
|
|
42
|
+
|
|
43
|
+
* feat: grpc client plugin useless
|
|
44
|
+
|
|
45
|
+
* chore: verbosed naming
|
|
46
|
+
|
|
47
|
+
* docs: improve verbosity
|
|
48
|
+
|
|
49
|
+
## 1.0.12 (2023-05-23)
|
|
50
|
+
|
|
51
|
+
### Bug Fixes
|
|
52
|
+
|
|
53
|
+
* remove private dependencies from `peerDependencies` section ([0bce6ff](https://github.com/R-Vision/rsdk/commit/0bce6ffb5b699e2ed1dc9aca77cbdbf085267ff7))
|
|
54
|
+
|
|
55
|
+
## 1.0.11 (2023-05-23)
|
|
56
|
+
|
|
57
|
+
**Note:** Version bump only for package @rsdk/kafka.transport
|
|
58
|
+
|
|
59
|
+
## 1.0.10 (2023-05-22)
|
|
60
|
+
|
|
61
|
+
### Bug Fixes
|
|
62
|
+
|
|
63
|
+
* add "access: public" to new packages ([04d5983](https://github.com/R-Vision/rsdk/commit/04d5983cfa3e97c5d8219e07208a0499a8c6bc83))
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import type { KafkaContext } from '@nestjs/microservices';
|
|
2
|
+
/**
|
|
3
|
+
* Helper that allowes to commiting offsets with one line
|
|
4
|
+
* of code.
|
|
5
|
+
*
|
|
6
|
+
* @param context Kafka context from @Ctx
|
|
7
|
+
* @param offset New offset. If unset, next offset will be +1 of message offset.
|
|
8
|
+
*/
|
|
9
|
+
export declare const commitOffset: (context: KafkaContext, offset?: string) => Promise<void>;
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.commitOffset = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* Calculate next Kafka offset
|
|
6
|
+
* @param context Kafka context from @Ctx
|
|
7
|
+
* @returns Offset +1
|
|
8
|
+
*/
|
|
9
|
+
const getNextOffset = (context) => {
|
|
10
|
+
const { offset } = context.getMessage();
|
|
11
|
+
const currentOffset = BigInt(offset);
|
|
12
|
+
const nextOffset = currentOffset + 1n;
|
|
13
|
+
return nextOffset.toString();
|
|
14
|
+
};
|
|
15
|
+
/**
|
|
16
|
+
* Helper that allowes to commiting offsets with one line
|
|
17
|
+
* of code.
|
|
18
|
+
*
|
|
19
|
+
* @param context Kafka context from @Ctx
|
|
20
|
+
* @param offset New offset. If unset, next offset will be +1 of message offset.
|
|
21
|
+
*/
|
|
22
|
+
const commitOffset = async (context, offset) => {
|
|
23
|
+
const partition = context.getPartition();
|
|
24
|
+
const topic = context.getTopic();
|
|
25
|
+
await context
|
|
26
|
+
.getConsumer()
|
|
27
|
+
.commitOffsets([
|
|
28
|
+
{ topic, partition, offset: offset || getNextOffset(context) },
|
|
29
|
+
]);
|
|
30
|
+
};
|
|
31
|
+
exports.commitOffset = commitOffset;
|
|
32
|
+
//# sourceMappingURL=commit-offset.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"commit-offset.js","sourceRoot":"","sources":["../src/commit-offset.ts"],"names":[],"mappings":";;;AAEA;;;;GAIG;AACH,MAAM,aAAa,GAAG,CAAC,OAAqB,EAAU,EAAE;IACtD,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC;IACxC,MAAM,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC;IACrC,MAAM,UAAU,GAAG,aAAa,GAAG,EAAE,CAAC;IAEtC,OAAO,UAAU,CAAC,QAAQ,EAAE,CAAC;AAC/B,CAAC,CAAC;AAEF;;;;;;GAMG;AACI,MAAM,YAAY,GAAG,KAAK,EAC/B,OAAqB,EACrB,MAAe,EACA,EAAE;IACjB,MAAM,SAAS,GAAG,OAAO,CAAC,YAAY,EAAE,CAAC;IACzC,MAAM,KAAK,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;IAEjC,MAAM,OAAO;SACV,WAAW,EAAE;SACb,aAAa,CAAC;QACb,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,IAAI,aAAa,CAAC,OAAO,CAAC,EAAE;KAC/D,CAAC,CAAC;AACP,CAAC,CAAC;AAZW,QAAA,YAAY,gBAYvB"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const KAFKA_CONSUME_RSDK_METADATA_SCOPE = "kafka-consume";
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":";;;AAAa,QAAA,iCAAiC,GAAG,eAAe,CAAC"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Consume = void 0;
|
|
4
|
+
const common_1 = require("@nestjs/common");
|
|
5
|
+
const microservices_1 = require("@nestjs/microservices");
|
|
6
|
+
const kafka_common_1 = require("@rsdk/kafka.common");
|
|
7
|
+
const metadata_1 = require("@rsdk/metadata");
|
|
8
|
+
const lodash_1 = require("lodash");
|
|
9
|
+
const constants_1 = require("../constants");
|
|
10
|
+
const KafkaConsumeMetadata = (eventType) => (target) => {
|
|
11
|
+
const rsdkMetadata = new metadata_1.RsdkMetadata(target.constructor, constants_1.KAFKA_CONSUME_RSDK_METADATA_SCOPE);
|
|
12
|
+
rsdkMetadata.add({
|
|
13
|
+
partitionKeyField: eventType.$partitionKeyField,
|
|
14
|
+
topicName: (0, kafka_common_1.getTopicName)(eventType),
|
|
15
|
+
group: eventType.$group,
|
|
16
|
+
type: kafka_common_1.KafkaTopicType.Consume,
|
|
17
|
+
eventType: (0, lodash_1.omit)(eventType, 'toJSON', 'encode', 'decode'),
|
|
18
|
+
});
|
|
19
|
+
};
|
|
20
|
+
const Consume = (eventType) => {
|
|
21
|
+
return (target, propertyKey, descriptor) => {
|
|
22
|
+
(0, microservices_1.EventPattern)(eventType)(target, propertyKey, descriptor);
|
|
23
|
+
(0, common_1.applyDecorators)(KafkaConsumeMetadata(eventType))(target, propertyKey, descriptor);
|
|
24
|
+
};
|
|
25
|
+
};
|
|
26
|
+
exports.Consume = Consume;
|
|
27
|
+
//# sourceMappingURL=consume.decorator.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"consume.decorator.js","sourceRoot":"","sources":["../../src/decorators/consume.decorator.ts"],"names":[],"mappings":";;;AAAA,2CAAiD;AACjD,yDAAqD;AAErD,qDAAkE;AAClE,6CAA8C;AAC9C,mCAA8B;AAE9B,4CAAiE;AAEjE,MAAM,oBAAoB,GACxB,CAAI,SAAuB,EAAmB,EAAE,CAChD,CAAC,MAAM,EAAE,EAAE;IACT,MAAM,YAAY,GAAG,IAAI,uBAAY,CACnC,MAAM,CAAC,WAAW,EAClB,6CAAiC,CAClC,CAAC;IAEF,YAAY,CAAC,GAAG,CAAC;QACf,iBAAiB,EAAE,SAAS,CAAC,kBAAkB;QAC/C,SAAS,EAAE,IAAA,2BAAY,EAAC,SAAS,CAAC;QAClC,KAAK,EAAE,SAAS,CAAC,MAAM;QACvB,IAAI,EAAE,6BAAc,CAAC,OAAO;QAC5B,SAAS,EAAE,IAAA,aAAI,EAAC,SAAS,EAAE,QAAQ,EAAE,QAAQ,EAAE,QAAQ,CAAC;KACzD,CAAC,CAAC;AACL,CAAC,CAAC;AAEG,MAAM,OAAO,GAAG,CAAI,SAAuB,EAAmB,EAAE;IACrE,OAAO,CAAC,MAAM,EAAE,WAAW,EAAE,UAAU,EAAE,EAAE;QACzC,IAAA,4BAAY,EAAC,SAAS,CAAC,CAAC,MAAM,EAAE,WAAW,EAAE,UAAU,CAAC,CAAC;QACzD,IAAA,wBAAe,EAAC,oBAAoB,CAAC,SAAS,CAAC,CAAC,CAC9C,MAAM,EACN,WAAW,EACX,UAAU,CACX,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC,CAAC;AATW,QAAA,OAAO,WASlB"}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
__exportStar(require("./consume.decorator"), exports);
|
|
18
|
+
__exportStar(require("./metadata.decorator"), exports);
|
|
19
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/decorators/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,sDAAoC;AACpC,uDAAqC"}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { PipeTransform } from '@nestjs/common';
|
|
2
|
+
import type { KafkaContext } from '@nestjs/microservices';
|
|
3
|
+
export declare class MetadataPipe implements PipeTransform {
|
|
4
|
+
transform(value: KafkaContext): Record<string, any> | null;
|
|
5
|
+
}
|
|
6
|
+
/**
|
|
7
|
+
* Extract metadata from event
|
|
8
|
+
* @returns Record<string, any> or null
|
|
9
|
+
*/
|
|
10
|
+
export declare const Metadata: () => ParameterDecorator;
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
9
|
+
exports.Metadata = exports.MetadataPipe = void 0;
|
|
10
|
+
const common_1 = require("@nestjs/common");
|
|
11
|
+
const rpc_paramtype_enum_1 = require("@nestjs/microservices/enums/rpc-paramtype.enum");
|
|
12
|
+
const param_utils_1 = require("@nestjs/microservices/utils/param.utils");
|
|
13
|
+
let MetadataPipe = class MetadataPipe {
|
|
14
|
+
transform(value) {
|
|
15
|
+
const { headers } = value.getMessage();
|
|
16
|
+
return headers?.metadata || null;
|
|
17
|
+
}
|
|
18
|
+
};
|
|
19
|
+
MetadataPipe = __decorate([
|
|
20
|
+
(0, common_1.Injectable)()
|
|
21
|
+
], MetadataPipe);
|
|
22
|
+
exports.MetadataPipe = MetadataPipe;
|
|
23
|
+
/**
|
|
24
|
+
* Extract metadata from event
|
|
25
|
+
* @returns Record<string, any> or null
|
|
26
|
+
*/
|
|
27
|
+
const Metadata = () => (0, param_utils_1.createRpcParamDecorator)(rpc_paramtype_enum_1.RpcParamtype.CONTEXT)(MetadataPipe);
|
|
28
|
+
exports.Metadata = Metadata;
|
|
29
|
+
//# sourceMappingURL=metadata.decorator.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"metadata.decorator.js","sourceRoot":"","sources":["../../src/decorators/metadata.decorator.ts"],"names":[],"mappings":";;;;;;;;;AACA,2CAA4C;AAE5C,uFAA8E;AAC9E,yEAAkF;AAGlF,IAAa,YAAY,GAAzB,MAAa,YAAY;IACvB,SAAS,CAAC,KAAmB;QAC3B,MAAM,EAAE,OAAO,EAAE,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC;QAEvC,OAAQ,OAAO,EAAE,QAAgB,IAAI,IAAI,CAAC;IAC5C,CAAC;CACF,CAAA;AANY,YAAY;IADxB,IAAA,mBAAU,GAAE;GACA,YAAY,CAMxB;AANY,oCAAY;AAQzB;;;GAGG;AACI,MAAM,QAAQ,GAAG,GAAuB,EAAE,CAC/C,IAAA,qCAAuB,EAAC,iCAAY,CAAC,OAAO,CAAC,CAAC,YAAY,CAAC,CAAC;AADjD,QAAA,QAAQ,YACyC"}
|
package/dist/index.d.ts
ADDED
package/dist/index.js
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.commitOffset = exports.KafkaTransport = exports.Metadata = exports.Consume = void 0;
|
|
4
|
+
var decorators_1 = require("./decorators");
|
|
5
|
+
Object.defineProperty(exports, "Consume", { enumerable: true, get: function () { return decorators_1.Consume; } });
|
|
6
|
+
Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return decorators_1.Metadata; } });
|
|
7
|
+
var kafka_transport_1 = require("./kafka.transport");
|
|
8
|
+
Object.defineProperty(exports, "KafkaTransport", { enumerable: true, get: function () { return kafka_transport_1.KafkaTransport; } });
|
|
9
|
+
var commit_offset_1 = require("./commit-offset");
|
|
10
|
+
Object.defineProperty(exports, "commitOffset", { enumerable: true, get: function () { return commit_offset_1.commitOffset; } });
|
|
11
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,2CAAiD;AAAxC,qGAAA,OAAO,OAAA;AAAE,sGAAA,QAAQ,OAAA;AAC1B,qDAAmD;AAA1C,iHAAA,cAAc,OAAA;AACvB,iDAA+C;AAAtC,6GAAA,YAAY,OAAA"}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.KafkaErrorsFormatter = void 0;
|
|
4
|
+
const common_1 = require("@nestjs/common");
|
|
5
|
+
class KafkaErrorsFormatter {
|
|
6
|
+
match() {
|
|
7
|
+
return false;
|
|
8
|
+
}
|
|
9
|
+
format() {
|
|
10
|
+
throw new common_1.NotImplementedException();
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
exports.KafkaErrorsFormatter = KafkaErrorsFormatter;
|
|
14
|
+
//# sourceMappingURL=kafka-errors.formatter.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka-errors.formatter.js","sourceRoot":"","sources":["../src/kafka-errors.formatter.ts"],"names":[],"mappings":";;;AAAA,2CAAyD;AAGzD,MAAa,oBAAoB;IAC/B,KAAK;QACH,OAAO,KAAK,CAAC;IACf,CAAC;IAED,MAAM;QACJ,MAAM,IAAI,gCAAuB,EAAE,CAAC;IACtC,CAAC;CACF;AARD,oDAQC"}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { MicroserviceOptions } from '@nestjs/microservices';
|
|
2
|
+
import type { ConfigContext, IErrorsFormatter, IErrorsTransformer, IMicroserviceTransport, NestModuleDefinitions } from '@rsdk/core';
|
|
3
|
+
export interface KafkaTransportOptions {
|
|
4
|
+
autoCommit?: boolean;
|
|
5
|
+
}
|
|
6
|
+
export declare class KafkaTransport implements IMicroserviceTransport {
|
|
7
|
+
private readonly options?;
|
|
8
|
+
private logger;
|
|
9
|
+
private brokers;
|
|
10
|
+
constructor(options?: KafkaTransportOptions | undefined);
|
|
11
|
+
modules(): NestModuleDefinitions;
|
|
12
|
+
createMicroserviceOptions(): MicroserviceOptions;
|
|
13
|
+
init(configContext: ConfigContext): void;
|
|
14
|
+
onStart(): void;
|
|
15
|
+
getProtocol(): string;
|
|
16
|
+
getErrorsFormatter(): IErrorsFormatter;
|
|
17
|
+
getErrorTransformers(): IErrorsTransformer[];
|
|
18
|
+
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.KafkaTransport = void 0;
|
|
4
|
+
const kafka_interface_1 = require("@nestjs/microservices/external/kafka.interface");
|
|
5
|
+
const manifest_1 = require("@rsdk/core/dist/manifest/manifest");
|
|
6
|
+
const kafka_common_1 = require("@rsdk/kafka.common");
|
|
7
|
+
const logging_1 = require("@rsdk/logging");
|
|
8
|
+
const kafka_transport_module_1 = require("./module/kafka-transport.module");
|
|
9
|
+
const patched_server_kafka_1 = require("./patched-server-kafka/patched-server-kafka");
|
|
10
|
+
const kafka_errors_formatter_1 = require("./kafka-errors.formatter");
|
|
11
|
+
// TODO: JsDoc
|
|
12
|
+
class KafkaTransport {
|
|
13
|
+
options;
|
|
14
|
+
logger = logging_1.LoggerFactory.create(KafkaTransport);
|
|
15
|
+
brokers;
|
|
16
|
+
constructor(options) {
|
|
17
|
+
this.options = options;
|
|
18
|
+
}
|
|
19
|
+
modules() {
|
|
20
|
+
return [kafka_transport_module_1.KafkaTransportModule];
|
|
21
|
+
}
|
|
22
|
+
createMicroserviceOptions() {
|
|
23
|
+
// TODO: нужен иной способ получения имени приложения
|
|
24
|
+
const { name } = manifest_1.Manifest.getData();
|
|
25
|
+
return {
|
|
26
|
+
strategy: new patched_server_kafka_1.PatchedServerKafka({
|
|
27
|
+
client: {
|
|
28
|
+
brokers: this.brokers,
|
|
29
|
+
clientId: `${name}-${process.pid}`,
|
|
30
|
+
logCreator: (0, kafka_common_1.createLoggerAdapter)(logging_1.LoggerFactory.create('NativeKafkaProducer')),
|
|
31
|
+
logLevel: kafka_interface_1.logLevel.DEBUG,
|
|
32
|
+
},
|
|
33
|
+
consumer: {
|
|
34
|
+
allowAutoTopicCreation: false,
|
|
35
|
+
groupId: name,
|
|
36
|
+
retry: {
|
|
37
|
+
retries: Infinity,
|
|
38
|
+
},
|
|
39
|
+
},
|
|
40
|
+
parser: {
|
|
41
|
+
keepBinary: true,
|
|
42
|
+
},
|
|
43
|
+
producer: {
|
|
44
|
+
allowAutoTopicCreation: false,
|
|
45
|
+
retry: {
|
|
46
|
+
retries: Infinity,
|
|
47
|
+
},
|
|
48
|
+
},
|
|
49
|
+
run: {
|
|
50
|
+
autoCommit: this.options?.autoCommit ?? true,
|
|
51
|
+
},
|
|
52
|
+
}),
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
init(configContext) {
|
|
56
|
+
const { brokers } = configContext.resolve(kafka_common_1.KafkaConfig);
|
|
57
|
+
this.brokers = brokers;
|
|
58
|
+
}
|
|
59
|
+
onStart() {
|
|
60
|
+
this.logger.info(`Kafka brokers: ${this.brokers.join(',')}`);
|
|
61
|
+
}
|
|
62
|
+
getProtocol() {
|
|
63
|
+
return 'kafka';
|
|
64
|
+
}
|
|
65
|
+
getErrorsFormatter() {
|
|
66
|
+
return new kafka_errors_formatter_1.KafkaErrorsFormatter();
|
|
67
|
+
}
|
|
68
|
+
getErrorTransformers() {
|
|
69
|
+
return [];
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
exports.KafkaTransport = KafkaTransport;
|
|
73
|
+
//# sourceMappingURL=kafka.transport.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka.transport.js","sourceRoot":"","sources":["../src/kafka.transport.ts"],"names":[],"mappings":";;;AACA,oFAA0E;AAQ1E,gEAA6D;AAC7D,qDAAsE;AACtE,2CAA8C;AAE9C,4EAAuE;AACvE,sFAAiF;AACjF,qEAAgE;AAQhE,cAAc;AACd,MAAa,cAAc;IAII;IAHrB,MAAM,GAAG,uBAAa,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;IAC9C,OAAO,CAAW;IAE1B,YAA6B,OAA+B;QAA/B,YAAO,GAAP,OAAO,CAAwB;IAAG,CAAC;IAEhE,OAAO;QACL,OAAO,CAAC,6CAAoB,CAAC,CAAC;IAChC,CAAC;IAED,yBAAyB;QACvB,qDAAqD;QACrD,MAAM,EAAE,IAAI,EAAE,GAAG,mBAAQ,CAAC,OAAO,EAAE,CAAC;QAEpC,OAAO;YACL,QAAQ,EAAE,IAAI,yCAAkB,CAAC;gBAC/B,MAAM,EAAE;oBACN,OAAO,EAAE,IAAI,CAAC,OAAO;oBACrB,QAAQ,EAAE,GAAG,IAAI,IAAI,OAAO,CAAC,GAAG,EAAE;oBAClC,UAAU,EAAE,IAAA,kCAAmB,EAC7B,uBAAa,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAC5C;oBACD,QAAQ,EAAE,0BAAQ,CAAC,KAAK;iBACzB;gBACD,QAAQ,EAAE;oBACR,sBAAsB,EAAE,KAAK;oBAC7B,OAAO,EAAE,IAAI;oBACb,KAAK,EAAE;wBACL,OAAO,EAAE,QAAQ;qBAClB;iBACF;gBACD,MAAM,EAAE;oBACN,UAAU,EAAE,IAAI;iBACjB;gBACD,QAAQ,EAAE;oBACR,sBAAsB,EAAE,KAAK;oBAC7B,KAAK,EAAE;wBACL,OAAO,EAAE,QAAQ;qBAClB;iBACF;gBACD,GAAG,EAAE;oBACH,UAAU,EAAE,IAAI,CAAC,OAAO,EAAE,UAAU,IAAI,IAAI;iBAC7C;aACF,CAAC;SACH,CAAC;IACJ,CAAC;IAED,IAAI,CAAC,aAA4B;QAC/B,MAAM,EAAE,OAAO,EAAE,GAAG,aAAa,CAAC,OAAO,CAAC,0BAAW,CAAC,CAAC;QAEvD,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACzB,CAAC;IAED,OAAO;QACL,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,kBAAkB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;IAC/D,CAAC;IAED,WAAW;QACT,OAAO,OAAO,CAAC;IACjB,CAAC;IAED,kBAAkB;QAChB,OAAO,IAAI,6CAAoB,EAAE,CAAC;IACpC,CAAC;IAED,oBAAoB;QAClB,OAAO,EAAE,CAAC;IACZ,CAAC;CACF;AApED,wCAoEC"}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import type { CheckResult, HealthIndicator } from '@rsdk/core';
|
|
2
|
+
import { KafkaMetadataProvider } from '@rsdk/kafka.common';
|
|
3
|
+
import { Kafka } from 'kafkajs';
|
|
4
|
+
export declare class KafkaConsumedHealthIndicator implements HealthIndicator {
|
|
5
|
+
private readonly kafkaClient;
|
|
6
|
+
private consumedTopicsProvider;
|
|
7
|
+
constructor(kafkaClient: Kafka, consumedTopicsProvider: KafkaMetadataProvider);
|
|
8
|
+
check(): Promise<CheckResult>;
|
|
9
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
9
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
10
|
+
};
|
|
11
|
+
var __param = (this && this.__param) || function (paramIndex, decorator) {
|
|
12
|
+
return function (target, key) { decorator(target, key, paramIndex); }
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
exports.KafkaConsumedHealthIndicator = void 0;
|
|
16
|
+
const common_1 = require("@nestjs/common");
|
|
17
|
+
const core_1 = require("@rsdk/core");
|
|
18
|
+
const kafka_common_1 = require("@rsdk/kafka.common");
|
|
19
|
+
const kafkajs_1 = require("kafkajs");
|
|
20
|
+
let KafkaConsumedHealthIndicator = class KafkaConsumedHealthIndicator {
|
|
21
|
+
kafkaClient;
|
|
22
|
+
consumedTopicsProvider;
|
|
23
|
+
constructor(kafkaClient, consumedTopicsProvider) {
|
|
24
|
+
this.kafkaClient = kafkaClient;
|
|
25
|
+
this.consumedTopicsProvider = consumedTopicsProvider;
|
|
26
|
+
}
|
|
27
|
+
async check() {
|
|
28
|
+
return new kafka_common_1.KafkaHealthcheck(this.kafkaClient, this.consumedTopicsProvider.getConsumedTopics()).check();
|
|
29
|
+
}
|
|
30
|
+
};
|
|
31
|
+
KafkaConsumedHealthIndicator = __decorate([
|
|
32
|
+
(0, core_1.Indicator)('kafka-consume', {
|
|
33
|
+
description: 'Check kafka connection and hosted topic-partition',
|
|
34
|
+
}),
|
|
35
|
+
__param(0, (0, common_1.Inject)(kafka_common_1.KAFKA_CLIENT_INJECTION_TOKEN)),
|
|
36
|
+
__metadata("design:paramtypes", [kafkajs_1.Kafka,
|
|
37
|
+
kafka_common_1.KafkaMetadataProvider])
|
|
38
|
+
], KafkaConsumedHealthIndicator);
|
|
39
|
+
exports.KafkaConsumedHealthIndicator = KafkaConsumedHealthIndicator;
|
|
40
|
+
//# sourceMappingURL=kafka-consumed-health.indicator.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka-consumed-health.indicator.js","sourceRoot":"","sources":["../../src/module/kafka-consumed-health.indicator.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,2CAAwC;AAExC,qCAAuC;AACvC,qDAI4B;AAC5B,qCAAgC;AAEhC,IAGa,4BAA4B,GAHzC,MAGa,4BAA4B;IAEkB;IAC/C;IAFV,YACyD,WAAkB,EACjE,sBAA6C;QADE,gBAAW,GAAX,WAAW,CAAO;QACjE,2BAAsB,GAAtB,sBAAsB,CAAuB;IACpD,CAAC;IAEJ,KAAK,CAAC,KAAK;QACT,OAAO,IAAI,+BAAgB,CACzB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,sBAAsB,CAAC,iBAAiB,EAAE,CAChD,CAAC,KAAK,EAAE,CAAC;IACZ,CAAC;CACF,CAAA;AAZY,4BAA4B;IAHxC,IAAA,gBAAS,EAAC,eAAe,EAAE;QAC1B,WAAW,EAAE,mDAAmD;KACjE,CAAC;IAGG,WAAA,IAAA,eAAM,EAAC,2CAA4B,CAAC,CAAA;qCAA+B,eAAK;QACzC,oCAAqB;GAH5C,4BAA4B,CAYxC;AAZY,oEAA4B"}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
9
|
+
exports.KafkaTransportModule = void 0;
|
|
10
|
+
const common_1 = require("@nestjs/common");
|
|
11
|
+
const kafka_common_1 = require("@rsdk/kafka.common");
|
|
12
|
+
const kafka_consumed_health_indicator_1 = require("./kafka-consumed-health.indicator");
|
|
13
|
+
let KafkaTransportModule = class KafkaTransportModule {
|
|
14
|
+
};
|
|
15
|
+
KafkaTransportModule = __decorate([
|
|
16
|
+
(0, common_1.Module)({
|
|
17
|
+
imports: [kafka_common_1.KafkaClientModule],
|
|
18
|
+
providers: [kafka_consumed_health_indicator_1.KafkaConsumedHealthIndicator],
|
|
19
|
+
})
|
|
20
|
+
], KafkaTransportModule);
|
|
21
|
+
exports.KafkaTransportModule = KafkaTransportModule;
|
|
22
|
+
//# sourceMappingURL=kafka-transport.module.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"kafka-transport.module.js","sourceRoot":"","sources":["../../src/module/kafka-transport.module.ts"],"names":[],"mappings":";;;;;;;;;AAAA,2CAAwC;AACxC,qDAAuD;AAEvD,uFAAiF;AAMjF,IAAa,oBAAoB,GAAjC,MAAa,oBAAoB;CAAG,CAAA;AAAvB,oBAAoB;IAJhC,IAAA,eAAM,EAAC;QACN,OAAO,EAAE,CAAC,gCAAiB,CAAC;QAC5B,SAAS,EAAE,CAAC,8DAA4B,CAAC;KAC1C,CAAC;GACW,oBAAoB,CAAG;AAAvB,oDAAoB"}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { IncomingEvent, IncomingRequest } from '@nestjs/microservices';
|
|
2
|
+
import { IncomingRequestDeserializer } from '@nestjs/microservices/deserializers';
|
|
3
|
+
import type { KafkaRequest } from '@nestjs/microservices/serializers/kafka-request.serializer';
|
|
4
|
+
import type { EventType } from '@rsdk/kafka.common';
|
|
5
|
+
/**
|
|
6
|
+
* This is a custom deserializer for a kafka request.
|
|
7
|
+
* From kafka comes a buffer, which can be either a proto-buffer or just a string.
|
|
8
|
+
* The string can be a JSON value.
|
|
9
|
+
* It is necessary to take into account all the options and parse the incoming data,
|
|
10
|
+
* or return everything to the user as it is without throwing an error
|
|
11
|
+
*/
|
|
12
|
+
export declare class EventsDeserializer extends IncomingRequestDeserializer {
|
|
13
|
+
events: Map<string, EventType>;
|
|
14
|
+
mapToSchema(data: KafkaRequest, options?: Record<string, any>): IncomingRequest | IncomingEvent;
|
|
15
|
+
/**
|
|
16
|
+
* Try to parse the incoming value. If an error, return the value as is.
|
|
17
|
+
* @param data kafka request
|
|
18
|
+
* @returns parsed or original value
|
|
19
|
+
*/
|
|
20
|
+
private tryParseValue;
|
|
21
|
+
}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.EventsDeserializer = void 0;
|
|
4
|
+
const deserializers_1 = require("@nestjs/microservices/deserializers");
|
|
5
|
+
/**
|
|
6
|
+
* This is a custom deserializer for a kafka request.
|
|
7
|
+
* From kafka comes a buffer, which can be either a proto-buffer or just a string.
|
|
8
|
+
* The string can be a JSON value.
|
|
9
|
+
* It is necessary to take into account all the options and parse the incoming data,
|
|
10
|
+
* or return everything to the user as it is without throwing an error
|
|
11
|
+
*/
|
|
12
|
+
class EventsDeserializer extends deserializers_1.IncomingRequestDeserializer {
|
|
13
|
+
events = new Map();
|
|
14
|
+
mapToSchema(data, options) {
|
|
15
|
+
if (!options) {
|
|
16
|
+
return {
|
|
17
|
+
data: undefined,
|
|
18
|
+
pattern: undefined,
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
return {
|
|
22
|
+
data: this.tryParseValue(data),
|
|
23
|
+
pattern: options.channel,
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Try to parse the incoming value. If an error, return the value as is.
|
|
28
|
+
* @param data kafka request
|
|
29
|
+
* @returns parsed or original value
|
|
30
|
+
*/
|
|
31
|
+
tryParseValue(data) {
|
|
32
|
+
let value = data?.value ?? data;
|
|
33
|
+
try {
|
|
34
|
+
if (data?.headers.type) {
|
|
35
|
+
const type = this.events.get(data.headers.type);
|
|
36
|
+
if (type) {
|
|
37
|
+
value = type.decode(value);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
else {
|
|
41
|
+
value = JSON.parse(value.toString());
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
finally {
|
|
45
|
+
// if error, return value as is
|
|
46
|
+
}
|
|
47
|
+
return value;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
exports.EventsDeserializer = EventsDeserializer;
|
|
51
|
+
//# sourceMappingURL=events.deserializer.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"events.deserializer.js","sourceRoot":"","sources":["../../src/patched-server-kafka/events.deserializer.ts"],"names":[],"mappings":";;;AACA,uEAAkF;AAIlF;;;;;;GAMG;AACH,MAAa,kBAAmB,SAAQ,2CAA2B;IACjE,MAAM,GAA2B,IAAI,GAAG,EAAE,CAAC;IAE3C,WAAW,CACT,IAAkB,EAClB,OAA6B;QAE7B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO;gBACL,IAAI,EAAE,SAAS;gBACf,OAAO,EAAE,SAAS;aACnB,CAAC;SACH;QAED,OAAO;YACL,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC;YAC9B,OAAO,EAAE,OAAO,CAAC,OAAO;SACzB,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACK,aAAa,CAAC,IAAkB;QACtC,IAAI,KAAK,GAAG,IAAI,EAAE,KAAK,IAAI,IAAI,CAAC;QAEhC,IAAI;YACF,IAAI,IAAI,EAAE,OAAO,CAAC,IAAI,EAAE;gBACtB,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;gBAEhD,IAAI,IAAI,EAAE;oBACR,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;iBAC5B;aACF;iBAAM;gBACL,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;aACtC;SACF;gBAAS;YACR,+BAA+B;SAChC;QAED,OAAO,KAAK,CAAC;IACf,CAAC;CACF;AA5CD,gDA4CC"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import type { ConsumerDeserializer, KafkaContext, MessageHandler, MsPattern, ReadPacket } from '@nestjs/microservices';
|
|
2
|
+
import { ServerKafka } from '@nestjs/microservices';
|
|
3
|
+
import type { EventType } from '@rsdk/kafka.common';
|
|
4
|
+
import type { Consumer } from 'kafkajs';
|
|
5
|
+
/**
|
|
6
|
+
* Patches default ServerKafka routing and deserialization logic.
|
|
7
|
+
*/
|
|
8
|
+
export declare class PatchedServerKafka extends ServerKafka {
|
|
9
|
+
protected deserializer: ConsumerDeserializer & {
|
|
10
|
+
events: Map<string, EventType>;
|
|
11
|
+
};
|
|
12
|
+
private topics;
|
|
13
|
+
addHandler(pattern: any, callback: MessageHandler, isEventHandler?: boolean, extras?: Record<string, any>): void;
|
|
14
|
+
bindEvents(consumer: Consumer): Promise<void>;
|
|
15
|
+
handleEvent(pattern: string, packet: ReadPacket, context: KafkaContext): Promise<any>;
|
|
16
|
+
/**
|
|
17
|
+
*
|
|
18
|
+
* @param pattern
|
|
19
|
+
* @returns
|
|
20
|
+
*/
|
|
21
|
+
protected normalizePattern(pattern: MsPattern): string;
|
|
22
|
+
protected initializeDeserializer(): void;
|
|
23
|
+
/**
|
|
24
|
+
* Возвращает функцию для подписки, которая выбрасывает кастомные ошибки упрощающие мониторинг и разработку засчёт большего количества информации в них
|
|
25
|
+
*/
|
|
26
|
+
private createSafetySubscribeToPattern;
|
|
27
|
+
}
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.PatchedServerKafka = void 0;
|
|
4
|
+
const microservices_1 = require("@nestjs/microservices");
|
|
5
|
+
const kafka_common_1 = require("@rsdk/kafka.common");
|
|
6
|
+
const exception_1 = require("@rsdk/kafka.common/dist/exception");
|
|
7
|
+
const rxjs_1 = require("rxjs");
|
|
8
|
+
const events_deserializer_1 = require("./events.deserializer");
|
|
9
|
+
/**
|
|
10
|
+
* Patches default ServerKafka routing and deserialization logic.
|
|
11
|
+
*/
|
|
12
|
+
class PatchedServerKafka extends microservices_1.ServerKafka {
|
|
13
|
+
topics = new Set();
|
|
14
|
+
// TODO JSDOC
|
|
15
|
+
addHandler(pattern, callback, isEventHandler = false, extras = {}) {
|
|
16
|
+
if ((0, kafka_common_1.isEventType)(pattern)) {
|
|
17
|
+
this.topics.add(`events.${pattern.$group}`);
|
|
18
|
+
this.deserializer.events.set(pattern.$type, pattern);
|
|
19
|
+
}
|
|
20
|
+
else if (typeof pattern === 'string') {
|
|
21
|
+
this.topics.add(pattern);
|
|
22
|
+
}
|
|
23
|
+
return super.addHandler(pattern, callback, isEventHandler, extras);
|
|
24
|
+
}
|
|
25
|
+
// TODO JSDOC
|
|
26
|
+
async bindEvents(consumer) {
|
|
27
|
+
const admin = this.client.admin();
|
|
28
|
+
try {
|
|
29
|
+
await admin.connect();
|
|
30
|
+
const topics = await admin.listTopics();
|
|
31
|
+
const notHostTopics = [...this.topics.values()].filter((neededTopic) => !topics.includes(neededTopic));
|
|
32
|
+
const consumerSubscribeOptions = this.options?.subscribe || {};
|
|
33
|
+
if (notHostTopics.length > 0) {
|
|
34
|
+
throw new exception_1.MissingTopics(notHostTopics, {
|
|
35
|
+
details: consumerSubscribeOptions,
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
const subscribeToPattern = this.createSafetySubscribeToPattern(consumer, consumerSubscribeOptions);
|
|
39
|
+
await Promise.all([...this.topics].map(subscribeToPattern));
|
|
40
|
+
const consumerRunOptions = Object.assign(this.options?.run || {}, {
|
|
41
|
+
eachMessage: this.getMessageHandler(),
|
|
42
|
+
});
|
|
43
|
+
await consumer.run(consumerRunOptions);
|
|
44
|
+
}
|
|
45
|
+
finally {
|
|
46
|
+
// Ну почему в голанге есть defer, а у js нет...
|
|
47
|
+
await admin.disconnect();
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
// TODO JSDOC
|
|
51
|
+
async handleEvent(pattern, packet, context) {
|
|
52
|
+
const { headers } = context.getMessage();
|
|
53
|
+
const handler = this.getHandlerByPattern(headers?.type ? headers.type.toString() : pattern);
|
|
54
|
+
if (handler) {
|
|
55
|
+
const resultOrStream = await handler(packet.data, context);
|
|
56
|
+
if ((0, rxjs_1.isObservable)(resultOrStream)) {
|
|
57
|
+
await (0, rxjs_1.lastValueFrom)(resultOrStream);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
*
|
|
63
|
+
* @param pattern
|
|
64
|
+
* @returns
|
|
65
|
+
*/
|
|
66
|
+
normalizePattern(pattern) {
|
|
67
|
+
if ((0, kafka_common_1.isEventType)(pattern)) {
|
|
68
|
+
return pattern.$type;
|
|
69
|
+
}
|
|
70
|
+
return super.normalizePattern(pattern);
|
|
71
|
+
}
|
|
72
|
+
initializeDeserializer() {
|
|
73
|
+
this.deserializer = new events_deserializer_1.EventsDeserializer();
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Возвращает функцию для подписки, которая выбрасывает кастомные ошибки упрощающие мониторинг и разработку засчёт большего количества информации в них
|
|
77
|
+
*/
|
|
78
|
+
createSafetySubscribeToPattern(consumer, consumerSubscribeOptions) {
|
|
79
|
+
return async (pattern) => {
|
|
80
|
+
this.logger.debug(`Subscribe to the topic: "${pattern}"`);
|
|
81
|
+
try {
|
|
82
|
+
return await consumer.subscribe({
|
|
83
|
+
topic: pattern,
|
|
84
|
+
...consumerSubscribeOptions,
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
catch (error) {
|
|
88
|
+
const exceptionFactory = new kafka_common_1.ServerKafkaExceptionFactory(pattern, {
|
|
89
|
+
details: consumerSubscribeOptions,
|
|
90
|
+
cause: error,
|
|
91
|
+
});
|
|
92
|
+
exceptionFactory.wrapAndThrow(error);
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
exports.PatchedServerKafka = PatchedServerKafka;
|
|
98
|
+
//# sourceMappingURL=patched-server-kafka.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"patched-server-kafka.js","sourceRoot":"","sources":["../../src/patched-server-kafka/patched-server-kafka.ts"],"names":[],"mappings":";;;AAOA,yDAAoD;AAGpD,qDAA8E;AAC9E,iEAAkE;AAElE,+BAAmD;AAEnD,+DAA2D;AAE3D;;GAEG;AACH,MAAa,kBAAmB,SAAQ,2BAAW;IAKzC,MAAM,GAAgB,IAAI,GAAG,EAAE,CAAC;IAExC,aAAa;IACG,UAAU,CACxB,OAAY,EACZ,QAAwB,EACxB,cAAc,GAAG,KAAK,EACtB,SAA8B,EAAE;QAEhC,IAAI,IAAA,0BAAW,EAAC,OAAO,CAAC,EAAE;YACxB,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,UAAU,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;YAC5C,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;SACtD;aAAM,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YACtC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;SAC1B;QAED,OAAO,KAAK,CAAC,UAAU,CAAC,OAAO,EAAE,QAAQ,EAAE,cAAc,EAAE,MAAM,CAAC,CAAC;IACrE,CAAC;IAED,aAAa;IACG,KAAK,CAAC,UAAU,CAAC,QAAkB;QACjD,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;QAElC,IAAI;YACF,MAAM,KAAK,CAAC,OAAO,EAAE,CAAC;YAEtB,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,UAAU,EAAE,CAAC;YAExC,MAAM,aAAa,GAAG,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,CACpD,CAAC,WAAW,EAAE,EAAE,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,CAAC,CAC/C,CAAC;YACF,MAAM,wBAAwB,GAAG,IAAI,CAAC,OAAO,EAAE,SAAS,IAAI,EAAE,CAAC;YAE/D,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC5B,MAAM,IAAI,yBAAa,CAAC,aAAa,EAAE;oBACrC,OAAO,EAAE,wBAAwB;iBAClC,CAAC,CAAC;aACJ;YAED,MAAM,kBAAkB,GAAG,IAAI,CAAC,8BAA8B,CAC5D,QAAQ,EACR,wBAAwB,CACzB,CAAC;YAEF,MAAM,OAAO,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAE5D,MAAM,kBAAkB,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,IAAI,EAAE,EAAE;gBAChE,WAAW,EAAE,IAAI,CAAC,iBAAiB,EAAE;aACtC,CAAC,CAAC;YAEH,MAAM,QAAQ,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC;SACxC;gBAAS;YACR,gDAAgD;YAChD,MAAM,KAAK,CAAC,UAAU,EAAE,CAAC;SAC1B;IACH,CAAC;IAED,aAAa;IACG,KAAK,CAAC,WAAW,CAC/B,OAAe,EACf,MAAkB,EAClB,OAAqB;QAErB,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC;QAEzC,MAAM,OAAO,GAAG,IAAI,CAAC,mBAAmB,CACtC,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CAClD,CAAC;QAEF,IAAI,OAAO,EAAE;YACX,MAAM,cAAc,GAAG,MAAM,OAAO,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YAC3D,IAAI,IAAA,mBAAY,EAAC,cAAc,CAAC,EAAE;gBAChC,MAAM,IAAA,oBAAa,EAAC,cAAc,CAAC,CAAC;aACrC;SACF;IACH,CAAC;IAED;;;;OAIG;IACgB,gBAAgB,CAAC,OAAkB;QACpD,IAAI,IAAA,0BAAW,EAAC,OAAO,CAAC,EAAE;YACxB,OAAO,OAAO,CAAC,KAAK,CAAC;SACtB;QAED,OAAO,KAAK,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC;IACzC,CAAC;IAEkB,sBAAsB;QACvC,IAAI,CAAC,YAAY,GAAG,IAAI,wCAAkB,EAAE,CAAC;IAC/C,CAAC;IAED;;OAEG;IACK,8BAA8B,CACpC,QAAkB,EAClB,wBAA+D;QAE/D,OAAO,KAAK,EAAE,OAAe,EAAiB,EAAE;YAC9C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,4BAA4B,OAAO,GAAG,CAAC,CAAC;YAC1D,IAAI;gBACF,OAAO,MAAM,QAAQ,CAAC,SAAS,CAAC;oBAC9B,KAAK,EAAE,OAAO;oBACd,GAAG,wBAAwB;iBAC5B,CAAC,CAAC;aACJ;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,gBAAgB,GAAG,IAAI,0CAA2B,CAAC,OAAO,EAAE;oBAChE,OAAO,EAAE,wBAAwB;oBACjC,KAAK,EAAE,KAAK;iBACb,CAAC,CAAC;gBAEH,gBAAgB,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;aACtC;QACH,CAAC,CAAC;IACJ,CAAC;CACF;AA3HD,gDA2HC"}
|
package/package.json
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@rsdk/kafka.transport",
|
|
3
|
+
"version": "2.0.0",
|
|
4
|
+
"description": "Transport that consumes messages from kafka",
|
|
5
|
+
"license": "Apache License 2.0",
|
|
6
|
+
"publishConfig": {
|
|
7
|
+
"access": "public"
|
|
8
|
+
},
|
|
9
|
+
"repository": {
|
|
10
|
+
"url": "https://github.com/R-Vision/rsdk"
|
|
11
|
+
},
|
|
12
|
+
"main": "dist/index.js",
|
|
13
|
+
"peerDependencies": {
|
|
14
|
+
"@nestjs/common": "^9.0.0",
|
|
15
|
+
"@nestjs/microservices": "^9.0.0",
|
|
16
|
+
"@rsdk/core": "^2.0.0",
|
|
17
|
+
"@rsdk/kafka.common": "^2.0.0",
|
|
18
|
+
"@rsdk/logging": "^2.0.0",
|
|
19
|
+
"@rsdk/metadata": "^2.0.0",
|
|
20
|
+
"kafkajs": "^2.2.4",
|
|
21
|
+
"rxjs": "^7.0.0"
|
|
22
|
+
},
|
|
23
|
+
"dependencies": {
|
|
24
|
+
"lodash": "^4.17.21"
|
|
25
|
+
},
|
|
26
|
+
"gitHead": "9fe1395b8e38e1c7b9578dd5eed12e0c57a9087f"
|
|
27
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import type { KafkaContext } from '@nestjs/microservices';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Calculate next Kafka offset
|
|
5
|
+
* @param context Kafka context from @Ctx
|
|
6
|
+
* @returns Offset +1
|
|
7
|
+
*/
|
|
8
|
+
const getNextOffset = (context: KafkaContext): string => {
|
|
9
|
+
const { offset } = context.getMessage();
|
|
10
|
+
const currentOffset = BigInt(offset);
|
|
11
|
+
const nextOffset = currentOffset + 1n;
|
|
12
|
+
|
|
13
|
+
return nextOffset.toString();
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Helper that allowes to commiting offsets with one line
|
|
18
|
+
* of code.
|
|
19
|
+
*
|
|
20
|
+
* @param context Kafka context from @Ctx
|
|
21
|
+
* @param offset New offset. If unset, next offset will be +1 of message offset.
|
|
22
|
+
*/
|
|
23
|
+
export const commitOffset = async (
|
|
24
|
+
context: KafkaContext,
|
|
25
|
+
offset?: string,
|
|
26
|
+
): Promise<void> => {
|
|
27
|
+
const partition = context.getPartition();
|
|
28
|
+
const topic = context.getTopic();
|
|
29
|
+
|
|
30
|
+
await context
|
|
31
|
+
.getConsumer()
|
|
32
|
+
.commitOffsets([
|
|
33
|
+
{ topic, partition, offset: offset || getNextOffset(context) },
|
|
34
|
+
]);
|
|
35
|
+
};
|
package/src/constants.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export const KAFKA_CONSUME_RSDK_METADATA_SCOPE = 'kafka-consume';
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { applyDecorators } from '@nestjs/common';
|
|
2
|
+
import { EventPattern } from '@nestjs/microservices';
|
|
3
|
+
import type { EventType, KafkaTopicMetadata } from '@rsdk/kafka.common';
|
|
4
|
+
import { getTopicName, KafkaTopicType } from '@rsdk/kafka.common';
|
|
5
|
+
import { RsdkMetadata } from '@rsdk/metadata';
|
|
6
|
+
import { omit } from 'lodash';
|
|
7
|
+
|
|
8
|
+
import { KAFKA_CONSUME_RSDK_METADATA_SCOPE } from '../constants';
|
|
9
|
+
|
|
10
|
+
const KafkaConsumeMetadata =
|
|
11
|
+
<T>(eventType: EventType<T>): MethodDecorator =>
|
|
12
|
+
(target) => {
|
|
13
|
+
const rsdkMetadata = new RsdkMetadata<KafkaTopicMetadata>(
|
|
14
|
+
target.constructor,
|
|
15
|
+
KAFKA_CONSUME_RSDK_METADATA_SCOPE,
|
|
16
|
+
);
|
|
17
|
+
|
|
18
|
+
rsdkMetadata.add({
|
|
19
|
+
partitionKeyField: eventType.$partitionKeyField,
|
|
20
|
+
topicName: getTopicName(eventType),
|
|
21
|
+
group: eventType.$group,
|
|
22
|
+
type: KafkaTopicType.Consume,
|
|
23
|
+
eventType: omit(eventType, 'toJSON', 'encode', 'decode'),
|
|
24
|
+
});
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
export const Consume = <T>(eventType: EventType<T>): MethodDecorator => {
|
|
28
|
+
return (target, propertyKey, descriptor) => {
|
|
29
|
+
EventPattern(eventType)(target, propertyKey, descriptor);
|
|
30
|
+
applyDecorators(KafkaConsumeMetadata(eventType))(
|
|
31
|
+
target,
|
|
32
|
+
propertyKey,
|
|
33
|
+
descriptor,
|
|
34
|
+
);
|
|
35
|
+
};
|
|
36
|
+
};
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { PipeTransform } from '@nestjs/common';
|
|
2
|
+
import { Injectable } from '@nestjs/common';
|
|
3
|
+
import type { KafkaContext } from '@nestjs/microservices';
|
|
4
|
+
import { RpcParamtype } from '@nestjs/microservices/enums/rpc-paramtype.enum';
|
|
5
|
+
import { createRpcParamDecorator } from '@nestjs/microservices/utils/param.utils';
|
|
6
|
+
|
|
7
|
+
@Injectable()
|
|
8
|
+
export class MetadataPipe implements PipeTransform {
|
|
9
|
+
transform(value: KafkaContext): Record<string, any> | null {
|
|
10
|
+
const { headers } = value.getMessage();
|
|
11
|
+
|
|
12
|
+
return (headers?.metadata as any) || null;
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Extract metadata from event
|
|
18
|
+
* @returns Record<string, any> or null
|
|
19
|
+
*/
|
|
20
|
+
export const Metadata = (): ParameterDecorator =>
|
|
21
|
+
createRpcParamDecorator(RpcParamtype.CONTEXT)(MetadataPipe);
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { NotImplementedException } from '@nestjs/common';
|
|
2
|
+
import type { IErrorsFormatter } from '@rsdk/core';
|
|
3
|
+
|
|
4
|
+
export class KafkaErrorsFormatter implements IErrorsFormatter {
|
|
5
|
+
match(): boolean {
|
|
6
|
+
return false;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
format(): unknown {
|
|
10
|
+
throw new NotImplementedException();
|
|
11
|
+
}
|
|
12
|
+
}
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import type { MicroserviceOptions } from '@nestjs/microservices';
|
|
2
|
+
import { logLevel } from '@nestjs/microservices/external/kafka.interface';
|
|
3
|
+
import type {
|
|
4
|
+
ConfigContext,
|
|
5
|
+
IErrorsFormatter,
|
|
6
|
+
IErrorsTransformer,
|
|
7
|
+
IMicroserviceTransport,
|
|
8
|
+
NestModuleDefinitions,
|
|
9
|
+
} from '@rsdk/core';
|
|
10
|
+
import { Manifest } from '@rsdk/core/dist/manifest/manifest';
|
|
11
|
+
import { createLoggerAdapter, KafkaConfig } from '@rsdk/kafka.common';
|
|
12
|
+
import { LoggerFactory } from '@rsdk/logging';
|
|
13
|
+
|
|
14
|
+
import { KafkaTransportModule } from './module/kafka-transport.module';
|
|
15
|
+
import { PatchedServerKafka } from './patched-server-kafka/patched-server-kafka';
|
|
16
|
+
import { KafkaErrorsFormatter } from './kafka-errors.formatter';
|
|
17
|
+
|
|
18
|
+
// TODO JsDoc
|
|
19
|
+
export interface KafkaTransportOptions {
|
|
20
|
+
// TODO JsDoc
|
|
21
|
+
autoCommit?: boolean;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
// TODO: JsDoc
|
|
25
|
+
export class KafkaTransport implements IMicroserviceTransport {
|
|
26
|
+
private logger = LoggerFactory.create(KafkaTransport);
|
|
27
|
+
private brokers: string[];
|
|
28
|
+
|
|
29
|
+
constructor(private readonly options?: KafkaTransportOptions) {}
|
|
30
|
+
|
|
31
|
+
modules(): NestModuleDefinitions {
|
|
32
|
+
return [KafkaTransportModule];
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
createMicroserviceOptions(): MicroserviceOptions {
|
|
36
|
+
// TODO: нужен иной способ получения имени приложения
|
|
37
|
+
const { name } = Manifest.getData();
|
|
38
|
+
|
|
39
|
+
return {
|
|
40
|
+
strategy: new PatchedServerKafka({
|
|
41
|
+
client: {
|
|
42
|
+
brokers: this.brokers,
|
|
43
|
+
clientId: `${name}-${process.pid}`,
|
|
44
|
+
logCreator: createLoggerAdapter(
|
|
45
|
+
LoggerFactory.create('NativeKafkaProducer'),
|
|
46
|
+
),
|
|
47
|
+
logLevel: logLevel.DEBUG,
|
|
48
|
+
},
|
|
49
|
+
consumer: {
|
|
50
|
+
allowAutoTopicCreation: false,
|
|
51
|
+
groupId: name,
|
|
52
|
+
retry: {
|
|
53
|
+
retries: Infinity,
|
|
54
|
+
},
|
|
55
|
+
},
|
|
56
|
+
parser: {
|
|
57
|
+
keepBinary: true,
|
|
58
|
+
},
|
|
59
|
+
producer: {
|
|
60
|
+
allowAutoTopicCreation: false,
|
|
61
|
+
retry: {
|
|
62
|
+
retries: Infinity,
|
|
63
|
+
},
|
|
64
|
+
},
|
|
65
|
+
run: {
|
|
66
|
+
autoCommit: this.options?.autoCommit ?? true,
|
|
67
|
+
},
|
|
68
|
+
}),
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
init(configContext: ConfigContext): void {
|
|
73
|
+
const { brokers } = configContext.resolve(KafkaConfig);
|
|
74
|
+
|
|
75
|
+
this.brokers = brokers;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
onStart(): void {
|
|
79
|
+
this.logger.info(`Kafka brokers: ${this.brokers.join(',')}`);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
getProtocol(): string {
|
|
83
|
+
return 'kafka';
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
getErrorsFormatter(): IErrorsFormatter {
|
|
87
|
+
return new KafkaErrorsFormatter();
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
getErrorTransformers(): IErrorsTransformer[] {
|
|
91
|
+
return [];
|
|
92
|
+
}
|
|
93
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { Inject } from '@nestjs/common';
|
|
2
|
+
import type { CheckResult, HealthIndicator } from '@rsdk/core';
|
|
3
|
+
import { Indicator } from '@rsdk/core';
|
|
4
|
+
import {
|
|
5
|
+
KAFKA_CLIENT_INJECTION_TOKEN,
|
|
6
|
+
KafkaHealthcheck,
|
|
7
|
+
KafkaMetadataProvider,
|
|
8
|
+
} from '@rsdk/kafka.common';
|
|
9
|
+
import { Kafka } from 'kafkajs';
|
|
10
|
+
|
|
11
|
+
@Indicator('kafka-consume', {
|
|
12
|
+
description: 'Check kafka connection and hosted topic-partition',
|
|
13
|
+
})
|
|
14
|
+
export class KafkaConsumedHealthIndicator implements HealthIndicator {
|
|
15
|
+
constructor(
|
|
16
|
+
@Inject(KAFKA_CLIENT_INJECTION_TOKEN) private readonly kafkaClient: Kafka,
|
|
17
|
+
private consumedTopicsProvider: KafkaMetadataProvider,
|
|
18
|
+
) {}
|
|
19
|
+
|
|
20
|
+
async check(): Promise<CheckResult> {
|
|
21
|
+
return new KafkaHealthcheck(
|
|
22
|
+
this.kafkaClient,
|
|
23
|
+
this.consumedTopicsProvider.getConsumedTopics(),
|
|
24
|
+
).check();
|
|
25
|
+
}
|
|
26
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { Module } from '@nestjs/common';
|
|
2
|
+
import { KafkaClientModule } from '@rsdk/kafka.common';
|
|
3
|
+
|
|
4
|
+
import { KafkaConsumedHealthIndicator } from './kafka-consumed-health.indicator';
|
|
5
|
+
|
|
6
|
+
@Module({
|
|
7
|
+
imports: [KafkaClientModule],
|
|
8
|
+
providers: [KafkaConsumedHealthIndicator],
|
|
9
|
+
})
|
|
10
|
+
export class KafkaTransportModule {}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import type { IncomingEvent, IncomingRequest } from '@nestjs/microservices';
|
|
2
|
+
import { IncomingRequestDeserializer } from '@nestjs/microservices/deserializers';
|
|
3
|
+
import type { KafkaRequest } from '@nestjs/microservices/serializers/kafka-request.serializer';
|
|
4
|
+
import type { EventType } from '@rsdk/kafka.common';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* This is a custom deserializer for a kafka request.
|
|
8
|
+
* From kafka comes a buffer, which can be either a proto-buffer or just a string.
|
|
9
|
+
* The string can be a JSON value.
|
|
10
|
+
* It is necessary to take into account all the options and parse the incoming data,
|
|
11
|
+
* or return everything to the user as it is without throwing an error
|
|
12
|
+
*/
|
|
13
|
+
export class EventsDeserializer extends IncomingRequestDeserializer {
|
|
14
|
+
events: Map<string, EventType> = new Map();
|
|
15
|
+
|
|
16
|
+
mapToSchema(
|
|
17
|
+
data: KafkaRequest,
|
|
18
|
+
options?: Record<string, any>,
|
|
19
|
+
): IncomingRequest | IncomingEvent {
|
|
20
|
+
if (!options) {
|
|
21
|
+
return {
|
|
22
|
+
data: undefined,
|
|
23
|
+
pattern: undefined,
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
return {
|
|
28
|
+
data: this.tryParseValue(data),
|
|
29
|
+
pattern: options.channel,
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Try to parse the incoming value. If an error, return the value as is.
|
|
35
|
+
* @param data kafka request
|
|
36
|
+
* @returns parsed or original value
|
|
37
|
+
*/
|
|
38
|
+
private tryParseValue(data: KafkaRequest): unknown {
|
|
39
|
+
let value = data?.value ?? data;
|
|
40
|
+
|
|
41
|
+
try {
|
|
42
|
+
if (data?.headers.type) {
|
|
43
|
+
const type = this.events.get(data.headers.type);
|
|
44
|
+
|
|
45
|
+
if (type) {
|
|
46
|
+
value = type.decode(value);
|
|
47
|
+
}
|
|
48
|
+
} else {
|
|
49
|
+
value = JSON.parse(value.toString());
|
|
50
|
+
}
|
|
51
|
+
} finally {
|
|
52
|
+
// if error, return value as is
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
return value;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
ConsumerDeserializer,
|
|
3
|
+
KafkaContext,
|
|
4
|
+
MessageHandler,
|
|
5
|
+
MsPattern,
|
|
6
|
+
ReadPacket,
|
|
7
|
+
} from '@nestjs/microservices';
|
|
8
|
+
import { ServerKafka } from '@nestjs/microservices';
|
|
9
|
+
import type { ConsumerSubscribeTopic } from '@nestjs/microservices/external/kafka.interface';
|
|
10
|
+
import type { EventType } from '@rsdk/kafka.common';
|
|
11
|
+
import { isEventType, ServerKafkaExceptionFactory } from '@rsdk/kafka.common';
|
|
12
|
+
import { MissingTopics } from '@rsdk/kafka.common/dist/exception';
|
|
13
|
+
import type { Consumer } from 'kafkajs';
|
|
14
|
+
import { isObservable, lastValueFrom } from 'rxjs';
|
|
15
|
+
|
|
16
|
+
import { EventsDeserializer } from './events.deserializer';
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Patches default ServerKafka routing and deserialization logic.
|
|
20
|
+
*/
|
|
21
|
+
export class PatchedServerKafka extends ServerKafka {
|
|
22
|
+
protected declare deserializer: ConsumerDeserializer & {
|
|
23
|
+
events: Map<string, EventType>;
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
private topics: Set<string> = new Set();
|
|
27
|
+
|
|
28
|
+
// TODO JSDOC
|
|
29
|
+
public override addHandler(
|
|
30
|
+
pattern: any,
|
|
31
|
+
callback: MessageHandler,
|
|
32
|
+
isEventHandler = false,
|
|
33
|
+
extras: Record<string, any> = {},
|
|
34
|
+
): void {
|
|
35
|
+
if (isEventType(pattern)) {
|
|
36
|
+
this.topics.add(`events.${pattern.$group}`);
|
|
37
|
+
this.deserializer.events.set(pattern.$type, pattern);
|
|
38
|
+
} else if (typeof pattern === 'string') {
|
|
39
|
+
this.topics.add(pattern);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return super.addHandler(pattern, callback, isEventHandler, extras);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// TODO JSDOC
|
|
46
|
+
public override async bindEvents(consumer: Consumer): Promise<void> {
|
|
47
|
+
const admin = this.client.admin();
|
|
48
|
+
|
|
49
|
+
try {
|
|
50
|
+
await admin.connect();
|
|
51
|
+
|
|
52
|
+
const topics = await admin.listTopics();
|
|
53
|
+
|
|
54
|
+
const notHostTopics = [...this.topics.values()].filter(
|
|
55
|
+
(neededTopic) => !topics.includes(neededTopic),
|
|
56
|
+
);
|
|
57
|
+
const consumerSubscribeOptions = this.options?.subscribe || {};
|
|
58
|
+
|
|
59
|
+
if (notHostTopics.length > 0) {
|
|
60
|
+
throw new MissingTopics(notHostTopics, {
|
|
61
|
+
details: consumerSubscribeOptions,
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const subscribeToPattern = this.createSafetySubscribeToPattern(
|
|
66
|
+
consumer,
|
|
67
|
+
consumerSubscribeOptions,
|
|
68
|
+
);
|
|
69
|
+
|
|
70
|
+
await Promise.all([...this.topics].map(subscribeToPattern));
|
|
71
|
+
|
|
72
|
+
const consumerRunOptions = Object.assign(this.options?.run || {}, {
|
|
73
|
+
eachMessage: this.getMessageHandler(),
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
await consumer.run(consumerRunOptions);
|
|
77
|
+
} finally {
|
|
78
|
+
// Ну почему в голанге есть defer, а у js нет...
|
|
79
|
+
await admin.disconnect();
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// TODO JSDOC
|
|
84
|
+
public override async handleEvent(
|
|
85
|
+
pattern: string,
|
|
86
|
+
packet: ReadPacket,
|
|
87
|
+
context: KafkaContext,
|
|
88
|
+
): Promise<any> {
|
|
89
|
+
const { headers } = context.getMessage();
|
|
90
|
+
|
|
91
|
+
const handler = this.getHandlerByPattern(
|
|
92
|
+
headers?.type ? headers.type.toString() : pattern,
|
|
93
|
+
);
|
|
94
|
+
|
|
95
|
+
if (handler) {
|
|
96
|
+
const resultOrStream = await handler(packet.data, context);
|
|
97
|
+
if (isObservable(resultOrStream)) {
|
|
98
|
+
await lastValueFrom(resultOrStream);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
*
|
|
105
|
+
* @param pattern
|
|
106
|
+
* @returns
|
|
107
|
+
*/
|
|
108
|
+
protected override normalizePattern(pattern: MsPattern): string {
|
|
109
|
+
if (isEventType(pattern)) {
|
|
110
|
+
return pattern.$type;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
return super.normalizePattern(pattern);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
protected override initializeDeserializer(): void {
|
|
117
|
+
this.deserializer = new EventsDeserializer();
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
/**
|
|
121
|
+
* Возвращает функцию для подписки, которая выбрасывает кастомные ошибки упрощающие мониторинг и разработку засчёт большего количества информации в них
|
|
122
|
+
*/
|
|
123
|
+
private createSafetySubscribeToPattern(
|
|
124
|
+
consumer: Consumer,
|
|
125
|
+
consumerSubscribeOptions: Omit<ConsumerSubscribeTopic, 'topic'>,
|
|
126
|
+
) {
|
|
127
|
+
return async (pattern: string): Promise<void> => {
|
|
128
|
+
this.logger.debug(`Subscribe to the topic: "${pattern}"`);
|
|
129
|
+
try {
|
|
130
|
+
return await consumer.subscribe({
|
|
131
|
+
topic: pattern,
|
|
132
|
+
...consumerSubscribeOptions,
|
|
133
|
+
});
|
|
134
|
+
} catch (error) {
|
|
135
|
+
const exceptionFactory = new ServerKafkaExceptionFactory(pattern, {
|
|
136
|
+
details: consumerSubscribeOptions,
|
|
137
|
+
cause: error,
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
exceptionFactory.wrapAndThrow(error);
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
}
|