@hazeljs/data 0.2.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +192 -0
- package/README.md +308 -0
- package/dist/connectors/connector.interface.d.ts +29 -0
- package/dist/connectors/connector.interface.d.ts.map +1 -0
- package/dist/connectors/connector.interface.js +6 -0
- package/dist/connectors/csv.connector.d.ts +63 -0
- package/dist/connectors/csv.connector.d.ts.map +1 -0
- package/dist/connectors/csv.connector.js +147 -0
- package/dist/connectors/http.connector.d.ts +68 -0
- package/dist/connectors/http.connector.d.ts.map +1 -0
- package/dist/connectors/http.connector.js +131 -0
- package/dist/connectors/index.d.ts +7 -0
- package/dist/connectors/index.d.ts.map +1 -0
- package/dist/connectors/index.js +12 -0
- package/dist/connectors/memory.connector.d.ts +38 -0
- package/dist/connectors/memory.connector.d.ts.map +1 -0
- package/dist/connectors/memory.connector.js +56 -0
- package/dist/connectors/memory.connector.test.d.ts +2 -0
- package/dist/connectors/memory.connector.test.d.ts.map +1 -0
- package/dist/connectors/memory.connector.test.js +43 -0
- package/dist/data.module.d.ts +30 -0
- package/dist/data.module.d.ts.map +1 -0
- package/dist/data.module.js +120 -0
- package/dist/data.module.test.d.ts +2 -0
- package/dist/data.module.test.d.ts.map +1 -0
- package/dist/data.module.test.js +28 -0
- package/dist/data.types.d.ts +67 -0
- package/dist/data.types.d.ts.map +1 -0
- package/dist/data.types.js +5 -0
- package/dist/decorators/index.d.ts +6 -0
- package/dist/decorators/index.d.ts.map +1 -0
- package/dist/decorators/index.js +24 -0
- package/dist/decorators/pii.decorator.d.ts +59 -0
- package/dist/decorators/pii.decorator.d.ts.map +1 -0
- package/dist/decorators/pii.decorator.js +197 -0
- package/dist/decorators/pii.decorator.test.d.ts +2 -0
- package/dist/decorators/pii.decorator.test.d.ts.map +1 -0
- package/dist/decorators/pii.decorator.test.js +150 -0
- package/dist/decorators/pipeline.decorator.d.ts +22 -0
- package/dist/decorators/pipeline.decorator.d.ts.map +1 -0
- package/dist/decorators/pipeline.decorator.js +42 -0
- package/dist/decorators/pipeline.decorator.test.d.ts +2 -0
- package/dist/decorators/pipeline.decorator.test.d.ts.map +1 -0
- package/dist/decorators/pipeline.decorator.test.js +104 -0
- package/dist/decorators/stream.decorator.d.ts +31 -0
- package/dist/decorators/stream.decorator.d.ts.map +1 -0
- package/dist/decorators/stream.decorator.js +48 -0
- package/dist/decorators/transform.decorator.d.ts +29 -0
- package/dist/decorators/transform.decorator.d.ts.map +1 -0
- package/dist/decorators/transform.decorator.js +41 -0
- package/dist/decorators/validate.decorator.d.ts +34 -0
- package/dist/decorators/validate.decorator.d.ts.map +1 -0
- package/dist/decorators/validate.decorator.js +49 -0
- package/dist/flink.service.d.ts +80 -0
- package/dist/flink.service.d.ts.map +1 -0
- package/dist/flink.service.js +134 -0
- package/dist/flink.service.test.d.ts +2 -0
- package/dist/flink.service.test.d.ts.map +1 -0
- package/dist/flink.service.test.js +60 -0
- package/dist/index.d.ts +32 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +96 -0
- package/dist/pipelines/etl.service.d.ts +59 -0
- package/dist/pipelines/etl.service.d.ts.map +1 -0
- package/dist/pipelines/etl.service.js +223 -0
- package/dist/pipelines/etl.service.test.d.ts +2 -0
- package/dist/pipelines/etl.service.test.d.ts.map +1 -0
- package/dist/pipelines/etl.service.test.js +319 -0
- package/dist/pipelines/pipeline.base.d.ts +24 -0
- package/dist/pipelines/pipeline.base.d.ts.map +1 -0
- package/dist/pipelines/pipeline.base.js +29 -0
- package/dist/pipelines/pipeline.base.test.d.ts +2 -0
- package/dist/pipelines/pipeline.base.test.d.ts.map +1 -0
- package/dist/pipelines/pipeline.base.test.js +38 -0
- package/dist/pipelines/pipeline.builder.d.ts +95 -0
- package/dist/pipelines/pipeline.builder.d.ts.map +1 -0
- package/dist/pipelines/pipeline.builder.js +212 -0
- package/dist/pipelines/pipeline.builder.test.d.ts +2 -0
- package/dist/pipelines/pipeline.builder.test.d.ts.map +1 -0
- package/dist/pipelines/pipeline.builder.test.js +185 -0
- package/dist/pipelines/stream.service.d.ts +12 -0
- package/dist/pipelines/stream.service.d.ts.map +1 -0
- package/dist/pipelines/stream.service.js +58 -0
- package/dist/pipelines/stream.service.test.d.ts +2 -0
- package/dist/pipelines/stream.service.test.d.ts.map +1 -0
- package/dist/pipelines/stream.service.test.js +103 -0
- package/dist/quality/quality.service.d.ts +87 -0
- package/dist/quality/quality.service.d.ts.map +1 -0
- package/dist/quality/quality.service.js +326 -0
- package/dist/quality/quality.service.test.d.ts +2 -0
- package/dist/quality/quality.service.test.d.ts.map +1 -0
- package/dist/quality/quality.service.test.js +128 -0
- package/dist/schema/schema.d.ts +127 -0
- package/dist/schema/schema.d.ts.map +1 -0
- package/dist/schema/schema.js +487 -0
- package/dist/schema/schema.test.d.ts +2 -0
- package/dist/schema/schema.test.d.ts.map +1 -0
- package/dist/schema/schema.test.js +411 -0
- package/dist/streaming/flink/flink.client.d.ts +96 -0
- package/dist/streaming/flink/flink.client.d.ts.map +1 -0
- package/dist/streaming/flink/flink.client.js +267 -0
- package/dist/streaming/flink/flink.client.test.d.ts +2 -0
- package/dist/streaming/flink/flink.client.test.d.ts.map +1 -0
- package/dist/streaming/flink/flink.client.test.js +59 -0
- package/dist/streaming/flink/flink.job.d.ts +29 -0
- package/dist/streaming/flink/flink.job.d.ts.map +1 -0
- package/dist/streaming/flink/flink.job.js +27 -0
- package/dist/streaming/flink/flink.job.test.d.ts +2 -0
- package/dist/streaming/flink/flink.job.test.d.ts.map +1 -0
- package/dist/streaming/flink/flink.job.test.js +37 -0
- package/dist/streaming/flink/flink.operators.d.ts +35 -0
- package/dist/streaming/flink/flink.operators.d.ts.map +1 -0
- package/dist/streaming/flink/flink.operators.js +43 -0
- package/dist/streaming/flink/flink.operators.test.d.ts +2 -0
- package/dist/streaming/flink/flink.operators.test.d.ts.map +1 -0
- package/dist/streaming/flink/flink.operators.test.js +38 -0
- package/dist/streaming/stream.builder.d.ts +22 -0
- package/dist/streaming/stream.builder.d.ts.map +1 -0
- package/dist/streaming/stream.builder.js +50 -0
- package/dist/streaming/stream.builder.test.d.ts +2 -0
- package/dist/streaming/stream.builder.test.d.ts.map +1 -0
- package/dist/streaming/stream.builder.test.js +59 -0
- package/dist/streaming/stream.processor.d.ts +66 -0
- package/dist/streaming/stream.processor.d.ts.map +1 -0
- package/dist/streaming/stream.processor.js +178 -0
- package/dist/streaming/stream.processor.test.d.ts +2 -0
- package/dist/streaming/stream.processor.test.d.ts.map +1 -0
- package/dist/streaming/stream.processor.test.js +151 -0
- package/dist/streaming/stream.processor.windowing.test.d.ts +2 -0
- package/dist/streaming/stream.processor.windowing.test.d.ts.map +1 -0
- package/dist/streaming/stream.processor.windowing.test.js +69 -0
- package/dist/telemetry/telemetry.d.ts +124 -0
- package/dist/telemetry/telemetry.d.ts.map +1 -0
- package/dist/telemetry/telemetry.js +259 -0
- package/dist/telemetry/telemetry.test.d.ts +2 -0
- package/dist/telemetry/telemetry.test.d.ts.map +1 -0
- package/dist/telemetry/telemetry.test.js +51 -0
- package/dist/testing/index.d.ts +12 -0
- package/dist/testing/index.d.ts.map +1 -0
- package/dist/testing/index.js +18 -0
- package/dist/testing/pipeline-test-harness.d.ts +40 -0
- package/dist/testing/pipeline-test-harness.d.ts.map +1 -0
- package/dist/testing/pipeline-test-harness.js +55 -0
- package/dist/testing/pipeline-test-harness.test.d.ts +2 -0
- package/dist/testing/pipeline-test-harness.test.d.ts.map +1 -0
- package/dist/testing/pipeline-test-harness.test.js +102 -0
- package/dist/testing/schema-faker.d.ts +32 -0
- package/dist/testing/schema-faker.d.ts.map +1 -0
- package/dist/testing/schema-faker.js +91 -0
- package/dist/testing/schema-faker.test.d.ts +2 -0
- package/dist/testing/schema-faker.test.d.ts.map +1 -0
- package/dist/testing/schema-faker.test.js +66 -0
- package/dist/transformers/built-in.transformers.d.ts +12 -0
- package/dist/transformers/built-in.transformers.d.ts.map +1 -0
- package/dist/transformers/built-in.transformers.js +75 -0
- package/dist/transformers/built-in.transformers.test.d.ts +2 -0
- package/dist/transformers/built-in.transformers.test.d.ts.map +1 -0
- package/dist/transformers/built-in.transformers.test.js +85 -0
- package/dist/transformers/transformer.service.d.ts +14 -0
- package/dist/transformers/transformer.service.d.ts.map +1 -0
- package/dist/transformers/transformer.service.js +65 -0
- package/dist/transformers/transformer.service.test.d.ts +2 -0
- package/dist/transformers/transformer.service.test.d.ts.map +1 -0
- package/dist/transformers/transformer.service.test.js +42 -0
- package/dist/validators/schema.validator.d.ts +21 -0
- package/dist/validators/schema.validator.d.ts.map +1 -0
- package/dist/validators/schema.validator.js +40 -0
- package/dist/validators/schema.validator.test.d.ts +2 -0
- package/dist/validators/schema.validator.test.d.ts.map +1 -0
- package/dist/validators/schema.validator.test.js +42 -0
- package/package.json +53 -0
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
9
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
require("reflect-metadata");
|
|
13
|
+
const pipeline_decorator_1 = require("./pipeline.decorator");
|
|
14
|
+
const transform_decorator_1 = require("./transform.decorator");
|
|
15
|
+
const validate_decorator_1 = require("./validate.decorator");
|
|
16
|
+
const stream_decorator_1 = require("./stream.decorator");
|
|
17
|
+
const schema_1 = require("../schema/schema");
|
|
18
|
+
describe('Pipeline decorator', () => {
|
|
19
|
+
it('applies metadata with string name', () => {
|
|
20
|
+
let TestPipeline = class TestPipeline {
|
|
21
|
+
};
|
|
22
|
+
TestPipeline = __decorate([
|
|
23
|
+
(0, pipeline_decorator_1.Pipeline)('my-pipeline')
|
|
24
|
+
], TestPipeline);
|
|
25
|
+
const meta = (0, pipeline_decorator_1.getPipelineMetadata)(TestPipeline);
|
|
26
|
+
expect(meta?.name).toBe('my-pipeline');
|
|
27
|
+
expect((0, pipeline_decorator_1.hasPipelineMetadata)(TestPipeline)).toBe(true);
|
|
28
|
+
});
|
|
29
|
+
it('applies metadata with options object', () => {
|
|
30
|
+
let CustomPipeline = class CustomPipeline {
|
|
31
|
+
};
|
|
32
|
+
CustomPipeline = __decorate([
|
|
33
|
+
(0, pipeline_decorator_1.Pipeline)({ name: 'custom' })
|
|
34
|
+
], CustomPipeline);
|
|
35
|
+
expect((0, pipeline_decorator_1.getPipelineMetadata)(CustomPipeline)?.name).toBe('custom');
|
|
36
|
+
});
|
|
37
|
+
it('applies metadata with no args (uses class name)', () => {
|
|
38
|
+
let DefaultPipeline = class DefaultPipeline {
|
|
39
|
+
};
|
|
40
|
+
DefaultPipeline = __decorate([
|
|
41
|
+
(0, pipeline_decorator_1.Pipeline)()
|
|
42
|
+
], DefaultPipeline);
|
|
43
|
+
expect((0, pipeline_decorator_1.getPipelineMetadata)(DefaultPipeline)?.name).toBe('DefaultPipeline');
|
|
44
|
+
});
|
|
45
|
+
});
|
|
46
|
+
describe('Transform decorator', () => {
|
|
47
|
+
it('applies step metadata', () => {
|
|
48
|
+
class TestClass {
|
|
49
|
+
normalize() { }
|
|
50
|
+
}
|
|
51
|
+
__decorate([
|
|
52
|
+
(0, transform_decorator_1.Transform)({ step: 1, name: 'normalize' }),
|
|
53
|
+
__metadata("design:type", Function),
|
|
54
|
+
__metadata("design:paramtypes", []),
|
|
55
|
+
__metadata("design:returntype", void 0)
|
|
56
|
+
], TestClass.prototype, "normalize", null);
|
|
57
|
+
const meta = (0, transform_decorator_1.getTransformMetadata)(TestClass.prototype, 'normalize');
|
|
58
|
+
expect(meta?.step).toBe(1);
|
|
59
|
+
expect(meta?.name).toBe('normalize');
|
|
60
|
+
expect(meta?.type).toBe('transform');
|
|
61
|
+
});
|
|
62
|
+
});
|
|
63
|
+
describe('Validate decorator', () => {
|
|
64
|
+
it('applies schema metadata', () => {
|
|
65
|
+
const schema = schema_1.Schema.object({ email: schema_1.Schema.string().email() });
|
|
66
|
+
class TestClass {
|
|
67
|
+
validate() { }
|
|
68
|
+
}
|
|
69
|
+
__decorate([
|
|
70
|
+
(0, validate_decorator_1.Validate)({ step: 2, name: 'validate', schema }),
|
|
71
|
+
__metadata("design:type", Function),
|
|
72
|
+
__metadata("design:paramtypes", []),
|
|
73
|
+
__metadata("design:returntype", void 0)
|
|
74
|
+
], TestClass.prototype, "validate", null);
|
|
75
|
+
const meta = (0, validate_decorator_1.getValidateMetadata)(TestClass.prototype, 'validate');
|
|
76
|
+
expect(meta?.step).toBe(2);
|
|
77
|
+
expect(meta?.schema).toBe(schema);
|
|
78
|
+
});
|
|
79
|
+
});
|
|
80
|
+
describe('Stream decorator', () => {
|
|
81
|
+
it('applies stream metadata', () => {
|
|
82
|
+
let StreamPipeline = class StreamPipeline {
|
|
83
|
+
};
|
|
84
|
+
StreamPipeline = __decorate([
|
|
85
|
+
(0, stream_decorator_1.Stream)({
|
|
86
|
+
name: 'events',
|
|
87
|
+
source: 'kafka://topic',
|
|
88
|
+
sink: 'kafka://out',
|
|
89
|
+
parallelism: 8,
|
|
90
|
+
})
|
|
91
|
+
], StreamPipeline);
|
|
92
|
+
const meta = (0, stream_decorator_1.getStreamMetadata)(StreamPipeline);
|
|
93
|
+
expect(meta?.name).toBe('events');
|
|
94
|
+
expect(meta?.source).toBe('kafka://topic');
|
|
95
|
+
expect(meta?.sink).toBe('kafka://out');
|
|
96
|
+
expect(meta?.parallelism).toBe(8);
|
|
97
|
+
expect((0, stream_decorator_1.hasStreamMetadata)(StreamPipeline)).toBe(true);
|
|
98
|
+
});
|
|
99
|
+
it('hasStreamMetadata false for undecorated', () => {
|
|
100
|
+
class Plain {
|
|
101
|
+
}
|
|
102
|
+
expect((0, stream_decorator_1.hasStreamMetadata)(Plain)).toBe(false);
|
|
103
|
+
});
|
|
104
|
+
});
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import 'reflect-metadata';
|
|
2
|
+
import type { StreamMetadata } from '../data.types';
|
|
3
|
+
export interface StreamOptions {
|
|
4
|
+
name: string;
|
|
5
|
+
source: string;
|
|
6
|
+
sink: string;
|
|
7
|
+
parallelism?: number;
|
|
8
|
+
}
|
|
9
|
+
/**
|
|
10
|
+
* @Stream decorator - Streaming pipeline with Flink
|
|
11
|
+
* For real-time stream processing
|
|
12
|
+
*
|
|
13
|
+
* @example
|
|
14
|
+
* ```typescript
|
|
15
|
+
* @Stream({
|
|
16
|
+
* name: 'user-events-stream',
|
|
17
|
+
* source: 'kafka://user-events',
|
|
18
|
+
* sink: 'kafka://processed-events',
|
|
19
|
+
* parallelism: 4,
|
|
20
|
+
* })
|
|
21
|
+
* @Injectable()
|
|
22
|
+
* export class UserEventsStreamPipeline {
|
|
23
|
+
* @Transform({ step: 1, name: 'parse' }) async parseEvent(event) { ... }
|
|
24
|
+
* @Transform({ step: 2, name: 'enrich' }) async enrich(event) { ... }
|
|
25
|
+
* }
|
|
26
|
+
* ```
|
|
27
|
+
*/
|
|
28
|
+
export declare function Stream(options: StreamOptions): ClassDecorator;
|
|
29
|
+
export declare function getStreamMetadata(target: object): StreamMetadata | undefined;
|
|
30
|
+
export declare function hasStreamMetadata(target: object): boolean;
|
|
31
|
+
//# sourceMappingURL=stream.decorator.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"stream.decorator.d.ts","sourceRoot":"","sources":["../../src/decorators/stream.decorator.ts"],"names":[],"mappings":"AAAA,OAAO,kBAAkB,CAAC;AAE1B,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,eAAe,CAAC;AAIpD,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED;;;;;;;;;;;;;;;;;;GAkBG;AACH,wBAAgB,MAAM,CAAC,OAAO,EAAE,aAAa,GAAG,cAAc,CAW7D;AAED,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,MAAM,GAAG,cAAc,GAAG,SAAS,CAE5E;AAED,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAEzD"}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.Stream = Stream;
|
|
7
|
+
exports.getStreamMetadata = getStreamMetadata;
|
|
8
|
+
exports.hasStreamMetadata = hasStreamMetadata;
|
|
9
|
+
require("reflect-metadata");
|
|
10
|
+
const core_1 = __importDefault(require("@hazeljs/core"));
|
|
11
|
+
const STREAM_METADATA_KEY = 'hazel:data:stream';
|
|
12
|
+
/**
|
|
13
|
+
* @Stream decorator - Streaming pipeline with Flink
|
|
14
|
+
* For real-time stream processing
|
|
15
|
+
*
|
|
16
|
+
* @example
|
|
17
|
+
* ```typescript
|
|
18
|
+
* @Stream({
|
|
19
|
+
* name: 'user-events-stream',
|
|
20
|
+
* source: 'kafka://user-events',
|
|
21
|
+
* sink: 'kafka://processed-events',
|
|
22
|
+
* parallelism: 4,
|
|
23
|
+
* })
|
|
24
|
+
* @Injectable()
|
|
25
|
+
* export class UserEventsStreamPipeline {
|
|
26
|
+
* @Transform({ step: 1, name: 'parse' }) async parseEvent(event) { ... }
|
|
27
|
+
* @Transform({ step: 2, name: 'enrich' }) async enrich(event) { ... }
|
|
28
|
+
* }
|
|
29
|
+
* ```
|
|
30
|
+
*/
|
|
31
|
+
function Stream(options) {
|
|
32
|
+
return (target) => {
|
|
33
|
+
const metadata = {
|
|
34
|
+
name: options.name,
|
|
35
|
+
source: options.source,
|
|
36
|
+
sink: options.sink,
|
|
37
|
+
parallelism: options.parallelism ?? 4,
|
|
38
|
+
};
|
|
39
|
+
Reflect.defineMetadata(STREAM_METADATA_KEY, metadata, target);
|
|
40
|
+
core_1.default.debug(`Stream decorator applied: ${metadata.name}`);
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
function getStreamMetadata(target) {
|
|
44
|
+
return Reflect.getMetadata(STREAM_METADATA_KEY, target);
|
|
45
|
+
}
|
|
46
|
+
function hasStreamMetadata(target) {
|
|
47
|
+
return Reflect.hasMetadata(STREAM_METADATA_KEY, target);
|
|
48
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import 'reflect-metadata';
|
|
2
|
+
import type { PipelineStepMetadata, RetryConfig, DLQConfig } from '../data.types';
|
|
3
|
+
export interface TransformOptions {
|
|
4
|
+
step: number;
|
|
5
|
+
name: string;
|
|
6
|
+
/** Execute step only when this predicate returns true */
|
|
7
|
+
when?: (data: unknown) => boolean;
|
|
8
|
+
/** Retry failed step with backoff */
|
|
9
|
+
retry?: RetryConfig;
|
|
10
|
+
/** Per-step execution timeout in milliseconds */
|
|
11
|
+
timeoutMs?: number;
|
|
12
|
+
/** Dead letter queue — called on failure instead of throwing */
|
|
13
|
+
dlq?: DLQConfig;
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* @Transform decorator - Data transformation with step ordering
|
|
17
|
+
* Output from step N feeds as input to step N+1
|
|
18
|
+
*
|
|
19
|
+
* @example
|
|
20
|
+
* ```typescript
|
|
21
|
+
* @Transform({ step: 1, name: 'normalize' })
|
|
22
|
+
* async normalize(data: any) {
|
|
23
|
+
* return { ...data, email: data.email.toLowerCase() };
|
|
24
|
+
* }
|
|
25
|
+
* ```
|
|
26
|
+
*/
|
|
27
|
+
export declare function Transform(options: TransformOptions): MethodDecorator;
|
|
28
|
+
export declare function getTransformMetadata(target: object, propertyKey: string | symbol): PipelineStepMetadata | undefined;
|
|
29
|
+
//# sourceMappingURL=transform.decorator.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"transform.decorator.d.ts","sourceRoot":"","sources":["../../src/decorators/transform.decorator.ts"],"names":[],"mappings":"AAAA,OAAO,kBAAkB,CAAC;AAE1B,OAAO,KAAK,EAAE,oBAAoB,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,eAAe,CAAC;AAIlF,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,yDAAyD;IACzD,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,OAAO,CAAC;IAClC,qCAAqC;IACrC,KAAK,CAAC,EAAE,WAAW,CAAC;IACpB,iDAAiD;IACjD,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,gEAAgE;IAChE,GAAG,CAAC,EAAE,SAAS,CAAC;CACjB;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,SAAS,CAAC,OAAO,EAAE,gBAAgB,GAAG,eAAe,CAiBpE;AAED,wBAAgB,oBAAoB,CAClC,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,MAAM,GAAG,MAAM,GAC3B,oBAAoB,GAAG,SAAS,CAElC"}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.Transform = Transform;
|
|
7
|
+
exports.getTransformMetadata = getTransformMetadata;
|
|
8
|
+
require("reflect-metadata");
|
|
9
|
+
const core_1 = __importDefault(require("@hazeljs/core"));
|
|
10
|
+
const TRANSFORM_METADATA_KEY = 'hazel:data:transform';
|
|
11
|
+
/**
|
|
12
|
+
* @Transform decorator - Data transformation with step ordering
|
|
13
|
+
* Output from step N feeds as input to step N+1
|
|
14
|
+
*
|
|
15
|
+
* @example
|
|
16
|
+
* ```typescript
|
|
17
|
+
* @Transform({ step: 1, name: 'normalize' })
|
|
18
|
+
* async normalize(data: any) {
|
|
19
|
+
* return { ...data, email: data.email.toLowerCase() };
|
|
20
|
+
* }
|
|
21
|
+
* ```
|
|
22
|
+
*/
|
|
23
|
+
function Transform(options) {
|
|
24
|
+
return (target, propertyKey, descriptor) => {
|
|
25
|
+
const metadata = {
|
|
26
|
+
step: options.step,
|
|
27
|
+
name: options.name,
|
|
28
|
+
type: 'transform',
|
|
29
|
+
when: options.when,
|
|
30
|
+
retry: options.retry,
|
|
31
|
+
timeoutMs: options.timeoutMs,
|
|
32
|
+
dlq: options.dlq,
|
|
33
|
+
};
|
|
34
|
+
Reflect.defineMetadata(TRANSFORM_METADATA_KEY, metadata, target, propertyKey);
|
|
35
|
+
core_1.default.debug(`Transform decorator applied: ${target.constructor.name}.${String(propertyKey)} step=${options.step}`);
|
|
36
|
+
return descriptor;
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
function getTransformMetadata(target, propertyKey) {
|
|
40
|
+
return Reflect.getMetadata(TRANSFORM_METADATA_KEY, target, propertyKey);
|
|
41
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import 'reflect-metadata';
|
|
2
|
+
import type { PipelineStepMetadata, RetryConfig, DLQConfig } from '../data.types';
|
|
3
|
+
import type { BaseSchema } from '../schema/schema';
|
|
4
|
+
export interface ValidateOptions {
|
|
5
|
+
step: number;
|
|
6
|
+
name: string;
|
|
7
|
+
schema: BaseSchema;
|
|
8
|
+
when?: (data: unknown) => boolean;
|
|
9
|
+
retry?: RetryConfig;
|
|
10
|
+
timeoutMs?: number;
|
|
11
|
+
dlq?: DLQConfig;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* @Validate decorator - Schema validation with step ordering
|
|
15
|
+
* Validates data before passing to next step
|
|
16
|
+
*
|
|
17
|
+
* @example
|
|
18
|
+
* ```typescript
|
|
19
|
+
* @Validate({
|
|
20
|
+
* step: 2,
|
|
21
|
+
* name: 'validate',
|
|
22
|
+
* schema: Schema.object({
|
|
23
|
+
* email: Schema.string().email(),
|
|
24
|
+
* age: Schema.number().min(0).max(120),
|
|
25
|
+
* })
|
|
26
|
+
* })
|
|
27
|
+
* async validate(data: any) {
|
|
28
|
+
* return data; // Validation happens automatically
|
|
29
|
+
* }
|
|
30
|
+
* ```
|
|
31
|
+
*/
|
|
32
|
+
export declare function Validate(options: ValidateOptions): MethodDecorator;
|
|
33
|
+
export declare function getValidateMetadata(target: object, propertyKey: string | symbol): PipelineStepMetadata | undefined;
|
|
34
|
+
//# sourceMappingURL=validate.decorator.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"validate.decorator.d.ts","sourceRoot":"","sources":["../../src/decorators/validate.decorator.ts"],"names":[],"mappings":"AAAA,OAAO,kBAAkB,CAAC;AAE1B,OAAO,KAAK,EAAE,oBAAoB,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,eAAe,CAAC;AAClF,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAInD,MAAM,WAAW,eAAe;IAC9B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,UAAU,CAAC;IACnB,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,OAAO,CAAC;IAClC,KAAK,CAAC,EAAE,WAAW,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,GAAG,CAAC,EAAE,SAAS,CAAC;CACjB;AAED;;;;;;;;;;;;;;;;;;GAkBG;AACH,wBAAgB,QAAQ,CAAC,OAAO,EAAE,eAAe,GAAG,eAAe,CAkBlE;AAED,wBAAgB,mBAAmB,CACjC,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,MAAM,GAAG,MAAM,GAC3B,oBAAoB,GAAG,SAAS,CAElC"}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.Validate = Validate;
|
|
7
|
+
exports.getValidateMetadata = getValidateMetadata;
|
|
8
|
+
require("reflect-metadata");
|
|
9
|
+
const core_1 = __importDefault(require("@hazeljs/core"));
|
|
10
|
+
const VALIDATE_METADATA_KEY = 'hazel:data:validate';
|
|
11
|
+
/**
|
|
12
|
+
* @Validate decorator - Schema validation with step ordering
|
|
13
|
+
* Validates data before passing to next step
|
|
14
|
+
*
|
|
15
|
+
* @example
|
|
16
|
+
* ```typescript
|
|
17
|
+
* @Validate({
|
|
18
|
+
* step: 2,
|
|
19
|
+
* name: 'validate',
|
|
20
|
+
* schema: Schema.object({
|
|
21
|
+
* email: Schema.string().email(),
|
|
22
|
+
* age: Schema.number().min(0).max(120),
|
|
23
|
+
* })
|
|
24
|
+
* })
|
|
25
|
+
* async validate(data: any) {
|
|
26
|
+
* return data; // Validation happens automatically
|
|
27
|
+
* }
|
|
28
|
+
* ```
|
|
29
|
+
*/
|
|
30
|
+
function Validate(options) {
|
|
31
|
+
return (target, propertyKey, descriptor) => {
|
|
32
|
+
const metadata = {
|
|
33
|
+
step: options.step,
|
|
34
|
+
name: options.name,
|
|
35
|
+
type: 'validate',
|
|
36
|
+
schema: options.schema,
|
|
37
|
+
when: options.when,
|
|
38
|
+
retry: options.retry,
|
|
39
|
+
timeoutMs: options.timeoutMs,
|
|
40
|
+
dlq: options.dlq,
|
|
41
|
+
};
|
|
42
|
+
Reflect.defineMetadata(VALIDATE_METADATA_KEY, metadata, target, propertyKey);
|
|
43
|
+
core_1.default.debug(`Validate decorator applied: ${target.constructor.name}.${String(propertyKey)} step=${options.step}`);
|
|
44
|
+
return descriptor;
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
function getValidateMetadata(target, propertyKey) {
|
|
48
|
+
return Reflect.getMetadata(VALIDATE_METADATA_KEY, target, propertyKey);
|
|
49
|
+
}
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { FlinkClient } from './streaming/flink/flink.client';
|
|
2
|
+
import { StreamBuilder } from './streaming/stream.builder';
|
|
3
|
+
import { ETLService } from './pipelines/etl.service';
|
|
4
|
+
import type { FlinkJobConfig } from './data.types';
|
|
5
|
+
import type { FlinkClientConfig } from './streaming/flink/flink.client';
|
|
6
|
+
export interface DeployStreamResult {
|
|
7
|
+
jobId?: string;
|
|
8
|
+
status: string;
|
|
9
|
+
webUI?: string;
|
|
10
|
+
jobConfig: FlinkJobConfig;
|
|
11
|
+
jobGraph: unknown;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Flink Service - Deploy stream pipelines to Flink cluster
|
|
15
|
+
* Wraps FlinkClient and StreamBuilder for pipeline deployment
|
|
16
|
+
*/
|
|
17
|
+
export declare class FlinkService {
|
|
18
|
+
private readonly etlService;
|
|
19
|
+
private readonly streamBuilder;
|
|
20
|
+
private flinkClient;
|
|
21
|
+
constructor(etlService: ETLService, streamBuilder: StreamBuilder);
|
|
22
|
+
configure(config: FlinkClientConfig): void;
|
|
23
|
+
getClient(): FlinkClient;
|
|
24
|
+
deployStream(pipeline: object, config?: Partial<FlinkJobConfig>): Promise<DeployStreamResult>;
|
|
25
|
+
getJobStatus(jobId: string): Promise<{
|
|
26
|
+
state: string;
|
|
27
|
+
startTime?: number;
|
|
28
|
+
duration?: number;
|
|
29
|
+
}>;
|
|
30
|
+
cancelJob(jobId: string): Promise<void>;
|
|
31
|
+
createSavepoint(jobId: string, savepointPath?: string): Promise<{
|
|
32
|
+
'request-id': string;
|
|
33
|
+
}>;
|
|
34
|
+
stopJob(jobId: string, savepointPath?: string): Promise<{
|
|
35
|
+
'request-id': string;
|
|
36
|
+
}>;
|
|
37
|
+
getClusterInfo(): Promise<{
|
|
38
|
+
taskmanagers?: number;
|
|
39
|
+
'slots-total'?: number;
|
|
40
|
+
}>;
|
|
41
|
+
getTaskManagers(): Promise<unknown[]>;
|
|
42
|
+
listJobs(): Promise<{
|
|
43
|
+
id: string;
|
|
44
|
+
status: string;
|
|
45
|
+
startTime?: number;
|
|
46
|
+
endTime?: number;
|
|
47
|
+
duration?: number;
|
|
48
|
+
}[]>;
|
|
49
|
+
/**
|
|
50
|
+
* Deploy a stream pipeline by uploading a JAR and running it.
|
|
51
|
+
* @param pipeline The @Stream-decorated pipeline instance
|
|
52
|
+
* @param jarFile Local path to the compiled pipeline JAR
|
|
53
|
+
* @param config Optional Flink job config overrides
|
|
54
|
+
*/
|
|
55
|
+
deployStreamWithJar(pipeline: object, jarFile: string, config?: Partial<FlinkJobConfig>): Promise<DeployStreamResult>;
|
|
56
|
+
/**
|
|
57
|
+
* Deploy a streaming pipeline using Flink SQL Gateway.
|
|
58
|
+
* @param sql The SQL DDL+DML to submit (CREATE TABLE + INSERT INTO)
|
|
59
|
+
* @param sessionId Optional existing session ID; a new session is created if omitted
|
|
60
|
+
*/
|
|
61
|
+
deployStreamWithSql(sql: string, sessionId?: string): Promise<{
|
|
62
|
+
operationId: string;
|
|
63
|
+
sessionId: string;
|
|
64
|
+
}>;
|
|
65
|
+
uploadJar(jarFile: string): Promise<string>;
|
|
66
|
+
runJar(jarId: string, options?: {
|
|
67
|
+
jobName?: string;
|
|
68
|
+
parallelism?: number;
|
|
69
|
+
entryClass?: string;
|
|
70
|
+
programArgs?: string;
|
|
71
|
+
}): Promise<string>;
|
|
72
|
+
listJars(): Promise<Array<{
|
|
73
|
+
id: string;
|
|
74
|
+
name: string;
|
|
75
|
+
uploaded: number;
|
|
76
|
+
}>>;
|
|
77
|
+
deleteJar(jarId: string): Promise<void>;
|
|
78
|
+
createSqlSession(properties?: Record<string, string>): Promise<string>;
|
|
79
|
+
}
|
|
80
|
+
//# sourceMappingURL=flink.service.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"flink.service.d.ts","sourceRoot":"","sources":["../src/flink.service.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,gCAAgC,CAAC;AAC7D,OAAO,EAAE,aAAa,EAAE,MAAM,4BAA4B,CAAC;AAC3D,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AACrD,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AACnD,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,gCAAgC,CAAC;AAExE,MAAM,WAAW,kBAAkB;IACjC,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,cAAc,CAAC;IAC1B,QAAQ,EAAE,OAAO,CAAC;CACnB;AAED;;;GAGG;AACH,qBACa,YAAY;IAIrB,OAAO,CAAC,QAAQ,CAAC,UAAU;IAC3B,OAAO,CAAC,QAAQ,CAAC,aAAa;IAJhC,OAAO,CAAC,WAAW,CAA4B;gBAG5B,UAAU,EAAE,UAAU,EACtB,aAAa,EAAE,aAAa;IAG/C,SAAS,CAAC,MAAM,EAAE,iBAAiB,GAAG,IAAI;IAI1C,SAAS,IAAI,WAAW;IASlB,YAAY,CAChB,QAAQ,EAAE,MAAM,EAChB,MAAM,CAAC,EAAE,OAAO,CAAC,cAAc,CAAC,GAC/B,OAAO,CAAC,kBAAkB,CAAC;IAyBxB,YAAY,CAChB,KAAK,EAAE,MAAM,GACZ,OAAO,CAAC;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;IAI9D,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAIvC,eAAe,CAAC,KAAK,EAAE,MAAM,EAAE,aAAa,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;QAAE,YAAY,EAAE,MAAM,CAAA;KAAE,CAAC;IAIzF,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,aAAa,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;QAAE,YAAY,EAAE,MAAM,CAAA;KAAE,CAAC;IAIjF,cAAc,IAAI,OAAO,CAAC;QAAE,YAAY,CAAC,EAAE,MAAM,CAAC;QAAC,aAAa,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;IAI5E,eAAe,IAAI,OAAO,CAAC,OAAO,EAAE,CAAC;IAIrC,QAAQ,IAAI,OAAO,CACvB;QAAE,EAAE,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,CAC1F;IAID;;;;;OAKG;IACG,mBAAmB,CACvB,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM,EACf,MAAM,CAAC,EAAE,OAAO,CAAC,cAAc,CAAC,GAC/B,OAAO,CAAC,kBAAkB,CAAC;IAkB9B;;;;OAIG;IACG,mBAAmB,CACvB,GAAG,EAAE,MAAM,EACX,SAAS,CAAC,EAAE,MAAM,GACjB,OAAO,CAAC;QAAE,WAAW,EAAE,MAAM,CAAC;QAAC,SAAS,EAAE,MAAM,CAAA;KAAE,CAAC;IAOhD,SAAS,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAI3C,MAAM,CACV,KAAK,EAAE,MAAM,EACb,OAAO,GAAE;QACP,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,WAAW,CAAC,EAAE,MAAM,CAAC;KACjB,GACL,OAAO,CAAC,MAAM,CAAC;IAIZ,QAAQ,IAAI,OAAO,CAAC,KAAK,CAAC;QAAE,EAAE,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAI1E,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAIvC,gBAAgB,CAAC,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC;CAG7E"}
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
9
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.FlinkService = void 0;
|
|
13
|
+
const core_1 = require("@hazeljs/core");
|
|
14
|
+
const flink_client_1 = require("./streaming/flink/flink.client");
|
|
15
|
+
const stream_builder_1 = require("./streaming/stream.builder");
|
|
16
|
+
const etl_service_1 = require("./pipelines/etl.service");
|
|
17
|
+
/**
|
|
18
|
+
* Flink Service - Deploy stream pipelines to Flink cluster
|
|
19
|
+
* Wraps FlinkClient and StreamBuilder for pipeline deployment
|
|
20
|
+
*/
|
|
21
|
+
let FlinkService = class FlinkService {
|
|
22
|
+
constructor(etlService, streamBuilder) {
|
|
23
|
+
this.etlService = etlService;
|
|
24
|
+
this.streamBuilder = streamBuilder;
|
|
25
|
+
this.flinkClient = null;
|
|
26
|
+
}
|
|
27
|
+
configure(config) {
|
|
28
|
+
this.flinkClient = new flink_client_1.FlinkClient(config);
|
|
29
|
+
}
|
|
30
|
+
getClient() {
|
|
31
|
+
if (!this.flinkClient) {
|
|
32
|
+
throw new Error('FlinkService not configured. Call configure() or use DataModule.forRoot() with flink options.');
|
|
33
|
+
}
|
|
34
|
+
return this.flinkClient;
|
|
35
|
+
}
|
|
36
|
+
async deployStream(pipeline, config) {
|
|
37
|
+
const { jobConfig, jobGraph } = this.streamBuilder.buildConfig(pipeline, config);
|
|
38
|
+
const client = this.getClient();
|
|
39
|
+
try {
|
|
40
|
+
const jobId = await client.submitJob(jobConfig);
|
|
41
|
+
return {
|
|
42
|
+
jobId,
|
|
43
|
+
status: 'submitted',
|
|
44
|
+
webUI: `${client.url}/#/job/${jobId}`,
|
|
45
|
+
jobConfig,
|
|
46
|
+
jobGraph,
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
catch {
|
|
50
|
+
// submitJob throws without jarFile - return config for manual deployment
|
|
51
|
+
return {
|
|
52
|
+
status: 'config_generated',
|
|
53
|
+
jobConfig,
|
|
54
|
+
jobGraph,
|
|
55
|
+
webUI: client.url,
|
|
56
|
+
};
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
async getJobStatus(jobId) {
|
|
60
|
+
return this.getClient().getJobStatus(jobId);
|
|
61
|
+
}
|
|
62
|
+
async cancelJob(jobId) {
|
|
63
|
+
return this.getClient().cancelJob(jobId);
|
|
64
|
+
}
|
|
65
|
+
async createSavepoint(jobId, savepointPath) {
|
|
66
|
+
return this.getClient().createSavepoint(jobId, savepointPath);
|
|
67
|
+
}
|
|
68
|
+
async stopJob(jobId, savepointPath) {
|
|
69
|
+
return this.getClient().stopJob(jobId, savepointPath);
|
|
70
|
+
}
|
|
71
|
+
async getClusterInfo() {
|
|
72
|
+
return this.getClient().getClusterInfo();
|
|
73
|
+
}
|
|
74
|
+
async getTaskManagers() {
|
|
75
|
+
return this.getClient().getTaskManagers();
|
|
76
|
+
}
|
|
77
|
+
async listJobs() {
|
|
78
|
+
return this.getClient().listJobs();
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Deploy a stream pipeline by uploading a JAR and running it.
|
|
82
|
+
* @param pipeline The @Stream-decorated pipeline instance
|
|
83
|
+
* @param jarFile Local path to the compiled pipeline JAR
|
|
84
|
+
* @param config Optional Flink job config overrides
|
|
85
|
+
*/
|
|
86
|
+
async deployStreamWithJar(pipeline, jarFile, config) {
|
|
87
|
+
const { jobConfig, jobGraph } = this.streamBuilder.buildConfig(pipeline, config);
|
|
88
|
+
const client = this.getClient();
|
|
89
|
+
const jobId = await client.submitJob(jobConfig, {
|
|
90
|
+
jarFile,
|
|
91
|
+
jobName: jobConfig.jobName,
|
|
92
|
+
parallelism: jobConfig.parallelism,
|
|
93
|
+
});
|
|
94
|
+
return {
|
|
95
|
+
jobId,
|
|
96
|
+
status: 'submitted',
|
|
97
|
+
webUI: `${client.url}/#/job/${jobId}`,
|
|
98
|
+
jobConfig,
|
|
99
|
+
jobGraph,
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Deploy a streaming pipeline using Flink SQL Gateway.
|
|
104
|
+
* @param sql The SQL DDL+DML to submit (CREATE TABLE + INSERT INTO)
|
|
105
|
+
* @param sessionId Optional existing session ID; a new session is created if omitted
|
|
106
|
+
*/
|
|
107
|
+
async deployStreamWithSql(sql, sessionId) {
|
|
108
|
+
const client = this.getClient();
|
|
109
|
+
const sid = sessionId ?? (await client.createSqlSession());
|
|
110
|
+
const operationId = await client.submitSql(sql, sid);
|
|
111
|
+
return { operationId, sessionId: sid };
|
|
112
|
+
}
|
|
113
|
+
async uploadJar(jarFile) {
|
|
114
|
+
return this.getClient().uploadJar(jarFile);
|
|
115
|
+
}
|
|
116
|
+
async runJar(jarId, options = {}) {
|
|
117
|
+
return this.getClient().runJar(jarId, options);
|
|
118
|
+
}
|
|
119
|
+
async listJars() {
|
|
120
|
+
return this.getClient().listJars();
|
|
121
|
+
}
|
|
122
|
+
async deleteJar(jarId) {
|
|
123
|
+
return this.getClient().deleteJar(jarId);
|
|
124
|
+
}
|
|
125
|
+
async createSqlSession(properties) {
|
|
126
|
+
return this.getClient().createSqlSession(properties);
|
|
127
|
+
}
|
|
128
|
+
};
|
|
129
|
+
exports.FlinkService = FlinkService;
|
|
130
|
+
exports.FlinkService = FlinkService = __decorate([
|
|
131
|
+
(0, core_1.Service)(),
|
|
132
|
+
__metadata("design:paramtypes", [etl_service_1.ETLService,
|
|
133
|
+
stream_builder_1.StreamBuilder])
|
|
134
|
+
], FlinkService);
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"flink.service.test.d.ts","sourceRoot":"","sources":["../src/flink.service.test.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
9
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
const flink_service_1 = require("./flink.service");
|
|
13
|
+
const etl_service_1 = require("./pipelines/etl.service");
|
|
14
|
+
const stream_builder_1 = require("./streaming/stream.builder");
|
|
15
|
+
const schema_validator_1 = require("./validators/schema.validator");
|
|
16
|
+
const decorators_1 = require("./decorators");
|
|
17
|
+
let TestPipeline = class TestPipeline {
|
|
18
|
+
a(data) {
|
|
19
|
+
return data;
|
|
20
|
+
}
|
|
21
|
+
};
|
|
22
|
+
__decorate([
|
|
23
|
+
(0, decorators_1.Transform)({ step: 1, name: 'a' }),
|
|
24
|
+
__metadata("design:type", Function),
|
|
25
|
+
__metadata("design:paramtypes", [Object]),
|
|
26
|
+
__metadata("design:returntype", void 0)
|
|
27
|
+
], TestPipeline.prototype, "a", null);
|
|
28
|
+
TestPipeline = __decorate([
|
|
29
|
+
(0, decorators_1.Stream)({ name: 'test', source: 'kafka://in', sink: 'kafka://out', parallelism: 2 })
|
|
30
|
+
], TestPipeline);
|
|
31
|
+
describe('FlinkService', () => {
|
|
32
|
+
let service;
|
|
33
|
+
beforeEach(() => {
|
|
34
|
+
const etlService = new etl_service_1.ETLService(new schema_validator_1.SchemaValidator());
|
|
35
|
+
const streamBuilder = new stream_builder_1.StreamBuilder(etlService);
|
|
36
|
+
service = new flink_service_1.FlinkService(etlService, streamBuilder);
|
|
37
|
+
});
|
|
38
|
+
it('getClient throws when not configured', () => {
|
|
39
|
+
expect(() => service.getClient()).toThrow('FlinkService not configured');
|
|
40
|
+
});
|
|
41
|
+
it('configure sets up client', () => {
|
|
42
|
+
service.configure({ url: 'http://flink:8081' });
|
|
43
|
+
const client = service.getClient();
|
|
44
|
+
expect(client.url).toBe('http://flink:8081');
|
|
45
|
+
});
|
|
46
|
+
it('deployStream returns job config', async () => {
|
|
47
|
+
service.configure({ url: 'http://localhost:8081' });
|
|
48
|
+
const pipeline = new TestPipeline();
|
|
49
|
+
const result = await service.deployStream(pipeline);
|
|
50
|
+
expect(result.status).toBe('config_generated');
|
|
51
|
+
expect(result.jobConfig).toBeDefined();
|
|
52
|
+
expect(result.jobConfig.jobName).toBe('TestPipeline');
|
|
53
|
+
expect(result.jobGraph).toBeDefined();
|
|
54
|
+
});
|
|
55
|
+
it('deployStream with config override', async () => {
|
|
56
|
+
service.configure({ url: 'http://localhost:8081' });
|
|
57
|
+
const result = await service.deployStream(new TestPipeline(), { parallelism: 16 });
|
|
58
|
+
expect(result.jobConfig.parallelism).toBe(16);
|
|
59
|
+
});
|
|
60
|
+
});
|