@hazeljs/data 0.2.0-beta.37

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/LICENSE +192 -0
  2. package/dist/data.module.d.ts +30 -0
  3. package/dist/data.module.d.ts.map +1 -0
  4. package/dist/data.module.js +120 -0
  5. package/dist/data.types.d.ts +51 -0
  6. package/dist/data.types.d.ts.map +1 -0
  7. package/dist/data.types.js +5 -0
  8. package/dist/decorators/index.d.ts +5 -0
  9. package/dist/decorators/index.d.ts.map +1 -0
  10. package/dist/decorators/index.js +17 -0
  11. package/dist/decorators/pipeline.decorator.d.ts +22 -0
  12. package/dist/decorators/pipeline.decorator.d.ts.map +1 -0
  13. package/dist/decorators/pipeline.decorator.js +42 -0
  14. package/dist/decorators/stream.decorator.d.ts +31 -0
  15. package/dist/decorators/stream.decorator.d.ts.map +1 -0
  16. package/dist/decorators/stream.decorator.js +48 -0
  17. package/dist/decorators/transform.decorator.d.ts +21 -0
  18. package/dist/decorators/transform.decorator.d.ts.map +1 -0
  19. package/dist/decorators/transform.decorator.js +37 -0
  20. package/dist/decorators/validate.decorator.d.ts +30 -0
  21. package/dist/decorators/validate.decorator.d.ts.map +1 -0
  22. package/dist/decorators/validate.decorator.js +45 -0
  23. package/dist/flink.service.d.ts +50 -0
  24. package/dist/flink.service.d.ts.map +1 -0
  25. package/dist/flink.service.js +86 -0
  26. package/dist/index.d.ts +26 -0
  27. package/dist/index.d.ts.map +1 -0
  28. package/dist/index.js +68 -0
  29. package/dist/pipelines/etl.service.d.ts +20 -0
  30. package/dist/pipelines/etl.service.d.ts.map +1 -0
  31. package/dist/pipelines/etl.service.js +86 -0
  32. package/dist/pipelines/pipeline.base.d.ts +24 -0
  33. package/dist/pipelines/pipeline.base.d.ts.map +1 -0
  34. package/dist/pipelines/pipeline.base.js +29 -0
  35. package/dist/pipelines/pipeline.builder.d.ts +22 -0
  36. package/dist/pipelines/pipeline.builder.d.ts.map +1 -0
  37. package/dist/pipelines/pipeline.builder.js +62 -0
  38. package/dist/pipelines/stream.service.d.ts +12 -0
  39. package/dist/pipelines/stream.service.d.ts.map +1 -0
  40. package/dist/pipelines/stream.service.js +58 -0
  41. package/dist/quality/quality.service.d.ts +25 -0
  42. package/dist/quality/quality.service.d.ts.map +1 -0
  43. package/dist/quality/quality.service.js +87 -0
  44. package/dist/schema/schema.d.ts +47 -0
  45. package/dist/schema/schema.d.ts.map +1 -0
  46. package/dist/schema/schema.js +175 -0
  47. package/dist/streaming/flink/flink.client.d.ts +58 -0
  48. package/dist/streaming/flink/flink.client.d.ts.map +1 -0
  49. package/dist/streaming/flink/flink.client.js +104 -0
  50. package/dist/streaming/flink/flink.job.d.ts +28 -0
  51. package/dist/streaming/flink/flink.job.d.ts.map +1 -0
  52. package/dist/streaming/flink/flink.job.js +27 -0
  53. package/dist/streaming/flink/flink.operators.d.ts +35 -0
  54. package/dist/streaming/flink/flink.operators.d.ts.map +1 -0
  55. package/dist/streaming/flink/flink.operators.js +43 -0
  56. package/dist/streaming/stream.builder.d.ts +22 -0
  57. package/dist/streaming/stream.builder.d.ts.map +1 -0
  58. package/dist/streaming/stream.builder.js +50 -0
  59. package/dist/streaming/stream.processor.d.ts +12 -0
  60. package/dist/streaming/stream.processor.d.ts.map +1 -0
  61. package/dist/streaming/stream.processor.js +31 -0
  62. package/dist/transformers/built-in.transformers.d.ts +12 -0
  63. package/dist/transformers/built-in.transformers.d.ts.map +1 -0
  64. package/dist/transformers/built-in.transformers.js +75 -0
  65. package/dist/transformers/transformer.service.d.ts +14 -0
  66. package/dist/transformers/transformer.service.d.ts.map +1 -0
  67. package/dist/transformers/transformer.service.js +65 -0
  68. package/dist/validators/schema.validator.d.ts +21 -0
  69. package/dist/validators/schema.validator.d.ts.map +1 -0
  70. package/dist/validators/schema.validator.js +40 -0
  71. package/package.json +53 -0
@@ -0,0 +1,58 @@
1
+ "use strict";
2
+ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
3
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
4
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
5
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
6
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
7
+ };
8
+ var __metadata = (this && this.__metadata) || function (k, v) {
9
+ if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
10
+ };
11
+ var __importDefault = (this && this.__importDefault) || function (mod) {
12
+ return (mod && mod.__esModule) ? mod : { "default": mod };
13
+ };
14
+ Object.defineProperty(exports, "__esModule", { value: true });
15
+ exports.StreamService = void 0;
16
+ const core_1 = require("@hazeljs/core");
17
+ const etl_service_1 = require("./etl.service");
18
+ const decorators_1 = require("../decorators");
19
+ const core_2 = __importDefault(require("@hazeljs/core"));
20
+ /**
21
+ * Stream Service - Streaming pipeline execution
22
+ * Processes data through pipeline steps (for in-process streaming, not Flink deployment)
23
+ */
24
+ let StreamService = class StreamService {
25
+ constructor(etlService) {
26
+ this.etlService = etlService;
27
+ }
28
+ async *processStream(pipelineInstance, source) {
29
+ const metadata = (0, decorators_1.getStreamMetadata)(pipelineInstance.constructor);
30
+ if (!metadata) {
31
+ throw new Error('Pipeline is not decorated with @Stream');
32
+ }
33
+ core_2.default.debug(`Processing stream ${metadata.name}`);
34
+ for await (const item of source) {
35
+ try {
36
+ const result = await this.etlService.execute(pipelineInstance, item);
37
+ yield result;
38
+ }
39
+ catch (error) {
40
+ core_2.default.error(`Stream ${metadata.name} error processing item:`, error);
41
+ throw error;
42
+ }
43
+ }
44
+ }
45
+ async processBatch(pipelineInstance, items) {
46
+ const results = [];
47
+ for (const item of items) {
48
+ const result = await this.etlService.execute(pipelineInstance, item);
49
+ results.push(result);
50
+ }
51
+ return results;
52
+ }
53
+ };
54
+ exports.StreamService = StreamService;
55
+ exports.StreamService = StreamService = __decorate([
56
+ (0, core_1.Injectable)(),
57
+ __metadata("design:paramtypes", [etl_service_1.ETLService])
58
+ ], StreamService);
@@ -0,0 +1,25 @@
1
+ export interface QualityCheckResult {
2
+ name: string;
3
+ passed: boolean;
4
+ message?: string;
5
+ details?: Record<string, unknown>;
6
+ }
7
+ export interface DataQualityReport {
8
+ timestamp: Date;
9
+ dataset: string;
10
+ totalRows: number;
11
+ checks: QualityCheckResult[];
12
+ passed: boolean;
13
+ }
14
+ /**
15
+ * Quality Service - Data quality checks
16
+ * Validates data completeness, consistency, and integrity
17
+ */
18
+ export declare class QualityService {
19
+ private checks;
20
+ registerCheck(name: string, check: (data: unknown) => Promise<QualityCheckResult> | QualityCheckResult): void;
21
+ runChecks(dataset: string, data: unknown): Promise<DataQualityReport>;
22
+ completeness(requiredFields: string[]): (data: unknown) => QualityCheckResult;
23
+ notNull(fields: string[]): (data: unknown) => QualityCheckResult;
24
+ }
25
+ //# sourceMappingURL=quality.service.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"quality.service.d.ts","sourceRoot":"","sources":["../../src/quality/quality.service.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,kBAAkB;IACjC,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,OAAO,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CACnC;AAED,MAAM,WAAW,iBAAiB;IAChC,SAAS,EAAE,IAAI,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;IAChB,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,kBAAkB,EAAE,CAAC;IAC7B,MAAM,EAAE,OAAO,CAAC;CACjB;AAED;;;GAGG;AACH,qBACa,cAAc;IACzB,OAAO,CAAC,MAAM,CACF;IAEZ,aAAa,CACX,IAAI,EAAE,MAAM,EACZ,KAAK,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,OAAO,CAAC,kBAAkB,CAAC,GAAG,kBAAkB,GACzE,IAAI;IAKD,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,GAAG,OAAO,CAAC,iBAAiB,CAAC;IA6B3E,YAAY,CAAC,cAAc,EAAE,MAAM,EAAE,IAC3B,MAAM,OAAO,KAAG,kBAAkB;IAe5C,OAAO,CAAC,MAAM,EAAE,MAAM,EAAE,IACd,MAAM,OAAO,KAAG,kBAAkB;CAc7C"}
@@ -0,0 +1,87 @@
1
+ "use strict";
2
+ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
3
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
4
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
5
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
6
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
7
+ };
8
+ var __importDefault = (this && this.__importDefault) || function (mod) {
9
+ return (mod && mod.__esModule) ? mod : { "default": mod };
10
+ };
11
+ Object.defineProperty(exports, "__esModule", { value: true });
12
+ exports.QualityService = void 0;
13
+ const core_1 = require("@hazeljs/core");
14
+ const core_2 = __importDefault(require("@hazeljs/core"));
15
+ /**
16
+ * Quality Service - Data quality checks
17
+ * Validates data completeness, consistency, and integrity
18
+ */
19
+ let QualityService = class QualityService {
20
+ constructor() {
21
+ this.checks = new Map();
22
+ }
23
+ registerCheck(name, check) {
24
+ this.checks.set(name, check);
25
+ core_2.default.debug(`Registered quality check: ${name}`);
26
+ }
27
+ async runChecks(dataset, data) {
28
+ const results = [];
29
+ const items = Array.isArray(data) ? data : [data];
30
+ const totalRows = items.length;
31
+ for (const [name, check] of this.checks) {
32
+ try {
33
+ const result = await Promise.resolve(check(data));
34
+ results.push({ ...result, name: result.name || name });
35
+ }
36
+ catch (error) {
37
+ results.push({
38
+ name,
39
+ passed: false,
40
+ message: error instanceof Error ? error.message : 'Check failed',
41
+ });
42
+ }
43
+ }
44
+ const passed = results.every((r) => r.passed);
45
+ return {
46
+ timestamp: new Date(),
47
+ dataset,
48
+ totalRows,
49
+ checks: results,
50
+ passed,
51
+ };
52
+ }
53
+ completeness(requiredFields) {
54
+ return (data) => {
55
+ if (data === null || typeof data !== 'object') {
56
+ return { name: 'completeness', passed: false, message: 'Data is not an object' };
57
+ }
58
+ const obj = data;
59
+ const missing = requiredFields.filter((f) => obj[f] === undefined || obj[f] === null);
60
+ return {
61
+ name: 'completeness',
62
+ passed: missing.length === 0,
63
+ message: missing.length > 0 ? `Missing fields: ${missing.join(', ')}` : undefined,
64
+ details: { missing, required: requiredFields },
65
+ };
66
+ };
67
+ }
68
+ notNull(fields) {
69
+ return (data) => {
70
+ if (data === null || typeof data !== 'object') {
71
+ return { name: 'notNull', passed: false, message: 'Data is not an object' };
72
+ }
73
+ const obj = data;
74
+ const nullFields = fields.filter((f) => obj[f] === null || obj[f] === undefined);
75
+ return {
76
+ name: 'notNull',
77
+ passed: nullFields.length === 0,
78
+ message: nullFields.length > 0 ? `Null fields: ${nullFields.join(', ')}` : undefined,
79
+ details: { nullFields },
80
+ };
81
+ };
82
+ }
83
+ };
84
+ exports.QualityService = QualityService;
85
+ exports.QualityService = QualityService = __decorate([
86
+ (0, core_1.Injectable)()
87
+ ], QualityService);
@@ -0,0 +1,47 @@
1
+ /**
2
+ * Schema builder for data validation - fluent API similar to Zod
3
+ */
4
+ export interface SchemaValidationError {
5
+ path: string;
6
+ message: string;
7
+ }
8
+ export type SchemaValidator<T = unknown> = (value: unknown) => {
9
+ success: true;
10
+ data: T;
11
+ } | {
12
+ success: false;
13
+ errors: SchemaValidationError[];
14
+ };
15
+ export interface BaseSchema<T = unknown> {
16
+ _type?: T;
17
+ validate(value: unknown): {
18
+ success: true;
19
+ data: T;
20
+ } | {
21
+ success: false;
22
+ errors: SchemaValidationError[];
23
+ };
24
+ }
25
+ export interface StringSchema extends BaseSchema<string> {
26
+ email(): StringSchema;
27
+ min(length: number): StringSchema;
28
+ max(length: number): StringSchema;
29
+ uuid(): StringSchema;
30
+ oneOf(values: string[]): StringSchema;
31
+ }
32
+ export interface NumberSchema extends BaseSchema<number> {
33
+ min(n: number): NumberSchema;
34
+ max(n: number): NumberSchema;
35
+ }
36
+ export type DateSchema = BaseSchema<Date>;
37
+ export interface ObjectSchema<T = Record<string, unknown>> extends BaseSchema<T> {
38
+ shape: Record<string, BaseSchema>;
39
+ }
40
+ export declare const Schema: {
41
+ string(): StringSchema;
42
+ number(): NumberSchema;
43
+ date(): DateSchema;
44
+ object<T extends Record<string, BaseSchema>>(shape: T): ObjectSchema;
45
+ array(itemSchema: BaseSchema): BaseSchema<unknown[]>;
46
+ };
47
+ //# sourceMappingURL=schema.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../../src/schema/schema.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,MAAM,WAAW,qBAAqB;IACpC,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,MAAM,eAAe,CAAC,CAAC,GAAG,OAAO,IAAI,CACzC,KAAK,EAAE,OAAO,KACX;IAAE,OAAO,EAAE,IAAI,CAAC;IAAC,IAAI,EAAE,CAAC,CAAA;CAAE,GAAG;IAAE,OAAO,EAAE,KAAK,CAAC;IAAC,MAAM,EAAE,qBAAqB,EAAE,CAAA;CAAE,CAAC;AAGtF,MAAM,WAAW,UAAU,CAAC,CAAC,GAAG,OAAO;IACrC,KAAK,CAAC,EAAE,CAAC,CAAC;IACV,QAAQ,CACN,KAAK,EAAE,OAAO,GACb;QAAE,OAAO,EAAE,IAAI,CAAC;QAAC,IAAI,EAAE,CAAC,CAAA;KAAE,GAAG;QAAE,OAAO,EAAE,KAAK,CAAC;QAAC,MAAM,EAAE,qBAAqB,EAAE,CAAA;KAAE,CAAC;CACrF;AAGD,MAAM,WAAW,YAAa,SAAQ,UAAU,CAAC,MAAM,CAAC;IACtD,KAAK,IAAI,YAAY,CAAC;IACtB,GAAG,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,CAAC;IAClC,GAAG,CAAC,MAAM,EAAE,MAAM,GAAG,YAAY,CAAC;IAClC,IAAI,IAAI,YAAY,CAAC;IACrB,KAAK,CAAC,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,CAAC;CACvC;AAGD,MAAM,WAAW,YAAa,SAAQ,UAAU,CAAC,MAAM,CAAC;IACtD,GAAG,CAAC,CAAC,EAAE,MAAM,GAAG,YAAY,CAAC;IAC7B,GAAG,CAAC,CAAC,EAAE,MAAM,GAAG,YAAY,CAAC;CAC9B;AAGD,MAAM,MAAM,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;AAG1C,MAAM,WAAW,YAAY,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAE,SAAQ,UAAU,CAAC,CAAC,CAAC;IAC9E,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;CACnC;AAsJD,eAAO,MAAM,MAAM;cACP,YAAY;cAGZ,YAAY;YAGd,UAAU;WAGX,CAAC,SAAS,MAAM,CAAC,MAAM,EAAE,UAAU,CAAC,SAAS,CAAC,GAAG,YAAY;sBAGlD,UAAU,GAAG,UAAU,CAAC,OAAO,EAAE,CAAC;CA2BrD,CAAC"}
@@ -0,0 +1,175 @@
1
+ "use strict";
2
+ /**
3
+ * Schema builder for data validation - fluent API similar to Zod
4
+ */
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.Schema = void 0;
7
+ // Schema factory
8
+ function createStringSchema(constraints = []) {
9
+ const validate = (value) => {
10
+ if (typeof value !== 'string') {
11
+ return { success: false, errors: [{ path: '', message: 'Expected string' }] };
12
+ }
13
+ for (const c of constraints) {
14
+ const err = c(value);
15
+ if (err)
16
+ return { success: false, errors: [{ path: '', message: err }] };
17
+ }
18
+ return { success: true, data: value };
19
+ };
20
+ const schema = {
21
+ _type: undefined,
22
+ validate,
23
+ email() {
24
+ return createStringSchema([
25
+ ...constraints,
26
+ (v) => {
27
+ const emailRe = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
28
+ return emailRe.test(v) ? null : 'Invalid email';
29
+ },
30
+ ]);
31
+ },
32
+ min(length) {
33
+ return createStringSchema([
34
+ ...constraints,
35
+ (v) => (v.length >= length ? null : `Min length ${length}`),
36
+ ]);
37
+ },
38
+ max(length) {
39
+ return createStringSchema([
40
+ ...constraints,
41
+ (v) => (v.length <= length ? null : `Max length ${length}`),
42
+ ]);
43
+ },
44
+ uuid() {
45
+ return createStringSchema([
46
+ ...constraints,
47
+ (v) => {
48
+ const uuidRe = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
49
+ return uuidRe.test(v) ? null : 'Invalid UUID';
50
+ },
51
+ ]);
52
+ },
53
+ oneOf(values) {
54
+ return createStringSchema([
55
+ ...constraints,
56
+ (v) => values.includes(v) ? null : `Must be one of: ${values.join(', ')}`,
57
+ ]);
58
+ },
59
+ };
60
+ return schema;
61
+ }
62
+ function createNumberSchema(constraints = []) {
63
+ const validate = (value) => {
64
+ if (typeof value !== 'number' || Number.isNaN(value)) {
65
+ return { success: false, errors: [{ path: '', message: 'Expected number' }] };
66
+ }
67
+ for (const c of constraints) {
68
+ const err = c(value);
69
+ if (err)
70
+ return { success: false, errors: [{ path: '', message: err }] };
71
+ }
72
+ return { success: true, data: value };
73
+ };
74
+ return {
75
+ _type: undefined,
76
+ validate,
77
+ min(n) {
78
+ return createNumberSchema([
79
+ ...constraints,
80
+ (v) => (v >= n ? null : `Min ${n}`),
81
+ ]);
82
+ },
83
+ max(n) {
84
+ return createNumberSchema([
85
+ ...constraints,
86
+ (v) => (v <= n ? null : `Max ${n}`),
87
+ ]);
88
+ },
89
+ };
90
+ }
91
+ function createDateSchema() {
92
+ return {
93
+ _type: undefined,
94
+ validate(value) {
95
+ if (value instanceof Date && !Number.isNaN(value.getTime())) {
96
+ return { success: true, data: value };
97
+ }
98
+ if (typeof value === 'string' || typeof value === 'number') {
99
+ const d = new Date(value);
100
+ if (!Number.isNaN(d.getTime()))
101
+ return { success: true, data: d };
102
+ }
103
+ return { success: false, errors: [{ path: '', message: 'Expected date' }] };
104
+ },
105
+ };
106
+ }
107
+ function createObjectSchema(shape) {
108
+ const validate = (value) => {
109
+ if (value === null || typeof value !== 'object' || Array.isArray(value)) {
110
+ return { success: false, errors: [{ path: '', message: 'Expected object' }] };
111
+ }
112
+ const obj = value;
113
+ const data = {};
114
+ const errors = [];
115
+ for (const [key, fieldSchema] of Object.entries(shape)) {
116
+ const result = fieldSchema.validate(obj[key]);
117
+ if (result.success) {
118
+ data[key] = result.data;
119
+ }
120
+ else {
121
+ errors.push(...result.errors.map((e) => ({
122
+ path: key + (e.path ? '.' + e.path : ''),
123
+ message: e.message,
124
+ })));
125
+ }
126
+ }
127
+ if (errors.length > 0)
128
+ return { success: false, errors };
129
+ return { success: true, data };
130
+ };
131
+ return {
132
+ _type: undefined,
133
+ shape,
134
+ validate,
135
+ };
136
+ }
137
+ exports.Schema = {
138
+ string() {
139
+ return createStringSchema();
140
+ },
141
+ number() {
142
+ return createNumberSchema();
143
+ },
144
+ date() {
145
+ return createDateSchema();
146
+ },
147
+ object(shape) {
148
+ return createObjectSchema(shape);
149
+ },
150
+ array(itemSchema) {
151
+ return {
152
+ _type: undefined,
153
+ validate(value) {
154
+ if (!Array.isArray(value)) {
155
+ return { success: false, errors: [{ path: '', message: 'Expected array' }] };
156
+ }
157
+ const data = [];
158
+ const errors = [];
159
+ for (let i = 0; i < value.length; i++) {
160
+ const result = itemSchema.validate(value[i]);
161
+ if (result.success)
162
+ data.push(result.data);
163
+ else
164
+ errors.push(...result.errors.map((e) => ({
165
+ path: `[${i}]${e.path ? '.' + e.path : ''}`,
166
+ message: e.message,
167
+ })));
168
+ }
169
+ if (errors.length > 0)
170
+ return { success: false, errors };
171
+ return { success: true, data };
172
+ },
173
+ };
174
+ },
175
+ };
@@ -0,0 +1,58 @@
1
+ import type { FlinkAuthConfig, FlinkJobConfig } from '../../data.types';
2
+ export interface FlinkClientConfig {
3
+ url: string;
4
+ auth?: FlinkAuthConfig;
5
+ timeout?: number;
6
+ retries?: number;
7
+ }
8
+ export interface FlinkJobInfo {
9
+ id: string;
10
+ status: string;
11
+ startTime?: number;
12
+ endTime?: number;
13
+ duration?: number;
14
+ }
15
+ export interface FlinkJobSubmitRequest {
16
+ jobName?: string;
17
+ parallelism?: number;
18
+ savepointPath?: string;
19
+ allowNonRestoredState?: boolean;
20
+ programArgs?: string;
21
+ entryClass?: string;
22
+ jarFile?: string;
23
+ }
24
+ /**
25
+ * Flink Client - REST API client for Apache Flink clusters
26
+ * Interacts with Flink JobManager REST API
27
+ */
28
+ export declare class FlinkClient {
29
+ readonly url: string;
30
+ private readonly config;
31
+ constructor(config: FlinkClientConfig);
32
+ private request;
33
+ listJobs(): Promise<FlinkJobInfo[]>;
34
+ getJobStatus(jobId: string): Promise<{
35
+ state: string;
36
+ startTime?: number;
37
+ duration?: number;
38
+ }>;
39
+ cancelJob(jobId: string): Promise<void>;
40
+ stopJob(jobId: string, savepointPath?: string): Promise<{
41
+ 'request-id': string;
42
+ }>;
43
+ createSavepoint(jobId: string, savepointPath?: string): Promise<{
44
+ 'request-id': string;
45
+ }>;
46
+ getClusterInfo(): Promise<{
47
+ taskmanagers?: number;
48
+ 'slots-total'?: number;
49
+ }>;
50
+ getTaskManagers(): Promise<unknown[]>;
51
+ /**
52
+ * Submit a job to Flink cluster.
53
+ * Note: Actual JAR submission requires Flink's /jars endpoint.
54
+ * This provides the interface; implementation depends on deployment setup.
55
+ */
56
+ submitJob(_jobConfig: FlinkJobConfig, _jobGraph?: unknown): Promise<string>;
57
+ }
58
+ //# sourceMappingURL=flink.client.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"flink.client.d.ts","sourceRoot":"","sources":["../../../src/streaming/flink/flink.client.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,kBAAkB,CAAC;AAExE,MAAM,WAAW,iBAAiB;IAChC,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,CAAC,EAAE,eAAe,CAAC;IACvB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,WAAW,YAAY;IAC3B,EAAE,EAAE,MAAM,CAAC;IACX,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,qBAAqB;IACpC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,qBAAqB,CAAC,EAAE,OAAO,CAAC;IAChC,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;;GAGG;AACH,qBAAa,WAAW;IACtB,SAAgB,GAAG,EAAE,MAAM,CAAC;IAC5B,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAoB;gBAE/B,MAAM,EAAE,iBAAiB;YASvB,OAAO;IA4Cf,QAAQ,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;IA+BnC,YAAY,CAChB,KAAK,EAAE,MAAM,GACZ,OAAO,CAAC;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;IAY9D,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAIvC,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,aAAa,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;QAAE,YAAY,EAAE,MAAM,CAAA;KAAE,CAAC;IAKjF,eAAe,CAAC,KAAK,EAAE,MAAM,EAAE,aAAa,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;QAAE,YAAY,EAAE,MAAM,CAAA;KAAE,CAAC;IAKzF,cAAc,IAAI,OAAO,CAAC;QAAE,YAAY,CAAC,EAAE,MAAM,CAAC;QAAC,aAAa,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;IAI5E,eAAe,IAAI,OAAO,CAAC,OAAO,EAAE,CAAC;IAK3C;;;;OAIG;IACG,SAAS,CAAC,UAAU,EAAE,cAAc,EAAE,SAAS,CAAC,EAAE,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;CASlF"}
@@ -0,0 +1,104 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.FlinkClient = void 0;
4
+ /**
5
+ * Flink Client - REST API client for Apache Flink clusters
6
+ * Interacts with Flink JobManager REST API
7
+ */
8
+ class FlinkClient {
9
+ constructor(config) {
10
+ this.config = {
11
+ timeout: 30000,
12
+ retries: 3,
13
+ ...config,
14
+ };
15
+ this.url = this.config.url.replace(/\/$/, '');
16
+ }
17
+ async request(method, path, body) {
18
+ const url = `${this.url}${path}`;
19
+ const headers = {
20
+ 'Content-Type': 'application/json',
21
+ };
22
+ if (this.config.auth?.type === 'basic' && this.config.auth.username) {
23
+ const credentials = Buffer.from(`${this.config.auth.username}:${this.config.auth.password || ''}`).toString('base64');
24
+ headers['Authorization'] = `Basic ${credentials}`;
25
+ }
26
+ else if (this.config.auth?.type === 'token' && this.config.auth.token) {
27
+ headers['Authorization'] = `Bearer ${this.config.auth.token}`;
28
+ }
29
+ const controller = new AbortController();
30
+ const timeoutId = setTimeout(() => controller.abort(), this.config.timeout);
31
+ try {
32
+ const response = await fetch(url, {
33
+ method,
34
+ headers,
35
+ body: body ? JSON.stringify(body) : undefined,
36
+ signal: controller.signal,
37
+ });
38
+ clearTimeout(timeoutId);
39
+ if (!response.ok) {
40
+ const text = await response.text();
41
+ throw new Error(`Flink API error ${response.status}: ${text}`);
42
+ }
43
+ const text = await response.text();
44
+ return text ? JSON.parse(text) : {};
45
+ }
46
+ catch (error) {
47
+ clearTimeout(timeoutId);
48
+ if (error instanceof Error) {
49
+ throw new Error(`Flink request failed: ${error.message}`);
50
+ }
51
+ throw error;
52
+ }
53
+ }
54
+ async listJobs() {
55
+ const result = await this.request('GET', '/jobs/overview');
56
+ const jobs = result.jobs ?? [];
57
+ return jobs.map((j) => ({
58
+ id: j.id,
59
+ status: j.status,
60
+ startTime: j['start-time'],
61
+ endTime: j['end-time'],
62
+ duration: j.duration,
63
+ }));
64
+ }
65
+ async getJobStatus(jobId) {
66
+ const result = await this.request('GET', `/jobs/${jobId}`);
67
+ return {
68
+ state: result.state,
69
+ startTime: result['start-time'],
70
+ duration: result.duration,
71
+ };
72
+ }
73
+ async cancelJob(jobId) {
74
+ await this.request('PATCH', `/jobs/${jobId}?mode=cancel`);
75
+ }
76
+ async stopJob(jobId, savepointPath) {
77
+ const body = savepointPath ? { targetDirectory: savepointPath } : {};
78
+ return this.request('PATCH', `/jobs/${jobId}?mode=stop`, body);
79
+ }
80
+ async createSavepoint(jobId, savepointPath) {
81
+ const body = savepointPath ? { targetDirectory: savepointPath } : {};
82
+ return this.request('POST', `/jobs/${jobId}/savepoints`, body);
83
+ }
84
+ async getClusterInfo() {
85
+ return this.request('GET', '/overview');
86
+ }
87
+ async getTaskManagers() {
88
+ const result = await this.request('GET', '/taskmanagers');
89
+ return result.taskmanagers ?? [];
90
+ }
91
+ /**
92
+ * Submit a job to Flink cluster.
93
+ * Note: Actual JAR submission requires Flink's /jars endpoint.
94
+ * This provides the interface; implementation depends on deployment setup.
95
+ */
96
+ async submitJob(_jobConfig, _jobGraph) {
97
+ // Flink REST API submits jobs via JAR upload and run
98
+ // For now return a placeholder - full implementation would:
99
+ // 1. Upload JAR to /jars
100
+ // 2. Create job from JAR via /jars/:jarid/run
101
+ throw new Error('submitJob: Full Flink deployment requires JAR submission. Use FlinkService.deployStream() for pipeline-to-job conversion.');
102
+ }
103
+ }
104
+ exports.FlinkClient = FlinkClient;
@@ -0,0 +1,28 @@
1
+ import { FlinkClient } from './flink.client';
2
+ import type { FlinkJobConfig } from '../../data.types';
3
+ export interface FlinkJobResult {
4
+ jobId: string;
5
+ status: string;
6
+ webUI?: string;
7
+ }
8
+ /**
9
+ * Flink Job - Job management operations
10
+ */
11
+ export declare class FlinkJob {
12
+ private readonly client;
13
+ constructor(client: FlinkClient);
14
+ getStatus(jobId: string): Promise<{
15
+ state: string;
16
+ startTime?: number;
17
+ duration?: number;
18
+ }>;
19
+ cancel(jobId: string): Promise<void>;
20
+ createSavepoint(jobId: string, savepointPath?: string): Promise<{
21
+ 'request-id': string;
22
+ }>;
23
+ stop(jobId: string, savepointPath?: string): Promise<{
24
+ 'request-id': string;
25
+ }>;
26
+ submit(config: FlinkJobConfig, jobGraph?: unknown): Promise<string>;
27
+ }
28
+ //# sourceMappingURL=flink.job.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"flink.job.d.ts","sourceRoot":"","sources":["../../../src/streaming/flink/flink.job.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,kBAAkB,CAAC;AAEvD,MAAM,WAAW,cAAc;IAC7B,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,qBAAa,QAAQ;IACP,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAN,MAAM,EAAE,WAAW;IAE1C,SAAS,CACb,KAAK,EAAE,MAAM,GACZ,OAAO,CAAC;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;IAI9D,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAIpC,eAAe,CAAC,KAAK,EAAE,MAAM,EAAE,aAAa,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;QAAE,YAAY,EAAE,MAAM,CAAA;KAAE,CAAC;IAIzF,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,aAAa,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC;QAAE,YAAY,EAAE,MAAM,CAAA;KAAE,CAAC;IAI9E,MAAM,CAAC,MAAM,EAAE,cAAc,EAAE,QAAQ,CAAC,EAAE,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;CAG1E"}
@@ -0,0 +1,27 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.FlinkJob = void 0;
4
+ /**
5
+ * Flink Job - Job management operations
6
+ */
7
+ class FlinkJob {
8
+ constructor(client) {
9
+ this.client = client;
10
+ }
11
+ async getStatus(jobId) {
12
+ return this.client.getJobStatus(jobId);
13
+ }
14
+ async cancel(jobId) {
15
+ return this.client.cancelJob(jobId);
16
+ }
17
+ async createSavepoint(jobId, savepointPath) {
18
+ return this.client.createSavepoint(jobId, savepointPath);
19
+ }
20
+ async stop(jobId, savepointPath) {
21
+ return this.client.stopJob(jobId, savepointPath);
22
+ }
23
+ async submit(config, jobGraph) {
24
+ return this.client.submitJob(config, jobGraph);
25
+ }
26
+ }
27
+ exports.FlinkJob = FlinkJob;
@@ -0,0 +1,35 @@
1
+ import type { PipelineStep } from '../../pipelines/etl.service';
2
+ export type FlinkOperatorType = 'map' | 'filter' | 'flatMap' | 'window' | 'keyBy';
3
+ export interface FlinkOperator {
4
+ type: FlinkOperatorType;
5
+ step: number;
6
+ name: string;
7
+ function?: (value: unknown) => unknown | Promise<unknown>;
8
+ predicate?: (value: unknown) => boolean;
9
+ windowType?: 'tumbling' | 'sliding' | 'session';
10
+ windowSize?: string;
11
+ aggregator?: (value: unknown) => unknown;
12
+ }
13
+ /**
14
+ * Maps HazelJS pipeline steps to Flink operators
15
+ * Note: Actual function/predicate execution happens in deployed Flink job
16
+ */
17
+ export declare function mapToFlinkOperator(step: PipelineStep): FlinkOperator;
18
+ export declare function createFlinkJobGraph(steps: PipelineStep[], sourceConfig: {
19
+ type: string;
20
+ topic?: string;
21
+ properties?: Record<string, string>;
22
+ }, sinkConfig: {
23
+ type: string;
24
+ topic?: string;
25
+ properties?: Record<string, string>;
26
+ }): {
27
+ source: typeof sourceConfig;
28
+ transformations: Array<{
29
+ step: number;
30
+ name: string;
31
+ operator: FlinkOperator;
32
+ }>;
33
+ sink: typeof sinkConfig;
34
+ };
35
+ //# sourceMappingURL=flink.operators.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"flink.operators.d.ts","sourceRoot":"","sources":["../../../src/streaming/flink/flink.operators.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAEhE,MAAM,MAAM,iBAAiB,GAAG,KAAK,GAAG,QAAQ,GAAG,SAAS,GAAG,QAAQ,GAAG,OAAO,CAAC;AAElF,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,iBAAiB,CAAC;IACxB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC;IAC1D,SAAS,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,KAAK,OAAO,CAAC;IACxC,UAAU,CAAC,EAAE,UAAU,GAAG,SAAS,GAAG,SAAS,CAAC;IAChD,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,UAAU,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,KAAK,OAAO,CAAC;CAC1C;AAED;;;GAGG;AACH,wBAAgB,kBAAkB,CAAC,IAAI,EAAE,YAAY,GAAG,aAAa,CAuBpE;AAED,wBAAgB,mBAAmB,CACjC,KAAK,EAAE,YAAY,EAAE,EACrB,YAAY,EAAE;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAAC,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CAAE,EACnF,UAAU,EAAE;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAAC,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CAAE,GAChF;IACD,MAAM,EAAE,OAAO,YAAY,CAAC;IAC5B,eAAe,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,aAAa,CAAA;KAAE,CAAC,CAAC;IAChF,IAAI,EAAE,OAAO,UAAU,CAAC;CACzB,CAUA"}