@hazeljs/data 0.2.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +192 -0
- package/README.md +308 -0
- package/dist/connectors/connector.interface.d.ts +29 -0
- package/dist/connectors/connector.interface.d.ts.map +1 -0
- package/dist/connectors/connector.interface.js +6 -0
- package/dist/connectors/csv.connector.d.ts +63 -0
- package/dist/connectors/csv.connector.d.ts.map +1 -0
- package/dist/connectors/csv.connector.js +147 -0
- package/dist/connectors/http.connector.d.ts +68 -0
- package/dist/connectors/http.connector.d.ts.map +1 -0
- package/dist/connectors/http.connector.js +131 -0
- package/dist/connectors/index.d.ts +7 -0
- package/dist/connectors/index.d.ts.map +1 -0
- package/dist/connectors/index.js +12 -0
- package/dist/connectors/memory.connector.d.ts +38 -0
- package/dist/connectors/memory.connector.d.ts.map +1 -0
- package/dist/connectors/memory.connector.js +56 -0
- package/dist/connectors/memory.connector.test.d.ts +2 -0
- package/dist/connectors/memory.connector.test.d.ts.map +1 -0
- package/dist/connectors/memory.connector.test.js +43 -0
- package/dist/data.module.d.ts +30 -0
- package/dist/data.module.d.ts.map +1 -0
- package/dist/data.module.js +120 -0
- package/dist/data.module.test.d.ts +2 -0
- package/dist/data.module.test.d.ts.map +1 -0
- package/dist/data.module.test.js +28 -0
- package/dist/data.types.d.ts +67 -0
- package/dist/data.types.d.ts.map +1 -0
- package/dist/data.types.js +5 -0
- package/dist/decorators/index.d.ts +6 -0
- package/dist/decorators/index.d.ts.map +1 -0
- package/dist/decorators/index.js +24 -0
- package/dist/decorators/pii.decorator.d.ts +59 -0
- package/dist/decorators/pii.decorator.d.ts.map +1 -0
- package/dist/decorators/pii.decorator.js +197 -0
- package/dist/decorators/pii.decorator.test.d.ts +2 -0
- package/dist/decorators/pii.decorator.test.d.ts.map +1 -0
- package/dist/decorators/pii.decorator.test.js +150 -0
- package/dist/decorators/pipeline.decorator.d.ts +22 -0
- package/dist/decorators/pipeline.decorator.d.ts.map +1 -0
- package/dist/decorators/pipeline.decorator.js +42 -0
- package/dist/decorators/pipeline.decorator.test.d.ts +2 -0
- package/dist/decorators/pipeline.decorator.test.d.ts.map +1 -0
- package/dist/decorators/pipeline.decorator.test.js +104 -0
- package/dist/decorators/stream.decorator.d.ts +31 -0
- package/dist/decorators/stream.decorator.d.ts.map +1 -0
- package/dist/decorators/stream.decorator.js +48 -0
- package/dist/decorators/transform.decorator.d.ts +29 -0
- package/dist/decorators/transform.decorator.d.ts.map +1 -0
- package/dist/decorators/transform.decorator.js +41 -0
- package/dist/decorators/validate.decorator.d.ts +34 -0
- package/dist/decorators/validate.decorator.d.ts.map +1 -0
- package/dist/decorators/validate.decorator.js +49 -0
- package/dist/flink.service.d.ts +80 -0
- package/dist/flink.service.d.ts.map +1 -0
- package/dist/flink.service.js +134 -0
- package/dist/flink.service.test.d.ts +2 -0
- package/dist/flink.service.test.d.ts.map +1 -0
- package/dist/flink.service.test.js +60 -0
- package/dist/index.d.ts +32 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +96 -0
- package/dist/pipelines/etl.service.d.ts +59 -0
- package/dist/pipelines/etl.service.d.ts.map +1 -0
- package/dist/pipelines/etl.service.js +223 -0
- package/dist/pipelines/etl.service.test.d.ts +2 -0
- package/dist/pipelines/etl.service.test.d.ts.map +1 -0
- package/dist/pipelines/etl.service.test.js +319 -0
- package/dist/pipelines/pipeline.base.d.ts +24 -0
- package/dist/pipelines/pipeline.base.d.ts.map +1 -0
- package/dist/pipelines/pipeline.base.js +29 -0
- package/dist/pipelines/pipeline.base.test.d.ts +2 -0
- package/dist/pipelines/pipeline.base.test.d.ts.map +1 -0
- package/dist/pipelines/pipeline.base.test.js +38 -0
- package/dist/pipelines/pipeline.builder.d.ts +95 -0
- package/dist/pipelines/pipeline.builder.d.ts.map +1 -0
- package/dist/pipelines/pipeline.builder.js +212 -0
- package/dist/pipelines/pipeline.builder.test.d.ts +2 -0
- package/dist/pipelines/pipeline.builder.test.d.ts.map +1 -0
- package/dist/pipelines/pipeline.builder.test.js +185 -0
- package/dist/pipelines/stream.service.d.ts +12 -0
- package/dist/pipelines/stream.service.d.ts.map +1 -0
- package/dist/pipelines/stream.service.js +58 -0
- package/dist/pipelines/stream.service.test.d.ts +2 -0
- package/dist/pipelines/stream.service.test.d.ts.map +1 -0
- package/dist/pipelines/stream.service.test.js +103 -0
- package/dist/quality/quality.service.d.ts +87 -0
- package/dist/quality/quality.service.d.ts.map +1 -0
- package/dist/quality/quality.service.js +326 -0
- package/dist/quality/quality.service.test.d.ts +2 -0
- package/dist/quality/quality.service.test.d.ts.map +1 -0
- package/dist/quality/quality.service.test.js +128 -0
- package/dist/schema/schema.d.ts +127 -0
- package/dist/schema/schema.d.ts.map +1 -0
- package/dist/schema/schema.js +487 -0
- package/dist/schema/schema.test.d.ts +2 -0
- package/dist/schema/schema.test.d.ts.map +1 -0
- package/dist/schema/schema.test.js +411 -0
- package/dist/streaming/flink/flink.client.d.ts +96 -0
- package/dist/streaming/flink/flink.client.d.ts.map +1 -0
- package/dist/streaming/flink/flink.client.js +267 -0
- package/dist/streaming/flink/flink.client.test.d.ts +2 -0
- package/dist/streaming/flink/flink.client.test.d.ts.map +1 -0
- package/dist/streaming/flink/flink.client.test.js +59 -0
- package/dist/streaming/flink/flink.job.d.ts +29 -0
- package/dist/streaming/flink/flink.job.d.ts.map +1 -0
- package/dist/streaming/flink/flink.job.js +27 -0
- package/dist/streaming/flink/flink.job.test.d.ts +2 -0
- package/dist/streaming/flink/flink.job.test.d.ts.map +1 -0
- package/dist/streaming/flink/flink.job.test.js +37 -0
- package/dist/streaming/flink/flink.operators.d.ts +35 -0
- package/dist/streaming/flink/flink.operators.d.ts.map +1 -0
- package/dist/streaming/flink/flink.operators.js +43 -0
- package/dist/streaming/flink/flink.operators.test.d.ts +2 -0
- package/dist/streaming/flink/flink.operators.test.d.ts.map +1 -0
- package/dist/streaming/flink/flink.operators.test.js +38 -0
- package/dist/streaming/stream.builder.d.ts +22 -0
- package/dist/streaming/stream.builder.d.ts.map +1 -0
- package/dist/streaming/stream.builder.js +50 -0
- package/dist/streaming/stream.builder.test.d.ts +2 -0
- package/dist/streaming/stream.builder.test.d.ts.map +1 -0
- package/dist/streaming/stream.builder.test.js +59 -0
- package/dist/streaming/stream.processor.d.ts +66 -0
- package/dist/streaming/stream.processor.d.ts.map +1 -0
- package/dist/streaming/stream.processor.js +178 -0
- package/dist/streaming/stream.processor.test.d.ts +2 -0
- package/dist/streaming/stream.processor.test.d.ts.map +1 -0
- package/dist/streaming/stream.processor.test.js +151 -0
- package/dist/streaming/stream.processor.windowing.test.d.ts +2 -0
- package/dist/streaming/stream.processor.windowing.test.d.ts.map +1 -0
- package/dist/streaming/stream.processor.windowing.test.js +69 -0
- package/dist/telemetry/telemetry.d.ts +124 -0
- package/dist/telemetry/telemetry.d.ts.map +1 -0
- package/dist/telemetry/telemetry.js +259 -0
- package/dist/telemetry/telemetry.test.d.ts +2 -0
- package/dist/telemetry/telemetry.test.d.ts.map +1 -0
- package/dist/telemetry/telemetry.test.js +51 -0
- package/dist/testing/index.d.ts +12 -0
- package/dist/testing/index.d.ts.map +1 -0
- package/dist/testing/index.js +18 -0
- package/dist/testing/pipeline-test-harness.d.ts +40 -0
- package/dist/testing/pipeline-test-harness.d.ts.map +1 -0
- package/dist/testing/pipeline-test-harness.js +55 -0
- package/dist/testing/pipeline-test-harness.test.d.ts +2 -0
- package/dist/testing/pipeline-test-harness.test.d.ts.map +1 -0
- package/dist/testing/pipeline-test-harness.test.js +102 -0
- package/dist/testing/schema-faker.d.ts +32 -0
- package/dist/testing/schema-faker.d.ts.map +1 -0
- package/dist/testing/schema-faker.js +91 -0
- package/dist/testing/schema-faker.test.d.ts +2 -0
- package/dist/testing/schema-faker.test.d.ts.map +1 -0
- package/dist/testing/schema-faker.test.js +66 -0
- package/dist/transformers/built-in.transformers.d.ts +12 -0
- package/dist/transformers/built-in.transformers.d.ts.map +1 -0
- package/dist/transformers/built-in.transformers.js +75 -0
- package/dist/transformers/built-in.transformers.test.d.ts +2 -0
- package/dist/transformers/built-in.transformers.test.d.ts.map +1 -0
- package/dist/transformers/built-in.transformers.test.js +85 -0
- package/dist/transformers/transformer.service.d.ts +14 -0
- package/dist/transformers/transformer.service.d.ts.map +1 -0
- package/dist/transformers/transformer.service.js +65 -0
- package/dist/transformers/transformer.service.test.d.ts +2 -0
- package/dist/transformers/transformer.service.test.d.ts.map +1 -0
- package/dist/transformers/transformer.service.test.js +42 -0
- package/dist/validators/schema.validator.d.ts +21 -0
- package/dist/validators/schema.validator.d.ts.map +1 -0
- package/dist/validators/schema.validator.js +40 -0
- package/dist/validators/schema.validator.test.d.ts +2 -0
- package/dist/validators/schema.validator.test.d.ts.map +1 -0
- package/dist/validators/schema.validator.test.js +42 -0
- package/package.json +53 -0
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.CsvSink = exports.CsvSource = void 0;
|
|
4
|
+
const fs_1 = require("fs");
|
|
5
|
+
const readline_1 = require("readline");
|
|
6
|
+
/**
|
|
7
|
+
* CSV file data source — reads records from a CSV file.
|
|
8
|
+
*
|
|
9
|
+
* @example
|
|
10
|
+
* const source = new CsvSource({ filePath: './data.csv', hasHeader: true });
|
|
11
|
+
* await source.open();
|
|
12
|
+
* const records = await source.readAll();
|
|
13
|
+
* await source.close();
|
|
14
|
+
*/
|
|
15
|
+
class CsvSource {
|
|
16
|
+
constructor(options) {
|
|
17
|
+
this.name = options.name ?? `csv:${options.filePath}`;
|
|
18
|
+
this.filePath = options.filePath;
|
|
19
|
+
this.delimiter = options.delimiter ?? ',';
|
|
20
|
+
this.hasHeader = options.hasHeader ?? true;
|
|
21
|
+
this.customHeaders = options.headers;
|
|
22
|
+
}
|
|
23
|
+
async open() {
|
|
24
|
+
if (!(0, fs_1.existsSync)(this.filePath)) {
|
|
25
|
+
throw new Error(`CSV file not found: ${this.filePath}`);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
async close() {
|
|
29
|
+
// No-op for file reads
|
|
30
|
+
}
|
|
31
|
+
async readAll() {
|
|
32
|
+
const records = [];
|
|
33
|
+
for await (const record of this.read()) {
|
|
34
|
+
records.push(record);
|
|
35
|
+
}
|
|
36
|
+
return records;
|
|
37
|
+
}
|
|
38
|
+
async *read() {
|
|
39
|
+
const rl = (0, readline_1.createInterface)({
|
|
40
|
+
input: (0, fs_1.createReadStream)(this.filePath),
|
|
41
|
+
crlfDelay: Infinity,
|
|
42
|
+
});
|
|
43
|
+
let headers = this.customHeaders ?? null;
|
|
44
|
+
let isFirst = true;
|
|
45
|
+
for await (const line of rl) {
|
|
46
|
+
const cols = this.parseLine(line);
|
|
47
|
+
if (isFirst && this.hasHeader && !this.customHeaders) {
|
|
48
|
+
headers = cols;
|
|
49
|
+
isFirst = false;
|
|
50
|
+
continue;
|
|
51
|
+
}
|
|
52
|
+
isFirst = false;
|
|
53
|
+
if (!headers) {
|
|
54
|
+
headers = cols.map((_, i) => `col${i}`);
|
|
55
|
+
}
|
|
56
|
+
const record = {};
|
|
57
|
+
headers.forEach((h, i) => {
|
|
58
|
+
record[h] = cols[i] ?? '';
|
|
59
|
+
});
|
|
60
|
+
yield record;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
parseLine(line) {
|
|
64
|
+
const result = [];
|
|
65
|
+
let current = '';
|
|
66
|
+
let inQuote = false;
|
|
67
|
+
for (let i = 0; i < line.length; i++) {
|
|
68
|
+
const ch = line[i];
|
|
69
|
+
if (ch === '"') {
|
|
70
|
+
if (inQuote && line[i + 1] === '"') {
|
|
71
|
+
current += '"';
|
|
72
|
+
i++;
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
inQuote = !inQuote;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
else if (ch === this.delimiter && !inQuote) {
|
|
79
|
+
result.push(current);
|
|
80
|
+
current = '';
|
|
81
|
+
}
|
|
82
|
+
else {
|
|
83
|
+
current += ch;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
result.push(current);
|
|
87
|
+
return result;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
exports.CsvSource = CsvSource;
|
|
91
|
+
/**
|
|
92
|
+
* CSV file data sink — writes records to a CSV file.
|
|
93
|
+
*
|
|
94
|
+
* @example
|
|
95
|
+
* const sink = new CsvSink({ filePath: './output.csv', writeHeader: true });
|
|
96
|
+
* await sink.open();
|
|
97
|
+
* await sink.writeBatch(records);
|
|
98
|
+
* await sink.close();
|
|
99
|
+
*/
|
|
100
|
+
class CsvSink {
|
|
101
|
+
constructor(options) {
|
|
102
|
+
this.headers = null;
|
|
103
|
+
this.stream = null;
|
|
104
|
+
this.headerWritten = false;
|
|
105
|
+
this.name = options.name ?? `csv:${options.filePath}`;
|
|
106
|
+
this.filePath = options.filePath;
|
|
107
|
+
this.delimiter = options.delimiter ?? ',';
|
|
108
|
+
this.writeHeader = options.writeHeader ?? true;
|
|
109
|
+
}
|
|
110
|
+
async open() {
|
|
111
|
+
this.stream = (0, fs_1.createWriteStream)(this.filePath, { encoding: 'utf8' });
|
|
112
|
+
}
|
|
113
|
+
async close() {
|
|
114
|
+
return new Promise((resolve, reject) => {
|
|
115
|
+
if (!this.stream) {
|
|
116
|
+
resolve();
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
119
|
+
this.stream.end((err) => (err ? reject(err) : resolve()));
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
async write(record) {
|
|
123
|
+
if (!this.stream)
|
|
124
|
+
throw new Error('CsvSink: call open() before write()');
|
|
125
|
+
if (!this.headers) {
|
|
126
|
+
this.headers = Object.keys(record);
|
|
127
|
+
}
|
|
128
|
+
if (this.writeHeader && !this.headerWritten) {
|
|
129
|
+
this.writeLine(this.headers);
|
|
130
|
+
this.headerWritten = true;
|
|
131
|
+
}
|
|
132
|
+
this.writeLine(this.headers.map((h) => String(record[h] ?? '')));
|
|
133
|
+
}
|
|
134
|
+
async writeBatch(records) {
|
|
135
|
+
for (const r of records)
|
|
136
|
+
await this.write(r);
|
|
137
|
+
}
|
|
138
|
+
writeLine(cols) {
|
|
139
|
+
const line = cols
|
|
140
|
+
.map((c) => c.includes(this.delimiter) || c.includes('"') || c.includes('\n')
|
|
141
|
+
? `"${c.replace(/"/g, '""')}"`
|
|
142
|
+
: c)
|
|
143
|
+
.join(this.delimiter) + '\n';
|
|
144
|
+
this.stream.write(line);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
exports.CsvSink = CsvSink;
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import type { DataSource, DataSink } from './connector.interface';
|
|
2
|
+
export interface HttpSourceOptions {
|
|
3
|
+
url: string;
|
|
4
|
+
method?: 'GET' | 'POST';
|
|
5
|
+
headers?: Record<string, string>;
|
|
6
|
+
body?: unknown;
|
|
7
|
+
/** JSON path to the array in the response (e.g. "data.items") */
|
|
8
|
+
dataPath?: string;
|
|
9
|
+
/** Pagination: next page URL extracted from response (e.g. "meta.next") */
|
|
10
|
+
nextPagePath?: string;
|
|
11
|
+
name?: string;
|
|
12
|
+
timeoutMs?: number;
|
|
13
|
+
}
|
|
14
|
+
export interface HttpSinkOptions {
|
|
15
|
+
url: string;
|
|
16
|
+
method?: 'POST' | 'PUT' | 'PATCH';
|
|
17
|
+
headers?: Record<string, string>;
|
|
18
|
+
/** Batch size — how many records to send per request (default: 1) */
|
|
19
|
+
batchSize?: number;
|
|
20
|
+
/** JSON field name to wrap records in (e.g. "records" → { records: [...] }) */
|
|
21
|
+
bodyKey?: string;
|
|
22
|
+
name?: string;
|
|
23
|
+
timeoutMs?: number;
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* HTTP API data source — reads records from a REST API.
|
|
27
|
+
* Supports pagination via `nextPagePath`.
|
|
28
|
+
*
|
|
29
|
+
* @example
|
|
30
|
+
* const source = new HttpSource({
|
|
31
|
+
* url: 'https://api.example.com/users',
|
|
32
|
+
* dataPath: 'data',
|
|
33
|
+
* nextPagePath: 'meta.next',
|
|
34
|
+
* });
|
|
35
|
+
*/
|
|
36
|
+
export declare class HttpSource implements DataSource<unknown> {
|
|
37
|
+
readonly name: string;
|
|
38
|
+
private readonly options;
|
|
39
|
+
constructor(options: HttpSourceOptions);
|
|
40
|
+
open(): Promise<void>;
|
|
41
|
+
close(): Promise<void>;
|
|
42
|
+
readAll(): Promise<unknown[]>;
|
|
43
|
+
read(): AsyncGenerator<unknown>;
|
|
44
|
+
private fetchPage;
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* HTTP API data sink — writes records to a REST API endpoint.
|
|
48
|
+
*
|
|
49
|
+
* @example
|
|
50
|
+
* const sink = new HttpSink({
|
|
51
|
+
* url: 'https://api.example.com/ingest',
|
|
52
|
+
* method: 'POST',
|
|
53
|
+
* batchSize: 100,
|
|
54
|
+
* bodyKey: 'records',
|
|
55
|
+
* });
|
|
56
|
+
*/
|
|
57
|
+
export declare class HttpSink implements DataSink<unknown> {
|
|
58
|
+
readonly name: string;
|
|
59
|
+
private readonly options;
|
|
60
|
+
private buffer;
|
|
61
|
+
constructor(options: HttpSinkOptions);
|
|
62
|
+
open(): Promise<void>;
|
|
63
|
+
close(): Promise<void>;
|
|
64
|
+
write(record: unknown): Promise<void>;
|
|
65
|
+
writeBatch(records: unknown[]): Promise<void>;
|
|
66
|
+
private flush;
|
|
67
|
+
}
|
|
68
|
+
//# sourceMappingURL=http.connector.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"http.connector.d.ts","sourceRoot":"","sources":["../../src/connectors/http.connector.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,uBAAuB,CAAC;AAElE,MAAM,WAAW,iBAAiB;IAChC,GAAG,EAAE,MAAM,CAAC;IACZ,MAAM,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;IACxB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,IAAI,CAAC,EAAE,OAAO,CAAC;IACf,iEAAiE;IACjE,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,2EAA2E;IAC3E,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,eAAe;IAC9B,GAAG,EAAE,MAAM,CAAC;IACZ,MAAM,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,OAAO,CAAC;IAClC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,qEAAqE;IACrE,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,+EAA+E;IAC/E,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AASD;;;;;;;;;;GAUG;AACH,qBAAa,UAAW,YAAW,UAAU,CAAC,OAAO,CAAC;IACpD,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IACtB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAoB;gBAEhC,OAAO,EAAE,iBAAiB;IAKhC,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAMrB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAEtB,OAAO,IAAI,OAAO,CAAC,OAAO,EAAE,CAAC;IAM5B,IAAI,IAAI,cAAc,CAAC,OAAO,CAAC;IAwBtC,OAAO,CAAC,SAAS;CAYlB;AAED;;;;;;;;;;GAUG;AACH,qBAAa,QAAS,YAAW,QAAQ,CAAC,OAAO,CAAC;IAChD,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IACtB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAkB;IAC1C,OAAO,CAAC,MAAM,CAAiB;gBAEnB,OAAO,EAAE,eAAe;IAK9B,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAErB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAItB,KAAK,CAAC,MAAM,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAOrC,UAAU,CAAC,OAAO,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;YAKrC,KAAK;CA2BpB"}
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.HttpSink = exports.HttpSource = void 0;
|
|
4
|
+
function getNestedValue(obj, path) {
|
|
5
|
+
return path.split('.').reduce((cur, key) => {
|
|
6
|
+
if (cur === null || cur === undefined || typeof cur !== 'object')
|
|
7
|
+
return undefined;
|
|
8
|
+
return cur[key];
|
|
9
|
+
}, obj);
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* HTTP API data source — reads records from a REST API.
|
|
13
|
+
* Supports pagination via `nextPagePath`.
|
|
14
|
+
*
|
|
15
|
+
* @example
|
|
16
|
+
* const source = new HttpSource({
|
|
17
|
+
* url: 'https://api.example.com/users',
|
|
18
|
+
* dataPath: 'data',
|
|
19
|
+
* nextPagePath: 'meta.next',
|
|
20
|
+
* });
|
|
21
|
+
*/
|
|
22
|
+
class HttpSource {
|
|
23
|
+
constructor(options) {
|
|
24
|
+
this.name = options.name ?? `http:${options.url}`;
|
|
25
|
+
this.options = options;
|
|
26
|
+
}
|
|
27
|
+
async open() {
|
|
28
|
+
// Validate connectivity on open
|
|
29
|
+
const res = await this.fetchPage(this.options.url);
|
|
30
|
+
if (!res.ok)
|
|
31
|
+
throw new Error(`HttpSource: Cannot reach ${this.options.url} (${res.status})`);
|
|
32
|
+
}
|
|
33
|
+
async close() { }
|
|
34
|
+
async readAll() {
|
|
35
|
+
const all = [];
|
|
36
|
+
for await (const r of this.read())
|
|
37
|
+
all.push(r);
|
|
38
|
+
return all;
|
|
39
|
+
}
|
|
40
|
+
async *read() {
|
|
41
|
+
let url = this.options.url;
|
|
42
|
+
while (url) {
|
|
43
|
+
const res = await this.fetchPage(url);
|
|
44
|
+
if (!res.ok)
|
|
45
|
+
throw new Error(`HttpSource: ${res.status} ${await res.text()}`);
|
|
46
|
+
const body = await res.json();
|
|
47
|
+
const items = this.options.dataPath ? getNestedValue(body, this.options.dataPath) : body;
|
|
48
|
+
if (Array.isArray(items)) {
|
|
49
|
+
for (const item of items)
|
|
50
|
+
yield item;
|
|
51
|
+
}
|
|
52
|
+
else {
|
|
53
|
+
yield body;
|
|
54
|
+
break;
|
|
55
|
+
}
|
|
56
|
+
url = this.options.nextPagePath
|
|
57
|
+
? (getNestedValue(body, this.options.nextPagePath) ?? null)
|
|
58
|
+
: null;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
fetchPage(url) {
|
|
62
|
+
const controller = new AbortController();
|
|
63
|
+
if (this.options.timeoutMs) {
|
|
64
|
+
setTimeout(() => controller.abort(), this.options.timeoutMs);
|
|
65
|
+
}
|
|
66
|
+
return fetch(url, {
|
|
67
|
+
method: this.options.method ?? 'GET',
|
|
68
|
+
headers: { 'Content-Type': 'application/json', ...this.options.headers },
|
|
69
|
+
body: this.options.body ? JSON.stringify(this.options.body) : undefined,
|
|
70
|
+
signal: controller.signal,
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
exports.HttpSource = HttpSource;
|
|
75
|
+
/**
|
|
76
|
+
* HTTP API data sink — writes records to a REST API endpoint.
|
|
77
|
+
*
|
|
78
|
+
* @example
|
|
79
|
+
* const sink = new HttpSink({
|
|
80
|
+
* url: 'https://api.example.com/ingest',
|
|
81
|
+
* method: 'POST',
|
|
82
|
+
* batchSize: 100,
|
|
83
|
+
* bodyKey: 'records',
|
|
84
|
+
* });
|
|
85
|
+
*/
|
|
86
|
+
class HttpSink {
|
|
87
|
+
constructor(options) {
|
|
88
|
+
this.buffer = [];
|
|
89
|
+
this.name = options.name ?? `http:${options.url}`;
|
|
90
|
+
this.options = { batchSize: 1, ...options };
|
|
91
|
+
}
|
|
92
|
+
async open() { }
|
|
93
|
+
async close() {
|
|
94
|
+
if (this.buffer.length > 0)
|
|
95
|
+
await this.flush();
|
|
96
|
+
}
|
|
97
|
+
async write(record) {
|
|
98
|
+
this.buffer.push(record);
|
|
99
|
+
if (this.buffer.length >= (this.options.batchSize ?? 1)) {
|
|
100
|
+
await this.flush();
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
async writeBatch(records) {
|
|
104
|
+
this.buffer.push(...records);
|
|
105
|
+
await this.flush();
|
|
106
|
+
}
|
|
107
|
+
async flush() {
|
|
108
|
+
if (this.buffer.length === 0)
|
|
109
|
+
return;
|
|
110
|
+
const payload = this.options.bodyKey
|
|
111
|
+
? { [this.options.bodyKey]: this.buffer }
|
|
112
|
+
: this.buffer.length === 1
|
|
113
|
+
? this.buffer[0]
|
|
114
|
+
: this.buffer;
|
|
115
|
+
const controller = new AbortController();
|
|
116
|
+
if (this.options.timeoutMs) {
|
|
117
|
+
setTimeout(() => controller.abort(), this.options.timeoutMs);
|
|
118
|
+
}
|
|
119
|
+
const res = await fetch(this.options.url, {
|
|
120
|
+
method: this.options.method ?? 'POST',
|
|
121
|
+
headers: { 'Content-Type': 'application/json', ...this.options.headers },
|
|
122
|
+
body: JSON.stringify(payload),
|
|
123
|
+
signal: controller.signal,
|
|
124
|
+
});
|
|
125
|
+
if (!res.ok) {
|
|
126
|
+
throw new Error(`HttpSink: ${res.status} ${await res.text()}`);
|
|
127
|
+
}
|
|
128
|
+
this.buffer = [];
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
exports.HttpSink = HttpSink;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export type { DataSource, DataSink, ConnectorOptions } from './connector.interface';
|
|
2
|
+
export { CsvSource, CsvSink } from './csv.connector';
|
|
3
|
+
export type { CsvSourceOptions, CsvSinkOptions } from './csv.connector';
|
|
4
|
+
export { HttpSource, HttpSink } from './http.connector';
|
|
5
|
+
export type { HttpSourceOptions, HttpSinkOptions } from './http.connector';
|
|
6
|
+
export { MemorySource, MemorySink } from './memory.connector';
|
|
7
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/connectors/index.ts"],"names":[],"mappings":"AAAA,YAAY,EAAE,UAAU,EAAE,QAAQ,EAAE,gBAAgB,EAAE,MAAM,uBAAuB,CAAC;AACpF,OAAO,EAAE,SAAS,EAAE,OAAO,EAAE,MAAM,iBAAiB,CAAC;AACrD,YAAY,EAAE,gBAAgB,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AACxE,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AACxD,YAAY,EAAE,iBAAiB,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAC3E,OAAO,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.MemorySink = exports.MemorySource = exports.HttpSink = exports.HttpSource = exports.CsvSink = exports.CsvSource = void 0;
|
|
4
|
+
var csv_connector_1 = require("./csv.connector");
|
|
5
|
+
Object.defineProperty(exports, "CsvSource", { enumerable: true, get: function () { return csv_connector_1.CsvSource; } });
|
|
6
|
+
Object.defineProperty(exports, "CsvSink", { enumerable: true, get: function () { return csv_connector_1.CsvSink; } });
|
|
7
|
+
var http_connector_1 = require("./http.connector");
|
|
8
|
+
Object.defineProperty(exports, "HttpSource", { enumerable: true, get: function () { return http_connector_1.HttpSource; } });
|
|
9
|
+
Object.defineProperty(exports, "HttpSink", { enumerable: true, get: function () { return http_connector_1.HttpSink; } });
|
|
10
|
+
var memory_connector_1 = require("./memory.connector");
|
|
11
|
+
Object.defineProperty(exports, "MemorySource", { enumerable: true, get: function () { return memory_connector_1.MemorySource; } });
|
|
12
|
+
Object.defineProperty(exports, "MemorySink", { enumerable: true, get: function () { return memory_connector_1.MemorySink; } });
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import type { DataSource, DataSink } from './connector.interface';
|
|
2
|
+
/**
|
|
3
|
+
* In-memory data source — wraps an array for use in pipelines and tests.
|
|
4
|
+
*
|
|
5
|
+
* @example
|
|
6
|
+
* const source = new MemorySource([{ id: 1 }, { id: 2 }]);
|
|
7
|
+
* const records = await source.readAll();
|
|
8
|
+
*/
|
|
9
|
+
export declare class MemorySource<T = unknown> implements DataSource<T> {
|
|
10
|
+
readonly name: string;
|
|
11
|
+
private readonly records;
|
|
12
|
+
constructor(records: T[], name?: string);
|
|
13
|
+
open(): Promise<void>;
|
|
14
|
+
close(): Promise<void>;
|
|
15
|
+
readAll(): Promise<T[]>;
|
|
16
|
+
read(): AsyncGenerator<T>;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* In-memory data sink — captures written records for inspection.
|
|
20
|
+
* Useful in tests and pipeline prototyping.
|
|
21
|
+
*
|
|
22
|
+
* @example
|
|
23
|
+
* const sink = new MemorySink<User>();
|
|
24
|
+
* await pipeline.run(source, sink);
|
|
25
|
+
* console.log(sink.records);
|
|
26
|
+
*/
|
|
27
|
+
export declare class MemorySink<T = unknown> implements DataSink<T> {
|
|
28
|
+
readonly name: string;
|
|
29
|
+
private _records;
|
|
30
|
+
constructor(name?: string);
|
|
31
|
+
open(): Promise<void>;
|
|
32
|
+
close(): Promise<void>;
|
|
33
|
+
write(record: T): Promise<void>;
|
|
34
|
+
writeBatch(records: T[]): Promise<void>;
|
|
35
|
+
get records(): readonly T[];
|
|
36
|
+
clear(): void;
|
|
37
|
+
}
|
|
38
|
+
//# sourceMappingURL=memory.connector.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"memory.connector.d.ts","sourceRoot":"","sources":["../../src/connectors/memory.connector.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,uBAAuB,CAAC;AAElE;;;;;;GAMG;AACH,qBAAa,YAAY,CAAC,CAAC,GAAG,OAAO,CAAE,YAAW,UAAU,CAAC,CAAC,CAAC;IAC7D,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IACtB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAM;gBAElB,OAAO,EAAE,CAAC,EAAE,EAAE,IAAI,SAAkB;IAK1C,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IACrB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAEtB,OAAO,IAAI,OAAO,CAAC,CAAC,EAAE,CAAC;IAItB,IAAI,IAAI,cAAc,CAAC,CAAC,CAAC;CAGjC;AAED;;;;;;;;GAQG;AACH,qBAAa,UAAU,CAAC,CAAC,GAAG,OAAO,CAAE,YAAW,QAAQ,CAAC,CAAC,CAAC;IACzD,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC;IACtB,OAAO,CAAC,QAAQ,CAAW;gBAEf,IAAI,SAAgB;IAI1B,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IACrB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAEtB,KAAK,CAAC,MAAM,EAAE,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAI/B,UAAU,CAAC,OAAO,EAAE,CAAC,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAI7C,IAAI,OAAO,IAAI,SAAS,CAAC,EAAE,CAE1B;IAED,KAAK,IAAI,IAAI;CAGd"}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.MemorySink = exports.MemorySource = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* In-memory data source — wraps an array for use in pipelines and tests.
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* const source = new MemorySource([{ id: 1 }, { id: 2 }]);
|
|
9
|
+
* const records = await source.readAll();
|
|
10
|
+
*/
|
|
11
|
+
class MemorySource {
|
|
12
|
+
constructor(records, name = 'memory:source') {
|
|
13
|
+
this.name = name;
|
|
14
|
+
this.records = [...records];
|
|
15
|
+
}
|
|
16
|
+
async open() { }
|
|
17
|
+
async close() { }
|
|
18
|
+
async readAll() {
|
|
19
|
+
return [...this.records];
|
|
20
|
+
}
|
|
21
|
+
async *read() {
|
|
22
|
+
for (const r of this.records)
|
|
23
|
+
yield r;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
exports.MemorySource = MemorySource;
|
|
27
|
+
/**
|
|
28
|
+
* In-memory data sink — captures written records for inspection.
|
|
29
|
+
* Useful in tests and pipeline prototyping.
|
|
30
|
+
*
|
|
31
|
+
* @example
|
|
32
|
+
* const sink = new MemorySink<User>();
|
|
33
|
+
* await pipeline.run(source, sink);
|
|
34
|
+
* console.log(sink.records);
|
|
35
|
+
*/
|
|
36
|
+
class MemorySink {
|
|
37
|
+
constructor(name = 'memory:sink') {
|
|
38
|
+
this._records = [];
|
|
39
|
+
this.name = name;
|
|
40
|
+
}
|
|
41
|
+
async open() { }
|
|
42
|
+
async close() { }
|
|
43
|
+
async write(record) {
|
|
44
|
+
this._records.push(record);
|
|
45
|
+
}
|
|
46
|
+
async writeBatch(records) {
|
|
47
|
+
this._records.push(...records);
|
|
48
|
+
}
|
|
49
|
+
get records() {
|
|
50
|
+
return this._records;
|
|
51
|
+
}
|
|
52
|
+
clear() {
|
|
53
|
+
this._records = [];
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
exports.MemorySink = MemorySink;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"memory.connector.test.d.ts","sourceRoot":"","sources":["../../src/connectors/memory.connector.test.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const memory_connector_1 = require("./memory.connector");
|
|
4
|
+
describe('MemorySource', () => {
|
|
5
|
+
it('readAll returns all records', async () => {
|
|
6
|
+
const source = new memory_connector_1.MemorySource([{ id: 1 }, { id: 2 }]);
|
|
7
|
+
await source.open();
|
|
8
|
+
const records = await source.readAll();
|
|
9
|
+
expect(records).toHaveLength(2);
|
|
10
|
+
expect(records[0]).toEqual({ id: 1 });
|
|
11
|
+
await source.close();
|
|
12
|
+
});
|
|
13
|
+
it('read yields records', async () => {
|
|
14
|
+
const source = new memory_connector_1.MemorySource([1, 2, 3]);
|
|
15
|
+
const out = [];
|
|
16
|
+
for await (const r of source.read())
|
|
17
|
+
out.push(r);
|
|
18
|
+
expect(out).toEqual([1, 2, 3]);
|
|
19
|
+
});
|
|
20
|
+
});
|
|
21
|
+
describe('MemorySink', () => {
|
|
22
|
+
it('captures written records', async () => {
|
|
23
|
+
const sink = new memory_connector_1.MemorySink();
|
|
24
|
+
await sink.open();
|
|
25
|
+
await sink.write({ x: 1 });
|
|
26
|
+
await sink.write({ x: 2 });
|
|
27
|
+
await sink.close();
|
|
28
|
+
expect(sink.records).toHaveLength(2);
|
|
29
|
+
expect(sink.records[0]).toEqual({ x: 1 });
|
|
30
|
+
});
|
|
31
|
+
it('writeBatch captures batch', async () => {
|
|
32
|
+
const sink = new memory_connector_1.MemorySink();
|
|
33
|
+
await sink.open();
|
|
34
|
+
await sink.writeBatch([1, 2, 3]);
|
|
35
|
+
expect(sink.records).toEqual([1, 2, 3]);
|
|
36
|
+
});
|
|
37
|
+
it('clear resets records', async () => {
|
|
38
|
+
const sink = new memory_connector_1.MemorySink();
|
|
39
|
+
await sink.write(1);
|
|
40
|
+
sink.clear();
|
|
41
|
+
expect(sink.records).toHaveLength(0);
|
|
42
|
+
});
|
|
43
|
+
});
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import type { FlinkClientConfig } from './streaming/flink/flink.client';
|
|
2
|
+
export declare const DATA_FLINK_CONFIG: unique symbol;
|
|
3
|
+
export interface DataModuleOptions {
|
|
4
|
+
flink?: FlinkClientConfig;
|
|
5
|
+
}
|
|
6
|
+
export declare class DataModule {
|
|
7
|
+
private static options;
|
|
8
|
+
/**
|
|
9
|
+
* Configure DataModule with optional Flink connection
|
|
10
|
+
*
|
|
11
|
+
* @example
|
|
12
|
+
* ```typescript
|
|
13
|
+
* imports: [
|
|
14
|
+
* DataModule.forRoot({
|
|
15
|
+
* flink: {
|
|
16
|
+
* url: process.env.FLINK_REST_URL || 'http://localhost:8081',
|
|
17
|
+
* timeout: 30000,
|
|
18
|
+
* },
|
|
19
|
+
* }),
|
|
20
|
+
* ]
|
|
21
|
+
* ```
|
|
22
|
+
*/
|
|
23
|
+
static forRoot(options?: DataModuleOptions): {
|
|
24
|
+
module: typeof DataModule;
|
|
25
|
+
providers: unknown[];
|
|
26
|
+
exports: unknown[];
|
|
27
|
+
};
|
|
28
|
+
static getOptions(): DataModuleOptions;
|
|
29
|
+
}
|
|
30
|
+
//# sourceMappingURL=data.module.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"data.module.d.ts","sourceRoot":"","sources":["../src/data.module.ts"],"names":[],"mappings":"AAUA,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,gCAAgC,CAAC;AAExE,eAAO,MAAM,iBAAiB,eAAoC,CAAC;AAEnE,MAAM,WAAW,iBAAiB;IAChC,KAAK,CAAC,EAAE,iBAAiB,CAAC;CAC3B;AAcD,qBAwBa,UAAU;IACrB,OAAO,CAAC,MAAM,CAAC,OAAO,CAAyB;IAE/C;;;;;;;;;;;;;;OAcG;IACH,MAAM,CAAC,OAAO,CAAC,OAAO,GAAE,iBAAsB,GAAG;QAC/C,MAAM,EAAE,OAAO,UAAU,CAAC;QAC1B,SAAS,EAAE,OAAO,EAAE,CAAC;QACrB,OAAO,EAAE,OAAO,EAAE,CAAC;KACpB;IAkCD,MAAM,CAAC,UAAU,IAAI,iBAAiB;CAGvC"}
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
3
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
4
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
5
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
6
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
7
|
+
};
|
|
8
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
9
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
10
|
+
};
|
|
11
|
+
var __param = (this && this.__param) || function (paramIndex, decorator) {
|
|
12
|
+
return function (target, key) { decorator(target, key, paramIndex); }
|
|
13
|
+
};
|
|
14
|
+
var DataModule_1;
|
|
15
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
16
|
+
exports.DataModule = exports.DATA_FLINK_CONFIG = void 0;
|
|
17
|
+
const core_1 = require("@hazeljs/core");
|
|
18
|
+
const schema_validator_1 = require("./validators/schema.validator");
|
|
19
|
+
const etl_service_1 = require("./pipelines/etl.service");
|
|
20
|
+
const pipeline_builder_1 = require("./pipelines/pipeline.builder");
|
|
21
|
+
const stream_service_1 = require("./pipelines/stream.service");
|
|
22
|
+
const stream_builder_1 = require("./streaming/stream.builder");
|
|
23
|
+
const stream_processor_1 = require("./streaming/stream.processor");
|
|
24
|
+
const transformer_service_1 = require("./transformers/transformer.service");
|
|
25
|
+
const quality_service_1 = require("./quality/quality.service");
|
|
26
|
+
const flink_service_1 = require("./flink.service");
|
|
27
|
+
exports.DATA_FLINK_CONFIG = Symbol('hazel:data:flink-config');
|
|
28
|
+
let DataFlinkBootstrap = class DataFlinkBootstrap {
|
|
29
|
+
constructor(flinkService, config) {
|
|
30
|
+
this.flinkService = flinkService;
|
|
31
|
+
this.config = config;
|
|
32
|
+
if (this.config) {
|
|
33
|
+
this.flinkService.configure(this.config);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
};
|
|
37
|
+
DataFlinkBootstrap = __decorate([
|
|
38
|
+
(0, core_1.Injectable)(),
|
|
39
|
+
__param(1, (0, core_1.Inject)(exports.DATA_FLINK_CONFIG)),
|
|
40
|
+
__metadata("design:paramtypes", [flink_service_1.FlinkService, Object])
|
|
41
|
+
], DataFlinkBootstrap);
|
|
42
|
+
let DataModule = DataModule_1 = class DataModule {
|
|
43
|
+
/**
|
|
44
|
+
* Configure DataModule with optional Flink connection
|
|
45
|
+
*
|
|
46
|
+
* @example
|
|
47
|
+
* ```typescript
|
|
48
|
+
* imports: [
|
|
49
|
+
* DataModule.forRoot({
|
|
50
|
+
* flink: {
|
|
51
|
+
* url: process.env.FLINK_REST_URL || 'http://localhost:8081',
|
|
52
|
+
* timeout: 30000,
|
|
53
|
+
* },
|
|
54
|
+
* }),
|
|
55
|
+
* ]
|
|
56
|
+
* ```
|
|
57
|
+
*/
|
|
58
|
+
static forRoot(options = {}) {
|
|
59
|
+
DataModule_1.options = options;
|
|
60
|
+
const providers = [
|
|
61
|
+
schema_validator_1.SchemaValidator,
|
|
62
|
+
etl_service_1.ETLService,
|
|
63
|
+
pipeline_builder_1.PipelineBuilder,
|
|
64
|
+
stream_service_1.StreamService,
|
|
65
|
+
stream_builder_1.StreamBuilder,
|
|
66
|
+
stream_processor_1.StreamProcessor,
|
|
67
|
+
transformer_service_1.TransformerService,
|
|
68
|
+
quality_service_1.QualityService,
|
|
69
|
+
flink_service_1.FlinkService,
|
|
70
|
+
{ provide: exports.DATA_FLINK_CONFIG, useValue: options.flink ?? null },
|
|
71
|
+
DataFlinkBootstrap,
|
|
72
|
+
];
|
|
73
|
+
return {
|
|
74
|
+
module: DataModule_1,
|
|
75
|
+
providers,
|
|
76
|
+
exports: [
|
|
77
|
+
schema_validator_1.SchemaValidator,
|
|
78
|
+
etl_service_1.ETLService,
|
|
79
|
+
pipeline_builder_1.PipelineBuilder,
|
|
80
|
+
stream_service_1.StreamService,
|
|
81
|
+
stream_builder_1.StreamBuilder,
|
|
82
|
+
stream_processor_1.StreamProcessor,
|
|
83
|
+
transformer_service_1.TransformerService,
|
|
84
|
+
quality_service_1.QualityService,
|
|
85
|
+
flink_service_1.FlinkService,
|
|
86
|
+
],
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
static getOptions() {
|
|
90
|
+
return DataModule_1.options;
|
|
91
|
+
}
|
|
92
|
+
};
|
|
93
|
+
exports.DataModule = DataModule;
|
|
94
|
+
DataModule.options = {};
|
|
95
|
+
exports.DataModule = DataModule = DataModule_1 = __decorate([
|
|
96
|
+
(0, core_1.HazelModule)({
|
|
97
|
+
providers: [
|
|
98
|
+
schema_validator_1.SchemaValidator,
|
|
99
|
+
etl_service_1.ETLService,
|
|
100
|
+
pipeline_builder_1.PipelineBuilder,
|
|
101
|
+
stream_service_1.StreamService,
|
|
102
|
+
stream_builder_1.StreamBuilder,
|
|
103
|
+
stream_processor_1.StreamProcessor,
|
|
104
|
+
transformer_service_1.TransformerService,
|
|
105
|
+
quality_service_1.QualityService,
|
|
106
|
+
flink_service_1.FlinkService,
|
|
107
|
+
],
|
|
108
|
+
exports: [
|
|
109
|
+
schema_validator_1.SchemaValidator,
|
|
110
|
+
etl_service_1.ETLService,
|
|
111
|
+
pipeline_builder_1.PipelineBuilder,
|
|
112
|
+
stream_service_1.StreamService,
|
|
113
|
+
stream_builder_1.StreamBuilder,
|
|
114
|
+
stream_processor_1.StreamProcessor,
|
|
115
|
+
transformer_service_1.TransformerService,
|
|
116
|
+
quality_service_1.QualityService,
|
|
117
|
+
flink_service_1.FlinkService,
|
|
118
|
+
],
|
|
119
|
+
})
|
|
120
|
+
], DataModule);
|