@hazeljs/data 0.2.0-beta.68 → 0.2.0-beta.69

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. package/README.md +175 -61
  2. package/dist/connectors/connector.interface.d.ts +29 -0
  3. package/dist/connectors/connector.interface.d.ts.map +1 -0
  4. package/dist/connectors/connector.interface.js +6 -0
  5. package/dist/connectors/csv.connector.d.ts +63 -0
  6. package/dist/connectors/csv.connector.d.ts.map +1 -0
  7. package/dist/connectors/csv.connector.js +147 -0
  8. package/dist/connectors/http.connector.d.ts +68 -0
  9. package/dist/connectors/http.connector.d.ts.map +1 -0
  10. package/dist/connectors/http.connector.js +131 -0
  11. package/dist/connectors/index.d.ts +7 -0
  12. package/dist/connectors/index.d.ts.map +1 -0
  13. package/dist/connectors/index.js +12 -0
  14. package/dist/connectors/memory.connector.d.ts +38 -0
  15. package/dist/connectors/memory.connector.d.ts.map +1 -0
  16. package/dist/connectors/memory.connector.js +56 -0
  17. package/dist/connectors/memory.connector.test.d.ts +2 -0
  18. package/dist/connectors/memory.connector.test.d.ts.map +1 -0
  19. package/dist/connectors/memory.connector.test.js +43 -0
  20. package/dist/data.types.d.ts +16 -0
  21. package/dist/data.types.d.ts.map +1 -1
  22. package/dist/decorators/index.d.ts +1 -0
  23. package/dist/decorators/index.d.ts.map +1 -1
  24. package/dist/decorators/index.js +8 -1
  25. package/dist/decorators/pii.decorator.d.ts +59 -0
  26. package/dist/decorators/pii.decorator.d.ts.map +1 -0
  27. package/dist/decorators/pii.decorator.js +197 -0
  28. package/dist/decorators/pii.decorator.test.d.ts +2 -0
  29. package/dist/decorators/pii.decorator.test.d.ts.map +1 -0
  30. package/dist/decorators/pii.decorator.test.js +150 -0
  31. package/dist/decorators/pipeline.decorator.js +1 -1
  32. package/dist/decorators/pipeline.decorator.test.js +8 -0
  33. package/dist/decorators/transform.decorator.d.ts +9 -1
  34. package/dist/decorators/transform.decorator.d.ts.map +1 -1
  35. package/dist/decorators/transform.decorator.js +4 -0
  36. package/dist/decorators/validate.decorator.d.ts +5 -1
  37. package/dist/decorators/validate.decorator.d.ts.map +1 -1
  38. package/dist/decorators/validate.decorator.js +4 -0
  39. package/dist/flink.service.d.ts +30 -0
  40. package/dist/flink.service.d.ts.map +1 -1
  41. package/dist/flink.service.js +50 -2
  42. package/dist/index.d.ts +13 -7
  43. package/dist/index.d.ts.map +1 -1
  44. package/dist/index.js +36 -8
  45. package/dist/pipelines/etl.service.d.ts +41 -2
  46. package/dist/pipelines/etl.service.d.ts.map +1 -1
  47. package/dist/pipelines/etl.service.js +143 -6
  48. package/dist/pipelines/etl.service.test.js +215 -0
  49. package/dist/pipelines/pipeline.builder.d.ts +86 -13
  50. package/dist/pipelines/pipeline.builder.d.ts.map +1 -1
  51. package/dist/pipelines/pipeline.builder.js +177 -27
  52. package/dist/pipelines/pipeline.builder.test.js +160 -12
  53. package/dist/pipelines/stream.service.test.js +49 -0
  54. package/dist/quality/quality.service.d.ts +67 -5
  55. package/dist/quality/quality.service.d.ts.map +1 -1
  56. package/dist/quality/quality.service.js +259 -20
  57. package/dist/quality/quality.service.test.js +94 -0
  58. package/dist/schema/schema.d.ts +92 -12
  59. package/dist/schema/schema.d.ts.map +1 -1
  60. package/dist/schema/schema.js +395 -83
  61. package/dist/schema/schema.test.js +292 -0
  62. package/dist/streaming/flink/flink.client.d.ts +41 -3
  63. package/dist/streaming/flink/flink.client.d.ts.map +1 -1
  64. package/dist/streaming/flink/flink.client.js +171 -8
  65. package/dist/streaming/flink/flink.client.test.js +2 -2
  66. package/dist/streaming/flink/flink.job.d.ts +2 -1
  67. package/dist/streaming/flink/flink.job.d.ts.map +1 -1
  68. package/dist/streaming/flink/flink.job.js +2 -2
  69. package/dist/streaming/stream.processor.d.ts +56 -2
  70. package/dist/streaming/stream.processor.d.ts.map +1 -1
  71. package/dist/streaming/stream.processor.js +149 -2
  72. package/dist/streaming/stream.processor.test.js +99 -0
  73. package/dist/streaming/stream.processor.windowing.test.d.ts +2 -0
  74. package/dist/streaming/stream.processor.windowing.test.d.ts.map +1 -0
  75. package/dist/streaming/stream.processor.windowing.test.js +69 -0
  76. package/dist/telemetry/telemetry.d.ts +124 -0
  77. package/dist/telemetry/telemetry.d.ts.map +1 -0
  78. package/dist/telemetry/telemetry.js +259 -0
  79. package/dist/telemetry/telemetry.test.d.ts +2 -0
  80. package/dist/telemetry/telemetry.test.d.ts.map +1 -0
  81. package/dist/telemetry/telemetry.test.js +51 -0
  82. package/dist/testing/index.d.ts +12 -0
  83. package/dist/testing/index.d.ts.map +1 -0
  84. package/dist/testing/index.js +18 -0
  85. package/dist/testing/pipeline-test-harness.d.ts +40 -0
  86. package/dist/testing/pipeline-test-harness.d.ts.map +1 -0
  87. package/dist/testing/pipeline-test-harness.js +55 -0
  88. package/dist/testing/pipeline-test-harness.test.d.ts +2 -0
  89. package/dist/testing/pipeline-test-harness.test.d.ts.map +1 -0
  90. package/dist/testing/pipeline-test-harness.test.js +102 -0
  91. package/dist/testing/schema-faker.d.ts +32 -0
  92. package/dist/testing/schema-faker.d.ts.map +1 -0
  93. package/dist/testing/schema-faker.js +91 -0
  94. package/dist/testing/schema-faker.test.d.ts +2 -0
  95. package/dist/testing/schema-faker.test.d.ts.map +1 -0
  96. package/dist/testing/schema-faker.test.js +66 -0
  97. package/dist/transformers/built-in.transformers.test.js +28 -0
  98. package/dist/transformers/transformer.service.test.js +10 -0
  99. package/package.json +2 -2
@@ -0,0 +1,259 @@
1
+ "use strict";
2
+ /**
3
+ * Telemetry — zero-dependency OpenTelemetry-compatible instrumentation.
4
+ *
5
+ * Works in two modes:
6
+ * 1. **Standalone** — emits structured events to an in-memory log / custom exporters.
7
+ * 2. **OTel** — when @opentelemetry/api is present in the host application,
8
+ * wraps each pipeline step in an OTel span automatically.
9
+ *
10
+ * The package itself does NOT list @opentelemetry/* as dependencies; it uses
11
+ * dynamic optional `require()` so the feature activates transparently when
12
+ * the host already has it installed.
13
+ */
14
+ Object.defineProperty(exports, "__esModule", { value: true });
15
+ exports.TelemetryService = void 0;
16
+ exports.createPrometheusExporter = createPrometheusExporter;
17
+ function generateId(len = 16) {
18
+ const chars = '0123456789abcdef';
19
+ return Array.from({ length: len }, () => chars[Math.floor(Math.random() * chars.length)]).join('');
20
+ }
21
+ function simpleHash(value) {
22
+ const str = JSON.stringify(value) ?? '';
23
+ let hash = 5381;
24
+ for (let i = 0; i < str.length; i++) {
25
+ hash = ((hash << 5) + hash) ^ str.charCodeAt(i);
26
+ hash = hash >>> 0;
27
+ }
28
+ return hash.toString(16);
29
+ }
30
+ /**
31
+ * TelemetryService — collect spans, metrics, and lineage for pipeline executions.
32
+ *
33
+ * @example
34
+ * const telemetry = TelemetryService.getInstance();
35
+ * telemetry.addSpanExporter((span) => console.log(span));
36
+ * telemetry.enableLineage();
37
+ *
38
+ * // Automatically used by ETLService when registered
39
+ * DataModule.forRoot({ telemetry: { enabled: true, serviceName: 'orders-pipeline' } });
40
+ */
41
+ class TelemetryService {
42
+ constructor(options = {}) {
43
+ this.spanExporters = [];
44
+ this.metricExporters = [];
45
+ this.spans = [];
46
+ this.metrics = [];
47
+ this.lineageStore = [];
48
+ this.lineageEnabled = false;
49
+ // Loaded lazily if @opentelemetry/api is present in the host app
50
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
51
+ this.otelApi = null;
52
+ this.serviceName = options.serviceName ?? 'hazeljs-pipeline';
53
+ this.maxSpansInMemory = options.maxSpansInMemory ?? 1000;
54
+ this.tryLoadOtel();
55
+ }
56
+ static getInstance(options) {
57
+ if (!TelemetryService.instance) {
58
+ TelemetryService.instance = new TelemetryService(options);
59
+ }
60
+ return TelemetryService.instance;
61
+ }
62
+ static reset() {
63
+ TelemetryService.instance = null;
64
+ }
65
+ tryLoadOtel() {
66
+ try {
67
+ // eslint-disable-next-line @typescript-eslint/no-require-imports
68
+ this.otelApi = require('@opentelemetry/api');
69
+ }
70
+ catch {
71
+ this.otelApi = null;
72
+ }
73
+ }
74
+ // ─── Configuration ────────────────────────────────────────────────────────
75
+ addSpanExporter(exporter) {
76
+ this.spanExporters.push(exporter);
77
+ return this;
78
+ }
79
+ addMetricExporter(exporter) {
80
+ this.metricExporters.push(exporter);
81
+ return this;
82
+ }
83
+ enableLineage() {
84
+ this.lineageEnabled = true;
85
+ return this;
86
+ }
87
+ // ─── Span Tracking ────────────────────────────────────────────────────────
88
+ startTrace(_pipelineName) {
89
+ const traceId = generateId(32);
90
+ const rootSpanId = generateId(16);
91
+ return { traceId, rootSpanId };
92
+ }
93
+ async recordSpan(span) {
94
+ const full = {
95
+ traceId: span.traceId ?? generateId(32),
96
+ spanId: span.spanId ?? generateId(16),
97
+ ...span,
98
+ };
99
+ if (this.spans.length >= this.maxSpansInMemory) {
100
+ this.spans.shift();
101
+ }
102
+ this.spans.push(full);
103
+ // OTel integration
104
+ if (this.otelApi) {
105
+ try {
106
+ const tracer = this.otelApi['trace'].getTracer(this.serviceName);
107
+ const otelSpan = tracer.startSpan(`pipeline.${full.pipeline}.step.${full.step}`, {
108
+ startTime: full.startTime,
109
+ });
110
+ otelSpan.setAttributes({
111
+ 'hazel.pipeline': full.pipeline,
112
+ 'hazel.step': full.step,
113
+ 'hazel.step.name': full.stepName,
114
+ 'hazel.service': this.serviceName,
115
+ ...full.attributes,
116
+ });
117
+ if (full.status === 'error' && full.error) {
118
+ otelSpan.recordException(new Error(full.error));
119
+ otelSpan.setStatus({ code: 2, message: full.error });
120
+ }
121
+ otelSpan.end(full.endTime);
122
+ }
123
+ catch {
124
+ /* OTel not properly configured */
125
+ }
126
+ }
127
+ for (const exporter of this.spanExporters) {
128
+ try {
129
+ await Promise.resolve(exporter(full));
130
+ }
131
+ catch {
132
+ /* noop */
133
+ }
134
+ }
135
+ }
136
+ // ─── Metrics ──────────────────────────────────────────────────────────────
137
+ async recordMetric(name, value, labels = {}) {
138
+ const point = {
139
+ name,
140
+ value,
141
+ labels: { service: this.serviceName, ...labels },
142
+ timestamp: Date.now(),
143
+ };
144
+ this.metrics.push(point);
145
+ for (const exporter of this.metricExporters) {
146
+ try {
147
+ await Promise.resolve(exporter(point));
148
+ }
149
+ catch {
150
+ /* noop */
151
+ }
152
+ }
153
+ }
154
+ async recordStepMetrics(pipeline, stepName, durationMs, success, recordCount = 1) {
155
+ const labels = { pipeline, step: stepName };
156
+ await this.recordMetric('hazel.pipeline.step.duration_ms', durationMs, labels);
157
+ await this.recordMetric('hazel.pipeline.step.records', recordCount, labels);
158
+ await this.recordMetric('hazel.pipeline.step.errors', success ? 0 : 1, labels);
159
+ if (durationMs > 0 && recordCount > 0) {
160
+ await this.recordMetric('hazel.pipeline.step.throughput', (recordCount / durationMs) * 1000, labels);
161
+ }
162
+ }
163
+ // ─── Lineage ──────────────────────────────────────────────────────────────
164
+ startLineage(pipeline, input) {
165
+ return {
166
+ traceId: generateId(32),
167
+ pipeline,
168
+ input,
169
+ steps: [],
170
+ output: undefined,
171
+ timestamp: new Date(),
172
+ };
173
+ }
174
+ recordLineageStep(entry, step, name, input, output, durationMs) {
175
+ if (!this.lineageEnabled)
176
+ return;
177
+ entry.steps.push({
178
+ step,
179
+ name,
180
+ inputHash: simpleHash(input),
181
+ outputHash: simpleHash(output),
182
+ durationMs,
183
+ });
184
+ }
185
+ finalizeLineage(entry, output) {
186
+ if (!this.lineageEnabled)
187
+ return;
188
+ entry.output = output;
189
+ this.lineageStore.push(entry);
190
+ }
191
+ // ─── Inspection ───────────────────────────────────────────────────────────
192
+ getSpans(pipeline) {
193
+ return pipeline ? this.spans.filter((s) => s.pipeline === pipeline) : [...this.spans];
194
+ }
195
+ getMetrics(name) {
196
+ return name ? this.metrics.filter((m) => m.name === name) : [...this.metrics];
197
+ }
198
+ getLineage(traceId) {
199
+ return traceId
200
+ ? this.lineageStore.filter((e) => e.traceId === traceId)
201
+ : [...this.lineageStore];
202
+ }
203
+ /** Compute summary stats for a pipeline across all recorded spans. */
204
+ getSummary(pipeline) {
205
+ const pipelineSpans = this.spans.filter((s) => s.pipeline === pipeline && s.step === 0);
206
+ if (pipelineSpans.length === 0)
207
+ return { totalRuns: 0, successRate: 0, avgDurationMs: 0, p95DurationMs: 0 };
208
+ const durations = pipelineSpans.map((s) => s.durationMs).sort((a, b) => a - b);
209
+ const successes = pipelineSpans.filter((s) => s.status === 'ok').length;
210
+ const p95Idx = Math.floor(durations.length * 0.95);
211
+ return {
212
+ totalRuns: pipelineSpans.length,
213
+ successRate: parseFloat(((successes / pipelineSpans.length) * 100).toFixed(2)),
214
+ avgDurationMs: parseFloat((durations.reduce((a, b) => a + b, 0) / durations.length).toFixed(2)),
215
+ p95DurationMs: durations[p95Idx] ?? durations[durations.length - 1],
216
+ };
217
+ }
218
+ clear() {
219
+ this.spans = [];
220
+ this.metrics = [];
221
+ this.lineageStore = [];
222
+ }
223
+ }
224
+ exports.TelemetryService = TelemetryService;
225
+ TelemetryService.instance = null;
226
+ /**
227
+ * Prometheus-format metric exporter factory.
228
+ * Returns a metric exporter that formats points as Prometheus text.
229
+ *
230
+ * @example
231
+ * const { exporter, getText } = createPrometheusExporter();
232
+ * telemetry.addMetricExporter(exporter);
233
+ * // GET /metrics → getText()
234
+ */
235
+ function createPrometheusExporter() {
236
+ const store = new Map();
237
+ const exporter = (metric) => {
238
+ const existing = store.get(metric.name) ?? [];
239
+ existing.push(metric);
240
+ if (existing.length > 1000)
241
+ existing.shift();
242
+ store.set(metric.name, existing);
243
+ };
244
+ const getText = () => {
245
+ const lines = [];
246
+ for (const [name, points] of store) {
247
+ const promName = name.replace(/\./g, '_').replace(/-/g, '_');
248
+ lines.push(`# TYPE ${promName} gauge`);
249
+ for (const p of points.slice(-1)) {
250
+ const labels = Object.entries(p.labels)
251
+ .map(([k, v]) => `${k}="${v}"`)
252
+ .join(',');
253
+ lines.push(`${promName}{${labels}} ${p.value} ${p.timestamp}`);
254
+ }
255
+ }
256
+ return lines.join('\n');
257
+ };
258
+ return { exporter, getText };
259
+ }
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=telemetry.test.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"telemetry.test.d.ts","sourceRoot":"","sources":["../../src/telemetry/telemetry.test.ts"],"names":[],"mappings":""}
@@ -0,0 +1,51 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const telemetry_1 = require("./telemetry");
4
+ describe('TelemetryService', () => {
5
+ let telemetry;
6
+ beforeEach(() => {
7
+ telemetry_1.TelemetryService.reset();
8
+ telemetry = telemetry_1.TelemetryService.getInstance({ serviceName: 'test-pipeline' });
9
+ telemetry.clear();
10
+ });
11
+ it('records span', async () => {
12
+ await telemetry.recordSpan({
13
+ traceId: 't1',
14
+ spanId: 's1',
15
+ pipeline: 'orders',
16
+ step: 1,
17
+ stepName: 'validate',
18
+ startTime: 0,
19
+ endTime: 100,
20
+ durationMs: 100,
21
+ status: 'ok',
22
+ attributes: {},
23
+ });
24
+ const spans = telemetry.getSpans('orders');
25
+ expect(spans).toHaveLength(1);
26
+ expect(spans[0].durationMs).toBe(100);
27
+ });
28
+ it('records metric', async () => {
29
+ await telemetry.recordMetric('test.metric', 42, { env: 'test' });
30
+ const metrics = telemetry.getMetrics('test.metric');
31
+ expect(metrics).toHaveLength(1);
32
+ expect(metrics[0].value).toBe(42);
33
+ });
34
+ it('records step metrics', async () => {
35
+ await telemetry.recordStepMetrics('orders', 'validate', 50, true, 10);
36
+ const metrics = telemetry.getMetrics('hazel.pipeline.step.duration_ms');
37
+ expect(metrics.length).toBeGreaterThanOrEqual(1);
38
+ });
39
+ it('getSummary returns pipeline stats', () => {
40
+ telemetry.getSpans(); // ensure we have data structure
41
+ const summary = telemetry.getSummary('orders');
42
+ expect(summary).toHaveProperty('totalRuns');
43
+ expect(summary).toHaveProperty('successRate');
44
+ });
45
+ it('createPrometheusExporter returns exporter and getText', () => {
46
+ const { exporter, getText } = (0, telemetry_1.createPrometheusExporter)();
47
+ exporter({ name: 'test', value: 1, labels: {}, timestamp: Date.now() });
48
+ const text = getText();
49
+ expect(text).toContain('test');
50
+ });
51
+ });
@@ -0,0 +1,12 @@
1
+ /**
2
+ * Testing utilities for @hazeljs/data pipelines.
3
+ *
4
+ * - SchemaFaker: generate fake data from schemas
5
+ * - PipelineTestHarness: run pipelines and capture per-step events
6
+ * - MockSource / MockSink: use MemorySource and MemorySink from connectors
7
+ */
8
+ export { SchemaFaker } from './schema-faker';
9
+ export { PipelineTestHarness } from './pipeline-test-harness';
10
+ export type { StepSnapshot, PipelineRunResult } from './pipeline-test-harness';
11
+ export { MemorySource as MockSource, MemorySink as MockSink } from '../connectors/memory.connector';
12
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/testing/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,EAAE,mBAAmB,EAAE,MAAM,yBAAyB,CAAC;AAC9D,YAAY,EAAE,YAAY,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAG/E,OAAO,EAAE,YAAY,IAAI,UAAU,EAAE,UAAU,IAAI,QAAQ,EAAE,MAAM,gCAAgC,CAAC"}
@@ -0,0 +1,18 @@
1
+ "use strict";
2
+ /**
3
+ * Testing utilities for @hazeljs/data pipelines.
4
+ *
5
+ * - SchemaFaker: generate fake data from schemas
6
+ * - PipelineTestHarness: run pipelines and capture per-step events
7
+ * - MockSource / MockSink: use MemorySource and MemorySink from connectors
8
+ */
9
+ Object.defineProperty(exports, "__esModule", { value: true });
10
+ exports.MockSink = exports.MockSource = exports.PipelineTestHarness = exports.SchemaFaker = void 0;
11
+ var schema_faker_1 = require("./schema-faker");
12
+ Object.defineProperty(exports, "SchemaFaker", { enumerable: true, get: function () { return schema_faker_1.SchemaFaker; } });
13
+ var pipeline_test_harness_1 = require("./pipeline-test-harness");
14
+ Object.defineProperty(exports, "PipelineTestHarness", { enumerable: true, get: function () { return pipeline_test_harness_1.PipelineTestHarness; } });
15
+ // Re-export in-memory connectors as mock source/sink for tests
16
+ var memory_connector_1 = require("../connectors/memory.connector");
17
+ Object.defineProperty(exports, "MockSource", { enumerable: true, get: function () { return memory_connector_1.MemorySource; } });
18
+ Object.defineProperty(exports, "MockSink", { enumerable: true, get: function () { return memory_connector_1.MemorySink; } });
@@ -0,0 +1,40 @@
1
+ import type { ETLService } from '../pipelines/etl.service';
2
+ export interface StepSnapshot {
3
+ step: number;
4
+ stepName: string;
5
+ durationMs: number;
6
+ success: boolean;
7
+ skipped?: boolean;
8
+ error?: string;
9
+ }
10
+ export interface PipelineRunResult<T = unknown> {
11
+ result: T;
12
+ events: StepSnapshot[];
13
+ durationMs: number;
14
+ }
15
+ /**
16
+ * Test harness for pipeline execution.
17
+ * Wraps a pipeline, runs it, and captures per-step execution events.
18
+ *
19
+ * @example
20
+ * const harness = PipelineTestHarness.create(etlService, myPipeline);
21
+ * const { result, events } = await harness.run({ raw: 'data' });
22
+ * expect(events.every(e => e.success)).toBe(true);
23
+ */
24
+ export declare class PipelineTestHarness {
25
+ private readonly etlService;
26
+ private readonly pipelineInstance;
27
+ private events;
28
+ constructor(etlService: ETLService, pipelineInstance: object);
29
+ /**
30
+ * Run the pipeline with the given input.
31
+ */
32
+ run<T = unknown>(input: unknown): Promise<PipelineRunResult<T>>;
33
+ /**
34
+ * Run the pipeline and assert no step failed.
35
+ * Throws if any step failed.
36
+ */
37
+ runAndAssertSuccess<T = unknown>(input: unknown): Promise<T>;
38
+ static create(etlService: ETLService, pipelineInstance: object): PipelineTestHarness;
39
+ }
40
+ //# sourceMappingURL=pipeline-test-harness.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pipeline-test-harness.d.ts","sourceRoot":"","sources":["../../src/testing/pipeline-test-harness.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,0BAA0B,CAAC;AAG3D,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,MAAM,CAAC;IACjB,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,EAAE,OAAO,CAAC;IACjB,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,iBAAiB,CAAC,CAAC,GAAG,OAAO;IAC5C,MAAM,EAAE,CAAC,CAAC;IACV,MAAM,EAAE,YAAY,EAAE,CAAC;IACvB,UAAU,EAAE,MAAM,CAAC;CACpB;AAED;;;;;;;;GAQG;AACH,qBAAa,mBAAmB;IAI5B,OAAO,CAAC,QAAQ,CAAC,UAAU;IAC3B,OAAO,CAAC,QAAQ,CAAC,gBAAgB;IAJnC,OAAO,CAAC,MAAM,CAAsB;gBAGjB,UAAU,EAAE,UAAU,EACtB,gBAAgB,EAAE,MAAM;IAc3C;;OAEG;IACG,GAAG,CAAC,CAAC,GAAG,OAAO,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC;IAQrE;;;OAGG;IACG,mBAAmB,CAAC,CAAC,GAAG,OAAO,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,CAAC,CAAC;IAWlE,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE,UAAU,EAAE,gBAAgB,EAAE,MAAM,GAAG,mBAAmB;CAGrF"}
@@ -0,0 +1,55 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.PipelineTestHarness = void 0;
4
+ /**
5
+ * Test harness for pipeline execution.
6
+ * Wraps a pipeline, runs it, and captures per-step execution events.
7
+ *
8
+ * @example
9
+ * const harness = PipelineTestHarness.create(etlService, myPipeline);
10
+ * const { result, events } = await harness.run({ raw: 'data' });
11
+ * expect(events.every(e => e.success)).toBe(true);
12
+ */
13
+ class PipelineTestHarness {
14
+ constructor(etlService, pipelineInstance) {
15
+ this.etlService = etlService;
16
+ this.pipelineInstance = pipelineInstance;
17
+ this.events = [];
18
+ this.etlService.onStepComplete((e) => {
19
+ this.events.push({
20
+ step: e.step,
21
+ stepName: e.stepName,
22
+ durationMs: e.durationMs,
23
+ success: e.success,
24
+ skipped: e.skipped,
25
+ error: e.error,
26
+ });
27
+ });
28
+ }
29
+ /**
30
+ * Run the pipeline with the given input.
31
+ */
32
+ async run(input) {
33
+ this.events = [];
34
+ const t0 = Date.now();
35
+ const result = await this.etlService.execute(this.pipelineInstance, input);
36
+ const durationMs = Date.now() - t0;
37
+ return { result, events: [...this.events], durationMs };
38
+ }
39
+ /**
40
+ * Run the pipeline and assert no step failed.
41
+ * Throws if any step failed.
42
+ */
43
+ async runAndAssertSuccess(input) {
44
+ const { result, events } = await this.run(input);
45
+ const failed = events.filter((e) => !e.success && !e.skipped);
46
+ if (failed.length > 0) {
47
+ throw new Error(`Pipeline steps failed: ${failed.map((e) => `${e.stepName}: ${e.error}`).join('; ')}`);
48
+ }
49
+ return result;
50
+ }
51
+ static create(etlService, pipelineInstance) {
52
+ return new PipelineTestHarness(etlService, pipelineInstance);
53
+ }
54
+ }
55
+ exports.PipelineTestHarness = PipelineTestHarness;
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=pipeline-test-harness.test.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pipeline-test-harness.test.d.ts","sourceRoot":"","sources":["../../src/testing/pipeline-test-harness.test.ts"],"names":[],"mappings":""}
@@ -0,0 +1,102 @@
1
+ "use strict";
2
+ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
3
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
4
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
5
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
6
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
7
+ };
8
+ var __metadata = (this && this.__metadata) || function (k, v) {
9
+ if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
10
+ };
11
+ Object.defineProperty(exports, "__esModule", { value: true });
12
+ const decorators_1 = require("../decorators");
13
+ const etl_service_1 = require("../pipelines/etl.service");
14
+ const schema_validator_1 = require("../validators/schema.validator");
15
+ const pipeline_test_harness_1 = require("./pipeline-test-harness");
16
+ let TestPipeline = class TestPipeline {
17
+ double(data) {
18
+ return { ...data, x: data.x * 2 };
19
+ }
20
+ add(data) {
21
+ return { ...data, x: data.x + 1 };
22
+ }
23
+ };
24
+ __decorate([
25
+ (0, decorators_1.Transform)({ step: 1, name: 'double' }),
26
+ __metadata("design:type", Function),
27
+ __metadata("design:paramtypes", [Object]),
28
+ __metadata("design:returntype", void 0)
29
+ ], TestPipeline.prototype, "double", null);
30
+ __decorate([
31
+ (0, decorators_1.Transform)({ step: 2, name: 'add' }),
32
+ __metadata("design:type", Function),
33
+ __metadata("design:paramtypes", [Object]),
34
+ __metadata("design:returntype", void 0)
35
+ ], TestPipeline.prototype, "add", null);
36
+ TestPipeline = __decorate([
37
+ (0, decorators_1.Pipeline)('TestPipeline')
38
+ ], TestPipeline);
39
+ describe('PipelineTestHarness', () => {
40
+ it('runs pipeline and captures events', async () => {
41
+ const schemaValidator = new schema_validator_1.SchemaValidator();
42
+ const etlService = new etl_service_1.ETLService(schemaValidator);
43
+ const pipeline = new TestPipeline();
44
+ const harness = pipeline_test_harness_1.PipelineTestHarness.create(etlService, pipeline);
45
+ const { result, events, durationMs } = await harness.run({ x: 5 });
46
+ expect(result).toEqual({ x: 11 });
47
+ expect(events).toHaveLength(2);
48
+ expect(events.every((e) => e.success)).toBe(true);
49
+ expect(durationMs).toBeGreaterThanOrEqual(0);
50
+ });
51
+ it('runAndAssertSuccess returns result', async () => {
52
+ const schemaValidator = new schema_validator_1.SchemaValidator();
53
+ const etlService = new etl_service_1.ETLService(schemaValidator);
54
+ const harness = pipeline_test_harness_1.PipelineTestHarness.create(etlService, new TestPipeline());
55
+ const result = await harness.runAndAssertSuccess({ x: 1 });
56
+ expect(result.x).toBe(3);
57
+ });
58
+ it('runAndAssertSuccess throws when step fails', async () => {
59
+ let FailingPipeline = class FailingPipeline {
60
+ fail() {
61
+ throw new Error('Step failed');
62
+ }
63
+ };
64
+ __decorate([
65
+ (0, decorators_1.Transform)({ step: 1, name: 'fail' }),
66
+ __metadata("design:type", Function),
67
+ __metadata("design:paramtypes", []),
68
+ __metadata("design:returntype", void 0)
69
+ ], FailingPipeline.prototype, "fail", null);
70
+ FailingPipeline = __decorate([
71
+ (0, decorators_1.Pipeline)('FailingPipeline')
72
+ ], FailingPipeline);
73
+ const schemaValidator = new schema_validator_1.SchemaValidator();
74
+ const etlService = new etl_service_1.ETLService(schemaValidator);
75
+ const harness = pipeline_test_harness_1.PipelineTestHarness.create(etlService, new FailingPipeline());
76
+ await expect(harness.runAndAssertSuccess({})).rejects.toThrow('Step failed');
77
+ });
78
+ it('runAndAssertSuccess throws when step fails with DLQ (events show failure)', async () => {
79
+ let DLQFailingPipeline = class DLQFailingPipeline {
80
+ fail() {
81
+ throw new Error('DLQ step failed');
82
+ }
83
+ };
84
+ __decorate([
85
+ (0, decorators_1.Transform)({
86
+ step: 1,
87
+ name: 'fail',
88
+ dlq: { handler: () => { } },
89
+ }),
90
+ __metadata("design:type", Function),
91
+ __metadata("design:paramtypes", []),
92
+ __metadata("design:returntype", void 0)
93
+ ], DLQFailingPipeline.prototype, "fail", null);
94
+ DLQFailingPipeline = __decorate([
95
+ (0, decorators_1.Pipeline)('DLQFailingPipeline')
96
+ ], DLQFailingPipeline);
97
+ const schemaValidator = new schema_validator_1.SchemaValidator();
98
+ const etlService = new etl_service_1.ETLService(schemaValidator);
99
+ const harness = pipeline_test_harness_1.PipelineTestHarness.create(etlService, new DLQFailingPipeline());
100
+ await expect(harness.runAndAssertSuccess({})).rejects.toThrow('Pipeline steps failed');
101
+ });
102
+ });
@@ -0,0 +1,32 @@
1
+ import type { BaseSchema } from '../schema/schema';
2
+ /**
3
+ * Generate fake data that matches a schema.
4
+ * Uses schema.toJsonSchema() to infer structure and produce valid sample data.
5
+ *
6
+ * @example
7
+ * const UserSchema = Schema.object({ name: Schema.string(), age: Schema.number() });
8
+ * const fake = SchemaFaker.generate(UserSchema);
9
+ * // { name: "random-string-123", age: 42 }
10
+ */
11
+ export declare class SchemaFaker {
12
+ private readonly options;
13
+ constructor(options?: {
14
+ arrayMinLength?: number;
15
+ arrayMaxLength?: number;
16
+ });
17
+ /**
18
+ * Generate one fake record matching the schema.
19
+ */
20
+ generate<T>(schema: BaseSchema<T>): T;
21
+ /**
22
+ * Generate N fake records.
23
+ */
24
+ generateMany<T>(schema: BaseSchema<T>, count: number): T[];
25
+ private generateFromJsonSchema;
26
+ private genByType;
27
+ private randomString;
28
+ /** Static convenience method */
29
+ static generate<T>(schema: BaseSchema<T>): T;
30
+ static generateMany<T>(schema: BaseSchema<T>, count: number): T[];
31
+ }
32
+ //# sourceMappingURL=schema-faker.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema-faker.d.ts","sourceRoot":"","sources":["../../src/testing/schema-faker.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAEnD;;;;;;;;GAQG;AACH,qBAAa,WAAW;IACtB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAuD;gBAEnE,OAAO,GAAE;QAAE,cAAc,CAAC,EAAE,MAAM,CAAC;QAAC,cAAc,CAAC,EAAE,MAAM,CAAA;KAAO;IAI9E;;OAEG;IACH,QAAQ,CAAC,CAAC,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC;IAKrC;;OAEG;IACH,YAAY,CAAC,CAAC,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,MAAM,GAAG,CAAC,EAAE;IAI1D,OAAO,CAAC,sBAAsB;IAuC9B,OAAO,CAAC,SAAS;IAkBjB,OAAO,CAAC,YAAY;IAQpB,gCAAgC;IAChC,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC;IAI5C,MAAM,CAAC,YAAY,CAAC,CAAC,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,MAAM,GAAG,CAAC,EAAE;CAGlE"}