@async-fusion/data 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +0 -0
- package/README.md +0 -0
- package/dist/cjs/cli/deploy.d.ts +1 -0
- package/dist/cjs/cli/deploy.d.ts.map +1 -0
- package/dist/cjs/cli/monitor.d.ts +1 -0
- package/dist/cjs/cli/monitor.d.ts.map +1 -0
- package/dist/cjs/index.d.ts +29 -0
- package/dist/cjs/index.d.ts.map +1 -0
- package/dist/cjs/index.js +2 -0
- package/dist/cjs/index.js.map +1 -0
- package/dist/cjs/kafka/consumer.d.ts +19 -0
- package/dist/cjs/kafka/consumer.d.ts.map +1 -0
- package/dist/cjs/kafka/consumer.js +2 -0
- package/dist/cjs/kafka/consumer.js.map +1 -0
- package/dist/cjs/kafka/producer.d.ts +21 -0
- package/dist/cjs/kafka/producer.d.ts.map +1 -0
- package/dist/cjs/kafka/producer.js +2 -0
- package/dist/cjs/kafka/producer.js.map +1 -0
- package/dist/cjs/kafka/stream.d.ts +52 -0
- package/dist/cjs/kafka/stream.d.ts.map +1 -0
- package/dist/cjs/kafka/stream.js +2 -0
- package/dist/cjs/kafka/stream.js.map +1 -0
- package/dist/cjs/pipeline/builder.d.ts +32 -0
- package/dist/cjs/pipeline/builder.d.ts.map +1 -0
- package/dist/cjs/pipeline/builder.js +2 -0
- package/dist/cjs/pipeline/builder.js.map +1 -0
- package/dist/cjs/pipeline/connectors.d.ts +1 -0
- package/dist/cjs/pipeline/connectors.d.ts.map +1 -0
- package/dist/cjs/pipeline/monitoring.d.ts +17 -0
- package/dist/cjs/pipeline/monitoring.d.ts.map +1 -0
- package/dist/cjs/pipeline/monitoring.js +2 -0
- package/dist/cjs/pipeline/monitoring.js.map +1 -0
- package/dist/cjs/react/index.d.ts +4 -0
- package/dist/cjs/react/index.d.ts.map +1 -0
- package/dist/cjs/react/useKafkaTopic.d.ts +13 -0
- package/dist/cjs/react/useKafkaTopic.d.ts.map +1 -0
- package/dist/cjs/react/useRealtimeData.d.ts +16 -0
- package/dist/cjs/react/useRealtimeData.d.ts.map +1 -0
- package/dist/cjs/react/useSparkQuery.d.ts +12 -0
- package/dist/cjs/react/useSparkQuery.d.ts.map +1 -0
- package/dist/cjs/spark/client.d.ts +27 -0
- package/dist/cjs/spark/client.d.ts.map +1 -0
- package/dist/cjs/spark/client.js +2 -0
- package/dist/cjs/spark/client.js.map +1 -0
- package/dist/cjs/spark/sql.d.ts +9 -0
- package/dist/cjs/spark/sql.d.ts.map +1 -0
- package/dist/cjs/spark/sql.js +2 -0
- package/dist/cjs/spark/sql.js.map +1 -0
- package/dist/cjs/spark/streaming.d.ts +8 -0
- package/dist/cjs/spark/streaming.d.ts.map +1 -0
- package/dist/cjs/spark/streaming.js +2 -0
- package/dist/cjs/spark/streaming.js.map +1 -0
- package/dist/cjs/types/index.d.ts +47 -0
- package/dist/cjs/types/index.d.ts.map +1 -0
- package/dist/cjs/utils/error-handling.d.ts +27 -0
- package/dist/cjs/utils/error-handling.d.ts.map +1 -0
- package/dist/cjs/utils/error-handling.js +2 -0
- package/dist/cjs/utils/error-handling.js.map +1 -0
- package/dist/esm/cli/deploy.d.ts +1 -0
- package/dist/esm/cli/deploy.d.ts.map +1 -0
- package/dist/esm/cli/monitor.d.ts +1 -0
- package/dist/esm/cli/monitor.d.ts.map +1 -0
- package/dist/esm/index.d.ts +29 -0
- package/dist/esm/index.d.ts.map +1 -0
- package/dist/esm/index.js +2 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/kafka/consumer.d.ts +19 -0
- package/dist/esm/kafka/consumer.d.ts.map +1 -0
- package/dist/esm/kafka/consumer.js +2 -0
- package/dist/esm/kafka/consumer.js.map +1 -0
- package/dist/esm/kafka/producer.d.ts +21 -0
- package/dist/esm/kafka/producer.d.ts.map +1 -0
- package/dist/esm/kafka/producer.js +2 -0
- package/dist/esm/kafka/producer.js.map +1 -0
- package/dist/esm/kafka/stream.d.ts +52 -0
- package/dist/esm/kafka/stream.d.ts.map +1 -0
- package/dist/esm/kafka/stream.js +2 -0
- package/dist/esm/kafka/stream.js.map +1 -0
- package/dist/esm/pipeline/builder.d.ts +32 -0
- package/dist/esm/pipeline/builder.d.ts.map +1 -0
- package/dist/esm/pipeline/builder.js +2 -0
- package/dist/esm/pipeline/builder.js.map +1 -0
- package/dist/esm/pipeline/connectors.d.ts +1 -0
- package/dist/esm/pipeline/connectors.d.ts.map +1 -0
- package/dist/esm/pipeline/monitoring.d.ts +17 -0
- package/dist/esm/pipeline/monitoring.d.ts.map +1 -0
- package/dist/esm/pipeline/monitoring.js +2 -0
- package/dist/esm/pipeline/monitoring.js.map +1 -0
- package/dist/esm/react/index.d.ts +4 -0
- package/dist/esm/react/index.d.ts.map +1 -0
- package/dist/esm/react/useKafkaTopic.d.ts +13 -0
- package/dist/esm/react/useKafkaTopic.d.ts.map +1 -0
- package/dist/esm/react/useRealtimeData.d.ts +16 -0
- package/dist/esm/react/useRealtimeData.d.ts.map +1 -0
- package/dist/esm/react/useSparkQuery.d.ts +12 -0
- package/dist/esm/react/useSparkQuery.d.ts.map +1 -0
- package/dist/esm/spark/client.d.ts +27 -0
- package/dist/esm/spark/client.d.ts.map +1 -0
- package/dist/esm/spark/client.js +2 -0
- package/dist/esm/spark/client.js.map +1 -0
- package/dist/esm/spark/sql.d.ts +9 -0
- package/dist/esm/spark/sql.d.ts.map +1 -0
- package/dist/esm/spark/sql.js +2 -0
- package/dist/esm/spark/sql.js.map +1 -0
- package/dist/esm/spark/streaming.d.ts +8 -0
- package/dist/esm/spark/streaming.d.ts.map +1 -0
- package/dist/esm/spark/streaming.js +2 -0
- package/dist/esm/spark/streaming.js.map +1 -0
- package/dist/esm/types/index.d.ts +47 -0
- package/dist/esm/types/index.d.ts.map +1 -0
- package/dist/esm/utils/error-handling.d.ts +27 -0
- package/dist/esm/utils/error-handling.d.ts.map +1 -0
- package/dist/esm/utils/error-handling.js +2 -0
- package/dist/esm/utils/error-handling.js.map +1 -0
- package/package.json +68 -0
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
export interface KafkaConfig {
|
|
2
|
+
brokers: string[];
|
|
3
|
+
clientId: string;
|
|
4
|
+
ssl?: boolean;
|
|
5
|
+
sasl?: {
|
|
6
|
+
mechanism: 'plain' | 'scram-sha-256' | 'scram-sha-512';
|
|
7
|
+
username: string;
|
|
8
|
+
password: string;
|
|
9
|
+
};
|
|
10
|
+
}
|
|
11
|
+
export type KafkaJSConfig = any;
|
|
12
|
+
export interface SparkConfig {
|
|
13
|
+
master: string;
|
|
14
|
+
appName: string;
|
|
15
|
+
sparkConf?: Record<string, string>;
|
|
16
|
+
pythonPath?: string;
|
|
17
|
+
}
|
|
18
|
+
export interface PipelineConfig {
|
|
19
|
+
name: string;
|
|
20
|
+
checkpointLocation?: string;
|
|
21
|
+
parallelism?: number;
|
|
22
|
+
}
|
|
23
|
+
export interface Message<T = any> {
|
|
24
|
+
key?: string;
|
|
25
|
+
value: T;
|
|
26
|
+
timestamp: Date;
|
|
27
|
+
partition?: number;
|
|
28
|
+
offset?: number;
|
|
29
|
+
}
|
|
30
|
+
export interface SparkJob {
|
|
31
|
+
id: string;
|
|
32
|
+
status: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled';
|
|
33
|
+
progress: number;
|
|
34
|
+
startTime: Date;
|
|
35
|
+
endTime?: Date;
|
|
36
|
+
stages: SparkStage[];
|
|
37
|
+
}
|
|
38
|
+
export interface SparkStage {
|
|
39
|
+
id: number;
|
|
40
|
+
name: string;
|
|
41
|
+
completed: number;
|
|
42
|
+
total: number;
|
|
43
|
+
status: 'pending' | 'running' | 'completed' | 'failed';
|
|
44
|
+
}
|
|
45
|
+
export type PipelineSource = 'kafka' | 'file' | 'socket' | 'http';
|
|
46
|
+
export type PipelineSink = 'kafka' | 'console' | 'file' | 'database' | 'http';
|
|
47
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/types/index.ts"],"names":[],"mappings":"AACA,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,QAAQ,EAAE,MAAM,CAAC;IACjB,GAAG,CAAC,EAAE,OAAO,CAAC;IACd,IAAI,CAAC,EAAE;QACL,SAAS,EAAE,OAAO,GAAG,eAAe,GAAG,eAAe,CAAC;QACvD,QAAQ,EAAE,MAAM,CAAC;QACjB,QAAQ,EAAE,MAAM,CAAC;KAClB,CAAC;CACH;AAGD,MAAM,MAAM,aAAa,GAAG,GAAG,CAAC;AAEhC,MAAM,WAAW,WAAW;IAC1B,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACnC,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,MAAM,CAAC;IACb,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED,MAAM,WAAW,OAAO,CAAC,CAAC,GAAG,GAAG;IAC9B,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,CAAC,CAAC;IACT,SAAS,EAAE,IAAI,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,QAAQ;IACvB,EAAE,EAAE,MAAM,CAAC;IACX,MAAM,EAAE,SAAS,GAAG,SAAS,GAAG,WAAW,GAAG,QAAQ,GAAG,WAAW,CAAC;IACrE,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,IAAI,CAAC;IAChB,OAAO,CAAC,EAAE,IAAI,CAAC;IACf,MAAM,EAAE,UAAU,EAAE,CAAC;CACtB;AAED,MAAM,WAAW,UAAU;IACzB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,MAAM,CAAC;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,SAAS,GAAG,SAAS,GAAG,WAAW,GAAG,QAAQ,CAAC;CACxD;AAED,MAAM,MAAM,cAAc,GAAG,OAAO,GAAG,MAAM,GAAG,QAAQ,GAAG,MAAM,CAAC;AAClE,MAAM,MAAM,YAAY,GAAG,OAAO,GAAG,SAAS,GAAG,MAAM,GAAG,UAAU,GAAG,MAAM,CAAC"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
export declare class RetryableError extends Error {
|
|
2
|
+
readonly retryable: boolean;
|
|
3
|
+
constructor(message: string, retryable?: boolean);
|
|
4
|
+
}
|
|
5
|
+
export declare class FatalError extends Error {
|
|
6
|
+
constructor(message: string);
|
|
7
|
+
}
|
|
8
|
+
export declare function withRetry<T>(fn: () => Promise<T>, options?: {
|
|
9
|
+
maxRetries?: number;
|
|
10
|
+
delayMs?: number;
|
|
11
|
+
backoffMultiplier?: number;
|
|
12
|
+
shouldRetry?: (error: Error) => boolean;
|
|
13
|
+
}): Promise<T>;
|
|
14
|
+
export declare function sleep(ms: number): Promise<void>;
|
|
15
|
+
export declare class CircuitBreaker {
|
|
16
|
+
private readonly failureThreshold;
|
|
17
|
+
private readonly timeoutMs;
|
|
18
|
+
private failures;
|
|
19
|
+
private lastFailureTime;
|
|
20
|
+
private state;
|
|
21
|
+
constructor(failureThreshold?: number, timeoutMs?: number);
|
|
22
|
+
call<T>(fn: () => Promise<T>): Promise<T>;
|
|
23
|
+
private onSuccess;
|
|
24
|
+
private onFailure;
|
|
25
|
+
getState(): string;
|
|
26
|
+
}
|
|
27
|
+
//# sourceMappingURL=error-handling.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"error-handling.d.ts","sourceRoot":"","sources":["../../../src/utils/error-handling.ts"],"names":[],"mappings":"AAAA,qBAAa,cAAe,SAAQ,KAAK;aACQ,SAAS,EAAE,OAAO;gBAAnD,OAAO,EAAE,MAAM,EAAkB,SAAS,GAAE,OAAc;CAIzE;AAED,qBAAa,UAAW,SAAQ,KAAK;gBACrB,OAAO,EAAE,MAAM;CAI9B;AAED,wBAAsB,SAAS,CAAC,CAAC,EAC7B,EAAE,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,EACpB,OAAO,GAAE;IACL,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC;CACtC,GACP,OAAO,CAAC,CAAC,CAAC,CA0BZ;AAED,wBAAgB,KAAK,CAAC,EAAE,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAE/C;AAED,qBAAa,cAAc;IAMnB,OAAO,CAAC,QAAQ,CAAC,gBAAgB;IACjC,OAAO,CAAC,QAAQ,CAAC,SAAS;IAN9B,OAAO,CAAC,QAAQ,CAAa;IAC7B,OAAO,CAAC,eAAe,CAAa;IACpC,OAAO,CAAC,KAAK,CAA6C;gBAGrC,gBAAgB,GAAE,MAAU,EAC5B,SAAS,GAAE,MAAc;IAGxC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;IAqB/C,OAAO,CAAC,SAAS;IASjB,OAAO,CAAC,SAAS;IAUjB,QAAQ,IAAI,MAAM;CAGrB"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
"use strict";class t extends Error{constructor(t,e=!0){super(t),this.retryable=e,this.name="RetryableError"}}class e extends Error{constructor(t){super(t),this.name="FatalError"}}function r(t){return new Promise(e=>setTimeout(e,t))}exports.CircuitBreaker=class{constructor(t=5,e=6e4){this.failureThreshold=t,this.timeoutMs=e,this.failures=0,this.lastFailureTime=0,this.state="CLOSED"}async call(t){if("OPEN"===this.state){if(!(Date.now()-this.lastFailureTime>=this.timeoutMs))throw new Error("Circuit breaker is OPEN");this.state="HALF_OPEN",console.log("🔌 Circuit breaker half-open, testing...")}try{const e=await t();return this.onSuccess(),e}catch(t){throw this.onFailure(),t}}onSuccess(){"HALF_OPEN"===this.state&&(this.state="CLOSED",this.failures=0,console.log("✅ Circuit breaker closed")),this.failures=0}onFailure(){this.failures++,this.lastFailureTime=Date.now(),this.failures>=this.failureThreshold&&(this.state="OPEN",console.error(`🔴 Circuit breaker opened after ${this.failures} failures`))}getState(){return this.state}},exports.FatalError=e,exports.RetryableError=t,exports.sleep=r,exports.withRetry=async function(e,s={}){const i=s.maxRetries||3,a=s.delayMs||1e3,o=s.backoffMultiplier||2,l=s.shouldRetry||(e=>e instanceof t);let h,u=a;for(let t=1;t<=i;t++)try{return await e()}catch(e){if(h=e,!l(e)||t===i)throw e;console.warn(`Retry attempt ${t}/${i} after ${u}ms`),await r(u),u*=o}throw h};
|
|
2
|
+
//# sourceMappingURL=error-handling.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"error-handling.js","sources":["../../../../src/utils/error-handling.ts"],"sourcesContent":["export class RetryableError extends Error {\r\n constructor(message: string, public readonly retryable: boolean = true) {\r\n super(message);\r\n this.name = 'RetryableError';\r\n }\r\n}\r\n\r\nexport class FatalError extends Error {\r\n constructor(message: string) {\r\n super(message);\r\n this.name = 'FatalError';\r\n }\r\n}\r\n\r\nexport async function withRetry<T>(\r\n fn: () => Promise<T>,\r\n options: {\r\n maxRetries?: number;\r\n delayMs?: number;\r\n backoffMultiplier?: number;\r\n shouldRetry?: (error: Error) => boolean;\r\n } = {}\r\n): Promise<T> {\r\n const maxRetries = options.maxRetries || 3;\r\n const delayMs = options.delayMs || 1000;\r\n const backoffMultiplier = options.backoffMultiplier || 2;\r\n const shouldRetry = options.shouldRetry || ((error) => error instanceof RetryableError);\r\n \r\n let lastError: Error;\r\n let currentDelay = delayMs;\r\n \r\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\r\n try {\r\n return await fn();\r\n } catch (error) {\r\n lastError = error as Error;\r\n \r\n if (!shouldRetry(error as Error) || attempt === maxRetries) {\r\n throw error;\r\n }\r\n \r\n console.warn(`Retry attempt ${attempt}/${maxRetries} after ${currentDelay}ms`);\r\n await sleep(currentDelay);\r\n currentDelay *= backoffMultiplier;\r\n }\r\n }\r\n \r\n throw lastError!;\r\n}\r\n\r\nexport function sleep(ms: number): Promise<void> {\r\n return new Promise(resolve => setTimeout(resolve, ms));\r\n}\r\n\r\nexport class CircuitBreaker {\r\n private failures: number = 0;\r\n private lastFailureTime: number = 0;\r\n private state: 'CLOSED' | 'OPEN' | 'HALF_OPEN' = 'CLOSED';\r\n \r\n constructor(\r\n private readonly failureThreshold: number = 5,\r\n private readonly timeoutMs: number = 60000\r\n ) {}\r\n \r\n async call<T>(fn: () => Promise<T>): Promise<T> {\r\n if (this.state === 'OPEN') {\r\n const now = Date.now();\r\n if (now - this.lastFailureTime >= this.timeoutMs) {\r\n this.state = 'HALF_OPEN';\r\n console.log('🔌 Circuit breaker half-open, testing...');\r\n } else {\r\n throw new Error('Circuit breaker is OPEN');\r\n }\r\n }\r\n \r\n try {\r\n const result = await fn();\r\n this.onSuccess();\r\n return result;\r\n } catch (error) {\r\n this.onFailure();\r\n throw error;\r\n }\r\n }\r\n \r\n private onSuccess(): void {\r\n if (this.state === 'HALF_OPEN') {\r\n this.state = 'CLOSED';\r\n this.failures = 0;\r\n console.log('✅ Circuit breaker closed');\r\n }\r\n this.failures = 0;\r\n }\r\n \r\n private onFailure(): void {\r\n this.failures++;\r\n this.lastFailureTime = Date.now();\r\n \r\n if (this.failures >= this.failureThreshold) {\r\n this.state = 'OPEN';\r\n console.error(`🔴 Circuit breaker opened after ${this.failures} failures`);\r\n }\r\n }\r\n \r\n getState(): string {\r\n return this.state;\r\n }\r\n}\r\n"],"names":["RetryableError","Error","constructor","message","retryable","super","this","name","FatalError","sleep","ms","Promise","resolve","setTimeout","failureThreshold","timeoutMs","failures","lastFailureTime","state","call","fn","Date","now","console","log","result","onSuccess","error","onFailure","getState","async","options","maxRetries","delayMs","backoffMultiplier","shouldRetry","lastError","currentDelay","attempt","warn"],"mappings":"aAAM,MAAOA,UAAuBC,MAChC,WAAAC,CAAYC,EAAiCC,GAAqB,GAC9DC,MAAMF,GADmCG,KAASF,UAATA,EAEzCE,KAAKC,KAAO,gBACf,EAGC,MAAOC,UAAmBP,MAC5B,WAAAC,CAAYC,GACRE,MAAMF,GACNG,KAAKC,KAAO,YACf,EAuCC,SAAUE,EAAMC,GAClB,OAAO,IAAIC,QAAQC,GAAWC,WAAWD,EAASF,GACtD,8BAOI,WAAAR,CACqBY,EAA2B,EAC3BC,EAAoB,KADpBT,KAAgBQ,iBAAhBA,EACAR,KAASS,UAATA,EANbT,KAAQU,SAAW,EACnBV,KAAeW,gBAAW,EAC1BX,KAAKY,MAAoC,QAK7C,CAEJ,UAAMC,CAAQC,GACV,GAAmB,SAAfd,KAAKY,MAAkB,CAEvB,KADYG,KAAKC,MACPhB,KAAKW,iBAAmBX,KAAKS,WAInC,MAAM,IAAId,MAAM,2BAHhBK,KAAKY,MAAQ,YACbK,QAAQC,IAAI,2CAInB,CAED,IACI,MAAMC,QAAeL,IAErB,OADAd,KAAKoB,YACED,CACV,CAAC,MAAOE,GAEL,MADArB,KAAKsB,YACCD,CACT,CACJ,CAEO,SAAAD,GACe,cAAfpB,KAAKY,QACLZ,KAAKY,MAAQ,SACbZ,KAAKU,SAAW,EAChBO,QAAQC,IAAI,6BAEhBlB,KAAKU,SAAW,CACnB,CAEO,SAAAY,GACJtB,KAAKU,WACLV,KAAKW,gBAAkBI,KAAKC,MAExBhB,KAAKU,UAAYV,KAAKQ,mBACtBR,KAAKY,MAAQ,OACbK,QAAQI,MAAM,mCAAmCrB,KAAKU,qBAE7D,CAED,QAAAa,GACI,OAAOvB,KAAKY,KACf,mFA5FEY,eACHV,EACAW,EAKI,IAEJ,MAAMC,EAAaD,EAAQC,YAAc,EACnCC,EAAUF,EAAQE,SAAW,IAC7BC,EAAoBH,EAAQG,mBAAqB,EACjDC,EAAcJ,EAAQI,aAAW,CAAMR,GAAUA,aAAiB3B,GAExE,IAAIoC,EACAC,EAAeJ,EAEnB,IAAK,IAAIK,EAAU,EAAGA,GAAWN,EAAYM,IACzC,IACI,aAAalB,GAChB,CAAC,MAAOO,GAGL,GAFAS,EAAYT,GAEPQ,EAAYR,IAAmBW,IAAYN,EAC5C,MAAML,EAGVJ,QAAQgB,KAAK,iBAAiBD,KAAWN,WAAoBK,aACvD5B,EAAM4B,GACZA,GAAgBH,CACnB,CAGL,MAAME,CACV"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
//# sourceMappingURL=deploy.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"deploy.d.ts","sourceRoot":"","sources":["../../../src/cli/deploy.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
//# sourceMappingURL=monitor.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"monitor.d.ts","sourceRoot":"","sources":["../../../src/cli/monitor.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
export declare const version = "1.0.0";
|
|
2
|
+
export declare const library: {
|
|
3
|
+
name: string;
|
|
4
|
+
version: string;
|
|
5
|
+
author: string;
|
|
6
|
+
license: string;
|
|
7
|
+
repository: string;
|
|
8
|
+
};
|
|
9
|
+
export declare function getLibraryInfo(): {
|
|
10
|
+
name: string;
|
|
11
|
+
version: string;
|
|
12
|
+
author: string;
|
|
13
|
+
description: string;
|
|
14
|
+
features: string[];
|
|
15
|
+
license: string;
|
|
16
|
+
repository: string;
|
|
17
|
+
};
|
|
18
|
+
export declare function hello(): string;
|
|
19
|
+
export { Producer } from './kafka/producer';
|
|
20
|
+
export { Consumer } from './kafka/consumer';
|
|
21
|
+
export { KafkaStream, GroupedStream } from './kafka/stream';
|
|
22
|
+
export { SparkClient } from './spark/client';
|
|
23
|
+
export { SparkStreaming } from './spark/streaming';
|
|
24
|
+
export { SparkSQL } from './spark/sql';
|
|
25
|
+
export { PipelineBuilder } from './pipeline/builder';
|
|
26
|
+
export { PipelineMonitor } from './pipeline/monitoring';
|
|
27
|
+
export { withRetry, sleep, CircuitBreaker, RetryableError, FatalError } from './utils/error-handling';
|
|
28
|
+
export * from './types';
|
|
29
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,OAAO,UAAU,CAAC;AAE/B,eAAO,MAAM,OAAO;;;;;;CAMnB,CAAC;AAEF,wBAAgB,cAAc;;;;;;;;EAoB7B;AAED,wBAAgB,KAAK,IAAI,MAAM,CAE9B;AAGD,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAC5C,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAC5C,OAAO,EAAE,WAAW,EAAE,aAAa,EAAE,MAAM,gBAAgB,CAAC;AAG5D,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACnD,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAGvC,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAGxD,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,cAAc,EAAE,cAAc,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAGtG,cAAc,SAAS,CAAC"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
export{Producer}from"./kafka/producer.js";export{Consumer}from"./kafka/consumer.js";export{GroupedStream,KafkaStream}from"./kafka/stream.js";export{SparkClient}from"./spark/client.js";export{SparkStreaming}from"./spark/streaming.js";export{SparkSQL}from"./spark/sql.js";export{PipelineBuilder}from"./pipeline/builder.js";export{PipelineMonitor}from"./pipeline/monitoring.js";export{CircuitBreaker,FatalError,RetryableError,sleep,withRetry}from"./utils/error-handling.js";const r="1.0.0",e={name:"@async-fusion/data",version:r,author:"Udayan Sharma",license:"MIT",repository:"https://github.com/hollermay/async-fusion-data"};function o(){return{name:e.name,version:e.version,author:e.author,description:"Unified data streaming library for Kafka and Spark",features:["Kafka Producer/Consumer with backpressure","Spark job submission and monitoring","Unified pipeline builder","React hooks for real-time data","TypeScript first","Built-in monitoring and metrics","Stream processing with windowing","Error handling and retries","Circuit breaker pattern"],license:e.license,repository:e.repository}}function i(){return`Hello from @async-fusion/data! Built with lots of Love! (errors and fixes :P) by ${e.author}`}export{o as getLibraryInfo,i as hello,e as library,r as version};
|
|
2
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sources":["../../../src/index.ts"],"sourcesContent":["// Built by Udayan Sharma\r\n\r\nexport const version = '1.0.0';\r\n\r\nexport const library = {\r\n name: '@async-fusion/data',\r\n version,\r\n author: 'Udayan Sharma',\r\n license: 'MIT',\r\n repository: 'https://github.com/hollermay/async-fusion-data'\r\n};\r\n\r\nexport function getLibraryInfo() {\r\n return {\r\n name: library.name,\r\n version: library.version,\r\n author: library.author,\r\n description: 'Unified data streaming library for Kafka and Spark',\r\n features: [\r\n 'Kafka Producer/Consumer with backpressure',\r\n 'Spark job submission and monitoring',\r\n 'Unified pipeline builder',\r\n 'React hooks for real-time data',\r\n 'TypeScript first',\r\n 'Built-in monitoring and metrics',\r\n 'Stream processing with windowing',\r\n 'Error handling and retries',\r\n 'Circuit breaker pattern'\r\n ],\r\n license: library.license,\r\n repository: library.repository\r\n };\r\n}\r\n\r\nexport function hello(): string {\r\n return `Hello from @async-fusion/data! Built with lots of Love! (errors and fixes :P) by ${library.author}`;\r\n}\r\n\r\n// Kafka exports\r\nexport { Producer } from './kafka/producer';\r\nexport { Consumer } from './kafka/consumer';\r\nexport { KafkaStream, GroupedStream } from './kafka/stream';\r\n\r\n// Spark exports\r\nexport { SparkClient } from './spark/client';\r\nexport { SparkStreaming } from './spark/streaming';\r\nexport { SparkSQL } from './spark/sql';\r\n\r\n// Pipeline exports\r\nexport { PipelineBuilder } from './pipeline/builder';\r\nexport { PipelineMonitor } from './pipeline/monitoring';\r\n\r\n// Utils\r\nexport { withRetry, sleep, CircuitBreaker, RetryableError, FatalError } from './utils/error-handling';\r\n\r\n// Types\r\nexport * from './types';"],"names":["version","library","name","author","license","repository","getLibraryInfo","description","features","hello"],"mappings":"udAEO,MAAMA,EAAU,QAEVC,EAAU,CACnBC,KAAM,qBACNF,UACAG,OAAQ,gBACRC,QAAS,MACTC,WAAY,2DAGAC,IACZ,MAAO,CACHJ,KAAMD,EAAQC,KACdF,QAASC,EAAQD,QACjBG,OAAQF,EAAQE,OAChBI,YAAa,qDACbC,SAAU,CACN,4CACA,sCACA,2BACA,iCACA,mBACA,kCACA,mCACA,6BACA,2BAEJJ,QAASH,EAAQG,QACjBC,WAAYJ,EAAQI,WAE5B,UAEgBI,IACZ,MAAO,oFAAoFR,EAAQE,QACvG"}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { KafkaConfig, Message } from '../types';
|
|
2
|
+
export type MessageHandler<T> = (message: Message<T>) => Promise<void>;
|
|
3
|
+
export declare class Consumer<T = any> {
|
|
4
|
+
private consumer;
|
|
5
|
+
private topic;
|
|
6
|
+
private groupId;
|
|
7
|
+
private handlers;
|
|
8
|
+
private isRunning;
|
|
9
|
+
private maxConcurrent;
|
|
10
|
+
private currentProcessing;
|
|
11
|
+
constructor(config: KafkaConfig, topic: string, groupId: string);
|
|
12
|
+
connect(): Promise<void>;
|
|
13
|
+
on(handler: MessageHandler<T>): this;
|
|
14
|
+
start(): Promise<void>;
|
|
15
|
+
stop(): Promise<void>;
|
|
16
|
+
setMaxConcurrent(limit: number): this;
|
|
17
|
+
seekToOffset(offset: number): Promise<void>;
|
|
18
|
+
}
|
|
19
|
+
//# sourceMappingURL=consumer.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"consumer.d.ts","sourceRoot":"","sources":["../../../src/kafka/consumer.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAEhD,MAAM,MAAM,cAAc,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;AAEvE,qBAAa,QAAQ,CAAC,CAAC,GAAG,GAAG;IAC3B,OAAO,CAAC,QAAQ,CAAgB;IAChC,OAAO,CAAC,KAAK,CAAS;IACtB,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,QAAQ,CAA2B;IAC3C,OAAO,CAAC,SAAS,CAAkB;IACnC,OAAO,CAAC,aAAa,CAAc;IACnC,OAAO,CAAC,iBAAiB,CAAa;gBAE1B,MAAM,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM;IAgBzD,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAK9B,EAAE,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC,CAAC,GAAG,IAAI;IAK9B,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAkCtB,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAK3B,gBAAgB,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAK/B,YAAY,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CAOlD"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import{Kafka as s}from"kafkajs";class t{constructor(t,e,n){this.handlers=[],this.isRunning=!1,this.maxConcurrent=10,this.currentProcessing=0;const i={clientId:t.clientId,brokers:t.brokers};t.ssl&&(i.ssl=t.ssl),t.sasl&&(i.sasl=t.sasl);const r=new s(i);this.consumer=r.consumer({groupId:n}),this.topic=e,this.groupId=n}async connect(){await this.consumer.connect(),await this.consumer.subscribe({topic:this.topic,fromBeginning:!1})}on(s){return this.handlers.push(s),this}async start(){this.isRunning=!0,await this.consumer.run({eachMessage:async s=>{if(!this.isRunning)return;for(;this.currentProcessing>=this.maxConcurrent;)await new Promise(s=>setTimeout(s,100));const t={key:s.message.key?.toString(),value:JSON.parse(s.message.value?.toString()||"{}"),timestamp:new Date(s.message.timestamp||Date.now()),partition:s.partition,offset:Number(s.message.offset)};this.currentProcessing++;try{await Promise.all(this.handlers.map(s=>s(t)))}catch(s){throw console.error("Error processing message:",s),s}finally{this.currentProcessing--}}})}async stop(){this.isRunning=!1,await this.consumer.disconnect()}setMaxConcurrent(s){return this.maxConcurrent=s,this}async seekToOffset(s){await this.consumer.seek({topic:this.topic,partition:0,offset:s.toString()})}}export{t as Consumer};
|
|
2
|
+
//# sourceMappingURL=consumer.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"consumer.js","sources":["../../../../src/kafka/consumer.ts"],"sourcesContent":["import { Kafka, Consumer as KafkaConsumer, EachMessagePayload } from 'kafkajs';\r\nimport { KafkaConfig, Message } from '../types';\r\n\r\nexport type MessageHandler<T> = (message: Message<T>) => Promise<void>;\r\n\r\nexport class Consumer<T = any> {\r\n private consumer: KafkaConsumer;\r\n private topic: string;\r\n private groupId: string;\r\n private handlers: MessageHandler<T>[] = [];\r\n private isRunning: boolean = false;\r\n private maxConcurrent: number = 10;\r\n private currentProcessing: number = 0;\r\n\r\n constructor(config: KafkaConfig, topic: string, groupId: string) {\r\n // Use type assertion to bypass strict type checking\r\n const kafkaConfig: any = {\r\n clientId: config.clientId,\r\n brokers: config.brokers,\r\n };\r\n \r\n if (config.ssl) kafkaConfig.ssl = config.ssl;\r\n if (config.sasl) kafkaConfig.sasl = config.sasl;\r\n \r\n const kafka = new Kafka(kafkaConfig);\r\n this.consumer = kafka.consumer({ groupId });\r\n this.topic = topic;\r\n this.groupId = groupId;\r\n }\r\n\r\n async connect(): Promise<void> {\r\n await this.consumer.connect();\r\n await this.consumer.subscribe({ topic: this.topic, fromBeginning: false });\r\n }\r\n\r\n on(handler: MessageHandler<T>): this {\r\n this.handlers.push(handler);\r\n return this;\r\n }\r\n\r\n async start(): Promise<void> {\r\n this.isRunning = true;\r\n \r\n await this.consumer.run({\r\n eachMessage: async (payload: EachMessagePayload) => {\r\n if (!this.isRunning) return;\r\n \r\n // Backpressure control\r\n while (this.currentProcessing >= this.maxConcurrent) {\r\n await new Promise(resolve => setTimeout(resolve, 100));\r\n }\r\n \r\n const message: Message<T> = {\r\n key: payload.message.key?.toString(),\r\n value: JSON.parse(payload.message.value?.toString() || '{}'),\r\n timestamp: new Date(payload.message.timestamp || Date.now()),\r\n partition: payload.partition,\r\n offset: Number(payload.message.offset),\r\n };\r\n \r\n this.currentProcessing++;\r\n \r\n try {\r\n await Promise.all(this.handlers.map(handler => handler(message)));\r\n } catch (error) {\r\n console.error('Error processing message:', error);\r\n throw error;\r\n } finally {\r\n this.currentProcessing--;\r\n }\r\n },\r\n });\r\n }\r\n\r\n async stop(): Promise<void> {\r\n this.isRunning = false;\r\n await this.consumer.disconnect();\r\n }\r\n\r\n setMaxConcurrent(limit: number): this {\r\n this.maxConcurrent = limit;\r\n return this;\r\n }\r\n\r\n async seekToOffset(offset: number): Promise<void> {\r\n await this.consumer.seek({\r\n topic: this.topic,\r\n partition: 0,\r\n offset: offset.toString(),\r\n });\r\n }\r\n}"],"names":["Consumer","constructor","config","topic","groupId","this","handlers","isRunning","maxConcurrent","currentProcessing","kafkaConfig","clientId","brokers","ssl","sasl","kafka","Kafka","consumer","connect","subscribe","fromBeginning","on","handler","push","start","run","eachMessage","async","payload","Promise","resolve","setTimeout","message","key","toString","value","JSON","parse","timestamp","Date","now","partition","offset","Number","all","map","error","console","stop","disconnect","setMaxConcurrent","limit","seekToOffset","seek"],"mappings":"sCAKaA,EASX,WAAAC,CAAYC,EAAqBC,EAAeC,GALxCC,KAAQC,SAAwB,GAChCD,KAASE,WAAY,EACrBF,KAAaG,cAAW,GACxBH,KAAiBI,kBAAW,EAIlC,MAAMC,EAAmB,CACvBC,SAAUT,EAAOS,SACjBC,QAASV,EAAOU,SAGdV,EAAOW,MAAKH,EAAYG,IAAMX,EAAOW,KACrCX,EAAOY,OAAMJ,EAAYI,KAAOZ,EAAOY,MAE3C,MAAMC,EAAQ,IAAIC,EAAMN,GACxBL,KAAKY,SAAWF,EAAME,SAAS,CAAEb,YACjCC,KAAKF,MAAQA,EACbE,KAAKD,QAAUA,CAChB,CAED,aAAMc,SACEb,KAAKY,SAASC,gBACdb,KAAKY,SAASE,UAAU,CAAEhB,MAAOE,KAAKF,MAAOiB,eAAe,GACnE,CAED,EAAAC,CAAGC,GAED,OADAjB,KAAKC,SAASiB,KAAKD,GACZjB,IACR,CAED,WAAMmB,GACJnB,KAAKE,WAAY,QAEXF,KAAKY,SAASQ,IAAI,CACtBC,YAAaC,MAAOC,IAClB,IAAKvB,KAAKE,UAAW,OAGrB,KAAOF,KAAKI,mBAAqBJ,KAAKG,qBAC9B,IAAIqB,QAAQC,GAAWC,WAAWD,EAAS,MAGnD,MAAME,EAAsB,CAC1BC,IAAKL,EAAQI,QAAQC,KAAKC,WAC1BC,MAAOC,KAAKC,MAAMT,EAAQI,QAAQG,OAAOD,YAAc,MACvDI,UAAW,IAAIC,KAAKX,EAAQI,QAAQM,WAAaC,KAAKC,OACtDC,UAAWb,EAAQa,UACnBC,OAAQC,OAAOf,EAAQI,QAAQU,SAGjCrC,KAAKI,oBAEL,UACQoB,QAAQe,IAAIvC,KAAKC,SAASuC,IAAIvB,GAAWA,EAAQU,IACxD,CAAC,MAAOc,GAEP,MADAC,QAAQD,MAAM,4BAA6BA,GACrCA,CACP,CAAS,QACRzC,KAAKI,mBACN,IAGN,CAED,UAAMuC,GACJ3C,KAAKE,WAAY,QACXF,KAAKY,SAASgC,YACrB,CAED,gBAAAC,CAAiBC,GAEf,OADA9C,KAAKG,cAAgB2C,EACd9C,IACR,CAED,kBAAM+C,CAAaV,SACXrC,KAAKY,SAASoC,KAAK,CACvBlD,MAAOE,KAAKF,MACZsC,UAAW,EACXC,OAAQA,EAAOR,YAElB"}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { RecordMetadata } from 'kafkajs';
|
|
2
|
+
import { KafkaConfig, Message } from '../types';
|
|
3
|
+
export declare class Producer<T = any> {
|
|
4
|
+
private producer;
|
|
5
|
+
private topic;
|
|
6
|
+
private batchSize;
|
|
7
|
+
private batchTimeout;
|
|
8
|
+
private messageQueue;
|
|
9
|
+
private batchTimer;
|
|
10
|
+
constructor(config: KafkaConfig, topic: string);
|
|
11
|
+
connect(): Promise<void>;
|
|
12
|
+
send(message: Message<T>): Promise<RecordMetadata[]>;
|
|
13
|
+
sendBatch(messages: Message<T>[]): Promise<RecordMetadata[]>;
|
|
14
|
+
sendBuffered(message: Message<T>): Promise<void>;
|
|
15
|
+
private flush;
|
|
16
|
+
private startBatchProcessor;
|
|
17
|
+
disconnect(): Promise<void>;
|
|
18
|
+
setBatchSize(size: number): this;
|
|
19
|
+
setBatchTimeout(ms: number): this;
|
|
20
|
+
}
|
|
21
|
+
//# sourceMappingURL=producer.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"producer.d.ts","sourceRoot":"","sources":["../../../src/kafka/producer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAoC,cAAc,EAAE,MAAM,SAAS,CAAC;AAC3E,OAAO,EAAE,WAAW,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAEhD,qBAAa,QAAQ,CAAC,CAAC,GAAG,GAAG;IAC3B,OAAO,CAAC,QAAQ,CAAgB;IAChC,OAAO,CAAC,KAAK,CAAS;IACtB,OAAO,CAAC,SAAS,CAAe;IAChC,OAAO,CAAC,YAAY,CAAgB;IACpC,OAAO,CAAC,YAAY,CAAoB;IACxC,OAAO,CAAC,UAAU,CAA+B;gBAErC,MAAM,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM;IAexC,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAKxB,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,cAAc,EAAE,CAAC;IAWpD,SAAS,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,OAAO,CAAC,cAAc,EAAE,CAAC;IAW5D,YAAY,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;YAQxC,KAAK;IASnB,OAAO,CAAC,mBAAmB;IAMrB,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAQjC,YAAY,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAKhC,eAAe,CAAC,EAAE,EAAE,MAAM,GAAG,IAAI;CAIlC"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import{Kafka as s}from"kafkajs";class t{constructor(t,e){this.batchSize=100,this.batchTimeout=1e3,this.messageQueue=[],this.batchTimer=null;const i={clientId:t.clientId,brokers:t.brokers};t.ssl&&(i.ssl=t.ssl),t.sasl&&(i.sasl=t.sasl);const a=new s(i);this.producer=a.producer(),this.topic=e}async connect(){await this.producer.connect(),this.startBatchProcessor()}async send(s){return this.producer.send({topic:this.topic,messages:[{key:s.key,value:JSON.stringify(s.value),timestamp:s.timestamp.toISOString()}]})}async sendBatch(s){return this.producer.send({topic:this.topic,messages:s.map(s=>({key:s.key,value:JSON.stringify(s.value),timestamp:s.timestamp.toISOString()}))})}async sendBuffered(s){this.messageQueue.push(s),this.messageQueue.length>=this.batchSize&&await this.flush()}async flush(){if(0===this.messageQueue.length)return;const s=[...this.messageQueue];this.messageQueue=[],await this.sendBatch(s)}startBatchProcessor(){this.batchTimer=setInterval(async()=>{await this.flush()},this.batchTimeout)}async disconnect(){this.batchTimer&&clearInterval(this.batchTimer),await this.flush(),await this.producer.disconnect()}setBatchSize(s){return this.batchSize=s,this}setBatchTimeout(s){return this.batchTimeout=s,this}}export{t as Producer};
|
|
2
|
+
//# sourceMappingURL=producer.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"producer.js","sources":["../../../../src/kafka/producer.ts"],"sourcesContent":["import { Kafka, Producer as KafkaProducer, RecordMetadata } from 'kafkajs';\r\nimport { KafkaConfig, Message } from '../types';\r\n\r\nexport class Producer<T = any> {\r\n private producer: KafkaProducer;\r\n private topic: string;\r\n private batchSize: number = 100;\r\n private batchTimeout: number = 1000;\r\n private messageQueue: Message<T>[] = [];\r\n private batchTimer: NodeJS.Timeout | null = null;\r\n\r\n constructor(config: KafkaConfig, topic: string) {\r\n // Use type assertion to bypass strict type checking\r\n const kafkaConfig: any = {\r\n clientId: config.clientId,\r\n brokers: config.brokers,\r\n };\r\n \r\n if (config.ssl) kafkaConfig.ssl = config.ssl;\r\n if (config.sasl) kafkaConfig.sasl = config.sasl;\r\n \r\n const kafka = new Kafka(kafkaConfig);\r\n this.producer = kafka.producer();\r\n this.topic = topic;\r\n }\r\n\r\n async connect(): Promise<void> {\r\n await this.producer.connect();\r\n this.startBatchProcessor();\r\n }\r\n\r\n async send(message: Message<T>): Promise<RecordMetadata[]> {\r\n return this.producer.send({\r\n topic: this.topic,\r\n messages: [{\r\n key: message.key,\r\n value: JSON.stringify(message.value),\r\n timestamp: message.timestamp.toISOString(),\r\n }],\r\n });\r\n }\r\n\r\n async sendBatch(messages: Message<T>[]): Promise<RecordMetadata[]> {\r\n return this.producer.send({\r\n topic: this.topic,\r\n messages: messages.map(msg => ({\r\n key: msg.key,\r\n value: JSON.stringify(msg.value),\r\n timestamp: msg.timestamp.toISOString(),\r\n })),\r\n });\r\n }\r\n\r\n async sendBuffered(message: Message<T>): Promise<void> {\r\n this.messageQueue.push(message);\r\n \r\n if (this.messageQueue.length >= this.batchSize) {\r\n await this.flush();\r\n }\r\n }\r\n\r\n private async flush(): Promise<void> {\r\n if (this.messageQueue.length === 0) return;\r\n \r\n const batch = [...this.messageQueue];\r\n this.messageQueue = [];\r\n \r\n await this.sendBatch(batch);\r\n }\r\n\r\n private startBatchProcessor(): void {\r\n this.batchTimer = setInterval(async () => {\r\n await this.flush();\r\n }, this.batchTimeout);\r\n }\r\n\r\n async disconnect(): Promise<void> {\r\n if (this.batchTimer) {\r\n clearInterval(this.batchTimer);\r\n }\r\n await this.flush();\r\n await this.producer.disconnect();\r\n }\r\n\r\n setBatchSize(size: number): this {\r\n this.batchSize = size;\r\n return this;\r\n }\r\n\r\n setBatchTimeout(ms: number): this {\r\n this.batchTimeout = ms;\r\n return this;\r\n }\r\n}"],"names":["Producer","constructor","config","topic","this","batchSize","batchTimeout","messageQueue","batchTimer","kafkaConfig","clientId","brokers","ssl","sasl","kafka","Kafka","producer","connect","startBatchProcessor","send","message","messages","key","value","JSON","stringify","timestamp","toISOString","sendBatch","map","msg","sendBuffered","push","length","flush","batch","setInterval","async","disconnect","clearInterval","setBatchSize","size","setBatchTimeout","ms"],"mappings":"sCAGaA,EAQX,WAAAC,CAAYC,EAAqBC,GALzBC,KAASC,UAAW,IACpBD,KAAYE,aAAW,IACvBF,KAAYG,aAAiB,GAC7BH,KAAUI,WAA0B,KAI1C,MAAMC,EAAmB,CACvBC,SAAUR,EAAOQ,SACjBC,QAAST,EAAOS,SAGdT,EAAOU,MAAKH,EAAYG,IAAMV,EAAOU,KACrCV,EAAOW,OAAMJ,EAAYI,KAAOX,EAAOW,MAE3C,MAAMC,EAAQ,IAAIC,EAAMN,GACxBL,KAAKY,SAAWF,EAAME,WACtBZ,KAAKD,MAAQA,CACd,CAED,aAAMc,SACEb,KAAKY,SAASC,UACpBb,KAAKc,qBACN,CAED,UAAMC,CAAKC,GACT,OAAOhB,KAAKY,SAASG,KAAK,CACxBhB,MAAOC,KAAKD,MACZkB,SAAU,CAAC,CACTC,IAAKF,EAAQE,IACbC,MAAOC,KAAKC,UAAUL,EAAQG,OAC9BG,UAAWN,EAAQM,UAAUC,iBAGlC,CAED,eAAMC,CAAUP,GACd,OAAOjB,KAAKY,SAASG,KAAK,CACxBhB,MAAOC,KAAKD,MACZkB,SAAUA,EAASQ,IAAIC,IAAQ,CAC7BR,IAAKQ,EAAIR,IACTC,MAAOC,KAAKC,UAAUK,EAAIP,OAC1BG,UAAWI,EAAIJ,UAAUC,kBAG9B,CAED,kBAAMI,CAAaX,GACjBhB,KAAKG,aAAayB,KAAKZ,GAEnBhB,KAAKG,aAAa0B,QAAU7B,KAAKC,iBAC7BD,KAAK8B,OAEd,CAEO,WAAMA,GACZ,GAAiC,IAA7B9B,KAAKG,aAAa0B,OAAc,OAEpC,MAAME,EAAQ,IAAI/B,KAAKG,cACvBH,KAAKG,aAAe,SAEdH,KAAKwB,UAAUO,EACtB,CAEO,mBAAAjB,GACNd,KAAKI,WAAa4B,YAAYC,gBACtBjC,KAAK8B,SACV9B,KAAKE,aACT,CAED,gBAAMgC,GACAlC,KAAKI,YACP+B,cAAcnC,KAAKI,kBAEfJ,KAAK8B,cACL9B,KAAKY,SAASsB,YACrB,CAED,YAAAE,CAAaC,GAEX,OADArC,KAAKC,UAAYoC,EACVrC,IACR,CAED,eAAAsC,CAAgBC,GAEd,OADAvC,KAAKE,aAAeqC,EACbvC,IACR"}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { Message } from '../types';
|
|
2
|
+
export interface StreamOptions {
|
|
3
|
+
windowSize?: number;
|
|
4
|
+
slideInterval?: number;
|
|
5
|
+
watermarkDelay?: number;
|
|
6
|
+
}
|
|
7
|
+
export declare class KafkaStream<T = any> {
|
|
8
|
+
private sourceTopic;
|
|
9
|
+
private operations;
|
|
10
|
+
private windowDuration;
|
|
11
|
+
private slideInterval;
|
|
12
|
+
private watermarkDelay;
|
|
13
|
+
private windowData;
|
|
14
|
+
private aggregates;
|
|
15
|
+
private stateStore;
|
|
16
|
+
constructor(sourceTopic: string, options?: StreamOptions);
|
|
17
|
+
filter(predicate: (data: T) => boolean): this;
|
|
18
|
+
map<U>(transform: (data: T) => U): KafkaStream<U>;
|
|
19
|
+
flatMap<U>(transform: (data: T) => U[]): KafkaStream<U>;
|
|
20
|
+
window(sizeMs: number, slideMs?: number): this;
|
|
21
|
+
groupBy(keyExtractor: (data: T) => string): GroupedStream<T>;
|
|
22
|
+
aggregate<U>(aggregator: (acc: U | undefined, curr: T) => U, initialValue?: U): KafkaStream<U>;
|
|
23
|
+
join<U>(otherStream: KafkaStream<U>, joinKey: (data: T) => string): KafkaStream<{
|
|
24
|
+
left: T;
|
|
25
|
+
right: U;
|
|
26
|
+
}>;
|
|
27
|
+
foreach(callback: (data: T) => void): this;
|
|
28
|
+
onError(errorHandler: (error: Error, data: any) => void): this;
|
|
29
|
+
process(message: Message<T>): Promise<any>;
|
|
30
|
+
getState(): Map<string, any>;
|
|
31
|
+
reset(): void;
|
|
32
|
+
}
|
|
33
|
+
export declare class GroupedStream<T> {
|
|
34
|
+
private stream;
|
|
35
|
+
private keyExtractor;
|
|
36
|
+
private groups;
|
|
37
|
+
constructor(stream: KafkaStream<T>, keyExtractor: (data: T) => string);
|
|
38
|
+
reduce<U>(reducer: (acc: U, curr: T) => U, initialValue: U): KafkaStream<U>;
|
|
39
|
+
count(): KafkaStream<{
|
|
40
|
+
key: string;
|
|
41
|
+
count: number;
|
|
42
|
+
}>;
|
|
43
|
+
sum(valueExtractor: (data: T) => number): KafkaStream<{
|
|
44
|
+
key: string;
|
|
45
|
+
sum: number;
|
|
46
|
+
}>;
|
|
47
|
+
avg(valueExtractor: (data: T) => number): KafkaStream<{
|
|
48
|
+
key: string;
|
|
49
|
+
avg: number;
|
|
50
|
+
}>;
|
|
51
|
+
}
|
|
52
|
+
//# sourceMappingURL=stream.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["../../../src/kafka/stream.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAEnC,MAAM,WAAW,aAAa;IAC1B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED,qBAAa,WAAW,CAAC,CAAC,GAAG,GAAG;IAShB,OAAO,CAAC,WAAW;IAR/B,OAAO,CAAC,UAAU,CAAiC;IACnD,OAAO,CAAC,cAAc,CAAa;IACnC,OAAO,CAAC,aAAa,CAAa;IAClC,OAAO,CAAC,cAAc,CAAa;IACnC,OAAO,CAAC,UAAU,CAA8D;IAChF,OAAO,CAAC,UAAU,CAA+B;IACjD,OAAO,CAAC,UAAU,CAA+B;gBAE7B,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,aAAa;IAShE,MAAM,CAAC,SAAS,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,OAAO,GAAG,IAAI;IAM7C,GAAG,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC;IAUjD,OAAO,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,EAAE,GAAG,WAAW,CAAC,CAAC,CAAC;IAOvD,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI;IAO9C,OAAO,CAAC,YAAY,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,MAAM,GAAG,aAAa,CAAC,CAAC,CAAC;IAK5D,SAAS,CAAC,CAAC,EACP,UAAU,EAAE,CAAC,GAAG,EAAE,CAAC,GAAG,SAAS,EAAE,IAAI,EAAE,CAAC,KAAK,CAAC,EAC9C,YAAY,CAAC,EAAE,CAAC,GACjB,WAAW,CAAC,CAAC,CAAC;IAqBjB,IAAI,CAAC,CAAC,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,MAAM,GAAG,WAAW,CAAC;QAAE,IAAI,EAAE,CAAC,CAAC;QAAC,KAAK,EAAE,CAAC,CAAA;KAAE,CAAC;IActG,OAAO,CAAC,QAAQ,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,IAAI,GAAG,IAAI;IAS1C,OAAO,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAcxD,OAAO,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC;IAsBhD,QAAQ,IAAI,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC;IAK5B,KAAK,IAAI,IAAI;CAKhB;AAED,qBAAa,aAAa,CAAC,CAAC;IAIpB,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,YAAY;IAJxB,OAAO,CAAC,MAAM,CAAiC;gBAGnC,MAAM,EAAE,WAAW,CAAC,CAAC,CAAC,EACtB,YAAY,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,MAAM;IAI7C,MAAM,CAAC,CAAC,EACJ,OAAO,EAAE,CAAC,GAAG,EAAE,CAAC,EAAE,IAAI,EAAE,CAAC,KAAK,CAAC,EAC/B,YAAY,EAAE,CAAC,GAChB,WAAW,CAAC,CAAC,CAAC;IAqBjB,KAAK,IAAI,WAAW,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC;IAepD,GAAG,CAAC,cAAc,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,MAAM,GAAG,WAAW,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,GAAG,EAAE,MAAM,CAAA;KAAE,CAAC;IAgBnF,GAAG,CAAC,cAAc,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,MAAM,GAAG,WAAW,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,GAAG,EAAE,MAAM,CAAA;KAAE,CAAC;CAgBtF"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
class t{constructor(t,r){this.sourceTopic=t,this.operations=[],this.windowDuration=0,this.slideInterval=0,this.watermarkDelay=0,this.windowData=new Map,this.aggregates=new Map,this.stateStore=new Map,r&&(this.windowDuration=r.windowSize||0,this.slideInterval=r.slideInterval||0,this.watermarkDelay=r.watermarkDelay||0)}filter(t){return this.operations.push(r=>t(r)?r:null),this}map(r){const s=new t(this.sourceTopic);return s.operations=[...this.operations,t=>r(t)],s.windowDuration=this.windowDuration,s.slideInterval=this.slideInterval,s.watermarkDelay=this.watermarkDelay,s}flatMap(r){const s=new t(this.sourceTopic);return s.operations=[...this.operations,t=>r(t)],s}window(t,r){return this.windowDuration=t,this.slideInterval=r||t,this}groupBy(t){return new r(this,t)}aggregate(r,s){const e=new t(this.sourceTopic);return e.operations=[...this.operations,t=>{const e=JSON.stringify(t);let o=this.aggregates.get(e);void 0===o&&void 0!==s&&(o=s);const i=r(o,t);return this.aggregates.set(e,i),i}],e}join(r,s){const e=new t(this.sourceTopic);return e.operations=[...this.operations,t=>(s(t),{left:t,right:null})],e}foreach(t){return this.operations.push(r=>(t(r),r)),this}onError(t){return this.operations.push(r=>{try{return r}catch(s){return t(s,r),null}}),this}async process(t){let r=t.value;for(const t of this.operations){if(null==r)break;try{r=Array.isArray(r)?r.flatMap(r=>t(r)).filter(Boolean):t(r)}catch(t){console.error("Stream processing error:",t),r=null}}return r}getState(){return new Map(this.stateStore)}reset(){this.aggregates.clear(),this.windowData.clear(),this.stateStore.clear()}}class r{constructor(t,r){this.stream=t,this.keyExtractor=r,this.groups=new Map}reduce(r,s){const e=new t(this.stream.sourceTopic);return e.operations=[...this.stream.operations,async t=>{const e=this.keyExtractor(t);let o=this.groups.get(e);return o||(o=[s],this.groups.set(e,o)),o[0]=r(o[0],t),o[0]}],e}count(){const r=new t(this.stream.sourceTopic);return r.operations=[...this.stream.operations,t=>{const r=this.keyExtractor(t);let s=this.groups.get(r)?.length||0;return this.groups.set(r,[...this.groups.get(r)||[],t]),{key:r,count:s+1}}],r}sum(r){const s=new t(this.stream.sourceTopic);return s.operations=[...this.stream.operations,t=>{const s=this.keyExtractor(t),e=r(t);let o=this.groups.get(s)?.[0]||0;return this.groups.set(s,[o+e]),{key:s,sum:o+e}}],s}avg(r){const s=new t(this.stream.sourceTopic);return s.operations=[...this.stream.operations,t=>{const s=this.keyExtractor(t),e=r(t);let o=this.groups.get(s)||[];o.push(e),this.groups.set(s,o);return{key:s,avg:o.reduce((t,r)=>t+r,0)/o.length}}],s}}export{r as GroupedStream,t as KafkaStream};
|
|
2
|
+
//# sourceMappingURL=stream.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"stream.js","sources":["../../../../src/kafka/stream.ts"],"sourcesContent":["import { Message } from '../types';\r\n\r\nexport interface StreamOptions {\r\n windowSize?: number; // Window size in milliseconds\r\n slideInterval?: number; // Slide interval for windowed operations\r\n watermarkDelay?: number; // Watermark delay for late data\r\n}\r\n\r\nexport class KafkaStream<T = any> {\r\n private operations: Array<(data: any) => any> = [];\r\n private windowDuration: number = 0;\r\n private slideInterval: number = 0;\r\n private watermarkDelay: number = 0;\r\n private windowData: Map<string, { data: any[], timestamp: number }> = new Map();\r\n private aggregates: Map<string, any> = new Map();\r\n private stateStore: Map<string, any> = new Map();\r\n\r\n constructor(private sourceTopic: string, options?: StreamOptions) {\r\n if (options) {\r\n this.windowDuration = options.windowSize || 0;\r\n this.slideInterval = options.slideInterval || 0;\r\n this.watermarkDelay = options.watermarkDelay || 0;\r\n }\r\n }\r\n\r\n // Filter records\r\n filter(predicate: (data: T) => boolean): this {\r\n this.operations.push((data: T) => predicate(data) ? data : null);\r\n return this;\r\n }\r\n\r\n // Map transformation\r\n map<U>(transform: (data: T) => U): KafkaStream<U> {\r\n const newStream = new KafkaStream<U>(this.sourceTopic);\r\n newStream.operations = [...this.operations, (data: T) => transform(data)];\r\n newStream.windowDuration = this.windowDuration;\r\n newStream.slideInterval = this.slideInterval;\r\n newStream.watermarkDelay = this.watermarkDelay;\r\n return newStream;\r\n }\r\n\r\n // FlatMap - one to many\r\n flatMap<U>(transform: (data: T) => U[]): KafkaStream<U> {\r\n const newStream = new KafkaStream<U>(this.sourceTopic);\r\n newStream.operations = [...this.operations, (data: T) => transform(data)];\r\n return newStream;\r\n }\r\n\r\n // Windowed operations\r\n window(sizeMs: number, slideMs?: number): this {\r\n this.windowDuration = sizeMs;\r\n this.slideInterval = slideMs || sizeMs;\r\n return this;\r\n }\r\n\r\n // Group by key\r\n groupBy(keyExtractor: (data: T) => string): GroupedStream<T> {\r\n return new GroupedStream<T>(this, keyExtractor);\r\n }\r\n\r\n // Aggregate with state\r\n aggregate<U>(\r\n aggregator: (acc: U | undefined, curr: T) => U,\r\n initialValue?: U\r\n ): KafkaStream<U> {\r\n const newStream = new KafkaStream<U>(this.sourceTopic);\r\n \r\n const aggregateOperation = (data: T) => {\r\n const key = JSON.stringify(data);\r\n let current = this.aggregates.get(key);\r\n \r\n if (current === undefined && initialValue !== undefined) {\r\n current = initialValue;\r\n }\r\n \r\n const result = aggregator(current, data);\r\n this.aggregates.set(key, result);\r\n return result;\r\n };\r\n \r\n newStream.operations = [...this.operations, aggregateOperation];\r\n return newStream;\r\n }\r\n\r\n // Join two streams\r\n join<U>(otherStream: KafkaStream<U>, joinKey: (data: T) => string): KafkaStream<{ left: T, right: U }> {\r\n const joinedStream = new KafkaStream<{ left: T, right: U }>(this.sourceTopic);\r\n \r\n const joinOperation = (data: T) => {\r\n const key = joinKey(data);\r\n // This would need actual stream joining logic\r\n return { left: data, right: null as any };\r\n };\r\n \r\n joinedStream.operations = [...this.operations, joinOperation];\r\n return joinedStream;\r\n }\r\n\r\n // Process each record with side effects\r\n foreach(callback: (data: T) => void): this {\r\n this.operations.push((data: T) => {\r\n callback(data);\r\n return data;\r\n });\r\n return this;\r\n }\r\n\r\n // Error handling for stream\r\n onError(errorHandler: (error: Error, data: any) => void): this {\r\n const errorOp = (data: any) => {\r\n try {\r\n return data;\r\n } catch (error) {\r\n errorHandler(error as Error, data);\r\n return null;\r\n }\r\n };\r\n this.operations.push(errorOp);\r\n return this;\r\n }\r\n\r\n // Process message through the stream pipeline\r\n async process(message: Message<T>): Promise<any> {\r\n let result: any = message.value;\r\n \r\n for (const op of this.operations) {\r\n if (result === null || result === undefined) break;\r\n \r\n try {\r\n if (Array.isArray(result)) {\r\n result = result.flatMap(item => op(item)).filter(Boolean);\r\n } else {\r\n result = op(result);\r\n }\r\n } catch (error) {\r\n console.error(`Stream processing error:`, error);\r\n result = null;\r\n }\r\n }\r\n \r\n return result;\r\n }\r\n\r\n // Get current state\r\n getState(): Map<string, any> {\r\n return new Map(this.stateStore);\r\n }\r\n\r\n // Reset stream state\r\n reset(): void {\r\n this.aggregates.clear();\r\n this.windowData.clear();\r\n this.stateStore.clear();\r\n }\r\n}\r\n\r\nexport class GroupedStream<T> {\r\n private groups: Map<string, any[]> = new Map();\r\n\r\n constructor(\r\n private stream: KafkaStream<T>,\r\n private keyExtractor: (data: T) => string\r\n ) {}\r\n\r\n // Reduce within groups\r\n reduce<U>(\r\n reducer: (acc: U, curr: T) => U,\r\n initialValue: U\r\n ): KafkaStream<U> {\r\n const newStream = new KafkaStream<U>(this.stream['sourceTopic']);\r\n \r\n const reduceOperation = async (data: T) => {\r\n const key = this.keyExtractor(data);\r\n let group = this.groups.get(key);\r\n \r\n if (!group) {\r\n group = [initialValue];\r\n this.groups.set(key, group);\r\n }\r\n \r\n group[0] = reducer(group[0] as U, data);\r\n return group[0];\r\n };\r\n \r\n newStream['operations'] = [...this.stream['operations'], reduceOperation];\r\n return newStream;\r\n }\r\n\r\n // Count within groups\r\n count(): KafkaStream<{ key: string, count: number }> {\r\n const newStream = new KafkaStream<{ key: string, count: number }>(this.stream['sourceTopic']);\r\n \r\n const countOperation = (data: T) => {\r\n const key = this.keyExtractor(data);\r\n let count = this.groups.get(key)?.length || 0;\r\n this.groups.set(key, [...(this.groups.get(key) || []), data]);\r\n return { key, count: count + 1 };\r\n };\r\n \r\n newStream['operations'] = [...this.stream['operations'], countOperation];\r\n return newStream;\r\n }\r\n\r\n // Sum within groups\r\n sum(valueExtractor: (data: T) => number): KafkaStream<{ key: string, sum: number }> {\r\n const newStream = new KafkaStream<{ key: string, sum: number }>(this.stream['sourceTopic']);\r\n \r\n const sumOperation = (data: T) => {\r\n const key = this.keyExtractor(data);\r\n const value = valueExtractor(data);\r\n let current = this.groups.get(key)?.[0] as number || 0;\r\n this.groups.set(key, [current + value]);\r\n return { key, sum: current + value };\r\n };\r\n \r\n newStream['operations'] = [...this.stream['operations'], sumOperation];\r\n return newStream;\r\n }\r\n\r\n // Average within groups\r\n avg(valueExtractor: (data: T) => number): KafkaStream<{ key: string, avg: number }> {\r\n const newStream = new KafkaStream<{ key: string, avg: number }>(this.stream['sourceTopic']);\r\n \r\n const avgOperation = (data: T) => {\r\n const key = this.keyExtractor(data);\r\n const value = valueExtractor(data);\r\n let group = this.groups.get(key) || [];\r\n group.push(value);\r\n this.groups.set(key, group);\r\n const sum = group.reduce((a, b) => a + b, 0);\r\n return { key, avg: sum / group.length };\r\n };\r\n \r\n newStream['operations'] = [...this.stream['operations'], avgOperation];\r\n return newStream;\r\n }\r\n}\r\n"],"names":["KafkaStream","constructor","sourceTopic","options","this","operations","windowDuration","slideInterval","watermarkDelay","windowData","Map","aggregates","stateStore","windowSize","filter","predicate","push","data","map","transform","newStream","flatMap","window","sizeMs","slideMs","groupBy","keyExtractor","GroupedStream","aggregate","aggregator","initialValue","key","JSON","stringify","current","get","undefined","result","set","join","otherStream","joinKey","joinedStream","left","right","foreach","callback","onError","errorHandler","error","process","message","value","op","Array","isArray","item","Boolean","console","getState","reset","clear","stream","groups","reduce","reducer","async","group","count","length","sum","valueExtractor","avg","a","b"],"mappings":"MAQaA,EAST,WAAAC,CAAoBC,EAAqBC,GAArBC,KAAWF,YAAXA,EARZE,KAAUC,WAA8B,GACxCD,KAAcE,eAAW,EACzBF,KAAaG,cAAW,EACxBH,KAAcI,eAAW,EACzBJ,KAAAK,WAA8D,IAAIC,IAClEN,KAAAO,WAA+B,IAAID,IACnCN,KAAAQ,WAA+B,IAAIF,IAGnCP,IACAC,KAAKE,eAAiBH,EAAQU,YAAc,EAC5CT,KAAKG,cAAgBJ,EAAQI,eAAiB,EAC9CH,KAAKI,eAAiBL,EAAQK,gBAAkB,EAEvD,CAGD,MAAAM,CAAOC,GAEH,OADAX,KAAKC,WAAWW,KAAMC,GAAYF,EAAUE,GAAQA,EAAO,MACpDb,IACV,CAGD,GAAAc,CAAOC,GACH,MAAMC,EAAY,IAAIpB,EAAeI,KAAKF,aAK1C,OAJAkB,EAAUf,WAAa,IAAID,KAAKC,WAAaY,GAAYE,EAAUF,IACnEG,EAAUd,eAAiBF,KAAKE,eAChCc,EAAUb,cAAgBH,KAAKG,cAC/Ba,EAAUZ,eAAiBJ,KAAKI,eACzBY,CACV,CAGD,OAAAC,CAAWF,GACP,MAAMC,EAAY,IAAIpB,EAAeI,KAAKF,aAE1C,OADAkB,EAAUf,WAAa,IAAID,KAAKC,WAAaY,GAAYE,EAAUF,IAC5DG,CACV,CAGD,MAAAE,CAAOC,EAAgBC,GAGnB,OAFApB,KAAKE,eAAiBiB,EACtBnB,KAAKG,cAAgBiB,GAAWD,EACzBnB,IACV,CAGD,OAAAqB,CAAQC,GACJ,OAAO,IAAIC,EAAiBvB,KAAMsB,EACrC,CAGD,SAAAE,CACIC,EACAC,GAEA,MAAMV,EAAY,IAAIpB,EAAeI,KAAKF,aAgB1C,OADAkB,EAAUf,WAAa,IAAID,KAAKC,WAbJY,IACxB,MAAMc,EAAMC,KAAKC,UAAUhB,GAC3B,IAAIiB,EAAU9B,KAAKO,WAAWwB,IAAIJ,QAElBK,IAAZF,QAA0CE,IAAjBN,IACzBI,EAAUJ,GAGd,MAAMO,EAASR,EAAWK,EAASjB,GAEnC,OADAb,KAAKO,WAAW2B,IAAIP,EAAKM,GAClBA,IAIJjB,CACV,CAGD,IAAAmB,CAAQC,EAA6BC,GACjC,MAAMC,EAAe,IAAI1C,EAAmCI,KAAKF,aASjE,OADAwC,EAAarC,WAAa,IAAID,KAAKC,WANZY,IACPwB,EAAQxB,GAEb,CAAE0B,KAAM1B,EAAM2B,MAAO,QAIzBF,CACV,CAGD,OAAAG,CAAQC,GAKJ,OAJA1C,KAAKC,WAAWW,KAAMC,IAClB6B,EAAS7B,GACFA,IAEJb,IACV,CAGD,OAAA2C,CAAQC,GAUJ,OADA5C,KAAKC,WAAWW,KARCC,IACb,IACI,OAAOA,CACV,CAAC,MAAOgC,GAEL,OADAD,EAAaC,EAAgBhC,GACtB,IACV,IAGEb,IACV,CAGD,aAAM8C,CAAQC,GACV,IAAId,EAAcc,EAAQC,MAE1B,IAAK,MAAMC,KAAMjD,KAAKC,WAAY,CAC9B,GAAIgC,QAAyC,MAE7C,IAEQA,EADAiB,MAAMC,QAAQlB,GACLA,EAAOhB,QAAQmC,GAAQH,EAAGG,IAAO1C,OAAO2C,SAExCJ,EAAGhB,EAEnB,CAAC,MAAOY,GACLS,QAAQT,MAAM,2BAA4BA,GAC1CZ,EAAS,IACZ,CACJ,CAED,OAAOA,CACV,CAGD,QAAAsB,GACI,OAAO,IAAIjD,IAAIN,KAAKQ,WACvB,CAGD,KAAAgD,GACIxD,KAAKO,WAAWkD,QAChBzD,KAAKK,WAAWoD,QAChBzD,KAAKQ,WAAWiD,OACnB,QAGQlC,EAGT,WAAA1B,CACY6D,EACApC,GADAtB,KAAM0D,OAANA,EACA1D,KAAYsB,aAAZA,EAJJtB,KAAA2D,OAA6B,IAAIrD,GAKrC,CAGJ,MAAAsD,CACIC,EACAnC,GAEA,MAAMV,EAAY,IAAIpB,EAAeI,KAAK0D,OAAoB,aAgB9D,OADA1C,EAAsB,WAAI,IAAIhB,KAAK0D,OAAmB,WAb9BI,MAAOjD,IAC3B,MAAMc,EAAM3B,KAAKsB,aAAaT,GAC9B,IAAIkD,EAAQ/D,KAAK2D,OAAO5B,IAAIJ,GAQ5B,OANKoC,IACDA,EAAQ,CAACrC,GACT1B,KAAK2D,OAAOzB,IAAIP,EAAKoC,IAGzBA,EAAM,GAAKF,EAAQE,EAAM,GAASlD,GAC3BkD,EAAM,KAIV/C,CACV,CAGD,KAAAgD,GACI,MAAMhD,EAAY,IAAIpB,EAA4CI,KAAK0D,OAAoB,aAU3F,OADA1C,EAAsB,WAAI,IAAIhB,KAAK0D,OAAmB,WAP9B7C,IACpB,MAAMc,EAAM3B,KAAKsB,aAAaT,GAC9B,IAAImD,EAAQhE,KAAK2D,OAAO5B,IAAIJ,IAAMsC,QAAU,EAE5C,OADAjE,KAAK2D,OAAOzB,IAAIP,EAAK,IAAK3B,KAAK2D,OAAO5B,IAAIJ,IAAQ,GAAKd,IAChD,CAAEc,MAAKqC,MAAOA,EAAQ,KAI1BhD,CACV,CAGD,GAAAkD,CAAIC,GACA,MAAMnD,EAAY,IAAIpB,EAA0CI,KAAK0D,OAAoB,aAWzF,OADA1C,EAAsB,WAAI,IAAIhB,KAAK0D,OAAmB,WARhC7C,IAClB,MAAMc,EAAM3B,KAAKsB,aAAaT,GACxBmC,EAAQmB,EAAetD,GAC7B,IAAIiB,EAAU9B,KAAK2D,OAAO5B,IAAIJ,KAAO,IAAgB,EAErD,OADA3B,KAAK2D,OAAOzB,IAAIP,EAAK,CAACG,EAAUkB,IACzB,CAAErB,MAAKuC,IAAKpC,EAAUkB,KAI1BhC,CACV,CAGD,GAAAoD,CAAID,GACA,MAAMnD,EAAY,IAAIpB,EAA0CI,KAAK0D,OAAoB,aAazF,OADA1C,EAAsB,WAAI,IAAIhB,KAAK0D,OAAmB,WAVhC7C,IAClB,MAAMc,EAAM3B,KAAKsB,aAAaT,GACxBmC,EAAQmB,EAAetD,GAC7B,IAAIkD,EAAQ/D,KAAK2D,OAAO5B,IAAIJ,IAAQ,GACpCoC,EAAMnD,KAAKoC,GACXhD,KAAK2D,OAAOzB,IAAIP,EAAKoC,GAErB,MAAO,CAAEpC,MAAKyC,IADFL,EAAMH,OAAO,CAACS,EAAGC,IAAMD,EAAIC,EAAG,GACjBP,EAAME,UAI5BjD,CACV"}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { PipelineConfig, PipelineSource, PipelineSink } from '../types';
|
|
2
|
+
export interface RetryConfig {
|
|
3
|
+
maxAttempts: number;
|
|
4
|
+
delayMs: number;
|
|
5
|
+
backoffMultiplier: number;
|
|
6
|
+
}
|
|
7
|
+
export interface PipelineOptions {
|
|
8
|
+
retryConfig?: RetryConfig;
|
|
9
|
+
errorHandler?: (error: Error, context: any) => void;
|
|
10
|
+
maxConcurrent?: number;
|
|
11
|
+
}
|
|
12
|
+
export declare class PipelineBuilder {
|
|
13
|
+
private config;
|
|
14
|
+
private sources;
|
|
15
|
+
private transforms;
|
|
16
|
+
private sinks;
|
|
17
|
+
private options;
|
|
18
|
+
private metrics;
|
|
19
|
+
constructor(config: PipelineConfig, options?: PipelineOptions);
|
|
20
|
+
source(type: PipelineSource, config: any): this;
|
|
21
|
+
transform(transformFn: (data: any) => any): this;
|
|
22
|
+
sink(type: PipelineSink, config: any): this;
|
|
23
|
+
run(): Promise<void>;
|
|
24
|
+
private processSource;
|
|
25
|
+
private processRecord;
|
|
26
|
+
private writeToSink;
|
|
27
|
+
private sleep;
|
|
28
|
+
private printSummary;
|
|
29
|
+
lineage(): any;
|
|
30
|
+
getMetrics(): any;
|
|
31
|
+
}
|
|
32
|
+
//# sourceMappingURL=builder.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"builder.d.ts","sourceRoot":"","sources":["../../../src/pipeline/builder.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,cAAc,EAAE,YAAY,EAAW,MAAM,UAAU,CAAC;AAEjF,MAAM,WAAW,WAAW;IACxB,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,MAAM,CAAC;IAChB,iBAAiB,EAAE,MAAM,CAAC;CAC7B;AAED,MAAM,WAAW,eAAe;IAC5B,WAAW,CAAC,EAAE,WAAW,CAAC;IAC1B,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,GAAG,KAAK,IAAI,CAAC;IACpD,aAAa,CAAC,EAAE,MAAM,CAAC;CAC1B;AAED,qBAAa,eAAe;IACxB,OAAO,CAAC,MAAM,CAAiB;IAC/B,OAAO,CAAC,OAAO,CAAoD;IACnE,OAAO,CAAC,UAAU,CAAiC;IACnD,OAAO,CAAC,KAAK,CAAkD;IAC/D,OAAO,CAAC,OAAO,CAAkB;IACjC,OAAO,CAAC,OAAO,CAMb;gBAEU,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,EAAE,eAAe;IAmB7D,MAAM,CAAC,IAAI,EAAE,cAAc,EAAE,MAAM,EAAE,GAAG,GAAG,IAAI;IAK/C,SAAS,CAAC,WAAW,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,GAAG,GAAG,IAAI;IAKhD,IAAI,CAAC,IAAI,EAAE,YAAY,EAAE,MAAM,EAAE,GAAG,GAAG,IAAI;IAKrC,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC;YAuBZ,aAAa;YAeb,aAAa;YA2Cb,WAAW;YAuBX,KAAK;IAInB,OAAO,CAAC,YAAY;IAkBpB,OAAO,IAAI,GAAG;IAUd,UAAU,IAAI,GAAG;CAQpB"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
class e{constructor(e,s){this.sources=[],this.transforms=[],this.sinks=[],this.config=e,this.options=s||{retryConfig:{maxAttempts:3,delayMs:1e3,backoffMultiplier:2},maxConcurrent:10},this.metrics={processed:0,errors:0,retries:0,startTime:null,endTime:null}}source(e,s){return this.sources.push({type:e,config:s}),this}transform(e){return this.transforms.push(e),this}sink(e,s){return this.sinks.push({type:e,config:s}),this}async run(){this.metrics.startTime=new Date,console.log(`🚀 Starting pipeline: ${this.config.name}`),console.log(` Sources: ${this.sources.map(e=>e.type).join(", ")}`),console.log(` Transforms: ${this.transforms.length}`),console.log(` Sinks: ${this.sinks.map(e=>e.type).join(", ")}`);try{for(const e of this.sources)await this.processSource(e);this.metrics.endTime=new Date,this.printSummary()}catch(e){throw console.error("❌ Pipeline failed:",e),this.options.errorHandler&&this.options.errorHandler(e,{pipeline:this.config.name}),e}}async processSource(e){console.log(`📡 Processing source: ${e.type}`);const s=[{id:1,name:"Record 1",value:100},{id:2,name:"Record 2",value:200},{id:3,name:"Record 3",value:300}];for(const e of s)await this.processRecord(e)}async processRecord(e,s=1){try{let s=e;for(const e of this.transforms)try{s=e(s)}catch(e){throw console.error("Transform error:",e),e}for(const e of this.sinks)await this.writeToSink(e,s);this.metrics.processed++}catch(t){const o=this.options.retryConfig?.maxAttempts||3;if(s<o){const t=(this.options.retryConfig?.delayMs||1e3)*Math.pow(this.options.retryConfig?.backoffMultiplier||2,s-1);console.warn(`⚠️ Retry ${s}/${o} after ${t}ms`),this.metrics.retries++,await this.sleep(t),await this.processRecord(e,s+1)}else console.error(`❌ Failed to process record after ${o} attempts:`,e),this.metrics.errors++,this.options.errorHandler&&this.options.errorHandler(t,{record:e,attempt:s})}}async writeToSink(e,s){switch(e.type){case"console":console.log(`[${e.type}]`,JSON.stringify(s,null,2));break;case"file":console.log(`📁 Writing to file: ${e.config.filePath}`);break;case"kafka":console.log(`📤 Sending to Kafka topic: ${e.config.topic}`);break;case"database":console.log(`💾 Writing to database: ${e.config.table}`);break;default:console.log(`📤 Sending to ${e.type}`)}await this.sleep(10)}async sleep(e){return new Promise(s=>setTimeout(s,e))}printSummary(){const e=this.metrics.startTime&&this.metrics.endTime?this.metrics.endTime.getTime()-this.metrics.startTime.getTime():0;console.log(""),console.log("=".repeat(50)),console.log("📊 Pipeline Summary"),console.log("=".repeat(50)),console.log(` Name: ${this.config.name}`),console.log(` Duration: ${e}ms`),console.log(` Records processed: ${this.metrics.processed}`),console.log(` Errors: ${this.metrics.errors}`),console.log(` Retries: ${this.metrics.retries}`),console.log(` Success rate: ${(this.metrics.processed/(this.metrics.processed+this.metrics.errors)*100).toFixed(2)}%`),console.log("=".repeat(50))}lineage(){return{name:this.config.name,sources:this.sources.map(e=>e.type),transforms:this.transforms.length,sinks:this.sinks.map(e=>e.type),timestamp:(new Date).toISOString()}}getMetrics(){return{...this.metrics,duration:this.metrics.startTime&&this.metrics.endTime?this.metrics.endTime.getTime()-this.metrics.startTime.getTime():null}}}export{e as PipelineBuilder};
|
|
2
|
+
//# sourceMappingURL=builder.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"builder.js","sources":["../../../../src/pipeline/builder.ts"],"sourcesContent":["import { PipelineConfig, PipelineSource, PipelineSink, Message } from '../types';\r\n\r\nexport interface RetryConfig {\r\n maxAttempts: number;\r\n delayMs: number;\r\n backoffMultiplier: number;\r\n}\r\n\r\nexport interface PipelineOptions {\r\n retryConfig?: RetryConfig;\r\n errorHandler?: (error: Error, context: any) => void;\r\n maxConcurrent?: number;\r\n}\r\n\r\nexport class PipelineBuilder {\r\n private config: PipelineConfig;\r\n private sources: Array<{ type: PipelineSource; config: any }> = [];\r\n private transforms: Array<(data: any) => any> = [];\r\n private sinks: Array<{ type: PipelineSink; config: any }> = [];\r\n private options: PipelineOptions;\r\n private metrics: {\r\n processed: number;\r\n errors: number;\r\n retries: number;\r\n startTime: Date | null;\r\n endTime: Date | null;\r\n };\r\n\r\n constructor(config: PipelineConfig, options?: PipelineOptions) {\r\n this.config = config;\r\n this.options = options || {\r\n retryConfig: {\r\n maxAttempts: 3,\r\n delayMs: 1000,\r\n backoffMultiplier: 2\r\n },\r\n maxConcurrent: 10\r\n };\r\n this.metrics = {\r\n processed: 0,\r\n errors: 0,\r\n retries: 0,\r\n startTime: null,\r\n endTime: null\r\n };\r\n }\r\n\r\n source(type: PipelineSource, config: any): this {\r\n this.sources.push({ type, config });\r\n return this;\r\n }\r\n\r\n transform(transformFn: (data: any) => any): this {\r\n this.transforms.push(transformFn);\r\n return this;\r\n }\r\n\r\n sink(type: PipelineSink, config: any): this {\r\n this.sinks.push({ type, config });\r\n return this;\r\n }\r\n\r\n async run(): Promise<void> {\r\n this.metrics.startTime = new Date();\r\n console.log(`🚀 Starting pipeline: ${this.config.name}`);\r\n console.log(` Sources: ${this.sources.map(s => s.type).join(', ')}`);\r\n console.log(` Transforms: ${this.transforms.length}`);\r\n console.log(` Sinks: ${this.sinks.map(s => s.type).join(', ')}`);\r\n \r\n try {\r\n for (const source of this.sources) {\r\n await this.processSource(source);\r\n }\r\n \r\n this.metrics.endTime = new Date();\r\n this.printSummary();\r\n } catch (error) {\r\n console.error(`❌ Pipeline failed:`, error);\r\n if (this.options.errorHandler) {\r\n this.options.errorHandler(error as Error, { pipeline: this.config.name });\r\n }\r\n throw error;\r\n }\r\n }\r\n\r\n private async processSource(source: { type: PipelineSource; config: any }): Promise<void> {\r\n console.log(`📡 Processing source: ${source.type}`);\r\n \r\n // Simulate data processing with retry logic\r\n const mockData = [\r\n { id: 1, name: 'Record 1', value: 100 },\r\n { id: 2, name: 'Record 2', value: 200 },\r\n { id: 3, name: 'Record 3', value: 300 }\r\n ];\r\n \r\n for (const record of mockData) {\r\n await this.processRecord(record);\r\n }\r\n }\r\n\r\n private async processRecord(record: any, attempt: number = 1): Promise<void> {\r\n try {\r\n // Apply transformations with error handling\r\n let result = record;\r\n for (const transform of this.transforms) {\r\n try {\r\n result = transform(result);\r\n } catch (transformError) {\r\n console.error(`Transform error:`, transformError);\r\n throw transformError;\r\n }\r\n }\r\n \r\n // Send to sinks\r\n for (const sink of this.sinks) {\r\n await this.writeToSink(sink, result);\r\n }\r\n \r\n this.metrics.processed++;\r\n \r\n } catch (error) {\r\n const maxAttempts = this.options.retryConfig?.maxAttempts || 3;\r\n \r\n if (attempt < maxAttempts) {\r\n const delay = (this.options.retryConfig?.delayMs || 1000) * \r\n Math.pow(this.options.retryConfig?.backoffMultiplier || 2, attempt - 1);\r\n \r\n console.warn(`⚠️ Retry ${attempt}/${maxAttempts} after ${delay}ms`);\r\n this.metrics.retries++;\r\n \r\n await this.sleep(delay);\r\n await this.processRecord(record, attempt + 1);\r\n } else {\r\n console.error(`❌ Failed to process record after ${maxAttempts} attempts:`, record);\r\n this.metrics.errors++;\r\n \r\n if (this.options.errorHandler) {\r\n this.options.errorHandler(error as Error, { record, attempt });\r\n }\r\n }\r\n }\r\n }\r\n\r\n private async writeToSink(sink: { type: PipelineSink; config: any }, data: any): Promise<void> {\r\n switch (sink.type) {\r\n case 'console':\r\n console.log(`[${sink.type}]`, JSON.stringify(data, null, 2));\r\n break;\r\n case 'file':\r\n // Simulate file write\r\n console.log(`📁 Writing to file: ${sink.config.filePath}`);\r\n break;\r\n case 'kafka':\r\n console.log(`📤 Sending to Kafka topic: ${sink.config.topic}`);\r\n break;\r\n case 'database':\r\n console.log(`💾 Writing to database: ${sink.config.table}`);\r\n break;\r\n default:\r\n console.log(`📤 Sending to ${sink.type}`);\r\n }\r\n \r\n // Simulate async operation\r\n await this.sleep(10);\r\n }\r\n\r\n private async sleep(ms: number): Promise<void> {\r\n return new Promise(resolve => setTimeout(resolve, ms));\r\n }\r\n\r\n private printSummary(): void {\r\n const duration = this.metrics.startTime && this.metrics.endTime \r\n ? this.metrics.endTime.getTime() - this.metrics.startTime.getTime()\r\n : 0;\r\n \r\n console.log('');\r\n console.log('='.repeat(50));\r\n console.log('📊 Pipeline Summary');\r\n console.log('='.repeat(50));\r\n console.log(` Name: ${this.config.name}`);\r\n console.log(` Duration: ${duration}ms`);\r\n console.log(` Records processed: ${this.metrics.processed}`);\r\n console.log(` Errors: ${this.metrics.errors}`);\r\n console.log(` Retries: ${this.metrics.retries}`);\r\n console.log(` Success rate: ${((this.metrics.processed / (this.metrics.processed + this.metrics.errors)) * 100).toFixed(2)}%`);\r\n console.log('='.repeat(50));\r\n }\r\n\r\n lineage(): any {\r\n return {\r\n name: this.config.name,\r\n sources: this.sources.map(s => s.type),\r\n transforms: this.transforms.length,\r\n sinks: this.sinks.map(s => s.type),\r\n timestamp: new Date().toISOString()\r\n };\r\n }\r\n\r\n getMetrics(): any {\r\n return {\r\n ...this.metrics,\r\n duration: this.metrics.startTime && this.metrics.endTime \r\n ? this.metrics.endTime.getTime() - this.metrics.startTime.getTime()\r\n : null\r\n };\r\n }\r\n}"],"names":["PipelineBuilder","constructor","config","options","this","sources","transforms","sinks","retryConfig","maxAttempts","delayMs","backoffMultiplier","maxConcurrent","metrics","processed","errors","retries","startTime","endTime","source","type","push","transform","transformFn","sink","run","Date","console","log","name","map","s","join","length","processSource","printSummary","error","errorHandler","pipeline","mockData","id","value","record","processRecord","attempt","result","transformError","writeToSink","delay","Math","pow","warn","sleep","data","JSON","stringify","filePath","topic","table","ms","Promise","resolve","setTimeout","duration","getTime","repeat","toFixed","lineage","timestamp","toISOString","getMetrics"],"mappings":"MAcaA,EAcT,WAAAC,CAAYC,EAAwBC,GAZ5BC,KAAOC,QAAiD,GACxDD,KAAUE,WAA8B,GACxCF,KAAKG,MAA+C,GAWxDH,KAAKF,OAASA,EACdE,KAAKD,QAAUA,GAAW,CACtBK,YAAa,CACTC,YAAa,EACbC,QAAS,IACTC,kBAAmB,GAEvBC,cAAe,IAEnBR,KAAKS,QAAU,CACXC,UAAW,EACXC,OAAQ,EACRC,QAAS,EACTC,UAAW,KACXC,QAAS,KAEhB,CAED,MAAAC,CAAOC,EAAsBlB,GAEzB,OADAE,KAAKC,QAAQgB,KAAK,CAAED,OAAMlB,WACnBE,IACV,CAED,SAAAkB,CAAUC,GAEN,OADAnB,KAAKE,WAAWe,KAAKE,GACdnB,IACV,CAED,IAAAoB,CAAKJ,EAAoBlB,GAErB,OADAE,KAAKG,MAAMc,KAAK,CAAED,OAAMlB,WACjBE,IACV,CAED,SAAMqB,GACFrB,KAAKS,QAAQI,UAAY,IAAIS,KAC7BC,QAAQC,IAAI,yBAAyBxB,KAAKF,OAAO2B,QACjDF,QAAQC,IAAI,eAAexB,KAAKC,QAAQyB,IAAIC,GAAKA,EAAEX,MAAMY,KAAK,SAC9DL,QAAQC,IAAI,kBAAkBxB,KAAKE,WAAW2B,UAC9CN,QAAQC,IAAI,aAAaxB,KAAKG,MAAMuB,IAAIC,GAAKA,EAAEX,MAAMY,KAAK,SAE1D,IACI,IAAK,MAAMb,KAAUf,KAAKC,cAChBD,KAAK8B,cAAcf,GAG7Bf,KAAKS,QAAQK,QAAU,IAAIQ,KAC3BtB,KAAK+B,cACR,CAAC,MAAOC,GAKL,MAJAT,QAAQS,MAAM,qBAAsBA,GAChChC,KAAKD,QAAQkC,cACbjC,KAAKD,QAAQkC,aAAaD,EAAgB,CAAEE,SAAUlC,KAAKF,OAAO2B,OAEhEO,CACT,CACJ,CAEO,mBAAMF,CAAcf,GACxBQ,QAAQC,IAAI,yBAAyBT,EAAOC,QAG5C,MAAMmB,EAAW,CACb,CAAEC,GAAI,EAAGX,KAAM,WAAYY,MAAO,KAClC,CAAED,GAAI,EAAGX,KAAM,WAAYY,MAAO,KAClC,CAAED,GAAI,EAAGX,KAAM,WAAYY,MAAO,MAGtC,IAAK,MAAMC,KAAUH,QACXnC,KAAKuC,cAAcD,EAEhC,CAEO,mBAAMC,CAAcD,EAAaE,EAAkB,GACvD,IAEI,IAAIC,EAASH,EACb,IAAK,MAAMpB,KAAalB,KAAKE,WACzB,IACIuC,EAASvB,EAAUuB,EACtB,CAAC,MAAOC,GAEL,MADAnB,QAAQS,MAAM,mBAAoBU,GAC5BA,CACT,CAIL,IAAK,MAAMtB,KAAQpB,KAAKG,YACdH,KAAK2C,YAAYvB,EAAMqB,GAGjCzC,KAAKS,QAAQC,WAEhB,CAAC,MAAOsB,GACL,MAAM3B,EAAcL,KAAKD,QAAQK,aAAaC,aAAe,EAE7D,GAAImC,EAAUnC,EAAa,CACvB,MAAMuC,GAAS5C,KAAKD,QAAQK,aAAaE,SAAW,KACvCuC,KAAKC,IAAI9C,KAAKD,QAAQK,aAAaG,mBAAqB,EAAGiC,EAAU,GAElFjB,QAAQwB,KAAK,YAAYP,KAAWnC,WAAqBuC,OACzD5C,KAAKS,QAAQG,gBAEPZ,KAAKgD,MAAMJ,SACX5C,KAAKuC,cAAcD,EAAQE,EAAU,EAC9C,MACGjB,QAAQS,MAAM,oCAAoC3B,cAAyBiC,GAC3EtC,KAAKS,QAAQE,SAETX,KAAKD,QAAQkC,cACbjC,KAAKD,QAAQkC,aAAaD,EAAgB,CAAEM,SAAQE,WAG/D,CACJ,CAEO,iBAAMG,CAAYvB,EAA2C6B,GACjE,OAAQ7B,EAAKJ,MACT,IAAK,UACDO,QAAQC,IAAI,IAAIJ,EAAKJ,QAASkC,KAAKC,UAAUF,EAAM,KAAM,IACzD,MACJ,IAAK,OAED1B,QAAQC,IAAI,uBAAuBJ,EAAKtB,OAAOsD,YAC/C,MACJ,IAAK,QACD7B,QAAQC,IAAI,8BAA8BJ,EAAKtB,OAAOuD,SACtD,MACJ,IAAK,WACD9B,QAAQC,IAAI,2BAA2BJ,EAAKtB,OAAOwD,SACnD,MACJ,QACI/B,QAAQC,IAAI,iBAAiBJ,EAAKJ,cAIpChB,KAAKgD,MAAM,GACpB,CAEO,WAAMA,CAAMO,GAChB,OAAO,IAAIC,QAAQC,GAAWC,WAAWD,EAASF,GACrD,CAEO,YAAAxB,GACJ,MAAM4B,EAAW3D,KAAKS,QAAQI,WAAab,KAAKS,QAAQK,QAClDd,KAAKS,QAAQK,QAAQ8C,UAAY5D,KAAKS,QAAQI,UAAU+C,UACxD,EAENrC,QAAQC,IAAI,IACZD,QAAQC,IAAI,IAAIqC,OAAO,KACvBtC,QAAQC,IAAI,uBACZD,QAAQC,IAAI,IAAIqC,OAAO,KACvBtC,QAAQC,IAAI,YAAYxB,KAAKF,OAAO2B,QACpCF,QAAQC,IAAI,gBAAgBmC,OAC5BpC,QAAQC,IAAI,yBAAyBxB,KAAKS,QAAQC,aAClDa,QAAQC,IAAI,cAAcxB,KAAKS,QAAQE,UACvCY,QAAQC,IAAI,eAAexB,KAAKS,QAAQG,WACxCW,QAAQC,IAAI,qBAAsBxB,KAAKS,QAAQC,WAAaV,KAAKS,QAAQC,UAAYV,KAAKS,QAAQE,QAAW,KAAKmD,QAAQ,OAC1HvC,QAAQC,IAAI,IAAIqC,OAAO,IAC1B,CAED,OAAAE,GACI,MAAO,CACHtC,KAAMzB,KAAKF,OAAO2B,KAClBxB,QAASD,KAAKC,QAAQyB,IAAIC,GAAKA,EAAEX,MACjCd,WAAYF,KAAKE,WAAW2B,OAC5B1B,MAAOH,KAAKG,MAAMuB,IAAIC,GAAKA,EAAEX,MAC7BgD,WAAW,IAAI1C,MAAO2C,cAE7B,CAED,UAAAC,GACI,MAAO,IACAlE,KAAKS,QACRkD,SAAU3D,KAAKS,QAAQI,WAAab,KAAKS,QAAQK,QAC3Cd,KAAKS,QAAQK,QAAQ8C,UAAY5D,KAAKS,QAAQI,UAAU+C,UACxD,KAEb"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
//# sourceMappingURL=connectors.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"connectors.d.ts","sourceRoot":"","sources":["../../../src/pipeline/connectors.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
export declare class PipelineMonitor {
|
|
2
|
+
private metrics;
|
|
3
|
+
private startTime;
|
|
4
|
+
recordMetric(name: string, value: number): void;
|
|
5
|
+
getMetrics(): {
|
|
6
|
+
uptime: number;
|
|
7
|
+
metrics: Record<string, {
|
|
8
|
+
count: number;
|
|
9
|
+
recent: any[];
|
|
10
|
+
average: number;
|
|
11
|
+
max: number;
|
|
12
|
+
min: number;
|
|
13
|
+
}>;
|
|
14
|
+
};
|
|
15
|
+
exportToPrometheus(): Promise<string>;
|
|
16
|
+
}
|
|
17
|
+
//# sourceMappingURL=monitoring.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"monitoring.d.ts","sourceRoot":"","sources":["../../../src/pipeline/monitoring.ts"],"names":[],"mappings":"AAAA,qBAAa,eAAe;IAC1B,OAAO,CAAC,OAAO,CAA+B;IAC9C,OAAO,CAAC,SAAS,CAAoB;IAErC,YAAY,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,IAAI;IAU/C,UAAU,IAAI;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE;YAAE,KAAK,EAAE,MAAM,CAAC;YAAC,MAAM,EAAE,GAAG,EAAE,CAAC;YAAC,OAAO,EAAE,MAAM,CAAC;YAAC,GAAG,EAAE,MAAM,CAAC;YAAC,GAAG,EAAE,MAAM,CAAA;SAAE,CAAC,CAAA;KAAE;IAoBhI,kBAAkB,IAAI,OAAO,CAAC,MAAM,CAAC;CAY5C"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
class e{constructor(){this.metrics=new Map,this.startTime=new Date}recordMetric(e,t){this.metrics.has(e)||this.metrics.set(e,[]),this.metrics.get(e).push({value:t,timestamp:new Date})}getMetrics(){const e={uptime:Date.now()-this.startTime.getTime(),metrics:{}};for(const[t,s]of this.metrics){const r=s.slice(-100);e.metrics[t]={count:s.length,recent:r,average:r.reduce((e,t)=>e+t.value,0)/r.length,max:Math.max(...r.map(e=>e.value)),min:Math.min(...r.map(e=>e.value))}}return e}async exportToPrometheus(){const e=this.getMetrics();let t="";for(const[s,r]of Object.entries(e.metrics))t+=`# HELP ${s} Pipeline metric\n`,t+=`# TYPE ${s} gauge\n`,t+=`${s} ${r.average}\n`;return t}}export{e as PipelineMonitor};
|
|
2
|
+
//# sourceMappingURL=monitoring.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"monitoring.js","sources":["../../../../src/pipeline/monitoring.ts"],"sourcesContent":["export class PipelineMonitor {\r\n private metrics: Map<string, any> = new Map();\r\n private startTime: Date = new Date();\r\n\r\n recordMetric(name: string, value: number): void {\r\n if (!this.metrics.has(name)) {\r\n this.metrics.set(name, []);\r\n }\r\n this.metrics.get(name).push({\r\n value,\r\n timestamp: new Date(),\r\n });\r\n }\r\n\r\n getMetrics(): { uptime: number; metrics: Record<string, { count: number; recent: any[]; average: number; max: number; min: number }> } {\r\n const result: { uptime: number; metrics: Record<string, { count: number; recent: any[]; average: number; max: number; min: number }> } = {\r\n uptime: Date.now() - this.startTime.getTime(),\r\n metrics: {} as Record<string, { count: number; recent: any[]; average: number; max: number; min: number }>,\r\n };\r\n \r\n for (const [name, values] of this.metrics) {\r\n const recentValues = values.slice(-100);\r\n result.metrics[name] = {\r\n count: values.length,\r\n recent: recentValues,\r\n average: recentValues.reduce((a: number, b: any) => a + b.value, 0) / recentValues.length,\r\n max: Math.max(...recentValues.map((v: any) => v.value)),\r\n min: Math.min(...recentValues.map((v: any) => v.value)),\r\n };\r\n }\r\n \r\n return result;\r\n }\r\n\r\n async exportToPrometheus(): Promise<string> {\r\n const metrics = this.getMetrics();\r\n let output = '';\r\n \r\n for (const [name, data] of Object.entries(metrics.metrics)) {\r\n output += `# HELP ${name} Pipeline metric\\n`;\r\n output += `# TYPE ${name} gauge\\n`;\r\n output += `${name} ${data.average}\\n`;\r\n }\r\n \r\n return output;\r\n }\r\n}"],"names":["PipelineMonitor","constructor","this","metrics","Map","startTime","Date","recordMetric","name","value","has","set","get","push","timestamp","getMetrics","result","uptime","now","getTime","values","recentValues","slice","count","length","recent","average","reduce","a","b","max","Math","map","v","min","exportToPrometheus","output","data","Object","entries"],"mappings":"MAAaA,EAAb,WAAAC,GACUC,KAAAC,QAA4B,IAAIC,IAChCF,KAAAG,UAAkB,IAAIC,IA4C/B,CA1CC,YAAAC,CAAaC,EAAcC,GACpBP,KAAKC,QAAQO,IAAIF,IACpBN,KAAKC,QAAQQ,IAAIH,EAAM,IAEzBN,KAAKC,QAAQS,IAAIJ,GAAMK,KAAK,CAC1BJ,QACAK,UAAW,IAAIR,MAElB,CAED,UAAAS,GACE,MAAMC,EAAmI,CACvIC,OAAQX,KAAKY,MAAQhB,KAAKG,UAAUc,UACpChB,QAAS,CAAiG,GAG5G,IAAK,MAAOK,EAAMY,KAAWlB,KAAKC,QAAS,CACzC,MAAMkB,EAAeD,EAAOE,OAAO,KACnCN,EAAOb,QAAQK,GAAQ,CACrBe,MAAOH,EAAOI,OACdC,OAAQJ,EACRK,QAASL,EAAaM,OAAO,CAACC,EAAWC,IAAWD,EAAIC,EAAEpB,MAAO,GAAKY,EAAaG,OACnFM,IAAKC,KAAKD,OAAOT,EAAaW,IAAKC,GAAWA,EAAExB,QAChDyB,IAAKH,KAAKG,OAAOb,EAAaW,IAAKC,GAAWA,EAAExB,QAEnD,CAED,OAAOO,CACR,CAED,wBAAMmB,GACJ,MAAMhC,EAAUD,KAAKa,aACrB,IAAIqB,EAAS,GAEb,IAAK,MAAO5B,EAAM6B,KAASC,OAAOC,QAAQpC,EAAQA,SAChDiC,GAAU,UAAU5B,sBACpB4B,GAAU,UAAU5B,YACpB4B,GAAU,GAAG5B,KAAQ6B,EAAKX,YAG5B,OAAOU,CACR"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/react/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
interface UseKafkaTopicOptions {
|
|
2
|
+
deserialize?: (data: any) => any;
|
|
3
|
+
maxMessages?: number;
|
|
4
|
+
reconnectOnError?: boolean;
|
|
5
|
+
}
|
|
6
|
+
export declare function useKafkaTopic<T = any>(topic: string, options?: UseKafkaTopicOptions): {
|
|
7
|
+
data: T[];
|
|
8
|
+
isConnected: boolean;
|
|
9
|
+
error: Error;
|
|
10
|
+
clearData: () => void;
|
|
11
|
+
};
|
|
12
|
+
export {};
|
|
13
|
+
//# sourceMappingURL=useKafkaTopic.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"useKafkaTopic.d.ts","sourceRoot":"","sources":["../../../src/react/useKafkaTopic.ts"],"names":[],"mappings":"AAIA,UAAU,oBAAoB;IAC5B,WAAW,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,GAAG,CAAC;IACjC,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,gBAAgB,CAAC,EAAE,OAAO,CAAC;CAC5B;AAED,wBAAgB,aAAa,CAAC,CAAC,GAAG,GAAG,EACnC,KAAK,EAAE,MAAM,EACb,OAAO,GAAE,oBAAyB;;;;;EAsDnC"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
interface UseRealtimeDataOptions {
|
|
2
|
+
kafkaTopic: string;
|
|
3
|
+
sparkQuery: string;
|
|
4
|
+
refreshInterval?: number;
|
|
5
|
+
transform?: (data: any) => any;
|
|
6
|
+
}
|
|
7
|
+
export declare function useRealtimeData<T = any>(options: UseRealtimeDataOptions): {
|
|
8
|
+
realtimeData: any[];
|
|
9
|
+
aggregatedData: any[];
|
|
10
|
+
isConnected: boolean;
|
|
11
|
+
loading: boolean;
|
|
12
|
+
refetch: () => void;
|
|
13
|
+
chartRef: import("react").MutableRefObject<HTMLDivElement>;
|
|
14
|
+
};
|
|
15
|
+
export {};
|
|
16
|
+
//# sourceMappingURL=useRealtimeData.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"useRealtimeData.d.ts","sourceRoot":"","sources":["../../../src/react/useRealtimeData.ts"],"names":[],"mappings":"AAIA,UAAU,sBAAsB;IAC9B,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,GAAG,CAAC;CAChC;AAED,wBAAgB,eAAe,CAAC,CAAC,GAAG,GAAG,EAAE,OAAO,EAAE,sBAAsB;;;;;;;EA6BvE"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
interface UseSparkQueryOptions {
|
|
2
|
+
refreshInterval?: number;
|
|
3
|
+
enabled?: boolean;
|
|
4
|
+
}
|
|
5
|
+
export declare function useSparkQuery(sqlQuery: string, options?: UseSparkQueryOptions): {
|
|
6
|
+
data: any[];
|
|
7
|
+
loading: boolean;
|
|
8
|
+
error: Error;
|
|
9
|
+
refetch: () => void;
|
|
10
|
+
};
|
|
11
|
+
export {};
|
|
12
|
+
//# sourceMappingURL=useSparkQuery.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"useSparkQuery.d.ts","sourceRoot":"","sources":["../../../src/react/useSparkQuery.ts"],"names":[],"mappings":"AAGA,UAAU,oBAAoB;IAC5B,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,OAAO,CAAC,EAAE,OAAO,CAAC;CACnB;AAED,wBAAgB,aAAa,CAC3B,QAAQ,EAAE,MAAM,EAChB,OAAO,GAAE,oBAAyB;;;;;EAuCnC"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { SparkConfig, SparkJob } from '../types';
|
|
2
|
+
export declare class SparkClient {
|
|
3
|
+
private baseUrl;
|
|
4
|
+
private headers;
|
|
5
|
+
private retryConfig;
|
|
6
|
+
constructor(config: SparkConfig, retryConfig?: {
|
|
7
|
+
maxRetries: number;
|
|
8
|
+
retryDelay: number;
|
|
9
|
+
});
|
|
10
|
+
submitJob(jobCode: string, jobName: string, options?: {
|
|
11
|
+
timeout?: number;
|
|
12
|
+
retries?: number;
|
|
13
|
+
}): Promise<SparkJob>;
|
|
14
|
+
getJobStatus(jobId: string): Promise<SparkJob>;
|
|
15
|
+
cancelJob(jobId: string): Promise<boolean>;
|
|
16
|
+
monitorJob(jobId: string, timeoutMs?: number): Promise<SparkJob>;
|
|
17
|
+
runPythonScript(scriptPath: string, args?: string[], options?: {
|
|
18
|
+
timeout?: number;
|
|
19
|
+
}): Promise<SparkJob>;
|
|
20
|
+
submitSQLQuery(sql: string, options?: {
|
|
21
|
+
database?: string;
|
|
22
|
+
timeout?: number;
|
|
23
|
+
}): Promise<any[]>;
|
|
24
|
+
private sleep;
|
|
25
|
+
healthCheck(): Promise<boolean>;
|
|
26
|
+
}
|
|
27
|
+
//# sourceMappingURL=client.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../../../src/spark/client.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,QAAQ,EAAc,MAAM,UAAU,CAAC;AAE7D,qBAAa,WAAW;IACpB,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,OAAO,CAAyB;IACxC,OAAO,CAAC,WAAW,CAA6C;gBAEpD,MAAM,EAAE,WAAW,EAAE,WAAW,CAAC,EAAE;QAAE,UAAU,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE;IAQnF,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,QAAQ,CAAC;IAiDhH,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,QAAQ,CAAC;IAwB9C,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAmB1C,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,SAAS,GAAE,MAAe,GAAG,OAAO,CAAC,QAAQ,CAAC;IA2BxE,eAAe,CAAC,UAAU,EAAE,MAAM,EAAE,IAAI,GAAE,MAAM,EAAO,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,QAAQ,CAAC;IA2B3G,cAAc,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,QAAQ,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC;IAkBpG,OAAO,CAAC,KAAK;IAIP,WAAW,IAAI,OAAO,CAAC,OAAO,CAAC;CAQxC"}
|