@async-fusion/data 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (115) hide show
  1. package/LICENSE +0 -0
  2. package/README.md +0 -0
  3. package/dist/cjs/cli/deploy.d.ts +1 -0
  4. package/dist/cjs/cli/deploy.d.ts.map +1 -0
  5. package/dist/cjs/cli/monitor.d.ts +1 -0
  6. package/dist/cjs/cli/monitor.d.ts.map +1 -0
  7. package/dist/cjs/index.d.ts +29 -0
  8. package/dist/cjs/index.d.ts.map +1 -0
  9. package/dist/cjs/index.js +2 -0
  10. package/dist/cjs/index.js.map +1 -0
  11. package/dist/cjs/kafka/consumer.d.ts +19 -0
  12. package/dist/cjs/kafka/consumer.d.ts.map +1 -0
  13. package/dist/cjs/kafka/consumer.js +2 -0
  14. package/dist/cjs/kafka/consumer.js.map +1 -0
  15. package/dist/cjs/kafka/producer.d.ts +21 -0
  16. package/dist/cjs/kafka/producer.d.ts.map +1 -0
  17. package/dist/cjs/kafka/producer.js +2 -0
  18. package/dist/cjs/kafka/producer.js.map +1 -0
  19. package/dist/cjs/kafka/stream.d.ts +52 -0
  20. package/dist/cjs/kafka/stream.d.ts.map +1 -0
  21. package/dist/cjs/kafka/stream.js +2 -0
  22. package/dist/cjs/kafka/stream.js.map +1 -0
  23. package/dist/cjs/pipeline/builder.d.ts +32 -0
  24. package/dist/cjs/pipeline/builder.d.ts.map +1 -0
  25. package/dist/cjs/pipeline/builder.js +2 -0
  26. package/dist/cjs/pipeline/builder.js.map +1 -0
  27. package/dist/cjs/pipeline/connectors.d.ts +1 -0
  28. package/dist/cjs/pipeline/connectors.d.ts.map +1 -0
  29. package/dist/cjs/pipeline/monitoring.d.ts +17 -0
  30. package/dist/cjs/pipeline/monitoring.d.ts.map +1 -0
  31. package/dist/cjs/pipeline/monitoring.js +2 -0
  32. package/dist/cjs/pipeline/monitoring.js.map +1 -0
  33. package/dist/cjs/react/index.d.ts +4 -0
  34. package/dist/cjs/react/index.d.ts.map +1 -0
  35. package/dist/cjs/react/useKafkaTopic.d.ts +13 -0
  36. package/dist/cjs/react/useKafkaTopic.d.ts.map +1 -0
  37. package/dist/cjs/react/useRealtimeData.d.ts +16 -0
  38. package/dist/cjs/react/useRealtimeData.d.ts.map +1 -0
  39. package/dist/cjs/react/useSparkQuery.d.ts +12 -0
  40. package/dist/cjs/react/useSparkQuery.d.ts.map +1 -0
  41. package/dist/cjs/spark/client.d.ts +27 -0
  42. package/dist/cjs/spark/client.d.ts.map +1 -0
  43. package/dist/cjs/spark/client.js +2 -0
  44. package/dist/cjs/spark/client.js.map +1 -0
  45. package/dist/cjs/spark/sql.d.ts +9 -0
  46. package/dist/cjs/spark/sql.d.ts.map +1 -0
  47. package/dist/cjs/spark/sql.js +2 -0
  48. package/dist/cjs/spark/sql.js.map +1 -0
  49. package/dist/cjs/spark/streaming.d.ts +8 -0
  50. package/dist/cjs/spark/streaming.d.ts.map +1 -0
  51. package/dist/cjs/spark/streaming.js +2 -0
  52. package/dist/cjs/spark/streaming.js.map +1 -0
  53. package/dist/cjs/types/index.d.ts +47 -0
  54. package/dist/cjs/types/index.d.ts.map +1 -0
  55. package/dist/cjs/utils/error-handling.d.ts +27 -0
  56. package/dist/cjs/utils/error-handling.d.ts.map +1 -0
  57. package/dist/cjs/utils/error-handling.js +2 -0
  58. package/dist/cjs/utils/error-handling.js.map +1 -0
  59. package/dist/esm/cli/deploy.d.ts +1 -0
  60. package/dist/esm/cli/deploy.d.ts.map +1 -0
  61. package/dist/esm/cli/monitor.d.ts +1 -0
  62. package/dist/esm/cli/monitor.d.ts.map +1 -0
  63. package/dist/esm/index.d.ts +29 -0
  64. package/dist/esm/index.d.ts.map +1 -0
  65. package/dist/esm/index.js +2 -0
  66. package/dist/esm/index.js.map +1 -0
  67. package/dist/esm/kafka/consumer.d.ts +19 -0
  68. package/dist/esm/kafka/consumer.d.ts.map +1 -0
  69. package/dist/esm/kafka/consumer.js +2 -0
  70. package/dist/esm/kafka/consumer.js.map +1 -0
  71. package/dist/esm/kafka/producer.d.ts +21 -0
  72. package/dist/esm/kafka/producer.d.ts.map +1 -0
  73. package/dist/esm/kafka/producer.js +2 -0
  74. package/dist/esm/kafka/producer.js.map +1 -0
  75. package/dist/esm/kafka/stream.d.ts +52 -0
  76. package/dist/esm/kafka/stream.d.ts.map +1 -0
  77. package/dist/esm/kafka/stream.js +2 -0
  78. package/dist/esm/kafka/stream.js.map +1 -0
  79. package/dist/esm/pipeline/builder.d.ts +32 -0
  80. package/dist/esm/pipeline/builder.d.ts.map +1 -0
  81. package/dist/esm/pipeline/builder.js +2 -0
  82. package/dist/esm/pipeline/builder.js.map +1 -0
  83. package/dist/esm/pipeline/connectors.d.ts +1 -0
  84. package/dist/esm/pipeline/connectors.d.ts.map +1 -0
  85. package/dist/esm/pipeline/monitoring.d.ts +17 -0
  86. package/dist/esm/pipeline/monitoring.d.ts.map +1 -0
  87. package/dist/esm/pipeline/monitoring.js +2 -0
  88. package/dist/esm/pipeline/monitoring.js.map +1 -0
  89. package/dist/esm/react/index.d.ts +4 -0
  90. package/dist/esm/react/index.d.ts.map +1 -0
  91. package/dist/esm/react/useKafkaTopic.d.ts +13 -0
  92. package/dist/esm/react/useKafkaTopic.d.ts.map +1 -0
  93. package/dist/esm/react/useRealtimeData.d.ts +16 -0
  94. package/dist/esm/react/useRealtimeData.d.ts.map +1 -0
  95. package/dist/esm/react/useSparkQuery.d.ts +12 -0
  96. package/dist/esm/react/useSparkQuery.d.ts.map +1 -0
  97. package/dist/esm/spark/client.d.ts +27 -0
  98. package/dist/esm/spark/client.d.ts.map +1 -0
  99. package/dist/esm/spark/client.js +2 -0
  100. package/dist/esm/spark/client.js.map +1 -0
  101. package/dist/esm/spark/sql.d.ts +9 -0
  102. package/dist/esm/spark/sql.d.ts.map +1 -0
  103. package/dist/esm/spark/sql.js +2 -0
  104. package/dist/esm/spark/sql.js.map +1 -0
  105. package/dist/esm/spark/streaming.d.ts +8 -0
  106. package/dist/esm/spark/streaming.d.ts.map +1 -0
  107. package/dist/esm/spark/streaming.js +2 -0
  108. package/dist/esm/spark/streaming.js.map +1 -0
  109. package/dist/esm/types/index.d.ts +47 -0
  110. package/dist/esm/types/index.d.ts.map +1 -0
  111. package/dist/esm/utils/error-handling.d.ts +27 -0
  112. package/dist/esm/utils/error-handling.d.ts.map +1 -0
  113. package/dist/esm/utils/error-handling.js +2 -0
  114. package/dist/esm/utils/error-handling.js.map +1 -0
  115. package/package.json +68 -0
package/LICENSE ADDED
File without changes
package/README.md ADDED
File without changes
@@ -0,0 +1 @@
1
+ //# sourceMappingURL=deploy.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"deploy.d.ts","sourceRoot":"","sources":["../../../src/cli/deploy.ts"],"names":[],"mappings":""}
@@ -0,0 +1 @@
1
+ //# sourceMappingURL=monitor.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"monitor.d.ts","sourceRoot":"","sources":["../../../src/cli/monitor.ts"],"names":[],"mappings":""}
@@ -0,0 +1,29 @@
1
+ export declare const version = "1.0.0";
2
+ export declare const library: {
3
+ name: string;
4
+ version: string;
5
+ author: string;
6
+ license: string;
7
+ repository: string;
8
+ };
9
+ export declare function getLibraryInfo(): {
10
+ name: string;
11
+ version: string;
12
+ author: string;
13
+ description: string;
14
+ features: string[];
15
+ license: string;
16
+ repository: string;
17
+ };
18
+ export declare function hello(): string;
19
+ export { Producer } from './kafka/producer';
20
+ export { Consumer } from './kafka/consumer';
21
+ export { KafkaStream, GroupedStream } from './kafka/stream';
22
+ export { SparkClient } from './spark/client';
23
+ export { SparkStreaming } from './spark/streaming';
24
+ export { SparkSQL } from './spark/sql';
25
+ export { PipelineBuilder } from './pipeline/builder';
26
+ export { PipelineMonitor } from './pipeline/monitoring';
27
+ export { withRetry, sleep, CircuitBreaker, RetryableError, FatalError } from './utils/error-handling';
28
+ export * from './types';
29
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,OAAO,UAAU,CAAC;AAE/B,eAAO,MAAM,OAAO;;;;;;CAMnB,CAAC;AAEF,wBAAgB,cAAc;;;;;;;;EAoB7B;AAED,wBAAgB,KAAK,IAAI,MAAM,CAE9B;AAGD,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAC5C,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAC5C,OAAO,EAAE,WAAW,EAAE,aAAa,EAAE,MAAM,gBAAgB,CAAC;AAG5D,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACnD,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAGvC,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAGxD,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,cAAc,EAAE,cAAc,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAGtG,cAAc,SAAS,CAAC"}
@@ -0,0 +1,2 @@
1
+ "use strict";var r=require("./kafka/producer.js"),e=require("./kafka/consumer.js"),i=require("./kafka/stream.js"),t=require("./spark/client.js"),a=require("./spark/streaming.js"),o=require("./spark/sql.js"),s=require("./pipeline/builder.js"),n=require("./pipeline/monitoring.js"),p=require("./utils/error-handling.js");const u="1.0.0",l={name:"@async-fusion/data",version:u,author:"Udayan Sharma",license:"MIT",repository:"https://github.com/hollermay/async-fusion-data"};exports.Producer=r.Producer,exports.Consumer=e.Consumer,exports.GroupedStream=i.GroupedStream,exports.KafkaStream=i.KafkaStream,exports.SparkClient=t.SparkClient,exports.SparkStreaming=a.SparkStreaming,exports.SparkSQL=o.SparkSQL,exports.PipelineBuilder=s.PipelineBuilder,exports.PipelineMonitor=n.PipelineMonitor,exports.CircuitBreaker=p.CircuitBreaker,exports.FatalError=p.FatalError,exports.RetryableError=p.RetryableError,exports.sleep=p.sleep,exports.withRetry=p.withRetry,exports.getLibraryInfo=function(){return{name:l.name,version:l.version,author:l.author,description:"Unified data streaming library for Kafka and Spark",features:["Kafka Producer/Consumer with backpressure","Spark job submission and monitoring","Unified pipeline builder","React hooks for real-time data","TypeScript first","Built-in monitoring and metrics","Stream processing with windowing","Error handling and retries","Circuit breaker pattern"],license:l.license,repository:l.repository}},exports.hello=function(){return`Hello from @async-fusion/data! Built with lots of Love! (errors and fixes :P) by ${l.author}`},exports.library=l,exports.version=u;
2
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sources":["../../../src/index.ts"],"sourcesContent":["// Built by Udayan Sharma\r\n\r\nexport const version = '1.0.0';\r\n\r\nexport const library = {\r\n name: '@async-fusion/data',\r\n version,\r\n author: 'Udayan Sharma',\r\n license: 'MIT',\r\n repository: 'https://github.com/hollermay/async-fusion-data'\r\n};\r\n\r\nexport function getLibraryInfo() {\r\n return {\r\n name: library.name,\r\n version: library.version,\r\n author: library.author,\r\n description: 'Unified data streaming library for Kafka and Spark',\r\n features: [\r\n 'Kafka Producer/Consumer with backpressure',\r\n 'Spark job submission and monitoring',\r\n 'Unified pipeline builder',\r\n 'React hooks for real-time data',\r\n 'TypeScript first',\r\n 'Built-in monitoring and metrics',\r\n 'Stream processing with windowing',\r\n 'Error handling and retries',\r\n 'Circuit breaker pattern'\r\n ],\r\n license: library.license,\r\n repository: library.repository\r\n };\r\n}\r\n\r\nexport function hello(): string {\r\n return `Hello from @async-fusion/data! Built with lots of Love! (errors and fixes :P) by ${library.author}`;\r\n}\r\n\r\n// Kafka exports\r\nexport { Producer } from './kafka/producer';\r\nexport { Consumer } from './kafka/consumer';\r\nexport { KafkaStream, GroupedStream } from './kafka/stream';\r\n\r\n// Spark exports\r\nexport { SparkClient } from './spark/client';\r\nexport { SparkStreaming } from './spark/streaming';\r\nexport { SparkSQL } from './spark/sql';\r\n\r\n// Pipeline exports\r\nexport { PipelineBuilder } from './pipeline/builder';\r\nexport { PipelineMonitor } from './pipeline/monitoring';\r\n\r\n// Utils\r\nexport { withRetry, sleep, CircuitBreaker, RetryableError, FatalError } from './utils/error-handling';\r\n\r\n// Types\r\nexport * from './types';"],"names":["version","library","name","author","license","repository","description","features"],"mappings":"+TAEO,MAAMA,EAAU,QAEVC,EAAU,CACnBC,KAAM,qBACNF,UACAG,OAAQ,gBACRC,QAAS,MACTC,WAAY,kjBAIZ,MAAO,CACHH,KAAMD,EAAQC,KACdF,QAASC,EAAQD,QACjBG,OAAQF,EAAQE,OAChBG,YAAa,qDACbC,SAAU,CACN,4CACA,sCACA,2BACA,iCACA,mBACA,kCACA,mCACA,6BACA,2BAEJH,QAASH,EAAQG,QACjBC,WAAYJ,EAAQI,WAE5B,2BAGI,MAAO,oFAAoFJ,EAAQE,QACvG"}
@@ -0,0 +1,19 @@
1
+ import { KafkaConfig, Message } from '../types';
2
+ export type MessageHandler<T> = (message: Message<T>) => Promise<void>;
3
+ export declare class Consumer<T = any> {
4
+ private consumer;
5
+ private topic;
6
+ private groupId;
7
+ private handlers;
8
+ private isRunning;
9
+ private maxConcurrent;
10
+ private currentProcessing;
11
+ constructor(config: KafkaConfig, topic: string, groupId: string);
12
+ connect(): Promise<void>;
13
+ on(handler: MessageHandler<T>): this;
14
+ start(): Promise<void>;
15
+ stop(): Promise<void>;
16
+ setMaxConcurrent(limit: number): this;
17
+ seekToOffset(offset: number): Promise<void>;
18
+ }
19
+ //# sourceMappingURL=consumer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"consumer.d.ts","sourceRoot":"","sources":["../../../src/kafka/consumer.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAEhD,MAAM,MAAM,cAAc,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;AAEvE,qBAAa,QAAQ,CAAC,CAAC,GAAG,GAAG;IAC3B,OAAO,CAAC,QAAQ,CAAgB;IAChC,OAAO,CAAC,KAAK,CAAS;IACtB,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,QAAQ,CAA2B;IAC3C,OAAO,CAAC,SAAS,CAAkB;IACnC,OAAO,CAAC,aAAa,CAAc;IACnC,OAAO,CAAC,iBAAiB,CAAa;gBAE1B,MAAM,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM;IAgBzD,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAK9B,EAAE,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC,CAAC,GAAG,IAAI;IAK9B,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAkCtB,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAK3B,gBAAgB,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAK/B,YAAY,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CAOlD"}
@@ -0,0 +1,2 @@
1
+ "use strict";var s=require("kafkajs");exports.Consumer=class{constructor(t,e,n){this.handlers=[],this.isRunning=!1,this.maxConcurrent=10,this.currentProcessing=0;const i={clientId:t.clientId,brokers:t.brokers};t.ssl&&(i.ssl=t.ssl),t.sasl&&(i.sasl=t.sasl);const r=new s.Kafka(i);this.consumer=r.consumer({groupId:n}),this.topic=e,this.groupId=n}async connect(){await this.consumer.connect(),await this.consumer.subscribe({topic:this.topic,fromBeginning:!1})}on(s){return this.handlers.push(s),this}async start(){this.isRunning=!0,await this.consumer.run({eachMessage:async s=>{if(!this.isRunning)return;for(;this.currentProcessing>=this.maxConcurrent;)await new Promise(s=>setTimeout(s,100));const t={key:s.message.key?.toString(),value:JSON.parse(s.message.value?.toString()||"{}"),timestamp:new Date(s.message.timestamp||Date.now()),partition:s.partition,offset:Number(s.message.offset)};this.currentProcessing++;try{await Promise.all(this.handlers.map(s=>s(t)))}catch(s){throw console.error("Error processing message:",s),s}finally{this.currentProcessing--}}})}async stop(){this.isRunning=!1,await this.consumer.disconnect()}setMaxConcurrent(s){return this.maxConcurrent=s,this}async seekToOffset(s){await this.consumer.seek({topic:this.topic,partition:0,offset:s.toString()})}};
2
+ //# sourceMappingURL=consumer.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"consumer.js","sources":["../../../../src/kafka/consumer.ts"],"sourcesContent":["import { Kafka, Consumer as KafkaConsumer, EachMessagePayload } from 'kafkajs';\r\nimport { KafkaConfig, Message } from '../types';\r\n\r\nexport type MessageHandler<T> = (message: Message<T>) => Promise<void>;\r\n\r\nexport class Consumer<T = any> {\r\n private consumer: KafkaConsumer;\r\n private topic: string;\r\n private groupId: string;\r\n private handlers: MessageHandler<T>[] = [];\r\n private isRunning: boolean = false;\r\n private maxConcurrent: number = 10;\r\n private currentProcessing: number = 0;\r\n\r\n constructor(config: KafkaConfig, topic: string, groupId: string) {\r\n // Use type assertion to bypass strict type checking\r\n const kafkaConfig: any = {\r\n clientId: config.clientId,\r\n brokers: config.brokers,\r\n };\r\n \r\n if (config.ssl) kafkaConfig.ssl = config.ssl;\r\n if (config.sasl) kafkaConfig.sasl = config.sasl;\r\n \r\n const kafka = new Kafka(kafkaConfig);\r\n this.consumer = kafka.consumer({ groupId });\r\n this.topic = topic;\r\n this.groupId = groupId;\r\n }\r\n\r\n async connect(): Promise<void> {\r\n await this.consumer.connect();\r\n await this.consumer.subscribe({ topic: this.topic, fromBeginning: false });\r\n }\r\n\r\n on(handler: MessageHandler<T>): this {\r\n this.handlers.push(handler);\r\n return this;\r\n }\r\n\r\n async start(): Promise<void> {\r\n this.isRunning = true;\r\n \r\n await this.consumer.run({\r\n eachMessage: async (payload: EachMessagePayload) => {\r\n if (!this.isRunning) return;\r\n \r\n // Backpressure control\r\n while (this.currentProcessing >= this.maxConcurrent) {\r\n await new Promise(resolve => setTimeout(resolve, 100));\r\n }\r\n \r\n const message: Message<T> = {\r\n key: payload.message.key?.toString(),\r\n value: JSON.parse(payload.message.value?.toString() || '{}'),\r\n timestamp: new Date(payload.message.timestamp || Date.now()),\r\n partition: payload.partition,\r\n offset: Number(payload.message.offset),\r\n };\r\n \r\n this.currentProcessing++;\r\n \r\n try {\r\n await Promise.all(this.handlers.map(handler => handler(message)));\r\n } catch (error) {\r\n console.error('Error processing message:', error);\r\n throw error;\r\n } finally {\r\n this.currentProcessing--;\r\n }\r\n },\r\n });\r\n }\r\n\r\n async stop(): Promise<void> {\r\n this.isRunning = false;\r\n await this.consumer.disconnect();\r\n }\r\n\r\n setMaxConcurrent(limit: number): this {\r\n this.maxConcurrent = limit;\r\n return this;\r\n }\r\n\r\n async seekToOffset(offset: number): Promise<void> {\r\n await this.consumer.seek({\r\n topic: this.topic,\r\n partition: 0,\r\n offset: offset.toString(),\r\n });\r\n }\r\n}"],"names":["constructor","config","topic","groupId","this","handlers","isRunning","maxConcurrent","currentProcessing","kafkaConfig","clientId","brokers","ssl","sasl","kafka","Kafka","consumer","connect","subscribe","fromBeginning","on","handler","push","start","run","eachMessage","async","payload","Promise","resolve","setTimeout","message","key","toString","value","JSON","parse","timestamp","Date","now","partition","offset","Number","all","map","error","console","stop","disconnect","setMaxConcurrent","limit","seekToOffset","seek"],"mappings":"6DAcE,WAAAA,CAAYC,EAAqBC,EAAeC,GALxCC,KAAQC,SAAwB,GAChCD,KAASE,WAAY,EACrBF,KAAaG,cAAW,GACxBH,KAAiBI,kBAAW,EAIlC,MAAMC,EAAmB,CACvBC,SAAUT,EAAOS,SACjBC,QAASV,EAAOU,SAGdV,EAAOW,MAAKH,EAAYG,IAAMX,EAAOW,KACrCX,EAAOY,OAAMJ,EAAYI,KAAOZ,EAAOY,MAE3C,MAAMC,EAAQ,IAAIC,QAAMN,GACxBL,KAAKY,SAAWF,EAAME,SAAS,CAAEb,YACjCC,KAAKF,MAAQA,EACbE,KAAKD,QAAUA,CAChB,CAED,aAAMc,SACEb,KAAKY,SAASC,gBACdb,KAAKY,SAASE,UAAU,CAAEhB,MAAOE,KAAKF,MAAOiB,eAAe,GACnE,CAED,EAAAC,CAAGC,GAED,OADAjB,KAAKC,SAASiB,KAAKD,GACZjB,IACR,CAED,WAAMmB,GACJnB,KAAKE,WAAY,QAEXF,KAAKY,SAASQ,IAAI,CACtBC,YAAaC,MAAOC,IAClB,IAAKvB,KAAKE,UAAW,OAGrB,KAAOF,KAAKI,mBAAqBJ,KAAKG,qBAC9B,IAAIqB,QAAQC,GAAWC,WAAWD,EAAS,MAGnD,MAAME,EAAsB,CAC1BC,IAAKL,EAAQI,QAAQC,KAAKC,WAC1BC,MAAOC,KAAKC,MAAMT,EAAQI,QAAQG,OAAOD,YAAc,MACvDI,UAAW,IAAIC,KAAKX,EAAQI,QAAQM,WAAaC,KAAKC,OACtDC,UAAWb,EAAQa,UACnBC,OAAQC,OAAOf,EAAQI,QAAQU,SAGjCrC,KAAKI,oBAEL,UACQoB,QAAQe,IAAIvC,KAAKC,SAASuC,IAAIvB,GAAWA,EAAQU,IACxD,CAAC,MAAOc,GAEP,MADAC,QAAQD,MAAM,4BAA6BA,GACrCA,CACP,CAAS,QACRzC,KAAKI,mBACN,IAGN,CAED,UAAMuC,GACJ3C,KAAKE,WAAY,QACXF,KAAKY,SAASgC,YACrB,CAED,gBAAAC,CAAiBC,GAEf,OADA9C,KAAKG,cAAgB2C,EACd9C,IACR,CAED,kBAAM+C,CAAaV,SACXrC,KAAKY,SAASoC,KAAK,CACvBlD,MAAOE,KAAKF,MACZsC,UAAW,EACXC,OAAQA,EAAOR,YAElB"}
@@ -0,0 +1,21 @@
1
+ import { RecordMetadata } from 'kafkajs';
2
+ import { KafkaConfig, Message } from '../types';
3
+ export declare class Producer<T = any> {
4
+ private producer;
5
+ private topic;
6
+ private batchSize;
7
+ private batchTimeout;
8
+ private messageQueue;
9
+ private batchTimer;
10
+ constructor(config: KafkaConfig, topic: string);
11
+ connect(): Promise<void>;
12
+ send(message: Message<T>): Promise<RecordMetadata[]>;
13
+ sendBatch(messages: Message<T>[]): Promise<RecordMetadata[]>;
14
+ sendBuffered(message: Message<T>): Promise<void>;
15
+ private flush;
16
+ private startBatchProcessor;
17
+ disconnect(): Promise<void>;
18
+ setBatchSize(size: number): this;
19
+ setBatchTimeout(ms: number): this;
20
+ }
21
+ //# sourceMappingURL=producer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"producer.d.ts","sourceRoot":"","sources":["../../../src/kafka/producer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAoC,cAAc,EAAE,MAAM,SAAS,CAAC;AAC3E,OAAO,EAAE,WAAW,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAEhD,qBAAa,QAAQ,CAAC,CAAC,GAAG,GAAG;IAC3B,OAAO,CAAC,QAAQ,CAAgB;IAChC,OAAO,CAAC,KAAK,CAAS;IACtB,OAAO,CAAC,SAAS,CAAe;IAChC,OAAO,CAAC,YAAY,CAAgB;IACpC,OAAO,CAAC,YAAY,CAAoB;IACxC,OAAO,CAAC,UAAU,CAA+B;gBAErC,MAAM,EAAE,WAAW,EAAE,KAAK,EAAE,MAAM;IAexC,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAKxB,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,cAAc,EAAE,CAAC;IAWpD,SAAS,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,OAAO,CAAC,cAAc,EAAE,CAAC;IAW5D,YAAY,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;YAQxC,KAAK;IASnB,OAAO,CAAC,mBAAmB;IAMrB,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAQjC,YAAY,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAKhC,eAAe,CAAC,EAAE,EAAE,MAAM,GAAG,IAAI;CAIlC"}
@@ -0,0 +1,2 @@
1
+ "use strict";var s=require("kafkajs");exports.Producer=class{constructor(t,e){this.batchSize=100,this.batchTimeout=1e3,this.messageQueue=[],this.batchTimer=null;const i={clientId:t.clientId,brokers:t.brokers};t.ssl&&(i.ssl=t.ssl),t.sasl&&(i.sasl=t.sasl);const a=new s.Kafka(i);this.producer=a.producer(),this.topic=e}async connect(){await this.producer.connect(),this.startBatchProcessor()}async send(s){return this.producer.send({topic:this.topic,messages:[{key:s.key,value:JSON.stringify(s.value),timestamp:s.timestamp.toISOString()}]})}async sendBatch(s){return this.producer.send({topic:this.topic,messages:s.map(s=>({key:s.key,value:JSON.stringify(s.value),timestamp:s.timestamp.toISOString()}))})}async sendBuffered(s){this.messageQueue.push(s),this.messageQueue.length>=this.batchSize&&await this.flush()}async flush(){if(0===this.messageQueue.length)return;const s=[...this.messageQueue];this.messageQueue=[],await this.sendBatch(s)}startBatchProcessor(){this.batchTimer=setInterval(async()=>{await this.flush()},this.batchTimeout)}async disconnect(){this.batchTimer&&clearInterval(this.batchTimer),await this.flush(),await this.producer.disconnect()}setBatchSize(s){return this.batchSize=s,this}setBatchTimeout(s){return this.batchTimeout=s,this}};
2
+ //# sourceMappingURL=producer.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"producer.js","sources":["../../../../src/kafka/producer.ts"],"sourcesContent":["import { Kafka, Producer as KafkaProducer, RecordMetadata } from 'kafkajs';\r\nimport { KafkaConfig, Message } from '../types';\r\n\r\nexport class Producer<T = any> {\r\n private producer: KafkaProducer;\r\n private topic: string;\r\n private batchSize: number = 100;\r\n private batchTimeout: number = 1000;\r\n private messageQueue: Message<T>[] = [];\r\n private batchTimer: NodeJS.Timeout | null = null;\r\n\r\n constructor(config: KafkaConfig, topic: string) {\r\n // Use type assertion to bypass strict type checking\r\n const kafkaConfig: any = {\r\n clientId: config.clientId,\r\n brokers: config.brokers,\r\n };\r\n \r\n if (config.ssl) kafkaConfig.ssl = config.ssl;\r\n if (config.sasl) kafkaConfig.sasl = config.sasl;\r\n \r\n const kafka = new Kafka(kafkaConfig);\r\n this.producer = kafka.producer();\r\n this.topic = topic;\r\n }\r\n\r\n async connect(): Promise<void> {\r\n await this.producer.connect();\r\n this.startBatchProcessor();\r\n }\r\n\r\n async send(message: Message<T>): Promise<RecordMetadata[]> {\r\n return this.producer.send({\r\n topic: this.topic,\r\n messages: [{\r\n key: message.key,\r\n value: JSON.stringify(message.value),\r\n timestamp: message.timestamp.toISOString(),\r\n }],\r\n });\r\n }\r\n\r\n async sendBatch(messages: Message<T>[]): Promise<RecordMetadata[]> {\r\n return this.producer.send({\r\n topic: this.topic,\r\n messages: messages.map(msg => ({\r\n key: msg.key,\r\n value: JSON.stringify(msg.value),\r\n timestamp: msg.timestamp.toISOString(),\r\n })),\r\n });\r\n }\r\n\r\n async sendBuffered(message: Message<T>): Promise<void> {\r\n this.messageQueue.push(message);\r\n \r\n if (this.messageQueue.length >= this.batchSize) {\r\n await this.flush();\r\n }\r\n }\r\n\r\n private async flush(): Promise<void> {\r\n if (this.messageQueue.length === 0) return;\r\n \r\n const batch = [...this.messageQueue];\r\n this.messageQueue = [];\r\n \r\n await this.sendBatch(batch);\r\n }\r\n\r\n private startBatchProcessor(): void {\r\n this.batchTimer = setInterval(async () => {\r\n await this.flush();\r\n }, this.batchTimeout);\r\n }\r\n\r\n async disconnect(): Promise<void> {\r\n if (this.batchTimer) {\r\n clearInterval(this.batchTimer);\r\n }\r\n await this.flush();\r\n await this.producer.disconnect();\r\n }\r\n\r\n setBatchSize(size: number): this {\r\n this.batchSize = size;\r\n return this;\r\n }\r\n\r\n setBatchTimeout(ms: number): this {\r\n this.batchTimeout = ms;\r\n return this;\r\n }\r\n}"],"names":["constructor","config","topic","this","batchSize","batchTimeout","messageQueue","batchTimer","kafkaConfig","clientId","brokers","ssl","sasl","kafka","Kafka","producer","connect","startBatchProcessor","send","message","messages","key","value","JSON","stringify","timestamp","toISOString","sendBatch","map","msg","sendBuffered","push","length","flush","batch","setInterval","async","disconnect","clearInterval","setBatchSize","size","setBatchTimeout","ms"],"mappings":"6DAWE,WAAAA,CAAYC,EAAqBC,GALzBC,KAASC,UAAW,IACpBD,KAAYE,aAAW,IACvBF,KAAYG,aAAiB,GAC7BH,KAAUI,WAA0B,KAI1C,MAAMC,EAAmB,CACvBC,SAAUR,EAAOQ,SACjBC,QAAST,EAAOS,SAGdT,EAAOU,MAAKH,EAAYG,IAAMV,EAAOU,KACrCV,EAAOW,OAAMJ,EAAYI,KAAOX,EAAOW,MAE3C,MAAMC,EAAQ,IAAIC,QAAMN,GACxBL,KAAKY,SAAWF,EAAME,WACtBZ,KAAKD,MAAQA,CACd,CAED,aAAMc,SACEb,KAAKY,SAASC,UACpBb,KAAKc,qBACN,CAED,UAAMC,CAAKC,GACT,OAAOhB,KAAKY,SAASG,KAAK,CACxBhB,MAAOC,KAAKD,MACZkB,SAAU,CAAC,CACTC,IAAKF,EAAQE,IACbC,MAAOC,KAAKC,UAAUL,EAAQG,OAC9BG,UAAWN,EAAQM,UAAUC,iBAGlC,CAED,eAAMC,CAAUP,GACd,OAAOjB,KAAKY,SAASG,KAAK,CACxBhB,MAAOC,KAAKD,MACZkB,SAAUA,EAASQ,IAAIC,IAAQ,CAC7BR,IAAKQ,EAAIR,IACTC,MAAOC,KAAKC,UAAUK,EAAIP,OAC1BG,UAAWI,EAAIJ,UAAUC,kBAG9B,CAED,kBAAMI,CAAaX,GACjBhB,KAAKG,aAAayB,KAAKZ,GAEnBhB,KAAKG,aAAa0B,QAAU7B,KAAKC,iBAC7BD,KAAK8B,OAEd,CAEO,WAAMA,GACZ,GAAiC,IAA7B9B,KAAKG,aAAa0B,OAAc,OAEpC,MAAME,EAAQ,IAAI/B,KAAKG,cACvBH,KAAKG,aAAe,SAEdH,KAAKwB,UAAUO,EACtB,CAEO,mBAAAjB,GACNd,KAAKI,WAAa4B,YAAYC,gBACtBjC,KAAK8B,SACV9B,KAAKE,aACT,CAED,gBAAMgC,GACAlC,KAAKI,YACP+B,cAAcnC,KAAKI,kBAEfJ,KAAK8B,cACL9B,KAAKY,SAASsB,YACrB,CAED,YAAAE,CAAaC,GAEX,OADArC,KAAKC,UAAYoC,EACVrC,IACR,CAED,eAAAsC,CAAgBC,GAEd,OADAvC,KAAKE,aAAeqC,EACbvC,IACR"}
@@ -0,0 +1,52 @@
1
+ import { Message } from '../types';
2
+ export interface StreamOptions {
3
+ windowSize?: number;
4
+ slideInterval?: number;
5
+ watermarkDelay?: number;
6
+ }
7
+ export declare class KafkaStream<T = any> {
8
+ private sourceTopic;
9
+ private operations;
10
+ private windowDuration;
11
+ private slideInterval;
12
+ private watermarkDelay;
13
+ private windowData;
14
+ private aggregates;
15
+ private stateStore;
16
+ constructor(sourceTopic: string, options?: StreamOptions);
17
+ filter(predicate: (data: T) => boolean): this;
18
+ map<U>(transform: (data: T) => U): KafkaStream<U>;
19
+ flatMap<U>(transform: (data: T) => U[]): KafkaStream<U>;
20
+ window(sizeMs: number, slideMs?: number): this;
21
+ groupBy(keyExtractor: (data: T) => string): GroupedStream<T>;
22
+ aggregate<U>(aggregator: (acc: U | undefined, curr: T) => U, initialValue?: U): KafkaStream<U>;
23
+ join<U>(otherStream: KafkaStream<U>, joinKey: (data: T) => string): KafkaStream<{
24
+ left: T;
25
+ right: U;
26
+ }>;
27
+ foreach(callback: (data: T) => void): this;
28
+ onError(errorHandler: (error: Error, data: any) => void): this;
29
+ process(message: Message<T>): Promise<any>;
30
+ getState(): Map<string, any>;
31
+ reset(): void;
32
+ }
33
+ export declare class GroupedStream<T> {
34
+ private stream;
35
+ private keyExtractor;
36
+ private groups;
37
+ constructor(stream: KafkaStream<T>, keyExtractor: (data: T) => string);
38
+ reduce<U>(reducer: (acc: U, curr: T) => U, initialValue: U): KafkaStream<U>;
39
+ count(): KafkaStream<{
40
+ key: string;
41
+ count: number;
42
+ }>;
43
+ sum(valueExtractor: (data: T) => number): KafkaStream<{
44
+ key: string;
45
+ sum: number;
46
+ }>;
47
+ avg(valueExtractor: (data: T) => number): KafkaStream<{
48
+ key: string;
49
+ avg: number;
50
+ }>;
51
+ }
52
+ //# sourceMappingURL=stream.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["../../../src/kafka/stream.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAEnC,MAAM,WAAW,aAAa;IAC1B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED,qBAAa,WAAW,CAAC,CAAC,GAAG,GAAG;IAShB,OAAO,CAAC,WAAW;IAR/B,OAAO,CAAC,UAAU,CAAiC;IACnD,OAAO,CAAC,cAAc,CAAa;IACnC,OAAO,CAAC,aAAa,CAAa;IAClC,OAAO,CAAC,cAAc,CAAa;IACnC,OAAO,CAAC,UAAU,CAA8D;IAChF,OAAO,CAAC,UAAU,CAA+B;IACjD,OAAO,CAAC,UAAU,CAA+B;gBAE7B,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,aAAa;IAShE,MAAM,CAAC,SAAS,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,OAAO,GAAG,IAAI;IAM7C,GAAG,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC;IAUjD,OAAO,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,EAAE,GAAG,WAAW,CAAC,CAAC,CAAC;IAOvD,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI;IAO9C,OAAO,CAAC,YAAY,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,MAAM,GAAG,aAAa,CAAC,CAAC,CAAC;IAK5D,SAAS,CAAC,CAAC,EACP,UAAU,EAAE,CAAC,GAAG,EAAE,CAAC,GAAG,SAAS,EAAE,IAAI,EAAE,CAAC,KAAK,CAAC,EAC9C,YAAY,CAAC,EAAE,CAAC,GACjB,WAAW,CAAC,CAAC,CAAC;IAqBjB,IAAI,CAAC,CAAC,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,MAAM,GAAG,WAAW,CAAC;QAAE,IAAI,EAAE,CAAC,CAAC;QAAC,KAAK,EAAE,CAAC,CAAA;KAAE,CAAC;IActG,OAAO,CAAC,QAAQ,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,IAAI,GAAG,IAAI;IAS1C,OAAO,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAcxD,OAAO,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC;IAsBhD,QAAQ,IAAI,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC;IAK5B,KAAK,IAAI,IAAI;CAKhB;AAED,qBAAa,aAAa,CAAC,CAAC;IAIpB,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,YAAY;IAJxB,OAAO,CAAC,MAAM,CAAiC;gBAGnC,MAAM,EAAE,WAAW,CAAC,CAAC,CAAC,EACtB,YAAY,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,MAAM;IAI7C,MAAM,CAAC,CAAC,EACJ,OAAO,EAAE,CAAC,GAAG,EAAE,CAAC,EAAE,IAAI,EAAE,CAAC,KAAK,CAAC,EAC/B,YAAY,EAAE,CAAC,GAChB,WAAW,CAAC,CAAC,CAAC;IAqBjB,KAAK,IAAI,WAAW,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC;IAepD,GAAG,CAAC,cAAc,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,MAAM,GAAG,WAAW,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,GAAG,EAAE,MAAM,CAAA;KAAE,CAAC;IAgBnF,GAAG,CAAC,cAAc,EAAE,CAAC,IAAI,EAAE,CAAC,KAAK,MAAM,GAAG,WAAW,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,GAAG,EAAE,MAAM,CAAA;KAAE,CAAC;CAgBtF"}
@@ -0,0 +1,2 @@
1
+ "use strict";class t{constructor(t,r){this.sourceTopic=t,this.operations=[],this.windowDuration=0,this.slideInterval=0,this.watermarkDelay=0,this.windowData=new Map,this.aggregates=new Map,this.stateStore=new Map,r&&(this.windowDuration=r.windowSize||0,this.slideInterval=r.slideInterval||0,this.watermarkDelay=r.watermarkDelay||0)}filter(t){return this.operations.push(r=>t(r)?r:null),this}map(r){const e=new t(this.sourceTopic);return e.operations=[...this.operations,t=>r(t)],e.windowDuration=this.windowDuration,e.slideInterval=this.slideInterval,e.watermarkDelay=this.watermarkDelay,e}flatMap(r){const e=new t(this.sourceTopic);return e.operations=[...this.operations,t=>r(t)],e}window(t,r){return this.windowDuration=t,this.slideInterval=r||t,this}groupBy(t){return new r(this,t)}aggregate(r,e){const s=new t(this.sourceTopic);return s.operations=[...this.operations,t=>{const s=JSON.stringify(t);let o=this.aggregates.get(s);void 0===o&&void 0!==e&&(o=e);const i=r(o,t);return this.aggregates.set(s,i),i}],s}join(r,e){const s=new t(this.sourceTopic);return s.operations=[...this.operations,t=>(e(t),{left:t,right:null})],s}foreach(t){return this.operations.push(r=>(t(r),r)),this}onError(t){return this.operations.push(r=>{try{return r}catch(e){return t(e,r),null}}),this}async process(t){let r=t.value;for(const t of this.operations){if(null==r)break;try{r=Array.isArray(r)?r.flatMap(r=>t(r)).filter(Boolean):t(r)}catch(t){console.error("Stream processing error:",t),r=null}}return r}getState(){return new Map(this.stateStore)}reset(){this.aggregates.clear(),this.windowData.clear(),this.stateStore.clear()}}class r{constructor(t,r){this.stream=t,this.keyExtractor=r,this.groups=new Map}reduce(r,e){const s=new t(this.stream.sourceTopic);return s.operations=[...this.stream.operations,async t=>{const s=this.keyExtractor(t);let o=this.groups.get(s);return o||(o=[e],this.groups.set(s,o)),o[0]=r(o[0],t),o[0]}],s}count(){const r=new t(this.stream.sourceTopic);return r.operations=[...this.stream.operations,t=>{const r=this.keyExtractor(t);let e=this.groups.get(r)?.length||0;return this.groups.set(r,[...this.groups.get(r)||[],t]),{key:r,count:e+1}}],r}sum(r){const e=new t(this.stream.sourceTopic);return e.operations=[...this.stream.operations,t=>{const e=this.keyExtractor(t),s=r(t);let o=this.groups.get(e)?.[0]||0;return this.groups.set(e,[o+s]),{key:e,sum:o+s}}],e}avg(r){const e=new t(this.stream.sourceTopic);return e.operations=[...this.stream.operations,t=>{const e=this.keyExtractor(t),s=r(t);let o=this.groups.get(e)||[];o.push(s),this.groups.set(e,o);return{key:e,avg:o.reduce((t,r)=>t+r,0)/o.length}}],e}}exports.GroupedStream=r,exports.KafkaStream=t;
2
+ //# sourceMappingURL=stream.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"stream.js","sources":["../../../../src/kafka/stream.ts"],"sourcesContent":["import { Message } from '../types';\r\n\r\nexport interface StreamOptions {\r\n windowSize?: number; // Window size in milliseconds\r\n slideInterval?: number; // Slide interval for windowed operations\r\n watermarkDelay?: number; // Watermark delay for late data\r\n}\r\n\r\nexport class KafkaStream<T = any> {\r\n private operations: Array<(data: any) => any> = [];\r\n private windowDuration: number = 0;\r\n private slideInterval: number = 0;\r\n private watermarkDelay: number = 0;\r\n private windowData: Map<string, { data: any[], timestamp: number }> = new Map();\r\n private aggregates: Map<string, any> = new Map();\r\n private stateStore: Map<string, any> = new Map();\r\n\r\n constructor(private sourceTopic: string, options?: StreamOptions) {\r\n if (options) {\r\n this.windowDuration = options.windowSize || 0;\r\n this.slideInterval = options.slideInterval || 0;\r\n this.watermarkDelay = options.watermarkDelay || 0;\r\n }\r\n }\r\n\r\n // Filter records\r\n filter(predicate: (data: T) => boolean): this {\r\n this.operations.push((data: T) => predicate(data) ? data : null);\r\n return this;\r\n }\r\n\r\n // Map transformation\r\n map<U>(transform: (data: T) => U): KafkaStream<U> {\r\n const newStream = new KafkaStream<U>(this.sourceTopic);\r\n newStream.operations = [...this.operations, (data: T) => transform(data)];\r\n newStream.windowDuration = this.windowDuration;\r\n newStream.slideInterval = this.slideInterval;\r\n newStream.watermarkDelay = this.watermarkDelay;\r\n return newStream;\r\n }\r\n\r\n // FlatMap - one to many\r\n flatMap<U>(transform: (data: T) => U[]): KafkaStream<U> {\r\n const newStream = new KafkaStream<U>(this.sourceTopic);\r\n newStream.operations = [...this.operations, (data: T) => transform(data)];\r\n return newStream;\r\n }\r\n\r\n // Windowed operations\r\n window(sizeMs: number, slideMs?: number): this {\r\n this.windowDuration = sizeMs;\r\n this.slideInterval = slideMs || sizeMs;\r\n return this;\r\n }\r\n\r\n // Group by key\r\n groupBy(keyExtractor: (data: T) => string): GroupedStream<T> {\r\n return new GroupedStream<T>(this, keyExtractor);\r\n }\r\n\r\n // Aggregate with state\r\n aggregate<U>(\r\n aggregator: (acc: U | undefined, curr: T) => U,\r\n initialValue?: U\r\n ): KafkaStream<U> {\r\n const newStream = new KafkaStream<U>(this.sourceTopic);\r\n \r\n const aggregateOperation = (data: T) => {\r\n const key = JSON.stringify(data);\r\n let current = this.aggregates.get(key);\r\n \r\n if (current === undefined && initialValue !== undefined) {\r\n current = initialValue;\r\n }\r\n \r\n const result = aggregator(current, data);\r\n this.aggregates.set(key, result);\r\n return result;\r\n };\r\n \r\n newStream.operations = [...this.operations, aggregateOperation];\r\n return newStream;\r\n }\r\n\r\n // Join two streams\r\n join<U>(otherStream: KafkaStream<U>, joinKey: (data: T) => string): KafkaStream<{ left: T, right: U }> {\r\n const joinedStream = new KafkaStream<{ left: T, right: U }>(this.sourceTopic);\r\n \r\n const joinOperation = (data: T) => {\r\n const key = joinKey(data);\r\n // This would need actual stream joining logic\r\n return { left: data, right: null as any };\r\n };\r\n \r\n joinedStream.operations = [...this.operations, joinOperation];\r\n return joinedStream;\r\n }\r\n\r\n // Process each record with side effects\r\n foreach(callback: (data: T) => void): this {\r\n this.operations.push((data: T) => {\r\n callback(data);\r\n return data;\r\n });\r\n return this;\r\n }\r\n\r\n // Error handling for stream\r\n onError(errorHandler: (error: Error, data: any) => void): this {\r\n const errorOp = (data: any) => {\r\n try {\r\n return data;\r\n } catch (error) {\r\n errorHandler(error as Error, data);\r\n return null;\r\n }\r\n };\r\n this.operations.push(errorOp);\r\n return this;\r\n }\r\n\r\n // Process message through the stream pipeline\r\n async process(message: Message<T>): Promise<any> {\r\n let result: any = message.value;\r\n \r\n for (const op of this.operations) {\r\n if (result === null || result === undefined) break;\r\n \r\n try {\r\n if (Array.isArray(result)) {\r\n result = result.flatMap(item => op(item)).filter(Boolean);\r\n } else {\r\n result = op(result);\r\n }\r\n } catch (error) {\r\n console.error(`Stream processing error:`, error);\r\n result = null;\r\n }\r\n }\r\n \r\n return result;\r\n }\r\n\r\n // Get current state\r\n getState(): Map<string, any> {\r\n return new Map(this.stateStore);\r\n }\r\n\r\n // Reset stream state\r\n reset(): void {\r\n this.aggregates.clear();\r\n this.windowData.clear();\r\n this.stateStore.clear();\r\n }\r\n}\r\n\r\nexport class GroupedStream<T> {\r\n private groups: Map<string, any[]> = new Map();\r\n\r\n constructor(\r\n private stream: KafkaStream<T>,\r\n private keyExtractor: (data: T) => string\r\n ) {}\r\n\r\n // Reduce within groups\r\n reduce<U>(\r\n reducer: (acc: U, curr: T) => U,\r\n initialValue: U\r\n ): KafkaStream<U> {\r\n const newStream = new KafkaStream<U>(this.stream['sourceTopic']);\r\n \r\n const reduceOperation = async (data: T) => {\r\n const key = this.keyExtractor(data);\r\n let group = this.groups.get(key);\r\n \r\n if (!group) {\r\n group = [initialValue];\r\n this.groups.set(key, group);\r\n }\r\n \r\n group[0] = reducer(group[0] as U, data);\r\n return group[0];\r\n };\r\n \r\n newStream['operations'] = [...this.stream['operations'], reduceOperation];\r\n return newStream;\r\n }\r\n\r\n // Count within groups\r\n count(): KafkaStream<{ key: string, count: number }> {\r\n const newStream = new KafkaStream<{ key: string, count: number }>(this.stream['sourceTopic']);\r\n \r\n const countOperation = (data: T) => {\r\n const key = this.keyExtractor(data);\r\n let count = this.groups.get(key)?.length || 0;\r\n this.groups.set(key, [...(this.groups.get(key) || []), data]);\r\n return { key, count: count + 1 };\r\n };\r\n \r\n newStream['operations'] = [...this.stream['operations'], countOperation];\r\n return newStream;\r\n }\r\n\r\n // Sum within groups\r\n sum(valueExtractor: (data: T) => number): KafkaStream<{ key: string, sum: number }> {\r\n const newStream = new KafkaStream<{ key: string, sum: number }>(this.stream['sourceTopic']);\r\n \r\n const sumOperation = (data: T) => {\r\n const key = this.keyExtractor(data);\r\n const value = valueExtractor(data);\r\n let current = this.groups.get(key)?.[0] as number || 0;\r\n this.groups.set(key, [current + value]);\r\n return { key, sum: current + value };\r\n };\r\n \r\n newStream['operations'] = [...this.stream['operations'], sumOperation];\r\n return newStream;\r\n }\r\n\r\n // Average within groups\r\n avg(valueExtractor: (data: T) => number): KafkaStream<{ key: string, avg: number }> {\r\n const newStream = new KafkaStream<{ key: string, avg: number }>(this.stream['sourceTopic']);\r\n \r\n const avgOperation = (data: T) => {\r\n const key = this.keyExtractor(data);\r\n const value = valueExtractor(data);\r\n let group = this.groups.get(key) || [];\r\n group.push(value);\r\n this.groups.set(key, group);\r\n const sum = group.reduce((a, b) => a + b, 0);\r\n return { key, avg: sum / group.length };\r\n };\r\n \r\n newStream['operations'] = [...this.stream['operations'], avgOperation];\r\n return newStream;\r\n }\r\n}\r\n"],"names":["KafkaStream","constructor","sourceTopic","options","this","operations","windowDuration","slideInterval","watermarkDelay","windowData","Map","aggregates","stateStore","windowSize","filter","predicate","push","data","map","transform","newStream","flatMap","window","sizeMs","slideMs","groupBy","keyExtractor","GroupedStream","aggregate","aggregator","initialValue","key","JSON","stringify","current","get","undefined","result","set","join","otherStream","joinKey","joinedStream","left","right","foreach","callback","onError","errorHandler","error","process","message","value","op","Array","isArray","item","Boolean","console","getState","reset","clear","stream","groups","reduce","reducer","async","group","count","length","sum","valueExtractor","avg","a","b"],"mappings":"mBAQaA,EAST,WAAAC,CAAoBC,EAAqBC,GAArBC,KAAWF,YAAXA,EARZE,KAAUC,WAA8B,GACxCD,KAAcE,eAAW,EACzBF,KAAaG,cAAW,EACxBH,KAAcI,eAAW,EACzBJ,KAAAK,WAA8D,IAAIC,IAClEN,KAAAO,WAA+B,IAAID,IACnCN,KAAAQ,WAA+B,IAAIF,IAGnCP,IACAC,KAAKE,eAAiBH,EAAQU,YAAc,EAC5CT,KAAKG,cAAgBJ,EAAQI,eAAiB,EAC9CH,KAAKI,eAAiBL,EAAQK,gBAAkB,EAEvD,CAGD,MAAAM,CAAOC,GAEH,OADAX,KAAKC,WAAWW,KAAMC,GAAYF,EAAUE,GAAQA,EAAO,MACpDb,IACV,CAGD,GAAAc,CAAOC,GACH,MAAMC,EAAY,IAAIpB,EAAeI,KAAKF,aAK1C,OAJAkB,EAAUf,WAAa,IAAID,KAAKC,WAAaY,GAAYE,EAAUF,IACnEG,EAAUd,eAAiBF,KAAKE,eAChCc,EAAUb,cAAgBH,KAAKG,cAC/Ba,EAAUZ,eAAiBJ,KAAKI,eACzBY,CACV,CAGD,OAAAC,CAAWF,GACP,MAAMC,EAAY,IAAIpB,EAAeI,KAAKF,aAE1C,OADAkB,EAAUf,WAAa,IAAID,KAAKC,WAAaY,GAAYE,EAAUF,IAC5DG,CACV,CAGD,MAAAE,CAAOC,EAAgBC,GAGnB,OAFApB,KAAKE,eAAiBiB,EACtBnB,KAAKG,cAAgBiB,GAAWD,EACzBnB,IACV,CAGD,OAAAqB,CAAQC,GACJ,OAAO,IAAIC,EAAiBvB,KAAMsB,EACrC,CAGD,SAAAE,CACIC,EACAC,GAEA,MAAMV,EAAY,IAAIpB,EAAeI,KAAKF,aAgB1C,OADAkB,EAAUf,WAAa,IAAID,KAAKC,WAbJY,IACxB,MAAMc,EAAMC,KAAKC,UAAUhB,GAC3B,IAAIiB,EAAU9B,KAAKO,WAAWwB,IAAIJ,QAElBK,IAAZF,QAA0CE,IAAjBN,IACzBI,EAAUJ,GAGd,MAAMO,EAASR,EAAWK,EAASjB,GAEnC,OADAb,KAAKO,WAAW2B,IAAIP,EAAKM,GAClBA,IAIJjB,CACV,CAGD,IAAAmB,CAAQC,EAA6BC,GACjC,MAAMC,EAAe,IAAI1C,EAAmCI,KAAKF,aASjE,OADAwC,EAAarC,WAAa,IAAID,KAAKC,WANZY,IACPwB,EAAQxB,GAEb,CAAE0B,KAAM1B,EAAM2B,MAAO,QAIzBF,CACV,CAGD,OAAAG,CAAQC,GAKJ,OAJA1C,KAAKC,WAAWW,KAAMC,IAClB6B,EAAS7B,GACFA,IAEJb,IACV,CAGD,OAAA2C,CAAQC,GAUJ,OADA5C,KAAKC,WAAWW,KARCC,IACb,IACI,OAAOA,CACV,CAAC,MAAOgC,GAEL,OADAD,EAAaC,EAAgBhC,GACtB,IACV,IAGEb,IACV,CAGD,aAAM8C,CAAQC,GACV,IAAId,EAAcc,EAAQC,MAE1B,IAAK,MAAMC,KAAMjD,KAAKC,WAAY,CAC9B,GAAIgC,QAAyC,MAE7C,IAEQA,EADAiB,MAAMC,QAAQlB,GACLA,EAAOhB,QAAQmC,GAAQH,EAAGG,IAAO1C,OAAO2C,SAExCJ,EAAGhB,EAEnB,CAAC,MAAOY,GACLS,QAAQT,MAAM,2BAA4BA,GAC1CZ,EAAS,IACZ,CACJ,CAED,OAAOA,CACV,CAGD,QAAAsB,GACI,OAAO,IAAIjD,IAAIN,KAAKQ,WACvB,CAGD,KAAAgD,GACIxD,KAAKO,WAAWkD,QAChBzD,KAAKK,WAAWoD,QAChBzD,KAAKQ,WAAWiD,OACnB,QAGQlC,EAGT,WAAA1B,CACY6D,EACApC,GADAtB,KAAM0D,OAANA,EACA1D,KAAYsB,aAAZA,EAJJtB,KAAA2D,OAA6B,IAAIrD,GAKrC,CAGJ,MAAAsD,CACIC,EACAnC,GAEA,MAAMV,EAAY,IAAIpB,EAAeI,KAAK0D,OAAoB,aAgB9D,OADA1C,EAAsB,WAAI,IAAIhB,KAAK0D,OAAmB,WAb9BI,MAAOjD,IAC3B,MAAMc,EAAM3B,KAAKsB,aAAaT,GAC9B,IAAIkD,EAAQ/D,KAAK2D,OAAO5B,IAAIJ,GAQ5B,OANKoC,IACDA,EAAQ,CAACrC,GACT1B,KAAK2D,OAAOzB,IAAIP,EAAKoC,IAGzBA,EAAM,GAAKF,EAAQE,EAAM,GAASlD,GAC3BkD,EAAM,KAIV/C,CACV,CAGD,KAAAgD,GACI,MAAMhD,EAAY,IAAIpB,EAA4CI,KAAK0D,OAAoB,aAU3F,OADA1C,EAAsB,WAAI,IAAIhB,KAAK0D,OAAmB,WAP9B7C,IACpB,MAAMc,EAAM3B,KAAKsB,aAAaT,GAC9B,IAAImD,EAAQhE,KAAK2D,OAAO5B,IAAIJ,IAAMsC,QAAU,EAE5C,OADAjE,KAAK2D,OAAOzB,IAAIP,EAAK,IAAK3B,KAAK2D,OAAO5B,IAAIJ,IAAQ,GAAKd,IAChD,CAAEc,MAAKqC,MAAOA,EAAQ,KAI1BhD,CACV,CAGD,GAAAkD,CAAIC,GACA,MAAMnD,EAAY,IAAIpB,EAA0CI,KAAK0D,OAAoB,aAWzF,OADA1C,EAAsB,WAAI,IAAIhB,KAAK0D,OAAmB,WARhC7C,IAClB,MAAMc,EAAM3B,KAAKsB,aAAaT,GACxBmC,EAAQmB,EAAetD,GAC7B,IAAIiB,EAAU9B,KAAK2D,OAAO5B,IAAIJ,KAAO,IAAgB,EAErD,OADA3B,KAAK2D,OAAOzB,IAAIP,EAAK,CAACG,EAAUkB,IACzB,CAAErB,MAAKuC,IAAKpC,EAAUkB,KAI1BhC,CACV,CAGD,GAAAoD,CAAID,GACA,MAAMnD,EAAY,IAAIpB,EAA0CI,KAAK0D,OAAoB,aAazF,OADA1C,EAAsB,WAAI,IAAIhB,KAAK0D,OAAmB,WAVhC7C,IAClB,MAAMc,EAAM3B,KAAKsB,aAAaT,GACxBmC,EAAQmB,EAAetD,GAC7B,IAAIkD,EAAQ/D,KAAK2D,OAAO5B,IAAIJ,IAAQ,GACpCoC,EAAMnD,KAAKoC,GACXhD,KAAK2D,OAAOzB,IAAIP,EAAKoC,GAErB,MAAO,CAAEpC,MAAKyC,IADFL,EAAMH,OAAO,CAACS,EAAGC,IAAMD,EAAIC,EAAG,GACjBP,EAAME,UAI5BjD,CACV"}
@@ -0,0 +1,32 @@
1
+ import { PipelineConfig, PipelineSource, PipelineSink } from '../types';
2
+ export interface RetryConfig {
3
+ maxAttempts: number;
4
+ delayMs: number;
5
+ backoffMultiplier: number;
6
+ }
7
+ export interface PipelineOptions {
8
+ retryConfig?: RetryConfig;
9
+ errorHandler?: (error: Error, context: any) => void;
10
+ maxConcurrent?: number;
11
+ }
12
+ export declare class PipelineBuilder {
13
+ private config;
14
+ private sources;
15
+ private transforms;
16
+ private sinks;
17
+ private options;
18
+ private metrics;
19
+ constructor(config: PipelineConfig, options?: PipelineOptions);
20
+ source(type: PipelineSource, config: any): this;
21
+ transform(transformFn: (data: any) => any): this;
22
+ sink(type: PipelineSink, config: any): this;
23
+ run(): Promise<void>;
24
+ private processSource;
25
+ private processRecord;
26
+ private writeToSink;
27
+ private sleep;
28
+ private printSummary;
29
+ lineage(): any;
30
+ getMetrics(): any;
31
+ }
32
+ //# sourceMappingURL=builder.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"builder.d.ts","sourceRoot":"","sources":["../../../src/pipeline/builder.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,cAAc,EAAE,YAAY,EAAW,MAAM,UAAU,CAAC;AAEjF,MAAM,WAAW,WAAW;IACxB,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,MAAM,CAAC;IAChB,iBAAiB,EAAE,MAAM,CAAC;CAC7B;AAED,MAAM,WAAW,eAAe;IAC5B,WAAW,CAAC,EAAE,WAAW,CAAC;IAC1B,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,GAAG,KAAK,IAAI,CAAC;IACpD,aAAa,CAAC,EAAE,MAAM,CAAC;CAC1B;AAED,qBAAa,eAAe;IACxB,OAAO,CAAC,MAAM,CAAiB;IAC/B,OAAO,CAAC,OAAO,CAAoD;IACnE,OAAO,CAAC,UAAU,CAAiC;IACnD,OAAO,CAAC,KAAK,CAAkD;IAC/D,OAAO,CAAC,OAAO,CAAkB;IACjC,OAAO,CAAC,OAAO,CAMb;gBAEU,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,EAAE,eAAe;IAmB7D,MAAM,CAAC,IAAI,EAAE,cAAc,EAAE,MAAM,EAAE,GAAG,GAAG,IAAI;IAK/C,SAAS,CAAC,WAAW,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,GAAG,GAAG,IAAI;IAKhD,IAAI,CAAC,IAAI,EAAE,YAAY,EAAE,MAAM,EAAE,GAAG,GAAG,IAAI;IAKrC,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC;YAuBZ,aAAa;YAeb,aAAa;YA2Cb,WAAW;YAuBX,KAAK;IAInB,OAAO,CAAC,YAAY;IAkBpB,OAAO,IAAI,GAAG;IAUd,UAAU,IAAI,GAAG;CAQpB"}
@@ -0,0 +1,2 @@
1
+ "use strict";exports.PipelineBuilder=class{constructor(e,s){this.sources=[],this.transforms=[],this.sinks=[],this.config=e,this.options=s||{retryConfig:{maxAttempts:3,delayMs:1e3,backoffMultiplier:2},maxConcurrent:10},this.metrics={processed:0,errors:0,retries:0,startTime:null,endTime:null}}source(e,s){return this.sources.push({type:e,config:s}),this}transform(e){return this.transforms.push(e),this}sink(e,s){return this.sinks.push({type:e,config:s}),this}async run(){this.metrics.startTime=new Date,console.log(`🚀 Starting pipeline: ${this.config.name}`),console.log(` Sources: ${this.sources.map(e=>e.type).join(", ")}`),console.log(` Transforms: ${this.transforms.length}`),console.log(` Sinks: ${this.sinks.map(e=>e.type).join(", ")}`);try{for(const e of this.sources)await this.processSource(e);this.metrics.endTime=new Date,this.printSummary()}catch(e){throw console.error("❌ Pipeline failed:",e),this.options.errorHandler&&this.options.errorHandler(e,{pipeline:this.config.name}),e}}async processSource(e){console.log(`📡 Processing source: ${e.type}`);const s=[{id:1,name:"Record 1",value:100},{id:2,name:"Record 2",value:200},{id:3,name:"Record 3",value:300}];for(const e of s)await this.processRecord(e)}async processRecord(e,s=1){try{let s=e;for(const e of this.transforms)try{s=e(s)}catch(e){throw console.error("Transform error:",e),e}for(const e of this.sinks)await this.writeToSink(e,s);this.metrics.processed++}catch(t){const o=this.options.retryConfig?.maxAttempts||3;if(s<o){const t=(this.options.retryConfig?.delayMs||1e3)*Math.pow(this.options.retryConfig?.backoffMultiplier||2,s-1);console.warn(`⚠️ Retry ${s}/${o} after ${t}ms`),this.metrics.retries++,await this.sleep(t),await this.processRecord(e,s+1)}else console.error(`❌ Failed to process record after ${o} attempts:`,e),this.metrics.errors++,this.options.errorHandler&&this.options.errorHandler(t,{record:e,attempt:s})}}async writeToSink(e,s){switch(e.type){case"console":console.log(`[${e.type}]`,JSON.stringify(s,null,2));break;case"file":console.log(`📁 Writing to file: ${e.config.filePath}`);break;case"kafka":console.log(`📤 Sending to Kafka topic: ${e.config.topic}`);break;case"database":console.log(`💾 Writing to database: ${e.config.table}`);break;default:console.log(`📤 Sending to ${e.type}`)}await this.sleep(10)}async sleep(e){return new Promise(s=>setTimeout(s,e))}printSummary(){const e=this.metrics.startTime&&this.metrics.endTime?this.metrics.endTime.getTime()-this.metrics.startTime.getTime():0;console.log(""),console.log("=".repeat(50)),console.log("📊 Pipeline Summary"),console.log("=".repeat(50)),console.log(` Name: ${this.config.name}`),console.log(` Duration: ${e}ms`),console.log(` Records processed: ${this.metrics.processed}`),console.log(` Errors: ${this.metrics.errors}`),console.log(` Retries: ${this.metrics.retries}`),console.log(` Success rate: ${(this.metrics.processed/(this.metrics.processed+this.metrics.errors)*100).toFixed(2)}%`),console.log("=".repeat(50))}lineage(){return{name:this.config.name,sources:this.sources.map(e=>e.type),transforms:this.transforms.length,sinks:this.sinks.map(e=>e.type),timestamp:(new Date).toISOString()}}getMetrics(){return{...this.metrics,duration:this.metrics.startTime&&this.metrics.endTime?this.metrics.endTime.getTime()-this.metrics.startTime.getTime():null}}};
2
+ //# sourceMappingURL=builder.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"builder.js","sources":["../../../../src/pipeline/builder.ts"],"sourcesContent":["import { PipelineConfig, PipelineSource, PipelineSink, Message } from '../types';\r\n\r\nexport interface RetryConfig {\r\n maxAttempts: number;\r\n delayMs: number;\r\n backoffMultiplier: number;\r\n}\r\n\r\nexport interface PipelineOptions {\r\n retryConfig?: RetryConfig;\r\n errorHandler?: (error: Error, context: any) => void;\r\n maxConcurrent?: number;\r\n}\r\n\r\nexport class PipelineBuilder {\r\n private config: PipelineConfig;\r\n private sources: Array<{ type: PipelineSource; config: any }> = [];\r\n private transforms: Array<(data: any) => any> = [];\r\n private sinks: Array<{ type: PipelineSink; config: any }> = [];\r\n private options: PipelineOptions;\r\n private metrics: {\r\n processed: number;\r\n errors: number;\r\n retries: number;\r\n startTime: Date | null;\r\n endTime: Date | null;\r\n };\r\n\r\n constructor(config: PipelineConfig, options?: PipelineOptions) {\r\n this.config = config;\r\n this.options = options || {\r\n retryConfig: {\r\n maxAttempts: 3,\r\n delayMs: 1000,\r\n backoffMultiplier: 2\r\n },\r\n maxConcurrent: 10\r\n };\r\n this.metrics = {\r\n processed: 0,\r\n errors: 0,\r\n retries: 0,\r\n startTime: null,\r\n endTime: null\r\n };\r\n }\r\n\r\n source(type: PipelineSource, config: any): this {\r\n this.sources.push({ type, config });\r\n return this;\r\n }\r\n\r\n transform(transformFn: (data: any) => any): this {\r\n this.transforms.push(transformFn);\r\n return this;\r\n }\r\n\r\n sink(type: PipelineSink, config: any): this {\r\n this.sinks.push({ type, config });\r\n return this;\r\n }\r\n\r\n async run(): Promise<void> {\r\n this.metrics.startTime = new Date();\r\n console.log(`🚀 Starting pipeline: ${this.config.name}`);\r\n console.log(` Sources: ${this.sources.map(s => s.type).join(', ')}`);\r\n console.log(` Transforms: ${this.transforms.length}`);\r\n console.log(` Sinks: ${this.sinks.map(s => s.type).join(', ')}`);\r\n \r\n try {\r\n for (const source of this.sources) {\r\n await this.processSource(source);\r\n }\r\n \r\n this.metrics.endTime = new Date();\r\n this.printSummary();\r\n } catch (error) {\r\n console.error(`❌ Pipeline failed:`, error);\r\n if (this.options.errorHandler) {\r\n this.options.errorHandler(error as Error, { pipeline: this.config.name });\r\n }\r\n throw error;\r\n }\r\n }\r\n\r\n private async processSource(source: { type: PipelineSource; config: any }): Promise<void> {\r\n console.log(`📡 Processing source: ${source.type}`);\r\n \r\n // Simulate data processing with retry logic\r\n const mockData = [\r\n { id: 1, name: 'Record 1', value: 100 },\r\n { id: 2, name: 'Record 2', value: 200 },\r\n { id: 3, name: 'Record 3', value: 300 }\r\n ];\r\n \r\n for (const record of mockData) {\r\n await this.processRecord(record);\r\n }\r\n }\r\n\r\n private async processRecord(record: any, attempt: number = 1): Promise<void> {\r\n try {\r\n // Apply transformations with error handling\r\n let result = record;\r\n for (const transform of this.transforms) {\r\n try {\r\n result = transform(result);\r\n } catch (transformError) {\r\n console.error(`Transform error:`, transformError);\r\n throw transformError;\r\n }\r\n }\r\n \r\n // Send to sinks\r\n for (const sink of this.sinks) {\r\n await this.writeToSink(sink, result);\r\n }\r\n \r\n this.metrics.processed++;\r\n \r\n } catch (error) {\r\n const maxAttempts = this.options.retryConfig?.maxAttempts || 3;\r\n \r\n if (attempt < maxAttempts) {\r\n const delay = (this.options.retryConfig?.delayMs || 1000) * \r\n Math.pow(this.options.retryConfig?.backoffMultiplier || 2, attempt - 1);\r\n \r\n console.warn(`⚠️ Retry ${attempt}/${maxAttempts} after ${delay}ms`);\r\n this.metrics.retries++;\r\n \r\n await this.sleep(delay);\r\n await this.processRecord(record, attempt + 1);\r\n } else {\r\n console.error(`❌ Failed to process record after ${maxAttempts} attempts:`, record);\r\n this.metrics.errors++;\r\n \r\n if (this.options.errorHandler) {\r\n this.options.errorHandler(error as Error, { record, attempt });\r\n }\r\n }\r\n }\r\n }\r\n\r\n private async writeToSink(sink: { type: PipelineSink; config: any }, data: any): Promise<void> {\r\n switch (sink.type) {\r\n case 'console':\r\n console.log(`[${sink.type}]`, JSON.stringify(data, null, 2));\r\n break;\r\n case 'file':\r\n // Simulate file write\r\n console.log(`📁 Writing to file: ${sink.config.filePath}`);\r\n break;\r\n case 'kafka':\r\n console.log(`📤 Sending to Kafka topic: ${sink.config.topic}`);\r\n break;\r\n case 'database':\r\n console.log(`💾 Writing to database: ${sink.config.table}`);\r\n break;\r\n default:\r\n console.log(`📤 Sending to ${sink.type}`);\r\n }\r\n \r\n // Simulate async operation\r\n await this.sleep(10);\r\n }\r\n\r\n private async sleep(ms: number): Promise<void> {\r\n return new Promise(resolve => setTimeout(resolve, ms));\r\n }\r\n\r\n private printSummary(): void {\r\n const duration = this.metrics.startTime && this.metrics.endTime \r\n ? this.metrics.endTime.getTime() - this.metrics.startTime.getTime()\r\n : 0;\r\n \r\n console.log('');\r\n console.log('='.repeat(50));\r\n console.log('📊 Pipeline Summary');\r\n console.log('='.repeat(50));\r\n console.log(` Name: ${this.config.name}`);\r\n console.log(` Duration: ${duration}ms`);\r\n console.log(` Records processed: ${this.metrics.processed}`);\r\n console.log(` Errors: ${this.metrics.errors}`);\r\n console.log(` Retries: ${this.metrics.retries}`);\r\n console.log(` Success rate: ${((this.metrics.processed / (this.metrics.processed + this.metrics.errors)) * 100).toFixed(2)}%`);\r\n console.log('='.repeat(50));\r\n }\r\n\r\n lineage(): any {\r\n return {\r\n name: this.config.name,\r\n sources: this.sources.map(s => s.type),\r\n transforms: this.transforms.length,\r\n sinks: this.sinks.map(s => s.type),\r\n timestamp: new Date().toISOString()\r\n };\r\n }\r\n\r\n getMetrics(): any {\r\n return {\r\n ...this.metrics,\r\n duration: this.metrics.startTime && this.metrics.endTime \r\n ? this.metrics.endTime.getTime() - this.metrics.startTime.getTime()\r\n : null\r\n };\r\n }\r\n}"],"names":["constructor","config","options","this","sources","transforms","sinks","retryConfig","maxAttempts","delayMs","backoffMultiplier","maxConcurrent","metrics","processed","errors","retries","startTime","endTime","source","type","push","transform","transformFn","sink","run","Date","console","log","name","map","s","join","length","processSource","printSummary","error","errorHandler","pipeline","mockData","id","value","record","processRecord","attempt","result","transformError","writeToSink","delay","Math","pow","warn","sleep","data","JSON","stringify","filePath","topic","table","ms","Promise","resolve","setTimeout","duration","getTime","repeat","toFixed","lineage","timestamp","toISOString","getMetrics"],"mappings":"2CA4BI,WAAAA,CAAYC,EAAwBC,GAZ5BC,KAAOC,QAAiD,GACxDD,KAAUE,WAA8B,GACxCF,KAAKG,MAA+C,GAWxDH,KAAKF,OAASA,EACdE,KAAKD,QAAUA,GAAW,CACtBK,YAAa,CACTC,YAAa,EACbC,QAAS,IACTC,kBAAmB,GAEvBC,cAAe,IAEnBR,KAAKS,QAAU,CACXC,UAAW,EACXC,OAAQ,EACRC,QAAS,EACTC,UAAW,KACXC,QAAS,KAEhB,CAED,MAAAC,CAAOC,EAAsBlB,GAEzB,OADAE,KAAKC,QAAQgB,KAAK,CAAED,OAAMlB,WACnBE,IACV,CAED,SAAAkB,CAAUC,GAEN,OADAnB,KAAKE,WAAWe,KAAKE,GACdnB,IACV,CAED,IAAAoB,CAAKJ,EAAoBlB,GAErB,OADAE,KAAKG,MAAMc,KAAK,CAAED,OAAMlB,WACjBE,IACV,CAED,SAAMqB,GACFrB,KAAKS,QAAQI,UAAY,IAAIS,KAC7BC,QAAQC,IAAI,yBAAyBxB,KAAKF,OAAO2B,QACjDF,QAAQC,IAAI,eAAexB,KAAKC,QAAQyB,IAAIC,GAAKA,EAAEX,MAAMY,KAAK,SAC9DL,QAAQC,IAAI,kBAAkBxB,KAAKE,WAAW2B,UAC9CN,QAAQC,IAAI,aAAaxB,KAAKG,MAAMuB,IAAIC,GAAKA,EAAEX,MAAMY,KAAK,SAE1D,IACI,IAAK,MAAMb,KAAUf,KAAKC,cAChBD,KAAK8B,cAAcf,GAG7Bf,KAAKS,QAAQK,QAAU,IAAIQ,KAC3BtB,KAAK+B,cACR,CAAC,MAAOC,GAKL,MAJAT,QAAQS,MAAM,qBAAsBA,GAChChC,KAAKD,QAAQkC,cACbjC,KAAKD,QAAQkC,aAAaD,EAAgB,CAAEE,SAAUlC,KAAKF,OAAO2B,OAEhEO,CACT,CACJ,CAEO,mBAAMF,CAAcf,GACxBQ,QAAQC,IAAI,yBAAyBT,EAAOC,QAG5C,MAAMmB,EAAW,CACb,CAAEC,GAAI,EAAGX,KAAM,WAAYY,MAAO,KAClC,CAAED,GAAI,EAAGX,KAAM,WAAYY,MAAO,KAClC,CAAED,GAAI,EAAGX,KAAM,WAAYY,MAAO,MAGtC,IAAK,MAAMC,KAAUH,QACXnC,KAAKuC,cAAcD,EAEhC,CAEO,mBAAMC,CAAcD,EAAaE,EAAkB,GACvD,IAEI,IAAIC,EAASH,EACb,IAAK,MAAMpB,KAAalB,KAAKE,WACzB,IACIuC,EAASvB,EAAUuB,EACtB,CAAC,MAAOC,GAEL,MADAnB,QAAQS,MAAM,mBAAoBU,GAC5BA,CACT,CAIL,IAAK,MAAMtB,KAAQpB,KAAKG,YACdH,KAAK2C,YAAYvB,EAAMqB,GAGjCzC,KAAKS,QAAQC,WAEhB,CAAC,MAAOsB,GACL,MAAM3B,EAAcL,KAAKD,QAAQK,aAAaC,aAAe,EAE7D,GAAImC,EAAUnC,EAAa,CACvB,MAAMuC,GAAS5C,KAAKD,QAAQK,aAAaE,SAAW,KACvCuC,KAAKC,IAAI9C,KAAKD,QAAQK,aAAaG,mBAAqB,EAAGiC,EAAU,GAElFjB,QAAQwB,KAAK,YAAYP,KAAWnC,WAAqBuC,OACzD5C,KAAKS,QAAQG,gBAEPZ,KAAKgD,MAAMJ,SACX5C,KAAKuC,cAAcD,EAAQE,EAAU,EAC9C,MACGjB,QAAQS,MAAM,oCAAoC3B,cAAyBiC,GAC3EtC,KAAKS,QAAQE,SAETX,KAAKD,QAAQkC,cACbjC,KAAKD,QAAQkC,aAAaD,EAAgB,CAAEM,SAAQE,WAG/D,CACJ,CAEO,iBAAMG,CAAYvB,EAA2C6B,GACjE,OAAQ7B,EAAKJ,MACT,IAAK,UACDO,QAAQC,IAAI,IAAIJ,EAAKJ,QAASkC,KAAKC,UAAUF,EAAM,KAAM,IACzD,MACJ,IAAK,OAED1B,QAAQC,IAAI,uBAAuBJ,EAAKtB,OAAOsD,YAC/C,MACJ,IAAK,QACD7B,QAAQC,IAAI,8BAA8BJ,EAAKtB,OAAOuD,SACtD,MACJ,IAAK,WACD9B,QAAQC,IAAI,2BAA2BJ,EAAKtB,OAAOwD,SACnD,MACJ,QACI/B,QAAQC,IAAI,iBAAiBJ,EAAKJ,cAIpChB,KAAKgD,MAAM,GACpB,CAEO,WAAMA,CAAMO,GAChB,OAAO,IAAIC,QAAQC,GAAWC,WAAWD,EAASF,GACrD,CAEO,YAAAxB,GACJ,MAAM4B,EAAW3D,KAAKS,QAAQI,WAAab,KAAKS,QAAQK,QAClDd,KAAKS,QAAQK,QAAQ8C,UAAY5D,KAAKS,QAAQI,UAAU+C,UACxD,EAENrC,QAAQC,IAAI,IACZD,QAAQC,IAAI,IAAIqC,OAAO,KACvBtC,QAAQC,IAAI,uBACZD,QAAQC,IAAI,IAAIqC,OAAO,KACvBtC,QAAQC,IAAI,YAAYxB,KAAKF,OAAO2B,QACpCF,QAAQC,IAAI,gBAAgBmC,OAC5BpC,QAAQC,IAAI,yBAAyBxB,KAAKS,QAAQC,aAClDa,QAAQC,IAAI,cAAcxB,KAAKS,QAAQE,UACvCY,QAAQC,IAAI,eAAexB,KAAKS,QAAQG,WACxCW,QAAQC,IAAI,qBAAsBxB,KAAKS,QAAQC,WAAaV,KAAKS,QAAQC,UAAYV,KAAKS,QAAQE,QAAW,KAAKmD,QAAQ,OAC1HvC,QAAQC,IAAI,IAAIqC,OAAO,IAC1B,CAED,OAAAE,GACI,MAAO,CACHtC,KAAMzB,KAAKF,OAAO2B,KAClBxB,QAASD,KAAKC,QAAQyB,IAAIC,GAAKA,EAAEX,MACjCd,WAAYF,KAAKE,WAAW2B,OAC5B1B,MAAOH,KAAKG,MAAMuB,IAAIC,GAAKA,EAAEX,MAC7BgD,WAAW,IAAI1C,MAAO2C,cAE7B,CAED,UAAAC,GACI,MAAO,IACAlE,KAAKS,QACRkD,SAAU3D,KAAKS,QAAQI,WAAab,KAAKS,QAAQK,QAC3Cd,KAAKS,QAAQK,QAAQ8C,UAAY5D,KAAKS,QAAQI,UAAU+C,UACxD,KAEb"}
@@ -0,0 +1 @@
1
+ //# sourceMappingURL=connectors.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"connectors.d.ts","sourceRoot":"","sources":["../../../src/pipeline/connectors.ts"],"names":[],"mappings":""}
@@ -0,0 +1,17 @@
1
+ export declare class PipelineMonitor {
2
+ private metrics;
3
+ private startTime;
4
+ recordMetric(name: string, value: number): void;
5
+ getMetrics(): {
6
+ uptime: number;
7
+ metrics: Record<string, {
8
+ count: number;
9
+ recent: any[];
10
+ average: number;
11
+ max: number;
12
+ min: number;
13
+ }>;
14
+ };
15
+ exportToPrometheus(): Promise<string>;
16
+ }
17
+ //# sourceMappingURL=monitoring.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"monitoring.d.ts","sourceRoot":"","sources":["../../../src/pipeline/monitoring.ts"],"names":[],"mappings":"AAAA,qBAAa,eAAe;IAC1B,OAAO,CAAC,OAAO,CAA+B;IAC9C,OAAO,CAAC,SAAS,CAAoB;IAErC,YAAY,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,IAAI;IAU/C,UAAU,IAAI;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE;YAAE,KAAK,EAAE,MAAM,CAAC;YAAC,MAAM,EAAE,GAAG,EAAE,CAAC;YAAC,OAAO,EAAE,MAAM,CAAC;YAAC,GAAG,EAAE,MAAM,CAAC;YAAC,GAAG,EAAE,MAAM,CAAA;SAAE,CAAC,CAAA;KAAE;IAoBhI,kBAAkB,IAAI,OAAO,CAAC,MAAM,CAAC;CAY5C"}
@@ -0,0 +1,2 @@
1
+ "use strict";exports.PipelineMonitor=class{constructor(){this.metrics=new Map,this.startTime=new Date}recordMetric(e,t){this.metrics.has(e)||this.metrics.set(e,[]),this.metrics.get(e).push({value:t,timestamp:new Date})}getMetrics(){const e={uptime:Date.now()-this.startTime.getTime(),metrics:{}};for(const[t,s]of this.metrics){const i=s.slice(-100);e.metrics[t]={count:s.length,recent:i,average:i.reduce((e,t)=>e+t.value,0)/i.length,max:Math.max(...i.map(e=>e.value)),min:Math.min(...i.map(e=>e.value))}}return e}async exportToPrometheus(){const e=this.getMetrics();let t="";for(const[s,i]of Object.entries(e.metrics))t+=`# HELP ${s} Pipeline metric\n`,t+=`# TYPE ${s} gauge\n`,t+=`${s} ${i.average}\n`;return t}};
2
+ //# sourceMappingURL=monitoring.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"monitoring.js","sources":["../../../../src/pipeline/monitoring.ts"],"sourcesContent":["export class PipelineMonitor {\r\n private metrics: Map<string, any> = new Map();\r\n private startTime: Date = new Date();\r\n\r\n recordMetric(name: string, value: number): void {\r\n if (!this.metrics.has(name)) {\r\n this.metrics.set(name, []);\r\n }\r\n this.metrics.get(name).push({\r\n value,\r\n timestamp: new Date(),\r\n });\r\n }\r\n\r\n getMetrics(): { uptime: number; metrics: Record<string, { count: number; recent: any[]; average: number; max: number; min: number }> } {\r\n const result: { uptime: number; metrics: Record<string, { count: number; recent: any[]; average: number; max: number; min: number }> } = {\r\n uptime: Date.now() - this.startTime.getTime(),\r\n metrics: {} as Record<string, { count: number; recent: any[]; average: number; max: number; min: number }>,\r\n };\r\n \r\n for (const [name, values] of this.metrics) {\r\n const recentValues = values.slice(-100);\r\n result.metrics[name] = {\r\n count: values.length,\r\n recent: recentValues,\r\n average: recentValues.reduce((a: number, b: any) => a + b.value, 0) / recentValues.length,\r\n max: Math.max(...recentValues.map((v: any) => v.value)),\r\n min: Math.min(...recentValues.map((v: any) => v.value)),\r\n };\r\n }\r\n \r\n return result;\r\n }\r\n\r\n async exportToPrometheus(): Promise<string> {\r\n const metrics = this.getMetrics();\r\n let output = '';\r\n \r\n for (const [name, data] of Object.entries(metrics.metrics)) {\r\n output += `# HELP ${name} Pipeline metric\\n`;\r\n output += `# TYPE ${name} gauge\\n`;\r\n output += `${name} ${data.average}\\n`;\r\n }\r\n \r\n return output;\r\n }\r\n}"],"names":["constructor","this","metrics","Map","startTime","Date","recordMetric","name","value","has","set","get","push","timestamp","getMetrics","result","uptime","now","getTime","values","recentValues","slice","count","length","recent","average","reduce","a","b","max","Math","map","v","min","exportToPrometheus","output","data","Object","entries"],"mappings":"2CAAA,WAAAA,GACUC,KAAAC,QAA4B,IAAIC,IAChCF,KAAAG,UAAkB,IAAIC,IA4C/B,CA1CC,YAAAC,CAAaC,EAAcC,GACpBP,KAAKC,QAAQO,IAAIF,IACpBN,KAAKC,QAAQQ,IAAIH,EAAM,IAEzBN,KAAKC,QAAQS,IAAIJ,GAAMK,KAAK,CAC1BJ,QACAK,UAAW,IAAIR,MAElB,CAED,UAAAS,GACE,MAAMC,EAAmI,CACvIC,OAAQX,KAAKY,MAAQhB,KAAKG,UAAUc,UACpChB,QAAS,CAAiG,GAG5G,IAAK,MAAOK,EAAMY,KAAWlB,KAAKC,QAAS,CACzC,MAAMkB,EAAeD,EAAOE,OAAO,KACnCN,EAAOb,QAAQK,GAAQ,CACrBe,MAAOH,EAAOI,OACdC,OAAQJ,EACRK,QAASL,EAAaM,OAAO,CAACC,EAAWC,IAAWD,EAAIC,EAAEpB,MAAO,GAAKY,EAAaG,OACnFM,IAAKC,KAAKD,OAAOT,EAAaW,IAAKC,GAAWA,EAAExB,QAChDyB,IAAKH,KAAKG,OAAOb,EAAaW,IAAKC,GAAWA,EAAExB,QAEnD,CAED,OAAOO,CACR,CAED,wBAAMmB,GACJ,MAAMhC,EAAUD,KAAKa,aACrB,IAAIqB,EAAS,GAEb,IAAK,MAAO5B,EAAM6B,KAASC,OAAOC,QAAQpC,EAAQA,SAChDiC,GAAU,UAAU5B,sBACpB4B,GAAU,UAAU5B,YACpB4B,GAAU,GAAG5B,KAAQ6B,EAAKX,YAG5B,OAAOU,CACR"}
@@ -0,0 +1,4 @@
1
+ export { useKafkaTopic } from './useKafkaTopic';
2
+ export { useSparkQuery } from './useSparkQuery';
3
+ export { useRealtimeData } from './useRealtimeData';
4
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/react/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC"}
@@ -0,0 +1,13 @@
1
+ interface UseKafkaTopicOptions {
2
+ deserialize?: (data: any) => any;
3
+ maxMessages?: number;
4
+ reconnectOnError?: boolean;
5
+ }
6
+ export declare function useKafkaTopic<T = any>(topic: string, options?: UseKafkaTopicOptions): {
7
+ data: T[];
8
+ isConnected: boolean;
9
+ error: Error;
10
+ clearData: () => void;
11
+ };
12
+ export {};
13
+ //# sourceMappingURL=useKafkaTopic.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"useKafkaTopic.d.ts","sourceRoot":"","sources":["../../../src/react/useKafkaTopic.ts"],"names":[],"mappings":"AAIA,UAAU,oBAAoB;IAC5B,WAAW,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,GAAG,CAAC;IACjC,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,gBAAgB,CAAC,EAAE,OAAO,CAAC;CAC5B;AAED,wBAAgB,aAAa,CAAC,CAAC,GAAG,GAAG,EACnC,KAAK,EAAE,MAAM,EACb,OAAO,GAAE,oBAAyB;;;;;EAsDnC"}
@@ -0,0 +1,16 @@
1
+ interface UseRealtimeDataOptions {
2
+ kafkaTopic: string;
3
+ sparkQuery: string;
4
+ refreshInterval?: number;
5
+ transform?: (data: any) => any;
6
+ }
7
+ export declare function useRealtimeData<T = any>(options: UseRealtimeDataOptions): {
8
+ realtimeData: any[];
9
+ aggregatedData: any[];
10
+ isConnected: boolean;
11
+ loading: boolean;
12
+ refetch: () => void;
13
+ chartRef: import("react").MutableRefObject<HTMLDivElement>;
14
+ };
15
+ export {};
16
+ //# sourceMappingURL=useRealtimeData.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"useRealtimeData.d.ts","sourceRoot":"","sources":["../../../src/react/useRealtimeData.ts"],"names":[],"mappings":"AAIA,UAAU,sBAAsB;IAC9B,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;IACnB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,GAAG,CAAC;CAChC;AAED,wBAAgB,eAAe,CAAC,CAAC,GAAG,GAAG,EAAE,OAAO,EAAE,sBAAsB;;;;;;;EA6BvE"}
@@ -0,0 +1,12 @@
1
+ interface UseSparkQueryOptions {
2
+ refreshInterval?: number;
3
+ enabled?: boolean;
4
+ }
5
+ export declare function useSparkQuery(sqlQuery: string, options?: UseSparkQueryOptions): {
6
+ data: any[];
7
+ loading: boolean;
8
+ error: Error;
9
+ refetch: () => void;
10
+ };
11
+ export {};
12
+ //# sourceMappingURL=useSparkQuery.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"useSparkQuery.d.ts","sourceRoot":"","sources":["../../../src/react/useSparkQuery.ts"],"names":[],"mappings":"AAGA,UAAU,oBAAoB;IAC5B,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,OAAO,CAAC,EAAE,OAAO,CAAC;CACnB;AAED,wBAAgB,aAAa,CAC3B,QAAQ,EAAE,MAAM,EAChB,OAAO,GAAE,oBAAyB;;;;;EAuCnC"}
@@ -0,0 +1,27 @@
1
+ import { SparkConfig, SparkJob } from '../types';
2
+ export declare class SparkClient {
3
+ private baseUrl;
4
+ private headers;
5
+ private retryConfig;
6
+ constructor(config: SparkConfig, retryConfig?: {
7
+ maxRetries: number;
8
+ retryDelay: number;
9
+ });
10
+ submitJob(jobCode: string, jobName: string, options?: {
11
+ timeout?: number;
12
+ retries?: number;
13
+ }): Promise<SparkJob>;
14
+ getJobStatus(jobId: string): Promise<SparkJob>;
15
+ cancelJob(jobId: string): Promise<boolean>;
16
+ monitorJob(jobId: string, timeoutMs?: number): Promise<SparkJob>;
17
+ runPythonScript(scriptPath: string, args?: string[], options?: {
18
+ timeout?: number;
19
+ }): Promise<SparkJob>;
20
+ submitSQLQuery(sql: string, options?: {
21
+ database?: string;
22
+ timeout?: number;
23
+ }): Promise<any[]>;
24
+ private sleep;
25
+ healthCheck(): Promise<boolean>;
26
+ }
27
+ //# sourceMappingURL=client.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../../../src/spark/client.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,QAAQ,EAAc,MAAM,UAAU,CAAC;AAE7D,qBAAa,WAAW;IACpB,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,OAAO,CAAyB;IACxC,OAAO,CAAC,WAAW,CAA6C;gBAEpD,MAAM,EAAE,WAAW,EAAE,WAAW,CAAC,EAAE;QAAE,UAAU,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE;IAQnF,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,QAAQ,CAAC;IAiDhH,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,QAAQ,CAAC;IAwB9C,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAmB1C,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,SAAS,GAAE,MAAe,GAAG,OAAO,CAAC,QAAQ,CAAC;IA2BxE,eAAe,CAAC,UAAU,EAAE,MAAM,EAAE,IAAI,GAAE,MAAM,EAAO,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,QAAQ,CAAC;IA2B3G,cAAc,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,QAAQ,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC;IAkBpG,OAAO,CAAC,KAAK;IAIP,WAAW,IAAI,OAAO,CAAC,OAAO,CAAC;CAQxC"}
@@ -0,0 +1,2 @@
1
+ "use strict";exports.SparkClient=class{constructor(t,s){this.baseUrl=`${t.master}/api/v1`,this.headers={"Content-Type":"application/json"},this.retryConfig=s||{maxRetries:3,retryDelay:1e3}}async submitJob(t,s,e){const r=e?.retries||this.retryConfig.maxRetries;for(let o=1;o<=r;o++)try{const r=await fetch(`${this.baseUrl}/submissions/create`,{method:"POST",headers:this.headers,body:JSON.stringify({action:"CreateSubmissionRequest",appResource:t,mainClass:"org.apache.spark.deploy.SparkSubmit",appArgs:[s],sparkProperties:{"spark.app.name":s,"spark.master":this.baseUrl}})});if(!r.ok)throw new Error(`HTTP ${r.status}: ${r.statusText}`);const o={id:(await r.json()).submissionId,status:"pending",progress:0,startTime:new Date,stages:[]};return e?.timeout&&this.monitorJob(o.id,e.timeout).catch(console.error),o}catch(t){if(console.error(`Job submission attempt ${o} failed:`,t),o===r)throw t;await this.sleep(this.retryConfig.retryDelay*o)}throw new Error("Job submission failed after all retries")}async getJobStatus(t){try{const s=await fetch(`${this.baseUrl}/submissions/status/${t}`);if(!s.ok)throw new Error(`Failed to get job status: ${s.status}`);const e=await s.json();return{id:t,status:e.driverState?.toLowerCase()||"unknown",progress:e.progress||0,startTime:new Date(e.submissionTime||Date.now()),endTime:e.completionTime?new Date(e.completionTime):void 0,stages:e.stages||[]}}catch(t){throw console.error("Error getting job status:",t),t}}async cancelJob(t){try{const s=await fetch(`${this.baseUrl}/submissions/kill/${t}`,{method:"POST"});return s.ok?(console.log(`✅ Job ${t} cancelled successfully`),!0):(console.error(`Failed to cancel job ${t}: ${s.status}`),!1)}catch(t){return console.error("Error cancelling job:",t),!1}}async monitorJob(t,s=3e5){const e=Date.now();let r=-1;for(;Date.now()-e<s;){const s=await this.getJobStatus(t);if(s.progress!==r&&(console.log(`📊 Job ${t} progress: ${s.progress}%`),r=s.progress),"completed"===s.status||"failed"===s.status)return console.log(`✅ Job ${t} ${s.status}`),s;await this.sleep(2e3)}return console.warn(`⚠️ Job ${t} monitoring timed out after ${s}ms`),this.getJobStatus(t)}async runPythonScript(t,s=[],e){if(!require("fs").existsSync(t))throw new Error(`Python script not found: ${t}`);const r=`\nfrom pyspark.sql import SparkSession\nimport sys\nimport json\n\nspark = SparkSession.builder.getOrCreate()\n\ntry:\n with open('${t}', 'r') as f:\n code = f.read()\n exec(code)\n print("✅ Python script executed successfully")\nexcept Exception as e:\n print(f"❌ Error executing script: {e}")\n sys.exit(1)\n `;return this.submitJob(r,`python-${Date.now()}`,e)}async submitSQLQuery(t,s){const e=`\nfrom pyspark.sql import SparkSession\n\nspark = SparkSession.builder.getOrCreate()\n\n${s?.database?`spark.sql("USE ${s.database}")`:""}\n\nresult = spark.sql("""${t.replace(/"/g,'\\"')}""")\nresult.show()\nprint(result.collect())\n `,r=await this.submitJob(e,`sql-${Date.now()}`,s);return await this.monitorJob(r.id,s?.timeout),[]}sleep(t){return new Promise(s=>setTimeout(s,t))}async healthCheck(){try{return(await fetch(`${this.baseUrl}/`)).ok}catch{return!1}}};
2
+ //# sourceMappingURL=client.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"client.js","sources":["../../../../src/spark/client.ts"],"sourcesContent":["\r\nimport { SparkConfig, SparkJob, SparkStage } from '../types';\r\n\r\nexport class SparkClient {\r\n private baseUrl: string;\r\n private headers: Record<string, string>;\r\n private retryConfig: { maxRetries: number; retryDelay: number };\r\n\r\n constructor(config: SparkConfig, retryConfig?: { maxRetries: number; retryDelay: number }) {\r\n this.baseUrl = `${config.master}/api/v1`;\r\n this.headers = {\r\n 'Content-Type': 'application/json',\r\n };\r\n this.retryConfig = retryConfig || { maxRetries: 3, retryDelay: 1000 };\r\n }\r\n\r\n async submitJob(jobCode: string, jobName: string, options?: { timeout?: number; retries?: number }): Promise<SparkJob> {\r\n const maxRetries = options?.retries || this.retryConfig.maxRetries;\r\n \r\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\r\n try {\r\n const response = await fetch(`${this.baseUrl}/submissions/create`, {\r\n method: 'POST',\r\n headers: this.headers,\r\n body: JSON.stringify({\r\n action: 'CreateSubmissionRequest',\r\n appResource: jobCode,\r\n mainClass: 'org.apache.spark.deploy.SparkSubmit',\r\n appArgs: [jobName],\r\n sparkProperties: {\r\n 'spark.app.name': jobName,\r\n 'spark.master': this.baseUrl,\r\n },\r\n }),\r\n });\r\n\r\n if (!response.ok) {\r\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\r\n }\r\n\r\n const data = await response.json();\r\n \r\n const job: SparkJob = {\r\n id: data.submissionId,\r\n status: 'pending',\r\n progress: 0,\r\n startTime: new Date(),\r\n stages: [],\r\n };\r\n\r\n // Start monitoring if timeout specified\r\n if (options?.timeout) {\r\n this.monitorJob(job.id, options.timeout).catch(console.error);\r\n }\r\n\r\n return job;\r\n } catch (error) {\r\n console.error(`Job submission attempt ${attempt} failed:`, error);\r\n if (attempt === maxRetries) throw error;\r\n await this.sleep(this.retryConfig.retryDelay * attempt);\r\n }\r\n }\r\n throw new Error('Job submission failed after all retries');\r\n }\r\n\r\n async getJobStatus(jobId: string): Promise<SparkJob> {\r\n try {\r\n const response = await fetch(`${this.baseUrl}/submissions/status/${jobId}`);\r\n \r\n if (!response.ok) {\r\n throw new Error(`Failed to get job status: ${response.status}`);\r\n }\r\n \r\n const data = await response.json();\r\n \r\n return {\r\n id: jobId,\r\n status: data.driverState?.toLowerCase() || 'unknown',\r\n progress: data.progress || 0,\r\n startTime: new Date(data.submissionTime || Date.now()),\r\n endTime: data.completionTime ? new Date(data.completionTime) : undefined,\r\n stages: data.stages || [],\r\n };\r\n } catch (error) {\r\n console.error(`Error getting job status:`, error);\r\n throw error;\r\n }\r\n }\r\n\r\n async cancelJob(jobId: string): Promise<boolean> {\r\n try {\r\n const response = await fetch(`${this.baseUrl}/submissions/kill/${jobId}`, {\r\n method: 'POST',\r\n });\r\n \r\n if (response.ok) {\r\n console.log(`✅ Job ${jobId} cancelled successfully`);\r\n return true;\r\n }\r\n \r\n console.error(`Failed to cancel job ${jobId}: ${response.status}`);\r\n return false;\r\n } catch (error) {\r\n console.error(`Error cancelling job:`, error);\r\n return false;\r\n }\r\n }\r\n\r\n async monitorJob(jobId: string, timeoutMs: number = 300000): Promise<SparkJob> {\r\n const startTime = Date.now();\r\n let lastProgress = -1;\r\n \r\n while (Date.now() - startTime < timeoutMs) {\r\n const job = await this.getJobStatus(jobId);\r\n \r\n // Log progress changes\r\n if (job.progress !== lastProgress) {\r\n console.log(`📊 Job ${jobId} progress: ${job.progress}%`);\r\n lastProgress = job.progress;\r\n }\r\n \r\n // Check completion\r\n if (job.status === 'completed' || job.status === 'failed') {\r\n console.log(`✅ Job ${jobId} ${job.status}`);\r\n return job;\r\n }\r\n \r\n // Wait before next poll\r\n await this.sleep(2000);\r\n }\r\n \r\n console.warn(`⚠️ Job ${jobId} monitoring timed out after ${timeoutMs}ms`);\r\n return this.getJobStatus(jobId);\r\n }\r\n\r\n async runPythonScript(scriptPath: string, args: string[] = [], options?: { timeout?: number }): Promise<SparkJob> {\r\n // Validate script exists\r\n const fs = require('fs');\r\n if (!fs.existsSync(scriptPath)) {\r\n throw new Error(`Python script not found: ${scriptPath}`);\r\n }\r\n \r\n const jobCode = `\r\nfrom pyspark.sql import SparkSession\r\nimport sys\r\nimport json\r\n\r\nspark = SparkSession.builder.getOrCreate()\r\n\r\ntry:\r\n with open('${scriptPath}', 'r') as f:\r\n code = f.read()\r\n exec(code)\r\n print(\"✅ Python script executed successfully\")\r\nexcept Exception as e:\r\n print(f\"❌ Error executing script: {e}\")\r\n sys.exit(1)\r\n `;\r\n \r\n return this.submitJob(jobCode, `python-${Date.now()}`, options);\r\n }\r\n\r\n async submitSQLQuery(sql: string, options?: { database?: string; timeout?: number }): Promise<any[]> {\r\n const queryJob = `\r\nfrom pyspark.sql import SparkSession\r\n\r\nspark = SparkSession.builder.getOrCreate()\r\n\r\n${options?.database ? `spark.sql(\"USE ${options.database}\")` : ''}\r\n\r\nresult = spark.sql(\"\"\"${sql.replace(/\"/g, '\\\\\"')}\"\"\")\r\nresult.show()\r\nprint(result.collect())\r\n `;\r\n \r\n const job = await this.submitJob(queryJob, `sql-${Date.now()}`, options);\r\n await this.monitorJob(job.id, options?.timeout);\r\n return [];\r\n }\r\n\r\n private sleep(ms: number): Promise<void> {\r\n return new Promise(resolve => setTimeout(resolve, ms));\r\n }\r\n\r\n async healthCheck(): Promise<boolean> {\r\n try {\r\n const response = await fetch(`${this.baseUrl}/`);\r\n return response.ok;\r\n } catch {\r\n return false;\r\n }\r\n }\r\n}"],"names":["constructor","config","retryConfig","this","baseUrl","master","headers","maxRetries","retryDelay","submitJob","jobCode","jobName","options","retries","attempt","response","fetch","method","body","JSON","stringify","action","appResource","mainClass","appArgs","sparkProperties","ok","Error","status","statusText","job","id","json","submissionId","progress","startTime","Date","stages","timeout","monitorJob","catch","console","error","sleep","getJobStatus","jobId","data","driverState","toLowerCase","submissionTime","now","endTime","completionTime","undefined","cancelJob","log","timeoutMs","lastProgress","warn","runPythonScript","scriptPath","args","require","existsSync","submitSQLQuery","sql","queryJob","database","replace","ms","Promise","resolve","setTimeout","healthCheck"],"mappings":"uCAQI,WAAAA,CAAYC,EAAqBC,GAC7BC,KAAKC,QAAU,GAAGH,EAAOI,gBACzBF,KAAKG,QAAU,CACX,eAAgB,oBAEpBH,KAAKD,YAAcA,GAAe,CAAEK,WAAY,EAAGC,WAAY,IAClE,CAED,eAAMC,CAAUC,EAAiBC,EAAiBC,GAC9C,MAAML,EAAaK,GAASC,SAAWV,KAAKD,YAAYK,WAExD,IAAK,IAAIO,EAAU,EAAGA,GAAWP,EAAYO,IACzC,IACI,MAAMC,QAAiBC,MAAM,GAAGb,KAAKC,6BAA8B,CAC/Da,OAAQ,OACRX,QAASH,KAAKG,QACdY,KAAMC,KAAKC,UAAU,CACjBC,OAAQ,0BACRC,YAAaZ,EACba,UAAW,sCACXC,QAAS,CAACb,GACVc,gBAAiB,CACb,iBAAkBd,EAClB,eAAgBR,KAAKC,aAKjC,IAAKW,EAASW,GACV,MAAM,IAAIC,MAAM,QAAQZ,EAASa,WAAWb,EAASc,cAGzD,MAEMC,EAAgB,CAClBC,UAHehB,EAASiB,QAGfC,aACTL,OAAQ,UACRM,SAAU,EACVC,UAAW,IAAIC,KACfC,OAAQ,IAQZ,OAJIzB,GAAS0B,SACTnC,KAAKoC,WAAWT,EAAIC,GAAInB,EAAQ0B,SAASE,MAAMC,QAAQC,OAGpDZ,CACV,CAAC,MAAOY,GAEL,GADAD,QAAQC,MAAM,0BAA0B5B,YAAmB4B,GACvD5B,IAAYP,EAAY,MAAMmC,QAC5BvC,KAAKwC,MAAMxC,KAAKD,YAAYM,WAAaM,EAClD,CAEL,MAAM,IAAIa,MAAM,0CACnB,CAED,kBAAMiB,CAAaC,GACf,IACI,MAAM9B,QAAiBC,MAAM,GAAGb,KAAKC,8BAA8ByC,KAEnE,IAAK9B,EAASW,GACV,MAAM,IAAIC,MAAM,6BAA6BZ,EAASa,UAG1D,MAAMkB,QAAa/B,EAASiB,OAE5B,MAAO,CACHD,GAAIc,EACJjB,OAAQkB,EAAKC,aAAaC,eAAiB,UAC3Cd,SAAUY,EAAKZ,UAAY,EAC3BC,UAAW,IAAIC,KAAKU,EAAKG,gBAAkBb,KAAKc,OAChDC,QAASL,EAAKM,eAAiB,IAAIhB,KAAKU,EAAKM,qBAAkBC,EAC/DhB,OAAQS,EAAKT,QAAU,GAE9B,CAAC,MAAOK,GAEL,MADAD,QAAQC,MAAM,4BAA6BA,GACrCA,CACT,CACJ,CAED,eAAMY,CAAUT,GACZ,IACI,MAAM9B,QAAiBC,MAAM,GAAGb,KAAKC,4BAA4ByC,IAAS,CACtE5B,OAAQ,SAGZ,OAAIF,EAASW,IACTe,QAAQc,IAAI,SAASV,6BACd,IAGXJ,QAAQC,MAAM,wBAAwBG,MAAU9B,EAASa,WAClD,EACV,CAAC,MAAOc,GAEL,OADAD,QAAQC,MAAM,wBAAyBA,IAChC,CACV,CACJ,CAED,gBAAMH,CAAWM,EAAeW,EAAoB,KAChD,MAAMrB,EAAYC,KAAKc,MACvB,IAAIO,GAAgB,EAEpB,KAAOrB,KAAKc,MAAQf,EAAYqB,GAAW,CACvC,MAAM1B,QAAY3B,KAAKyC,aAAaC,GASpC,GANIf,EAAII,WAAauB,IACjBhB,QAAQc,IAAI,UAAUV,eAAmBf,EAAII,aAC7CuB,EAAe3B,EAAII,UAIJ,cAAfJ,EAAIF,QAAyC,WAAfE,EAAIF,OAElC,OADAa,QAAQc,IAAI,SAASV,KAASf,EAAIF,UAC3BE,QAIL3B,KAAKwC,MAAM,IACpB,CAGD,OADAF,QAAQiB,KAAK,UAAUb,gCAAoCW,OACpDrD,KAAKyC,aAAaC,EAC5B,CAED,qBAAMc,CAAgBC,EAAoBC,EAAiB,GAAIjD,GAG3D,IADWkD,QAAQ,MACXC,WAAWH,GACf,MAAM,IAAIjC,MAAM,4BAA4BiC,KAGhD,MAAMlD,EAAU,yIAQPkD,8MAST,OAAOzD,KAAKM,UAAUC,EAAS,UAAU0B,KAAKc,QAAStC,EAC1D,CAED,oBAAMoD,CAAeC,EAAarD,GAC9B,MAAMsD,EAAW,2FAKvBtD,GAASuD,SAAW,kBAAkBvD,EAAQuD,aAAe,+BAEvCF,EAAIG,QAAQ,KAAM,+DAK5BtC,QAAY3B,KAAKM,UAAUyD,EAAU,OAAO9B,KAAKc,QAAStC,GAEhE,aADMT,KAAKoC,WAAWT,EAAIC,GAAInB,GAAS0B,SAChC,EACV,CAEO,KAAAK,CAAM0B,GACV,OAAO,IAAIC,QAAQC,GAAWC,WAAWD,EAASF,GACrD,CAED,iBAAMI,GACF,IAEI,aADuBzD,MAAM,GAAGb,KAAKC,aACrBsB,EACnB,CAAC,MACE,OAAO,CACV,CACJ"}
@@ -0,0 +1,9 @@
1
+ import { SparkConfig } from '../types/index';
2
+ export declare class SparkSQL {
3
+ private config;
4
+ constructor(config: SparkConfig);
5
+ query(sql: string): Promise<any[]>;
6
+ createTemporaryView(tableName: string, kafkaTopic: string): Promise<void>;
7
+ registerUDF(name: string, pythonFunction: string): Promise<void>;
8
+ }
9
+ //# sourceMappingURL=sql.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sql.d.ts","sourceRoot":"","sources":["../../../src/spark/sql.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,qBAAa,QAAQ;IACnB,OAAO,CAAC,MAAM,CAAc;gBAEhB,MAAM,EAAE,WAAW;IAIzB,KAAK,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC;IAWlC,mBAAmB,CAAC,SAAS,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAczE,WAAW,CAAC,IAAI,EAAE,MAAM,EAAE,cAAc,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CASvE"}
@@ -0,0 +1,2 @@
1
+ "use strict";exports.SparkSQL=class{constructor(s){this.config=s}async query(s){const t=await fetch(`${this.config.master}/api/v1/sql`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({sql:s})});return(await t.json()).rows||[]}async createTemporaryView(s,t){const n=`\n CREATE TEMPORARY VIEW ${s}\n USING kafka\n OPTIONS (\n kafka.bootstrap.servers = "localhost:9092",\n subscribe = "${t}",\n startingOffsets = "latest"\n )\n `;await this.query(n)}async registerUDF(s,t){const n=`\n CREATE OR REPLACE FUNCTION ${s}\n USING PYTHON\n AS '${t}'\n `;await this.query(n)}};
2
+ //# sourceMappingURL=sql.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sql.js","sources":["../../../../src/spark/sql.ts"],"sourcesContent":["import { SparkConfig } from '../types/index';\r\n\r\nexport class SparkSQL {\r\n private config: SparkConfig;\r\n\r\n constructor(config: SparkConfig) {\r\n this.config = config;\r\n }\r\n\r\n async query(sql: string): Promise<any[]> {\r\n const response = await fetch(`${this.config.master}/api/v1/sql`, {\r\n method: 'POST',\r\n headers: { 'Content-Type': 'application/json' },\r\n body: JSON.stringify({ sql }),\r\n });\r\n \r\n const data = await response.json();\r\n return data.rows || [];\r\n }\r\n\r\n async createTemporaryView(tableName: string, kafkaTopic: string): Promise<void> {\r\n const createViewSQL = `\r\n CREATE TEMPORARY VIEW ${tableName}\r\n USING kafka\r\n OPTIONS (\r\n kafka.bootstrap.servers = \"localhost:9092\",\r\n subscribe = \"${kafkaTopic}\",\r\n startingOffsets = \"latest\"\r\n )\r\n `;\r\n \r\n await this.query(createViewSQL);\r\n }\r\n\r\n async registerUDF(name: string, pythonFunction: string): Promise<void> {\r\n const registerSQL = `\r\n CREATE OR REPLACE FUNCTION ${name}\r\n USING PYTHON\r\n AS '${pythonFunction}'\r\n `;\r\n \r\n await this.query(registerSQL);\r\n }\r\n}"],"names":["constructor","config","this","query","sql","response","fetch","master","method","headers","body","JSON","stringify","json","rows","createTemporaryView","tableName","kafkaTopic","createViewSQL","registerUDF","name","pythonFunction","registerSQL"],"mappings":"oCAKE,WAAAA,CAAYC,GACVC,KAAKD,OAASA,CACf,CAED,WAAME,CAAMC,GACV,MAAMC,QAAiBC,MAAM,GAAGJ,KAAKD,OAAOM,oBAAqB,CAC/DC,OAAQ,OACRC,QAAS,CAAE,eAAgB,oBAC3BC,KAAMC,KAAKC,UAAU,CAAER,UAIzB,aADmBC,EAASQ,QAChBC,MAAQ,EACrB,CAED,yBAAMC,CAAoBC,EAAmBC,GAC3C,MAAMC,EAAgB,iCACIF,oHAIPC,+DAKbf,KAAKC,MAAMe,EAClB,CAED,iBAAMC,CAAYC,EAAcC,GAC9B,MAAMC,EAAc,sCACWF,oCAEvBC,iBAGFnB,KAAKC,MAAMmB,EAClB"}
@@ -0,0 +1,8 @@
1
+ import { SparkConfig } from '../types/index';
2
+ export declare class SparkStreaming {
3
+ private config;
4
+ constructor(config: SparkConfig);
5
+ createStreamingQuery(sourceTopic: string, sinkTopic: string, transformation: string): Promise<string>;
6
+ stopStreamingQuery(queryId: string): Promise<void>;
7
+ }
8
+ //# sourceMappingURL=streaming.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"streaming.d.ts","sourceRoot":"","sources":["../../../src/spark/streaming.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,qBAAa,cAAc;IACzB,OAAO,CAAC,MAAM,CAAc;gBAEhB,MAAM,EAAE,WAAW;IAIzB,oBAAoB,CACxB,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,MAAM,EACjB,cAAc,EAAE,MAAM,GACrB,OAAO,CAAC,MAAM,CAAC;IAgDZ,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CAKzD"}
@@ -0,0 +1,2 @@
1
+ "use strict";exports.SparkStreaming=class{constructor(a){this.config=a}async createStreamingQuery(a,t,n){const s=`\n from pyspark.sql import SparkSession\n from pyspark.sql.functions import *\n \n spark = SparkSession.builder \\\n .appName("${this.config.appName}") \\\n .config("spark.sql.streaming.checkpointLocation", "/checkpoint") \\\n .getOrCreate()\n \n # Read from Kafka\n df = spark.readStream.format("kafka") \\\n .option("kafka.bootstrap.servers", "localhost:9092") \\\n .option("subscribe", "${a}") \\\n .load()\n \n # Parse JSON\n parsed = df.select(from_json(col("value").cast("string"), \n "schema").alias("data")).select("data.*")\n \n # Apply transformation\n result = ${n}\n \n # Write to Kafka\n query = result.select(to_json(struct("*")).alias("value")) \\\n .writeStream.format("kafka") \\\n .option("kafka.bootstrap.servers", "localhost:9092") \\\n .option("topic", "${t}") \\\n .outputMode("append") \\\n .start()\n \n query.awaitTermination()\n `,e=await fetch(`${this.config.master}/api/v1/submissions/create`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"CreateSubmissionRequest",appResource:s})});return(await e.json()).submissionId}async stopStreamingQuery(a){await fetch(`${this.config.master}/api/v1/streaming/queries/${a}/stop`,{method:"POST"})}};
2
+ //# sourceMappingURL=streaming.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"streaming.js","sources":["../../../../src/spark/streaming.ts"],"sourcesContent":["import { SparkConfig } from '../types/index';\r\n\r\nexport class SparkStreaming {\r\n private config: SparkConfig;\r\n\r\n constructor(config: SparkConfig) {\r\n this.config = config;\r\n }\r\n\r\n async createStreamingQuery(\r\n sourceTopic: string,\r\n sinkTopic: string,\r\n transformation: string\r\n ): Promise<string> {\r\n const sparkCode = `\r\n from pyspark.sql import SparkSession\r\n from pyspark.sql.functions import *\r\n \r\n spark = SparkSession.builder \\\\\r\n .appName(\"${this.config.appName}\") \\\\\r\n .config(\"spark.sql.streaming.checkpointLocation\", \"/checkpoint\") \\\\\r\n .getOrCreate()\r\n \r\n # Read from Kafka\r\n df = spark.readStream.format(\"kafka\") \\\\\r\n .option(\"kafka.bootstrap.servers\", \"localhost:9092\") \\\\\r\n .option(\"subscribe\", \"${sourceTopic}\") \\\\\r\n .load()\r\n \r\n # Parse JSON\r\n parsed = df.select(from_json(col(\"value\").cast(\"string\"), \r\n \"schema\").alias(\"data\")).select(\"data.*\")\r\n \r\n # Apply transformation\r\n result = ${transformation}\r\n \r\n # Write to Kafka\r\n query = result.select(to_json(struct(\"*\")).alias(\"value\")) \\\\\r\n .writeStream.format(\"kafka\") \\\\\r\n .option(\"kafka.bootstrap.servers\", \"localhost:9092\") \\\\\r\n .option(\"topic\", \"${sinkTopic}\") \\\\\r\n .outputMode(\"append\") \\\\\r\n .start()\r\n \r\n query.awaitTermination()\r\n `;\r\n \r\n // Submit to Spark cluster\r\n const response = await fetch(`${this.config.master}/api/v1/submissions/create`, {\r\n method: 'POST',\r\n headers: { 'Content-Type': 'application/json' },\r\n body: JSON.stringify({\r\n action: 'CreateSubmissionRequest',\r\n appResource: sparkCode,\r\n }),\r\n });\r\n \r\n const data = await response.json();\r\n return data.submissionId;\r\n }\r\n\r\n async stopStreamingQuery(queryId: string): Promise<void> {\r\n await fetch(`${this.config.master}/api/v1/streaming/queries/${queryId}/stop`, {\r\n method: 'POST',\r\n });\r\n }\r\n}"],"names":["constructor","config","this","createStreamingQuery","sourceTopic","sinkTopic","transformation","sparkCode","appName","response","fetch","master","method","headers","body","JSON","stringify","action","appResource","json","submissionId","stopStreamingQuery","queryId"],"mappings":"0CAKE,WAAAA,CAAYC,GACVC,KAAKD,OAASA,CACf,CAED,0BAAME,CACJC,EACAC,EACAC,GAEA,MAAMC,EAAY,+JAKAL,KAAKD,OAAOO,8SAOAJ,sOAQjBE,qPAMaD,+GAQpBI,QAAiBC,MAAM,GAAGR,KAAKD,OAAOU,mCAAoC,CAC9EC,OAAQ,OACRC,QAAS,CAAE,eAAgB,oBAC3BC,KAAMC,KAAKC,UAAU,CACnBC,OAAQ,0BACRC,YAAaX,MAKjB,aADmBE,EAASU,QAChBC,YACb,CAED,wBAAMC,CAAmBC,SACjBZ,MAAM,GAAGR,KAAKD,OAAOU,mCAAmCW,SAAgB,CAC5EV,OAAQ,QAEX"}