@async-fusion/data 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +0 -0
- package/README.md +0 -0
- package/dist/cjs/cli/deploy.d.ts +1 -0
- package/dist/cjs/cli/deploy.d.ts.map +1 -0
- package/dist/cjs/cli/monitor.d.ts +1 -0
- package/dist/cjs/cli/monitor.d.ts.map +1 -0
- package/dist/cjs/index.d.ts +29 -0
- package/dist/cjs/index.d.ts.map +1 -0
- package/dist/cjs/index.js +2 -0
- package/dist/cjs/index.js.map +1 -0
- package/dist/cjs/kafka/consumer.d.ts +19 -0
- package/dist/cjs/kafka/consumer.d.ts.map +1 -0
- package/dist/cjs/kafka/consumer.js +2 -0
- package/dist/cjs/kafka/consumer.js.map +1 -0
- package/dist/cjs/kafka/producer.d.ts +21 -0
- package/dist/cjs/kafka/producer.d.ts.map +1 -0
- package/dist/cjs/kafka/producer.js +2 -0
- package/dist/cjs/kafka/producer.js.map +1 -0
- package/dist/cjs/kafka/stream.d.ts +52 -0
- package/dist/cjs/kafka/stream.d.ts.map +1 -0
- package/dist/cjs/kafka/stream.js +2 -0
- package/dist/cjs/kafka/stream.js.map +1 -0
- package/dist/cjs/pipeline/builder.d.ts +32 -0
- package/dist/cjs/pipeline/builder.d.ts.map +1 -0
- package/dist/cjs/pipeline/builder.js +2 -0
- package/dist/cjs/pipeline/builder.js.map +1 -0
- package/dist/cjs/pipeline/connectors.d.ts +1 -0
- package/dist/cjs/pipeline/connectors.d.ts.map +1 -0
- package/dist/cjs/pipeline/monitoring.d.ts +17 -0
- package/dist/cjs/pipeline/monitoring.d.ts.map +1 -0
- package/dist/cjs/pipeline/monitoring.js +2 -0
- package/dist/cjs/pipeline/monitoring.js.map +1 -0
- package/dist/cjs/react/index.d.ts +4 -0
- package/dist/cjs/react/index.d.ts.map +1 -0
- package/dist/cjs/react/useKafkaTopic.d.ts +13 -0
- package/dist/cjs/react/useKafkaTopic.d.ts.map +1 -0
- package/dist/cjs/react/useRealtimeData.d.ts +16 -0
- package/dist/cjs/react/useRealtimeData.d.ts.map +1 -0
- package/dist/cjs/react/useSparkQuery.d.ts +12 -0
- package/dist/cjs/react/useSparkQuery.d.ts.map +1 -0
- package/dist/cjs/spark/client.d.ts +27 -0
- package/dist/cjs/spark/client.d.ts.map +1 -0
- package/dist/cjs/spark/client.js +2 -0
- package/dist/cjs/spark/client.js.map +1 -0
- package/dist/cjs/spark/sql.d.ts +9 -0
- package/dist/cjs/spark/sql.d.ts.map +1 -0
- package/dist/cjs/spark/sql.js +2 -0
- package/dist/cjs/spark/sql.js.map +1 -0
- package/dist/cjs/spark/streaming.d.ts +8 -0
- package/dist/cjs/spark/streaming.d.ts.map +1 -0
- package/dist/cjs/spark/streaming.js +2 -0
- package/dist/cjs/spark/streaming.js.map +1 -0
- package/dist/cjs/types/index.d.ts +47 -0
- package/dist/cjs/types/index.d.ts.map +1 -0
- package/dist/cjs/utils/error-handling.d.ts +27 -0
- package/dist/cjs/utils/error-handling.d.ts.map +1 -0
- package/dist/cjs/utils/error-handling.js +2 -0
- package/dist/cjs/utils/error-handling.js.map +1 -0
- package/dist/esm/cli/deploy.d.ts +1 -0
- package/dist/esm/cli/deploy.d.ts.map +1 -0
- package/dist/esm/cli/monitor.d.ts +1 -0
- package/dist/esm/cli/monitor.d.ts.map +1 -0
- package/dist/esm/index.d.ts +29 -0
- package/dist/esm/index.d.ts.map +1 -0
- package/dist/esm/index.js +2 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/kafka/consumer.d.ts +19 -0
- package/dist/esm/kafka/consumer.d.ts.map +1 -0
- package/dist/esm/kafka/consumer.js +2 -0
- package/dist/esm/kafka/consumer.js.map +1 -0
- package/dist/esm/kafka/producer.d.ts +21 -0
- package/dist/esm/kafka/producer.d.ts.map +1 -0
- package/dist/esm/kafka/producer.js +2 -0
- package/dist/esm/kafka/producer.js.map +1 -0
- package/dist/esm/kafka/stream.d.ts +52 -0
- package/dist/esm/kafka/stream.d.ts.map +1 -0
- package/dist/esm/kafka/stream.js +2 -0
- package/dist/esm/kafka/stream.js.map +1 -0
- package/dist/esm/pipeline/builder.d.ts +32 -0
- package/dist/esm/pipeline/builder.d.ts.map +1 -0
- package/dist/esm/pipeline/builder.js +2 -0
- package/dist/esm/pipeline/builder.js.map +1 -0
- package/dist/esm/pipeline/connectors.d.ts +1 -0
- package/dist/esm/pipeline/connectors.d.ts.map +1 -0
- package/dist/esm/pipeline/monitoring.d.ts +17 -0
- package/dist/esm/pipeline/monitoring.d.ts.map +1 -0
- package/dist/esm/pipeline/monitoring.js +2 -0
- package/dist/esm/pipeline/monitoring.js.map +1 -0
- package/dist/esm/react/index.d.ts +4 -0
- package/dist/esm/react/index.d.ts.map +1 -0
- package/dist/esm/react/useKafkaTopic.d.ts +13 -0
- package/dist/esm/react/useKafkaTopic.d.ts.map +1 -0
- package/dist/esm/react/useRealtimeData.d.ts +16 -0
- package/dist/esm/react/useRealtimeData.d.ts.map +1 -0
- package/dist/esm/react/useSparkQuery.d.ts +12 -0
- package/dist/esm/react/useSparkQuery.d.ts.map +1 -0
- package/dist/esm/spark/client.d.ts +27 -0
- package/dist/esm/spark/client.d.ts.map +1 -0
- package/dist/esm/spark/client.js +2 -0
- package/dist/esm/spark/client.js.map +1 -0
- package/dist/esm/spark/sql.d.ts +9 -0
- package/dist/esm/spark/sql.d.ts.map +1 -0
- package/dist/esm/spark/sql.js +2 -0
- package/dist/esm/spark/sql.js.map +1 -0
- package/dist/esm/spark/streaming.d.ts +8 -0
- package/dist/esm/spark/streaming.d.ts.map +1 -0
- package/dist/esm/spark/streaming.js +2 -0
- package/dist/esm/spark/streaming.js.map +1 -0
- package/dist/esm/types/index.d.ts +47 -0
- package/dist/esm/types/index.d.ts.map +1 -0
- package/dist/esm/utils/error-handling.d.ts +27 -0
- package/dist/esm/utils/error-handling.d.ts.map +1 -0
- package/dist/esm/utils/error-handling.js +2 -0
- package/dist/esm/utils/error-handling.js.map +1 -0
- package/package.json +68 -0
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
class t{constructor(t,s){this.baseUrl=`${t.master}/api/v1`,this.headers={"Content-Type":"application/json"},this.retryConfig=s||{maxRetries:3,retryDelay:1e3}}async submitJob(t,s,e){const r=e?.retries||this.retryConfig.maxRetries;for(let o=1;o<=r;o++)try{const r=await fetch(`${this.baseUrl}/submissions/create`,{method:"POST",headers:this.headers,body:JSON.stringify({action:"CreateSubmissionRequest",appResource:t,mainClass:"org.apache.spark.deploy.SparkSubmit",appArgs:[s],sparkProperties:{"spark.app.name":s,"spark.master":this.baseUrl}})});if(!r.ok)throw new Error(`HTTP ${r.status}: ${r.statusText}`);const o={id:(await r.json()).submissionId,status:"pending",progress:0,startTime:new Date,stages:[]};return e?.timeout&&this.monitorJob(o.id,e.timeout).catch(console.error),o}catch(t){if(console.error(`Job submission attempt ${o} failed:`,t),o===r)throw t;await this.sleep(this.retryConfig.retryDelay*o)}throw new Error("Job submission failed after all retries")}async getJobStatus(t){try{const s=await fetch(`${this.baseUrl}/submissions/status/${t}`);if(!s.ok)throw new Error(`Failed to get job status: ${s.status}`);const e=await s.json();return{id:t,status:e.driverState?.toLowerCase()||"unknown",progress:e.progress||0,startTime:new Date(e.submissionTime||Date.now()),endTime:e.completionTime?new Date(e.completionTime):void 0,stages:e.stages||[]}}catch(t){throw console.error("Error getting job status:",t),t}}async cancelJob(t){try{const s=await fetch(`${this.baseUrl}/submissions/kill/${t}`,{method:"POST"});return s.ok?(console.log(`✅ Job ${t} cancelled successfully`),!0):(console.error(`Failed to cancel job ${t}: ${s.status}`),!1)}catch(t){return console.error("Error cancelling job:",t),!1}}async monitorJob(t,s=3e5){const e=Date.now();let r=-1;for(;Date.now()-e<s;){const s=await this.getJobStatus(t);if(s.progress!==r&&(console.log(`📊 Job ${t} progress: ${s.progress}%`),r=s.progress),"completed"===s.status||"failed"===s.status)return console.log(`✅ Job ${t} ${s.status}`),s;await this.sleep(2e3)}return console.warn(`⚠️ Job ${t} monitoring timed out after ${s}ms`),this.getJobStatus(t)}async runPythonScript(t,s=[],e){if(!require("fs").existsSync(t))throw new Error(`Python script not found: ${t}`);const r=`\nfrom pyspark.sql import SparkSession\nimport sys\nimport json\n\nspark = SparkSession.builder.getOrCreate()\n\ntry:\n with open('${t}', 'r') as f:\n code = f.read()\n exec(code)\n print("✅ Python script executed successfully")\nexcept Exception as e:\n print(f"❌ Error executing script: {e}")\n sys.exit(1)\n `;return this.submitJob(r,`python-${Date.now()}`,e)}async submitSQLQuery(t,s){const e=`\nfrom pyspark.sql import SparkSession\n\nspark = SparkSession.builder.getOrCreate()\n\n${s?.database?`spark.sql("USE ${s.database}")`:""}\n\nresult = spark.sql("""${t.replace(/"/g,'\\"')}""")\nresult.show()\nprint(result.collect())\n `,r=await this.submitJob(e,`sql-${Date.now()}`,s);return await this.monitorJob(r.id,s?.timeout),[]}sleep(t){return new Promise(s=>setTimeout(s,t))}async healthCheck(){try{return(await fetch(`${this.baseUrl}/`)).ok}catch{return!1}}}export{t as SparkClient};
|
|
2
|
+
//# sourceMappingURL=client.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"client.js","sources":["../../../../src/spark/client.ts"],"sourcesContent":["\r\nimport { SparkConfig, SparkJob, SparkStage } from '../types';\r\n\r\nexport class SparkClient {\r\n private baseUrl: string;\r\n private headers: Record<string, string>;\r\n private retryConfig: { maxRetries: number; retryDelay: number };\r\n\r\n constructor(config: SparkConfig, retryConfig?: { maxRetries: number; retryDelay: number }) {\r\n this.baseUrl = `${config.master}/api/v1`;\r\n this.headers = {\r\n 'Content-Type': 'application/json',\r\n };\r\n this.retryConfig = retryConfig || { maxRetries: 3, retryDelay: 1000 };\r\n }\r\n\r\n async submitJob(jobCode: string, jobName: string, options?: { timeout?: number; retries?: number }): Promise<SparkJob> {\r\n const maxRetries = options?.retries || this.retryConfig.maxRetries;\r\n \r\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\r\n try {\r\n const response = await fetch(`${this.baseUrl}/submissions/create`, {\r\n method: 'POST',\r\n headers: this.headers,\r\n body: JSON.stringify({\r\n action: 'CreateSubmissionRequest',\r\n appResource: jobCode,\r\n mainClass: 'org.apache.spark.deploy.SparkSubmit',\r\n appArgs: [jobName],\r\n sparkProperties: {\r\n 'spark.app.name': jobName,\r\n 'spark.master': this.baseUrl,\r\n },\r\n }),\r\n });\r\n\r\n if (!response.ok) {\r\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\r\n }\r\n\r\n const data = await response.json();\r\n \r\n const job: SparkJob = {\r\n id: data.submissionId,\r\n status: 'pending',\r\n progress: 0,\r\n startTime: new Date(),\r\n stages: [],\r\n };\r\n\r\n // Start monitoring if timeout specified\r\n if (options?.timeout) {\r\n this.monitorJob(job.id, options.timeout).catch(console.error);\r\n }\r\n\r\n return job;\r\n } catch (error) {\r\n console.error(`Job submission attempt ${attempt} failed:`, error);\r\n if (attempt === maxRetries) throw error;\r\n await this.sleep(this.retryConfig.retryDelay * attempt);\r\n }\r\n }\r\n throw new Error('Job submission failed after all retries');\r\n }\r\n\r\n async getJobStatus(jobId: string): Promise<SparkJob> {\r\n try {\r\n const response = await fetch(`${this.baseUrl}/submissions/status/${jobId}`);\r\n \r\n if (!response.ok) {\r\n throw new Error(`Failed to get job status: ${response.status}`);\r\n }\r\n \r\n const data = await response.json();\r\n \r\n return {\r\n id: jobId,\r\n status: data.driverState?.toLowerCase() || 'unknown',\r\n progress: data.progress || 0,\r\n startTime: new Date(data.submissionTime || Date.now()),\r\n endTime: data.completionTime ? new Date(data.completionTime) : undefined,\r\n stages: data.stages || [],\r\n };\r\n } catch (error) {\r\n console.error(`Error getting job status:`, error);\r\n throw error;\r\n }\r\n }\r\n\r\n async cancelJob(jobId: string): Promise<boolean> {\r\n try {\r\n const response = await fetch(`${this.baseUrl}/submissions/kill/${jobId}`, {\r\n method: 'POST',\r\n });\r\n \r\n if (response.ok) {\r\n console.log(`✅ Job ${jobId} cancelled successfully`);\r\n return true;\r\n }\r\n \r\n console.error(`Failed to cancel job ${jobId}: ${response.status}`);\r\n return false;\r\n } catch (error) {\r\n console.error(`Error cancelling job:`, error);\r\n return false;\r\n }\r\n }\r\n\r\n async monitorJob(jobId: string, timeoutMs: number = 300000): Promise<SparkJob> {\r\n const startTime = Date.now();\r\n let lastProgress = -1;\r\n \r\n while (Date.now() - startTime < timeoutMs) {\r\n const job = await this.getJobStatus(jobId);\r\n \r\n // Log progress changes\r\n if (job.progress !== lastProgress) {\r\n console.log(`📊 Job ${jobId} progress: ${job.progress}%`);\r\n lastProgress = job.progress;\r\n }\r\n \r\n // Check completion\r\n if (job.status === 'completed' || job.status === 'failed') {\r\n console.log(`✅ Job ${jobId} ${job.status}`);\r\n return job;\r\n }\r\n \r\n // Wait before next poll\r\n await this.sleep(2000);\r\n }\r\n \r\n console.warn(`⚠️ Job ${jobId} monitoring timed out after ${timeoutMs}ms`);\r\n return this.getJobStatus(jobId);\r\n }\r\n\r\n async runPythonScript(scriptPath: string, args: string[] = [], options?: { timeout?: number }): Promise<SparkJob> {\r\n // Validate script exists\r\n const fs = require('fs');\r\n if (!fs.existsSync(scriptPath)) {\r\n throw new Error(`Python script not found: ${scriptPath}`);\r\n }\r\n \r\n const jobCode = `\r\nfrom pyspark.sql import SparkSession\r\nimport sys\r\nimport json\r\n\r\nspark = SparkSession.builder.getOrCreate()\r\n\r\ntry:\r\n with open('${scriptPath}', 'r') as f:\r\n code = f.read()\r\n exec(code)\r\n print(\"✅ Python script executed successfully\")\r\nexcept Exception as e:\r\n print(f\"❌ Error executing script: {e}\")\r\n sys.exit(1)\r\n `;\r\n \r\n return this.submitJob(jobCode, `python-${Date.now()}`, options);\r\n }\r\n\r\n async submitSQLQuery(sql: string, options?: { database?: string; timeout?: number }): Promise<any[]> {\r\n const queryJob = `\r\nfrom pyspark.sql import SparkSession\r\n\r\nspark = SparkSession.builder.getOrCreate()\r\n\r\n${options?.database ? `spark.sql(\"USE ${options.database}\")` : ''}\r\n\r\nresult = spark.sql(\"\"\"${sql.replace(/\"/g, '\\\\\"')}\"\"\")\r\nresult.show()\r\nprint(result.collect())\r\n `;\r\n \r\n const job = await this.submitJob(queryJob, `sql-${Date.now()}`, options);\r\n await this.monitorJob(job.id, options?.timeout);\r\n return [];\r\n }\r\n\r\n private sleep(ms: number): Promise<void> {\r\n return new Promise(resolve => setTimeout(resolve, ms));\r\n }\r\n\r\n async healthCheck(): Promise<boolean> {\r\n try {\r\n const response = await fetch(`${this.baseUrl}/`);\r\n return response.ok;\r\n } catch {\r\n return false;\r\n }\r\n }\r\n}"],"names":["SparkClient","constructor","config","retryConfig","this","baseUrl","master","headers","maxRetries","retryDelay","submitJob","jobCode","jobName","options","retries","attempt","response","fetch","method","body","JSON","stringify","action","appResource","mainClass","appArgs","sparkProperties","ok","Error","status","statusText","job","id","json","submissionId","progress","startTime","Date","stages","timeout","monitorJob","catch","console","error","sleep","getJobStatus","jobId","data","driverState","toLowerCase","submissionTime","now","endTime","completionTime","undefined","cancelJob","log","timeoutMs","lastProgress","warn","runPythonScript","scriptPath","args","require","existsSync","submitSQLQuery","sql","queryJob","database","replace","ms","Promise","resolve","setTimeout","healthCheck"],"mappings":"MAGaA,EAKT,WAAAC,CAAYC,EAAqBC,GAC7BC,KAAKC,QAAU,GAAGH,EAAOI,gBACzBF,KAAKG,QAAU,CACX,eAAgB,oBAEpBH,KAAKD,YAAcA,GAAe,CAAEK,WAAY,EAAGC,WAAY,IAClE,CAED,eAAMC,CAAUC,EAAiBC,EAAiBC,GAC9C,MAAML,EAAaK,GAASC,SAAWV,KAAKD,YAAYK,WAExD,IAAK,IAAIO,EAAU,EAAGA,GAAWP,EAAYO,IACzC,IACI,MAAMC,QAAiBC,MAAM,GAAGb,KAAKC,6BAA8B,CAC/Da,OAAQ,OACRX,QAASH,KAAKG,QACdY,KAAMC,KAAKC,UAAU,CACjBC,OAAQ,0BACRC,YAAaZ,EACba,UAAW,sCACXC,QAAS,CAACb,GACVc,gBAAiB,CACb,iBAAkBd,EAClB,eAAgBR,KAAKC,aAKjC,IAAKW,EAASW,GACV,MAAM,IAAIC,MAAM,QAAQZ,EAASa,WAAWb,EAASc,cAGzD,MAEMC,EAAgB,CAClBC,UAHehB,EAASiB,QAGfC,aACTL,OAAQ,UACRM,SAAU,EACVC,UAAW,IAAIC,KACfC,OAAQ,IAQZ,OAJIzB,GAAS0B,SACTnC,KAAKoC,WAAWT,EAAIC,GAAInB,EAAQ0B,SAASE,MAAMC,QAAQC,OAGpDZ,CACV,CAAC,MAAOY,GAEL,GADAD,QAAQC,MAAM,0BAA0B5B,YAAmB4B,GACvD5B,IAAYP,EAAY,MAAMmC,QAC5BvC,KAAKwC,MAAMxC,KAAKD,YAAYM,WAAaM,EAClD,CAEL,MAAM,IAAIa,MAAM,0CACnB,CAED,kBAAMiB,CAAaC,GACf,IACI,MAAM9B,QAAiBC,MAAM,GAAGb,KAAKC,8BAA8ByC,KAEnE,IAAK9B,EAASW,GACV,MAAM,IAAIC,MAAM,6BAA6BZ,EAASa,UAG1D,MAAMkB,QAAa/B,EAASiB,OAE5B,MAAO,CACHD,GAAIc,EACJjB,OAAQkB,EAAKC,aAAaC,eAAiB,UAC3Cd,SAAUY,EAAKZ,UAAY,EAC3BC,UAAW,IAAIC,KAAKU,EAAKG,gBAAkBb,KAAKc,OAChDC,QAASL,EAAKM,eAAiB,IAAIhB,KAAKU,EAAKM,qBAAkBC,EAC/DhB,OAAQS,EAAKT,QAAU,GAE9B,CAAC,MAAOK,GAEL,MADAD,QAAQC,MAAM,4BAA6BA,GACrCA,CACT,CACJ,CAED,eAAMY,CAAUT,GACZ,IACI,MAAM9B,QAAiBC,MAAM,GAAGb,KAAKC,4BAA4ByC,IAAS,CACtE5B,OAAQ,SAGZ,OAAIF,EAASW,IACTe,QAAQc,IAAI,SAASV,6BACd,IAGXJ,QAAQC,MAAM,wBAAwBG,MAAU9B,EAASa,WAClD,EACV,CAAC,MAAOc,GAEL,OADAD,QAAQC,MAAM,wBAAyBA,IAChC,CACV,CACJ,CAED,gBAAMH,CAAWM,EAAeW,EAAoB,KAChD,MAAMrB,EAAYC,KAAKc,MACvB,IAAIO,GAAgB,EAEpB,KAAOrB,KAAKc,MAAQf,EAAYqB,GAAW,CACvC,MAAM1B,QAAY3B,KAAKyC,aAAaC,GASpC,GANIf,EAAII,WAAauB,IACjBhB,QAAQc,IAAI,UAAUV,eAAmBf,EAAII,aAC7CuB,EAAe3B,EAAII,UAIJ,cAAfJ,EAAIF,QAAyC,WAAfE,EAAIF,OAElC,OADAa,QAAQc,IAAI,SAASV,KAASf,EAAIF,UAC3BE,QAIL3B,KAAKwC,MAAM,IACpB,CAGD,OADAF,QAAQiB,KAAK,UAAUb,gCAAoCW,OACpDrD,KAAKyC,aAAaC,EAC5B,CAED,qBAAMc,CAAgBC,EAAoBC,EAAiB,GAAIjD,GAG3D,IADWkD,QAAQ,MACXC,WAAWH,GACf,MAAM,IAAIjC,MAAM,4BAA4BiC,KAGhD,MAAMlD,EAAU,yIAQPkD,8MAST,OAAOzD,KAAKM,UAAUC,EAAS,UAAU0B,KAAKc,QAAStC,EAC1D,CAED,oBAAMoD,CAAeC,EAAarD,GAC9B,MAAMsD,EAAW,2FAKvBtD,GAASuD,SAAW,kBAAkBvD,EAAQuD,aAAe,+BAEvCF,EAAIG,QAAQ,KAAM,+DAK5BtC,QAAY3B,KAAKM,UAAUyD,EAAU,OAAO9B,KAAKc,QAAStC,GAEhE,aADMT,KAAKoC,WAAWT,EAAIC,GAAInB,GAAS0B,SAChC,EACV,CAEO,KAAAK,CAAM0B,GACV,OAAO,IAAIC,QAAQC,GAAWC,WAAWD,EAASF,GACrD,CAED,iBAAMI,GACF,IAEI,aADuBzD,MAAM,GAAGb,KAAKC,aACrBsB,EACnB,CAAC,MACE,OAAO,CACV,CACJ"}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { SparkConfig } from '../types/index';
|
|
2
|
+
export declare class SparkSQL {
|
|
3
|
+
private config;
|
|
4
|
+
constructor(config: SparkConfig);
|
|
5
|
+
query(sql: string): Promise<any[]>;
|
|
6
|
+
createTemporaryView(tableName: string, kafkaTopic: string): Promise<void>;
|
|
7
|
+
registerUDF(name: string, pythonFunction: string): Promise<void>;
|
|
8
|
+
}
|
|
9
|
+
//# sourceMappingURL=sql.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sql.d.ts","sourceRoot":"","sources":["../../../src/spark/sql.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,qBAAa,QAAQ;IACnB,OAAO,CAAC,MAAM,CAAc;gBAEhB,MAAM,EAAE,WAAW;IAIzB,KAAK,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC;IAWlC,mBAAmB,CAAC,SAAS,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAczE,WAAW,CAAC,IAAI,EAAE,MAAM,EAAE,cAAc,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CASvE"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
class s{constructor(s){this.config=s}async query(s){const t=await fetch(`${this.config.master}/api/v1/sql`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({sql:s})});return(await t.json()).rows||[]}async createTemporaryView(s,t){const n=`\n CREATE TEMPORARY VIEW ${s}\n USING kafka\n OPTIONS (\n kafka.bootstrap.servers = "localhost:9092",\n subscribe = "${t}",\n startingOffsets = "latest"\n )\n `;await this.query(n)}async registerUDF(s,t){const n=`\n CREATE OR REPLACE FUNCTION ${s}\n USING PYTHON\n AS '${t}'\n `;await this.query(n)}}export{s as SparkSQL};
|
|
2
|
+
//# sourceMappingURL=sql.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sql.js","sources":["../../../../src/spark/sql.ts"],"sourcesContent":["import { SparkConfig } from '../types/index';\r\n\r\nexport class SparkSQL {\r\n private config: SparkConfig;\r\n\r\n constructor(config: SparkConfig) {\r\n this.config = config;\r\n }\r\n\r\n async query(sql: string): Promise<any[]> {\r\n const response = await fetch(`${this.config.master}/api/v1/sql`, {\r\n method: 'POST',\r\n headers: { 'Content-Type': 'application/json' },\r\n body: JSON.stringify({ sql }),\r\n });\r\n \r\n const data = await response.json();\r\n return data.rows || [];\r\n }\r\n\r\n async createTemporaryView(tableName: string, kafkaTopic: string): Promise<void> {\r\n const createViewSQL = `\r\n CREATE TEMPORARY VIEW ${tableName}\r\n USING kafka\r\n OPTIONS (\r\n kafka.bootstrap.servers = \"localhost:9092\",\r\n subscribe = \"${kafkaTopic}\",\r\n startingOffsets = \"latest\"\r\n )\r\n `;\r\n \r\n await this.query(createViewSQL);\r\n }\r\n\r\n async registerUDF(name: string, pythonFunction: string): Promise<void> {\r\n const registerSQL = `\r\n CREATE OR REPLACE FUNCTION ${name}\r\n USING PYTHON\r\n AS '${pythonFunction}'\r\n `;\r\n \r\n await this.query(registerSQL);\r\n }\r\n}"],"names":["SparkSQL","constructor","config","this","query","sql","response","fetch","master","method","headers","body","JSON","stringify","json","rows","createTemporaryView","tableName","kafkaTopic","createViewSQL","registerUDF","name","pythonFunction","registerSQL"],"mappings":"MAEaA,EAGX,WAAAC,CAAYC,GACVC,KAAKD,OAASA,CACf,CAED,WAAME,CAAMC,GACV,MAAMC,QAAiBC,MAAM,GAAGJ,KAAKD,OAAOM,oBAAqB,CAC/DC,OAAQ,OACRC,QAAS,CAAE,eAAgB,oBAC3BC,KAAMC,KAAKC,UAAU,CAAER,UAIzB,aADmBC,EAASQ,QAChBC,MAAQ,EACrB,CAED,yBAAMC,CAAoBC,EAAmBC,GAC3C,MAAMC,EAAgB,iCACIF,oHAIPC,+DAKbf,KAAKC,MAAMe,EAClB,CAED,iBAAMC,CAAYC,EAAcC,GAC9B,MAAMC,EAAc,sCACWF,oCAEvBC,iBAGFnB,KAAKC,MAAMmB,EAClB"}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { SparkConfig } from '../types/index';
|
|
2
|
+
export declare class SparkStreaming {
|
|
3
|
+
private config;
|
|
4
|
+
constructor(config: SparkConfig);
|
|
5
|
+
createStreamingQuery(sourceTopic: string, sinkTopic: string, transformation: string): Promise<string>;
|
|
6
|
+
stopStreamingQuery(queryId: string): Promise<void>;
|
|
7
|
+
}
|
|
8
|
+
//# sourceMappingURL=streaming.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"streaming.d.ts","sourceRoot":"","sources":["../../../src/spark/streaming.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,qBAAa,cAAc;IACzB,OAAO,CAAC,MAAM,CAAc;gBAEhB,MAAM,EAAE,WAAW;IAIzB,oBAAoB,CACxB,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,MAAM,EACjB,cAAc,EAAE,MAAM,GACrB,OAAO,CAAC,MAAM,CAAC;IAgDZ,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CAKzD"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
class a{constructor(a){this.config=a}async createStreamingQuery(a,t,n){const s=`\n from pyspark.sql import SparkSession\n from pyspark.sql.functions import *\n \n spark = SparkSession.builder \\\n .appName("${this.config.appName}") \\\n .config("spark.sql.streaming.checkpointLocation", "/checkpoint") \\\n .getOrCreate()\n \n # Read from Kafka\n df = spark.readStream.format("kafka") \\\n .option("kafka.bootstrap.servers", "localhost:9092") \\\n .option("subscribe", "${a}") \\\n .load()\n \n # Parse JSON\n parsed = df.select(from_json(col("value").cast("string"), \n "schema").alias("data")).select("data.*")\n \n # Apply transformation\n result = ${n}\n \n # Write to Kafka\n query = result.select(to_json(struct("*")).alias("value")) \\\n .writeStream.format("kafka") \\\n .option("kafka.bootstrap.servers", "localhost:9092") \\\n .option("topic", "${t}") \\\n .outputMode("append") \\\n .start()\n \n query.awaitTermination()\n `,e=await fetch(`${this.config.master}/api/v1/submissions/create`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({action:"CreateSubmissionRequest",appResource:s})});return(await e.json()).submissionId}async stopStreamingQuery(a){await fetch(`${this.config.master}/api/v1/streaming/queries/${a}/stop`,{method:"POST"})}}export{a as SparkStreaming};
|
|
2
|
+
//# sourceMappingURL=streaming.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"streaming.js","sources":["../../../../src/spark/streaming.ts"],"sourcesContent":["import { SparkConfig } from '../types/index';\r\n\r\nexport class SparkStreaming {\r\n private config: SparkConfig;\r\n\r\n constructor(config: SparkConfig) {\r\n this.config = config;\r\n }\r\n\r\n async createStreamingQuery(\r\n sourceTopic: string,\r\n sinkTopic: string,\r\n transformation: string\r\n ): Promise<string> {\r\n const sparkCode = `\r\n from pyspark.sql import SparkSession\r\n from pyspark.sql.functions import *\r\n \r\n spark = SparkSession.builder \\\\\r\n .appName(\"${this.config.appName}\") \\\\\r\n .config(\"spark.sql.streaming.checkpointLocation\", \"/checkpoint\") \\\\\r\n .getOrCreate()\r\n \r\n # Read from Kafka\r\n df = spark.readStream.format(\"kafka\") \\\\\r\n .option(\"kafka.bootstrap.servers\", \"localhost:9092\") \\\\\r\n .option(\"subscribe\", \"${sourceTopic}\") \\\\\r\n .load()\r\n \r\n # Parse JSON\r\n parsed = df.select(from_json(col(\"value\").cast(\"string\"), \r\n \"schema\").alias(\"data\")).select(\"data.*\")\r\n \r\n # Apply transformation\r\n result = ${transformation}\r\n \r\n # Write to Kafka\r\n query = result.select(to_json(struct(\"*\")).alias(\"value\")) \\\\\r\n .writeStream.format(\"kafka\") \\\\\r\n .option(\"kafka.bootstrap.servers\", \"localhost:9092\") \\\\\r\n .option(\"topic\", \"${sinkTopic}\") \\\\\r\n .outputMode(\"append\") \\\\\r\n .start()\r\n \r\n query.awaitTermination()\r\n `;\r\n \r\n // Submit to Spark cluster\r\n const response = await fetch(`${this.config.master}/api/v1/submissions/create`, {\r\n method: 'POST',\r\n headers: { 'Content-Type': 'application/json' },\r\n body: JSON.stringify({\r\n action: 'CreateSubmissionRequest',\r\n appResource: sparkCode,\r\n }),\r\n });\r\n \r\n const data = await response.json();\r\n return data.submissionId;\r\n }\r\n\r\n async stopStreamingQuery(queryId: string): Promise<void> {\r\n await fetch(`${this.config.master}/api/v1/streaming/queries/${queryId}/stop`, {\r\n method: 'POST',\r\n });\r\n }\r\n}"],"names":["SparkStreaming","constructor","config","this","createStreamingQuery","sourceTopic","sinkTopic","transformation","sparkCode","appName","response","fetch","master","method","headers","body","JSON","stringify","action","appResource","json","submissionId","stopStreamingQuery","queryId"],"mappings":"MAEaA,EAGX,WAAAC,CAAYC,GACVC,KAAKD,OAASA,CACf,CAED,0BAAME,CACJC,EACAC,EACAC,GAEA,MAAMC,EAAY,+JAKAL,KAAKD,OAAOO,8SAOAJ,sOAQjBE,qPAMaD,+GAQpBI,QAAiBC,MAAM,GAAGR,KAAKD,OAAOU,mCAAoC,CAC9EC,OAAQ,OACRC,QAAS,CAAE,eAAgB,oBAC3BC,KAAMC,KAAKC,UAAU,CACnBC,OAAQ,0BACRC,YAAaX,MAKjB,aADmBE,EAASU,QAChBC,YACb,CAED,wBAAMC,CAAmBC,SACjBZ,MAAM,GAAGR,KAAKD,OAAOU,mCAAmCW,SAAgB,CAC5EV,OAAQ,QAEX"}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
export interface KafkaConfig {
|
|
2
|
+
brokers: string[];
|
|
3
|
+
clientId: string;
|
|
4
|
+
ssl?: boolean;
|
|
5
|
+
sasl?: {
|
|
6
|
+
mechanism: 'plain' | 'scram-sha-256' | 'scram-sha-512';
|
|
7
|
+
username: string;
|
|
8
|
+
password: string;
|
|
9
|
+
};
|
|
10
|
+
}
|
|
11
|
+
export type KafkaJSConfig = any;
|
|
12
|
+
export interface SparkConfig {
|
|
13
|
+
master: string;
|
|
14
|
+
appName: string;
|
|
15
|
+
sparkConf?: Record<string, string>;
|
|
16
|
+
pythonPath?: string;
|
|
17
|
+
}
|
|
18
|
+
export interface PipelineConfig {
|
|
19
|
+
name: string;
|
|
20
|
+
checkpointLocation?: string;
|
|
21
|
+
parallelism?: number;
|
|
22
|
+
}
|
|
23
|
+
export interface Message<T = any> {
|
|
24
|
+
key?: string;
|
|
25
|
+
value: T;
|
|
26
|
+
timestamp: Date;
|
|
27
|
+
partition?: number;
|
|
28
|
+
offset?: number;
|
|
29
|
+
}
|
|
30
|
+
export interface SparkJob {
|
|
31
|
+
id: string;
|
|
32
|
+
status: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled';
|
|
33
|
+
progress: number;
|
|
34
|
+
startTime: Date;
|
|
35
|
+
endTime?: Date;
|
|
36
|
+
stages: SparkStage[];
|
|
37
|
+
}
|
|
38
|
+
export interface SparkStage {
|
|
39
|
+
id: number;
|
|
40
|
+
name: string;
|
|
41
|
+
completed: number;
|
|
42
|
+
total: number;
|
|
43
|
+
status: 'pending' | 'running' | 'completed' | 'failed';
|
|
44
|
+
}
|
|
45
|
+
export type PipelineSource = 'kafka' | 'file' | 'socket' | 'http';
|
|
46
|
+
export type PipelineSink = 'kafka' | 'console' | 'file' | 'database' | 'http';
|
|
47
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/types/index.ts"],"names":[],"mappings":"AACA,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,QAAQ,EAAE,MAAM,CAAC;IACjB,GAAG,CAAC,EAAE,OAAO,CAAC;IACd,IAAI,CAAC,EAAE;QACL,SAAS,EAAE,OAAO,GAAG,eAAe,GAAG,eAAe,CAAC;QACvD,QAAQ,EAAE,MAAM,CAAC;QACjB,QAAQ,EAAE,MAAM,CAAC;KAClB,CAAC;CACH;AAGD,MAAM,MAAM,aAAa,GAAG,GAAG,CAAC;AAEhC,MAAM,WAAW,WAAW;IAC1B,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACnC,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,MAAM,CAAC;IACb,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED,MAAM,WAAW,OAAO,CAAC,CAAC,GAAG,GAAG;IAC9B,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,CAAC,CAAC;IACT,SAAS,EAAE,IAAI,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,QAAQ;IACvB,EAAE,EAAE,MAAM,CAAC;IACX,MAAM,EAAE,SAAS,GAAG,SAAS,GAAG,WAAW,GAAG,QAAQ,GAAG,WAAW,CAAC;IACrE,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,IAAI,CAAC;IAChB,OAAO,CAAC,EAAE,IAAI,CAAC;IACf,MAAM,EAAE,UAAU,EAAE,CAAC;CACtB;AAED,MAAM,WAAW,UAAU;IACzB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,MAAM,CAAC;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,SAAS,GAAG,SAAS,GAAG,WAAW,GAAG,QAAQ,CAAC;CACxD;AAED,MAAM,MAAM,cAAc,GAAG,OAAO,GAAG,MAAM,GAAG,QAAQ,GAAG,MAAM,CAAC;AAClE,MAAM,MAAM,YAAY,GAAG,OAAO,GAAG,SAAS,GAAG,MAAM,GAAG,UAAU,GAAG,MAAM,CAAC"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
export declare class RetryableError extends Error {
|
|
2
|
+
readonly retryable: boolean;
|
|
3
|
+
constructor(message: string, retryable?: boolean);
|
|
4
|
+
}
|
|
5
|
+
export declare class FatalError extends Error {
|
|
6
|
+
constructor(message: string);
|
|
7
|
+
}
|
|
8
|
+
export declare function withRetry<T>(fn: () => Promise<T>, options?: {
|
|
9
|
+
maxRetries?: number;
|
|
10
|
+
delayMs?: number;
|
|
11
|
+
backoffMultiplier?: number;
|
|
12
|
+
shouldRetry?: (error: Error) => boolean;
|
|
13
|
+
}): Promise<T>;
|
|
14
|
+
export declare function sleep(ms: number): Promise<void>;
|
|
15
|
+
export declare class CircuitBreaker {
|
|
16
|
+
private readonly failureThreshold;
|
|
17
|
+
private readonly timeoutMs;
|
|
18
|
+
private failures;
|
|
19
|
+
private lastFailureTime;
|
|
20
|
+
private state;
|
|
21
|
+
constructor(failureThreshold?: number, timeoutMs?: number);
|
|
22
|
+
call<T>(fn: () => Promise<T>): Promise<T>;
|
|
23
|
+
private onSuccess;
|
|
24
|
+
private onFailure;
|
|
25
|
+
getState(): string;
|
|
26
|
+
}
|
|
27
|
+
//# sourceMappingURL=error-handling.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"error-handling.d.ts","sourceRoot":"","sources":["../../../src/utils/error-handling.ts"],"names":[],"mappings":"AAAA,qBAAa,cAAe,SAAQ,KAAK;aACQ,SAAS,EAAE,OAAO;gBAAnD,OAAO,EAAE,MAAM,EAAkB,SAAS,GAAE,OAAc;CAIzE;AAED,qBAAa,UAAW,SAAQ,KAAK;gBACrB,OAAO,EAAE,MAAM;CAI9B;AAED,wBAAsB,SAAS,CAAC,CAAC,EAC7B,EAAE,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,EACpB,OAAO,GAAE;IACL,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC;CACtC,GACP,OAAO,CAAC,CAAC,CAAC,CA0BZ;AAED,wBAAgB,KAAK,CAAC,EAAE,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAE/C;AAED,qBAAa,cAAc;IAMnB,OAAO,CAAC,QAAQ,CAAC,gBAAgB;IACjC,OAAO,CAAC,QAAQ,CAAC,SAAS;IAN9B,OAAO,CAAC,QAAQ,CAAa;IAC7B,OAAO,CAAC,eAAe,CAAa;IACpC,OAAO,CAAC,KAAK,CAA6C;gBAGrC,gBAAgB,GAAE,MAAU,EAC5B,SAAS,GAAE,MAAc;IAGxC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC;IAqB/C,OAAO,CAAC,SAAS;IASjB,OAAO,CAAC,SAAS;IAUjB,QAAQ,IAAI,MAAM;CAGrB"}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
class t extends Error{constructor(t,e=!0){super(t),this.retryable=e,this.name="RetryableError"}}class e extends Error{constructor(t){super(t),this.name="FatalError"}}async function r(e,r={}){const i=r.maxRetries||3,a=r.delayMs||1e3,o=r.backoffMultiplier||2,l=r.shouldRetry||(e=>e instanceof t);let h,n=a;for(let t=1;t<=i;t++)try{return await e()}catch(e){if(h=e,!l(e)||t===i)throw e;console.warn(`Retry attempt ${t}/${i} after ${n}ms`),await s(n),n*=o}throw h}function s(t){return new Promise(e=>setTimeout(e,t))}class i{constructor(t=5,e=6e4){this.failureThreshold=t,this.timeoutMs=e,this.failures=0,this.lastFailureTime=0,this.state="CLOSED"}async call(t){if("OPEN"===this.state){if(!(Date.now()-this.lastFailureTime>=this.timeoutMs))throw new Error("Circuit breaker is OPEN");this.state="HALF_OPEN",console.log("🔌 Circuit breaker half-open, testing...")}try{const e=await t();return this.onSuccess(),e}catch(t){throw this.onFailure(),t}}onSuccess(){"HALF_OPEN"===this.state&&(this.state="CLOSED",this.failures=0,console.log("✅ Circuit breaker closed")),this.failures=0}onFailure(){this.failures++,this.lastFailureTime=Date.now(),this.failures>=this.failureThreshold&&(this.state="OPEN",console.error(`🔴 Circuit breaker opened after ${this.failures} failures`))}getState(){return this.state}}export{i as CircuitBreaker,e as FatalError,t as RetryableError,s as sleep,r as withRetry};
|
|
2
|
+
//# sourceMappingURL=error-handling.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"error-handling.js","sources":["../../../../src/utils/error-handling.ts"],"sourcesContent":["export class RetryableError extends Error {\r\n constructor(message: string, public readonly retryable: boolean = true) {\r\n super(message);\r\n this.name = 'RetryableError';\r\n }\r\n}\r\n\r\nexport class FatalError extends Error {\r\n constructor(message: string) {\r\n super(message);\r\n this.name = 'FatalError';\r\n }\r\n}\r\n\r\nexport async function withRetry<T>(\r\n fn: () => Promise<T>,\r\n options: {\r\n maxRetries?: number;\r\n delayMs?: number;\r\n backoffMultiplier?: number;\r\n shouldRetry?: (error: Error) => boolean;\r\n } = {}\r\n): Promise<T> {\r\n const maxRetries = options.maxRetries || 3;\r\n const delayMs = options.delayMs || 1000;\r\n const backoffMultiplier = options.backoffMultiplier || 2;\r\n const shouldRetry = options.shouldRetry || ((error) => error instanceof RetryableError);\r\n \r\n let lastError: Error;\r\n let currentDelay = delayMs;\r\n \r\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\r\n try {\r\n return await fn();\r\n } catch (error) {\r\n lastError = error as Error;\r\n \r\n if (!shouldRetry(error as Error) || attempt === maxRetries) {\r\n throw error;\r\n }\r\n \r\n console.warn(`Retry attempt ${attempt}/${maxRetries} after ${currentDelay}ms`);\r\n await sleep(currentDelay);\r\n currentDelay *= backoffMultiplier;\r\n }\r\n }\r\n \r\n throw lastError!;\r\n}\r\n\r\nexport function sleep(ms: number): Promise<void> {\r\n return new Promise(resolve => setTimeout(resolve, ms));\r\n}\r\n\r\nexport class CircuitBreaker {\r\n private failures: number = 0;\r\n private lastFailureTime: number = 0;\r\n private state: 'CLOSED' | 'OPEN' | 'HALF_OPEN' = 'CLOSED';\r\n \r\n constructor(\r\n private readonly failureThreshold: number = 5,\r\n private readonly timeoutMs: number = 60000\r\n ) {}\r\n \r\n async call<T>(fn: () => Promise<T>): Promise<T> {\r\n if (this.state === 'OPEN') {\r\n const now = Date.now();\r\n if (now - this.lastFailureTime >= this.timeoutMs) {\r\n this.state = 'HALF_OPEN';\r\n console.log('🔌 Circuit breaker half-open, testing...');\r\n } else {\r\n throw new Error('Circuit breaker is OPEN');\r\n }\r\n }\r\n \r\n try {\r\n const result = await fn();\r\n this.onSuccess();\r\n return result;\r\n } catch (error) {\r\n this.onFailure();\r\n throw error;\r\n }\r\n }\r\n \r\n private onSuccess(): void {\r\n if (this.state === 'HALF_OPEN') {\r\n this.state = 'CLOSED';\r\n this.failures = 0;\r\n console.log('✅ Circuit breaker closed');\r\n }\r\n this.failures = 0;\r\n }\r\n \r\n private onFailure(): void {\r\n this.failures++;\r\n this.lastFailureTime = Date.now();\r\n \r\n if (this.failures >= this.failureThreshold) {\r\n this.state = 'OPEN';\r\n console.error(`🔴 Circuit breaker opened after ${this.failures} failures`);\r\n }\r\n }\r\n \r\n getState(): string {\r\n return this.state;\r\n }\r\n}\r\n"],"names":["RetryableError","Error","constructor","message","retryable","super","this","name","FatalError","async","withRetry","fn","options","maxRetries","delayMs","backoffMultiplier","shouldRetry","error","lastError","currentDelay","attempt","console","warn","sleep","ms","Promise","resolve","setTimeout","CircuitBreaker","failureThreshold","timeoutMs","failures","lastFailureTime","state","call","Date","now","log","result","onSuccess","onFailure","getState"],"mappings":"AAAM,MAAOA,UAAuBC,MAChC,WAAAC,CAAYC,EAAiCC,GAAqB,GAC9DC,MAAMF,GADmCG,KAASF,UAATA,EAEzCE,KAAKC,KAAO,gBACf,EAGC,MAAOC,UAAmBP,MAC5B,WAAAC,CAAYC,GACRE,MAAMF,GACNG,KAAKC,KAAO,YACf,EAGEE,eAAeC,EAClBC,EACAC,EAKI,IAEJ,MAAMC,EAAaD,EAAQC,YAAc,EACnCC,EAAUF,EAAQE,SAAW,IAC7BC,EAAoBH,EAAQG,mBAAqB,EACjDC,EAAcJ,EAAQI,aAAW,CAAMC,GAAUA,aAAiBjB,GAExE,IAAIkB,EACAC,EAAeL,EAEnB,IAAK,IAAIM,EAAU,EAAGA,GAAWP,EAAYO,IACzC,IACI,aAAaT,GAChB,CAAC,MAAOM,GAGL,GAFAC,EAAYD,GAEPD,EAAYC,IAAmBG,IAAYP,EAC5C,MAAMI,EAGVI,QAAQC,KAAK,iBAAiBF,KAAWP,WAAoBM,aACvDI,EAAMJ,GACZA,GAAgBJ,CACnB,CAGL,MAAMG,CACV,CAEM,SAAUK,EAAMC,GAClB,OAAO,IAAIC,QAAQC,GAAWC,WAAWD,EAASF,GACtD,OAEaI,EAKT,WAAA1B,CACqB2B,EAA2B,EAC3BC,EAAoB,KADpBxB,KAAgBuB,iBAAhBA,EACAvB,KAASwB,UAATA,EANbxB,KAAQyB,SAAW,EACnBzB,KAAe0B,gBAAW,EAC1B1B,KAAK2B,MAAoC,QAK7C,CAEJ,UAAMC,CAAQvB,GACV,GAAmB,SAAfL,KAAK2B,MAAkB,CAEvB,KADYE,KAAKC,MACP9B,KAAK0B,iBAAmB1B,KAAKwB,WAInC,MAAM,IAAI7B,MAAM,2BAHhBK,KAAK2B,MAAQ,YACbZ,QAAQgB,IAAI,2CAInB,CAED,IACI,MAAMC,QAAe3B,IAErB,OADAL,KAAKiC,YACED,CACV,CAAC,MAAOrB,GAEL,MADAX,KAAKkC,YACCvB,CACT,CACJ,CAEO,SAAAsB,GACe,cAAfjC,KAAK2B,QACL3B,KAAK2B,MAAQ,SACb3B,KAAKyB,SAAW,EAChBV,QAAQgB,IAAI,6BAEhB/B,KAAKyB,SAAW,CACnB,CAEO,SAAAS,GACJlC,KAAKyB,WACLzB,KAAK0B,gBAAkBG,KAAKC,MAExB9B,KAAKyB,UAAYzB,KAAKuB,mBACtBvB,KAAK2B,MAAQ,OACbZ,QAAQJ,MAAM,mCAAmCX,KAAKyB,qBAE7D,CAED,QAAAU,GACI,OAAOnC,KAAK2B,KACf"}
|
package/package.json
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@async-fusion/data",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Unified data streaming library for Kafka and Spark with React hooks",
|
|
5
|
+
"main": "dist/cjs/index.js",
|
|
6
|
+
"module": "dist/esm/index.js",
|
|
7
|
+
"types": "dist/types/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"import": "./dist/esm/index.js",
|
|
11
|
+
"require": "./dist/cjs/index.js",
|
|
12
|
+
"types": "./dist/types/index.d.ts"
|
|
13
|
+
},
|
|
14
|
+
"./react": {
|
|
15
|
+
"import": "./dist/esm/react/index.js",
|
|
16
|
+
"require": "./dist/cjs/react/index.js",
|
|
17
|
+
"types": "./dist/types/react/index.d.ts"
|
|
18
|
+
}
|
|
19
|
+
},
|
|
20
|
+
"files": [
|
|
21
|
+
"dist",
|
|
22
|
+
"README.md",
|
|
23
|
+
"LICENSE"
|
|
24
|
+
],
|
|
25
|
+
"scripts": {
|
|
26
|
+
"build": "rollup -c",
|
|
27
|
+
"dev": "rollup -c -w",
|
|
28
|
+
"test": "jest",
|
|
29
|
+
"prepublishOnly": "npm run build",
|
|
30
|
+
"type-check": "tsc --noEmit"
|
|
31
|
+
},
|
|
32
|
+
"keywords": [
|
|
33
|
+
"kafka",
|
|
34
|
+
"spark",
|
|
35
|
+
"streaming",
|
|
36
|
+
"realtime",
|
|
37
|
+
"data-pipeline",
|
|
38
|
+
"react-hooks"
|
|
39
|
+
],
|
|
40
|
+
"author": "Your Name",
|
|
41
|
+
"license": "MIT",
|
|
42
|
+
"dependencies": {
|
|
43
|
+
"kafkajs": "^2.2.4"
|
|
44
|
+
},
|
|
45
|
+
"peerDependencies": {
|
|
46
|
+
"react": ">=16.8.0"
|
|
47
|
+
},
|
|
48
|
+
"peerDependenciesMeta": {
|
|
49
|
+
"react": {
|
|
50
|
+
"optional": true
|
|
51
|
+
}
|
|
52
|
+
},
|
|
53
|
+
"devDependencies": {
|
|
54
|
+
"@rollup/plugin-commonjs": "^25.0.0",
|
|
55
|
+
"@rollup/plugin-node-resolve": "^15.0.0",
|
|
56
|
+
"@rollup/plugin-terser": "^1.0.0",
|
|
57
|
+
"@rollup/plugin-typescript": "^11.0.0",
|
|
58
|
+
"@types/jest": "^29.5.0",
|
|
59
|
+
"@types/node": "^20.0.0",
|
|
60
|
+
"@types/react": "^18.2.0",
|
|
61
|
+
"jest": "^29.5.0",
|
|
62
|
+
"rimraf": "^6.1.3",
|
|
63
|
+
"rollup": "^3.20.0",
|
|
64
|
+
"ts-jest": "^29.1.0",
|
|
65
|
+
"tslib": "^2.5.0",
|
|
66
|
+
"typescript": "^5.0.0"
|
|
67
|
+
}
|
|
68
|
+
}
|