@databricks/appkit 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (166) hide show
  1. package/CLAUDE.md +3 -0
  2. package/DCO +25 -0
  3. package/LICENSE +203 -0
  4. package/NOTICE.md +73 -0
  5. package/README.md +35 -0
  6. package/bin/setup-claude.js +190 -0
  7. package/dist/_virtual/rolldown_runtime.js +39 -0
  8. package/dist/analytics/analytics.d.ts +31 -0
  9. package/dist/analytics/analytics.d.ts.map +1 -0
  10. package/dist/analytics/analytics.js +149 -0
  11. package/dist/analytics/analytics.js.map +1 -0
  12. package/dist/analytics/defaults.js +17 -0
  13. package/dist/analytics/defaults.js.map +1 -0
  14. package/dist/analytics/index.js +3 -0
  15. package/dist/analytics/query.js +50 -0
  16. package/dist/analytics/query.js.map +1 -0
  17. package/dist/analytics/types.d.ts +9 -0
  18. package/dist/analytics/types.d.ts.map +1 -0
  19. package/dist/app/index.d.ts +23 -0
  20. package/dist/app/index.d.ts.map +1 -0
  21. package/dist/app/index.js +49 -0
  22. package/dist/app/index.js.map +1 -0
  23. package/dist/appkit/package.js +7 -0
  24. package/dist/appkit/package.js.map +1 -0
  25. package/dist/cache/defaults.js +14 -0
  26. package/dist/cache/defaults.js.map +1 -0
  27. package/dist/cache/index.d.ts +119 -0
  28. package/dist/cache/index.d.ts.map +1 -0
  29. package/dist/cache/index.js +307 -0
  30. package/dist/cache/index.js.map +1 -0
  31. package/dist/cache/storage/defaults.js +16 -0
  32. package/dist/cache/storage/defaults.js.map +1 -0
  33. package/dist/cache/storage/index.js +4 -0
  34. package/dist/cache/storage/memory.js +87 -0
  35. package/dist/cache/storage/memory.js.map +1 -0
  36. package/dist/cache/storage/persistent.js +211 -0
  37. package/dist/cache/storage/persistent.js.map +1 -0
  38. package/dist/connectors/index.js +6 -0
  39. package/dist/connectors/lakebase/client.js +348 -0
  40. package/dist/connectors/lakebase/client.js.map +1 -0
  41. package/dist/connectors/lakebase/defaults.js +13 -0
  42. package/dist/connectors/lakebase/defaults.js.map +1 -0
  43. package/dist/connectors/lakebase/index.js +3 -0
  44. package/dist/connectors/sql-warehouse/client.js +284 -0
  45. package/dist/connectors/sql-warehouse/client.js.map +1 -0
  46. package/dist/connectors/sql-warehouse/defaults.js +12 -0
  47. package/dist/connectors/sql-warehouse/defaults.js.map +1 -0
  48. package/dist/connectors/sql-warehouse/index.js +3 -0
  49. package/dist/core/appkit.d.ts +14 -0
  50. package/dist/core/appkit.d.ts.map +1 -0
  51. package/dist/core/appkit.js +66 -0
  52. package/dist/core/appkit.js.map +1 -0
  53. package/dist/core/index.js +3 -0
  54. package/dist/index.d.ts +15 -0
  55. package/dist/index.js +21 -0
  56. package/dist/index.js.map +1 -0
  57. package/dist/plugin/dev-reader.d.ts +20 -0
  58. package/dist/plugin/dev-reader.d.ts.map +1 -0
  59. package/dist/plugin/dev-reader.js +63 -0
  60. package/dist/plugin/dev-reader.js.map +1 -0
  61. package/dist/plugin/index.js +4 -0
  62. package/dist/plugin/interceptors/cache.js +15 -0
  63. package/dist/plugin/interceptors/cache.js.map +1 -0
  64. package/dist/plugin/interceptors/retry.js +32 -0
  65. package/dist/plugin/interceptors/retry.js.map +1 -0
  66. package/dist/plugin/interceptors/telemetry.js +33 -0
  67. package/dist/plugin/interceptors/telemetry.js.map +1 -0
  68. package/dist/plugin/interceptors/timeout.js +35 -0
  69. package/dist/plugin/interceptors/timeout.js.map +1 -0
  70. package/dist/plugin/plugin.d.ts +43 -0
  71. package/dist/plugin/plugin.d.ts.map +1 -0
  72. package/dist/plugin/plugin.js +119 -0
  73. package/dist/plugin/plugin.js.map +1 -0
  74. package/dist/plugin/to-plugin.d.ts +7 -0
  75. package/dist/plugin/to-plugin.d.ts.map +1 -0
  76. package/dist/plugin/to-plugin.js +12 -0
  77. package/dist/plugin/to-plugin.js.map +1 -0
  78. package/dist/server/base-server.js +24 -0
  79. package/dist/server/base-server.js.map +1 -0
  80. package/dist/server/index.d.ts +100 -0
  81. package/dist/server/index.d.ts.map +1 -0
  82. package/dist/server/index.js +224 -0
  83. package/dist/server/index.js.map +1 -0
  84. package/dist/server/remote-tunnel/denied.html +68 -0
  85. package/dist/server/remote-tunnel/gate.js +51 -0
  86. package/dist/server/remote-tunnel/gate.js.map +1 -0
  87. package/dist/server/remote-tunnel/index.html +165 -0
  88. package/dist/server/remote-tunnel/remote-tunnel-controller.js +100 -0
  89. package/dist/server/remote-tunnel/remote-tunnel-controller.js.map +1 -0
  90. package/dist/server/remote-tunnel/remote-tunnel-manager.js +320 -0
  91. package/dist/server/remote-tunnel/remote-tunnel-manager.js.map +1 -0
  92. package/dist/server/remote-tunnel/wait.html +158 -0
  93. package/dist/server/static-server.js +47 -0
  94. package/dist/server/static-server.js.map +1 -0
  95. package/dist/server/types.d.ts +14 -0
  96. package/dist/server/types.d.ts.map +1 -0
  97. package/dist/server/utils.js +70 -0
  98. package/dist/server/utils.js.map +1 -0
  99. package/dist/server/vite-dev-server.js +103 -0
  100. package/dist/server/vite-dev-server.js.map +1 -0
  101. package/dist/shared/src/cache.d.ts +62 -0
  102. package/dist/shared/src/cache.d.ts.map +1 -0
  103. package/dist/shared/src/execute.d.ts +46 -0
  104. package/dist/shared/src/execute.d.ts.map +1 -0
  105. package/dist/shared/src/plugin.d.ts +50 -0
  106. package/dist/shared/src/plugin.d.ts.map +1 -0
  107. package/dist/shared/src/sql/helpers.d.ts +160 -0
  108. package/dist/shared/src/sql/helpers.d.ts.map +1 -0
  109. package/dist/shared/src/sql/helpers.js +103 -0
  110. package/dist/shared/src/sql/helpers.js.map +1 -0
  111. package/dist/shared/src/sql/types.d.ts +34 -0
  112. package/dist/shared/src/sql/types.d.ts.map +1 -0
  113. package/dist/shared/src/tunnel.d.ts +30 -0
  114. package/dist/shared/src/tunnel.d.ts.map +1 -0
  115. package/dist/stream/arrow-stream-processor.js +154 -0
  116. package/dist/stream/arrow-stream-processor.js.map +1 -0
  117. package/dist/stream/buffers.js +88 -0
  118. package/dist/stream/buffers.js.map +1 -0
  119. package/dist/stream/defaults.js +14 -0
  120. package/dist/stream/defaults.js.map +1 -0
  121. package/dist/stream/index.js +6 -0
  122. package/dist/stream/sse-writer.js +61 -0
  123. package/dist/stream/sse-writer.js.map +1 -0
  124. package/dist/stream/stream-manager.d.ts +27 -0
  125. package/dist/stream/stream-manager.d.ts.map +1 -0
  126. package/dist/stream/stream-manager.js +191 -0
  127. package/dist/stream/stream-manager.js.map +1 -0
  128. package/dist/stream/stream-registry.js +54 -0
  129. package/dist/stream/stream-registry.js.map +1 -0
  130. package/dist/stream/types.js +14 -0
  131. package/dist/stream/types.js.map +1 -0
  132. package/dist/stream/validator.js +25 -0
  133. package/dist/stream/validator.js.map +1 -0
  134. package/dist/telemetry/config.js +20 -0
  135. package/dist/telemetry/config.js.map +1 -0
  136. package/dist/telemetry/index.d.ts +4 -0
  137. package/dist/telemetry/index.js +8 -0
  138. package/dist/telemetry/instrumentations.js +38 -0
  139. package/dist/telemetry/instrumentations.js.map +1 -0
  140. package/dist/telemetry/noop.js +54 -0
  141. package/dist/telemetry/noop.js.map +1 -0
  142. package/dist/telemetry/telemetry-manager.js +113 -0
  143. package/dist/telemetry/telemetry-manager.js.map +1 -0
  144. package/dist/telemetry/telemetry-provider.js +82 -0
  145. package/dist/telemetry/telemetry-provider.js.map +1 -0
  146. package/dist/telemetry/types.d.ts +74 -0
  147. package/dist/telemetry/types.d.ts.map +1 -0
  148. package/dist/type-generator/vite-plugin.d.ts +22 -0
  149. package/dist/type-generator/vite-plugin.d.ts.map +1 -0
  150. package/dist/type-generator/vite-plugin.js +49 -0
  151. package/dist/type-generator/vite-plugin.js.map +1 -0
  152. package/dist/utils/databricks-client-middleware.d.ts +17 -0
  153. package/dist/utils/databricks-client-middleware.d.ts.map +1 -0
  154. package/dist/utils/databricks-client-middleware.js +117 -0
  155. package/dist/utils/databricks-client-middleware.js.map +1 -0
  156. package/dist/utils/env-validator.js +14 -0
  157. package/dist/utils/env-validator.js.map +1 -0
  158. package/dist/utils/index.js +26 -0
  159. package/dist/utils/index.js.map +1 -0
  160. package/dist/utils/merge.js +25 -0
  161. package/dist/utils/merge.js.map +1 -0
  162. package/dist/utils/vite-config-merge.js +22 -0
  163. package/dist/utils/vite-config-merge.js.map +1 -0
  164. package/llms.txt +193 -0
  165. package/package.json +70 -0
  166. package/scripts/postinstall.js +6 -0
@@ -0,0 +1,30 @@
1
+ import { WebSocket } from "ws";
2
+
3
+ //#region ../shared/src/tunnel.d.ts
4
+ interface TunnelConnection {
5
+ ws: WebSocket;
6
+ owner: string;
7
+ approvedViewers: Set<string>;
8
+ pendingRequests: Set<string>;
9
+ rejectedViewers: Set<string>;
10
+ pendingFetches: Map<string, PendingFetch>;
11
+ pendingFileReads: Map<string, PendingFileRead>;
12
+ waitingForBinaryBody: string | null;
13
+ }
14
+ interface PendingFetch {
15
+ resolve: (data: any) => void;
16
+ reject: (err: Error) => void;
17
+ timeout: NodeJS.Timeout;
18
+ metadata?: {
19
+ status: number;
20
+ headers: Record<string, any>;
21
+ };
22
+ }
23
+ interface PendingFileRead {
24
+ resolve: (content: string) => void;
25
+ reject: (err: Error) => void;
26
+ timeout: NodeJS.Timeout;
27
+ }
28
+ //#endregion
29
+ export { TunnelConnection };
30
+ //# sourceMappingURL=tunnel.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"tunnel.d.ts","names":[],"sources":["../../../../shared/src/tunnel.ts"],"sourcesContent":[],"mappings":";;;UAEiB,gBAAA;MACX;EADW,KAAA,EAAA,MAAA;EAAgB,eAAA,EAGd,GAHc,CAAA,MAAA,CAAA;iBAC3B,EAGa,GAHb,CAAA,MAAA,CAAA;iBAEa,EAEA,GAFA,CAAA,MAAA,CAAA;gBACA,EAED,GAFC,CAAA,MAAA,EAEW,YAFX,CAAA;kBACA,EAEC,GAFD,CAAA,MAAA,EAEa,eAFb,CAAA;sBACW,EAAA,MAAA,GAAA,IAAA;;AACE,UAIf,YAAA,CAJe;SAAZ,EAAA,CAAA,IAAA,EAAA,GAAA,EAAA,GAAA,IAAA;EAAG,MAAA,EAAA,CAAA,GAAA,EAMP,KANO,EAAA,GAAA,IAAA;EAIN,OAAA,EAGN,MAAA,CAAO,OAHW;EAAA,QAAA,CAAA,EAAA;IAEb,MAAA,EAAA,MAAA;IACL,OAAO,EAGL,MAHK,CAAA,MAAA,EAAA,GAAA,CAAA;;;AAOD,UAAA,eAAA,CAAe;EAAA,OAAA,EAAA,CAAA,OAAA,EAAA,MAAA,EAAA,GAAA,IAAA;QAEhB,EAAA,CAAA,GAAA,EAAA,KAAA,EAAA,GAAA,IAAA;SACL,EAAA,MAAA,CAAO,OAAA"}
@@ -0,0 +1,154 @@
1
+ //#region src/stream/arrow-stream-processor.ts
2
+ const BACKOFF_MULTIPLIER = 1e3;
3
+ var ArrowStreamProcessor = class ArrowStreamProcessor {
4
+ static {
5
+ this.DEFAULT_MAX_CONCURRENT_DOWNLOADS = 5;
6
+ }
7
+ static {
8
+ this.DEFAULT_TIMEOUT = 3e4;
9
+ }
10
+ static {
11
+ this.DEFAULT_RETRIES = 3;
12
+ }
13
+ constructor(options = {
14
+ maxConcurrentDownloads: ArrowStreamProcessor.DEFAULT_MAX_CONCURRENT_DOWNLOADS,
15
+ timeout: ArrowStreamProcessor.DEFAULT_TIMEOUT,
16
+ retries: ArrowStreamProcessor.DEFAULT_RETRIES
17
+ }) {
18
+ this.options = options;
19
+ this.options = {
20
+ maxConcurrentDownloads: options.maxConcurrentDownloads ?? ArrowStreamProcessor.DEFAULT_MAX_CONCURRENT_DOWNLOADS,
21
+ timeout: options.timeout ?? ArrowStreamProcessor.DEFAULT_TIMEOUT,
22
+ retries: options.retries ?? ArrowStreamProcessor.DEFAULT_RETRIES
23
+ };
24
+ }
25
+ /**
26
+ * Process Arrow chunks using zero-copy proxy pattern.
27
+ *
28
+ * Downloads raw IPC bytes from external links and concatenates them
29
+ * without parsing into Arrow Tables on the server. This reduces:
30
+ * - Memory usage by ~50% (no parsed Table representation)
31
+ * - CPU usage (no tableFromIPC/tableToIPC calls)
32
+ *
33
+ * The client is responsible for parsing the IPC bytes.
34
+ *
35
+ * @param chunks - External links to Arrow IPC data
36
+ * @param schema - Schema from Databricks manifest
37
+ * @param signal - Optional abort signal
38
+ * @returns Raw concatenated IPC bytes with schema
39
+ */
40
+ async processChunks(chunks, schema, signal) {
41
+ if (chunks.length === 0) throw new Error("No Arrow chunks provided");
42
+ const buffers = await this.downloadChunksRaw(chunks, signal);
43
+ return {
44
+ data: this.concatenateBuffers(buffers),
45
+ schema
46
+ };
47
+ }
48
+ /**
49
+ * Download all chunks as raw bytes with concurrency control.
50
+ */
51
+ async downloadChunksRaw(chunks, signal) {
52
+ const semaphore = new Semaphore(this.options.maxConcurrentDownloads);
53
+ const downloadPromises = chunks.map(async (chunk) => {
54
+ await semaphore.acquire();
55
+ try {
56
+ return await this.downloadChunkRaw(chunk, signal);
57
+ } finally {
58
+ semaphore.release();
59
+ }
60
+ });
61
+ return Promise.all(downloadPromises);
62
+ }
63
+ /**
64
+ * Download a single chunk as raw bytes with retry logic.
65
+ */
66
+ async downloadChunkRaw(chunk, signal) {
67
+ let lastError = null;
68
+ for (let attempt = 0; attempt < this.options.retries; attempt++) {
69
+ const timeoutController = new AbortController();
70
+ const timeoutId = setTimeout(() => {
71
+ timeoutController.abort();
72
+ }, this.options.timeout);
73
+ const combinedSignal = signal ? this.combineAbortSignals(signal, timeoutController.signal) : timeoutController.signal;
74
+ try {
75
+ const externalLink = chunk.external_link;
76
+ if (!externalLink) {
77
+ console.error("External link is required", chunk);
78
+ continue;
79
+ }
80
+ const response = await fetch(externalLink, { signal: combinedSignal });
81
+ if (!response.ok) throw new Error(`Failed to download chunk ${chunk.chunk_index}: ${response.status} ${response.statusText}`);
82
+ const arrayBuffer = await response.arrayBuffer();
83
+ return new Uint8Array(arrayBuffer);
84
+ } catch (error) {
85
+ lastError = error;
86
+ if (timeoutController.signal.aborted) lastError = /* @__PURE__ */ new Error(`Chunk ${chunk.chunk_index} download timed out after ${this.options.timeout}ms`);
87
+ if (signal?.aborted) throw new Error("Arrow stream processing was aborted");
88
+ if (attempt < this.options.retries - 1) await this.delay(2 ** attempt * BACKOFF_MULTIPLIER);
89
+ } finally {
90
+ clearTimeout(timeoutId);
91
+ }
92
+ }
93
+ throw new Error(`Failed to download chunk ${chunk.chunk_index} after ${this.options.retries} attempts: ${lastError?.message}`);
94
+ }
95
+ /**
96
+ * Concatenate multiple Uint8Array buffers into a single buffer.
97
+ * Pre-allocates the result array for efficiency.
98
+ */
99
+ concatenateBuffers(buffers) {
100
+ if (buffers.length === 0) throw new Error("No buffers to concatenate");
101
+ if (buffers.length === 1) return buffers[0];
102
+ const totalLength = buffers.reduce((sum, buf) => sum + buf.length, 0);
103
+ const result = new Uint8Array(totalLength);
104
+ let offset = 0;
105
+ for (const buffer of buffers) {
106
+ result.set(buffer, offset);
107
+ offset += buffer.length;
108
+ }
109
+ return result;
110
+ }
111
+ /**
112
+ * Combines multiple AbortSignals into one.
113
+ * The combined signal aborts when any of the input signals abort.
114
+ */
115
+ combineAbortSignals(...signals) {
116
+ const controller = new AbortController();
117
+ for (const signal of signals) {
118
+ if (signal.aborted) {
119
+ controller.abort();
120
+ return controller.signal;
121
+ }
122
+ signal.addEventListener("abort", () => controller.abort(), { once: true });
123
+ }
124
+ return controller.signal;
125
+ }
126
+ delay(ms) {
127
+ return new Promise((resolve) => setTimeout(resolve, ms));
128
+ }
129
+ };
130
+ var Semaphore = class {
131
+ constructor(permits) {
132
+ this.waiting = [];
133
+ this.permits = permits;
134
+ }
135
+ async acquire() {
136
+ if (this.permits > 0) {
137
+ this.permits--;
138
+ return;
139
+ }
140
+ return new Promise((resolve) => {
141
+ this.waiting.push(resolve);
142
+ });
143
+ }
144
+ release() {
145
+ if (this.waiting.length > 0) {
146
+ const next = this.waiting.shift();
147
+ if (next) next();
148
+ } else this.permits++;
149
+ }
150
+ };
151
+
152
+ //#endregion
153
+ export { ArrowStreamProcessor };
154
+ //# sourceMappingURL=arrow-stream-processor.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"arrow-stream-processor.js","names":["options: ArrowStreamOptions","lastError: Error | null"],"sources":["../../src/stream/arrow-stream-processor.ts"],"sourcesContent":["import type { sql } from \"@databricks/sdk-experimental\";\n\ntype ResultManifest = sql.ResultManifest;\ntype ExternalLink = sql.ExternalLink;\n\nexport interface ArrowStreamOptions {\n maxConcurrentDownloads: number;\n timeout: number;\n retries: number;\n}\n\n/**\n * Result from zero-copy Arrow chunk processing.\n * Contains raw IPC bytes without server-side parsing.\n */\nexport interface ArrowRawResult {\n /** Concatenated raw Arrow IPC bytes */\n data: Uint8Array;\n /** Schema from Databricks manifest (not parsed from Arrow) */\n schema: ResultManifest[\"schema\"];\n}\n\nconst BACKOFF_MULTIPLIER = 1000;\n\nexport class ArrowStreamProcessor {\n static readonly DEFAULT_MAX_CONCURRENT_DOWNLOADS = 5;\n static readonly DEFAULT_TIMEOUT = 30000;\n static readonly DEFAULT_RETRIES = 3;\n\n constructor(\n private options: ArrowStreamOptions = {\n maxConcurrentDownloads:\n ArrowStreamProcessor.DEFAULT_MAX_CONCURRENT_DOWNLOADS,\n timeout: ArrowStreamProcessor.DEFAULT_TIMEOUT,\n retries: ArrowStreamProcessor.DEFAULT_RETRIES,\n },\n ) {\n this.options = {\n maxConcurrentDownloads:\n options.maxConcurrentDownloads ??\n ArrowStreamProcessor.DEFAULT_MAX_CONCURRENT_DOWNLOADS,\n timeout: options.timeout ?? ArrowStreamProcessor.DEFAULT_TIMEOUT,\n retries: options.retries ?? ArrowStreamProcessor.DEFAULT_RETRIES,\n };\n }\n\n /**\n * Process Arrow chunks using zero-copy proxy pattern.\n *\n * Downloads raw IPC bytes from external links and concatenates them\n * without parsing into Arrow Tables on the server. This reduces:\n * - Memory usage by ~50% (no parsed Table representation)\n * - CPU usage (no tableFromIPC/tableToIPC calls)\n *\n * The client is responsible for parsing the IPC bytes.\n *\n * @param chunks - External links to Arrow IPC data\n * @param schema - Schema from Databricks manifest\n * @param signal - Optional abort signal\n * @returns Raw concatenated IPC bytes with schema\n */\n async processChunks(\n chunks: ExternalLink[],\n schema: ResultManifest[\"schema\"],\n signal?: AbortSignal,\n ): Promise<ArrowRawResult> {\n if (chunks.length === 0) {\n throw new Error(\"No Arrow chunks provided\");\n }\n\n const buffers = await this.downloadChunksRaw(chunks, signal);\n const data = this.concatenateBuffers(buffers);\n\n return { data, schema };\n }\n\n /**\n * Download all chunks as raw bytes with concurrency control.\n */\n private async downloadChunksRaw(\n chunks: ExternalLink[],\n signal?: AbortSignal,\n ): Promise<Uint8Array[]> {\n const semaphore = new Semaphore(this.options.maxConcurrentDownloads);\n\n const downloadPromises = chunks.map(async (chunk) => {\n await semaphore.acquire();\n try {\n return await this.downloadChunkRaw(chunk, signal);\n } finally {\n semaphore.release();\n }\n });\n\n return Promise.all(downloadPromises);\n }\n\n /**\n * Download a single chunk as raw bytes with retry logic.\n */\n private async downloadChunkRaw(\n chunk: ExternalLink,\n signal?: AbortSignal,\n ): Promise<Uint8Array> {\n let lastError: Error | null = null;\n\n for (let attempt = 0; attempt < this.options.retries; attempt++) {\n const timeoutController = new AbortController();\n const timeoutId = setTimeout(() => {\n timeoutController.abort();\n }, this.options.timeout);\n\n const combinedSignal = signal\n ? this.combineAbortSignals(signal, timeoutController.signal)\n : timeoutController.signal;\n\n try {\n const externalLink = chunk.external_link;\n if (!externalLink) {\n console.error(\"External link is required\", chunk);\n continue;\n }\n\n const response = await fetch(externalLink, {\n signal: combinedSignal,\n });\n\n if (!response.ok) {\n throw new Error(\n `Failed to download chunk ${chunk.chunk_index}: ${response.status} ${response.statusText}`,\n );\n }\n\n const arrayBuffer = await response.arrayBuffer();\n return new Uint8Array(arrayBuffer);\n } catch (error) {\n lastError = error as Error;\n\n if (timeoutController.signal.aborted) {\n lastError = new Error(\n `Chunk ${chunk.chunk_index} download timed out after ${this.options.timeout}ms`,\n );\n }\n\n if (signal?.aborted) {\n throw new Error(\"Arrow stream processing was aborted\");\n }\n\n if (attempt < this.options.retries - 1) {\n await this.delay(2 ** attempt * BACKOFF_MULTIPLIER);\n }\n } finally {\n clearTimeout(timeoutId);\n }\n }\n\n throw new Error(\n `Failed to download chunk ${chunk.chunk_index} after ${this.options.retries} attempts: ${lastError?.message}`,\n );\n }\n\n /**\n * Concatenate multiple Uint8Array buffers into a single buffer.\n * Pre-allocates the result array for efficiency.\n */\n private concatenateBuffers(buffers: Uint8Array[]): Uint8Array {\n if (buffers.length === 0) {\n throw new Error(\"No buffers to concatenate\");\n }\n\n if (buffers.length === 1) {\n return buffers[0];\n }\n\n const totalLength = buffers.reduce((sum, buf) => sum + buf.length, 0);\n const result = new Uint8Array(totalLength);\n\n let offset = 0;\n for (const buffer of buffers) {\n result.set(buffer, offset);\n offset += buffer.length;\n }\n\n return result;\n }\n\n /**\n * Combines multiple AbortSignals into one.\n * The combined signal aborts when any of the input signals abort.\n */\n private combineAbortSignals(...signals: AbortSignal[]): AbortSignal {\n const controller = new AbortController();\n\n for (const signal of signals) {\n if (signal.aborted) {\n controller.abort();\n return controller.signal;\n }\n signal.addEventListener(\"abort\", () => controller.abort(), {\n once: true,\n });\n }\n\n return controller.signal;\n }\n\n private delay(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n }\n}\n\nclass Semaphore {\n private permits: number;\n private waiting: (() => void)[] = [];\n\n constructor(permits: number) {\n this.permits = permits;\n }\n\n async acquire(): Promise<void> {\n if (this.permits > 0) {\n this.permits--;\n return;\n }\n\n return new Promise<void>((resolve) => {\n this.waiting.push(resolve);\n });\n }\n\n release(): void {\n if (this.waiting.length > 0) {\n const next = this.waiting.shift();\n\n if (next) {\n next();\n }\n } else {\n this.permits++;\n }\n }\n}\n"],"mappings":";AAsBA,MAAM,qBAAqB;AAE3B,IAAa,uBAAb,MAAa,qBAAqB;;0CACmB;;;yBACjB;;;yBACA;;CAElC,YACE,AAAQA,UAA8B;EACpC,wBACE,qBAAqB;EACvB,SAAS,qBAAqB;EAC9B,SAAS,qBAAqB;EAC/B,EACD;EANQ;AAOR,OAAK,UAAU;GACb,wBACE,QAAQ,0BACR,qBAAqB;GACvB,SAAS,QAAQ,WAAW,qBAAqB;GACjD,SAAS,QAAQ,WAAW,qBAAqB;GAClD;;;;;;;;;;;;;;;;;CAkBH,MAAM,cACJ,QACA,QACA,QACyB;AACzB,MAAI,OAAO,WAAW,EACpB,OAAM,IAAI,MAAM,2BAA2B;EAG7C,MAAM,UAAU,MAAM,KAAK,kBAAkB,QAAQ,OAAO;AAG5D,SAAO;GAAE,MAFI,KAAK,mBAAmB,QAAQ;GAE9B;GAAQ;;;;;CAMzB,MAAc,kBACZ,QACA,QACuB;EACvB,MAAM,YAAY,IAAI,UAAU,KAAK,QAAQ,uBAAuB;EAEpE,MAAM,mBAAmB,OAAO,IAAI,OAAO,UAAU;AACnD,SAAM,UAAU,SAAS;AACzB,OAAI;AACF,WAAO,MAAM,KAAK,iBAAiB,OAAO,OAAO;aACzC;AACR,cAAU,SAAS;;IAErB;AAEF,SAAO,QAAQ,IAAI,iBAAiB;;;;;CAMtC,MAAc,iBACZ,OACA,QACqB;EACrB,IAAIC,YAA0B;AAE9B,OAAK,IAAI,UAAU,GAAG,UAAU,KAAK,QAAQ,SAAS,WAAW;GAC/D,MAAM,oBAAoB,IAAI,iBAAiB;GAC/C,MAAM,YAAY,iBAAiB;AACjC,sBAAkB,OAAO;MACxB,KAAK,QAAQ,QAAQ;GAExB,MAAM,iBAAiB,SACnB,KAAK,oBAAoB,QAAQ,kBAAkB,OAAO,GAC1D,kBAAkB;AAEtB,OAAI;IACF,MAAM,eAAe,MAAM;AAC3B,QAAI,CAAC,cAAc;AACjB,aAAQ,MAAM,6BAA6B,MAAM;AACjD;;IAGF,MAAM,WAAW,MAAM,MAAM,cAAc,EACzC,QAAQ,gBACT,CAAC;AAEF,QAAI,CAAC,SAAS,GACZ,OAAM,IAAI,MACR,4BAA4B,MAAM,YAAY,IAAI,SAAS,OAAO,GAAG,SAAS,aAC/E;IAGH,MAAM,cAAc,MAAM,SAAS,aAAa;AAChD,WAAO,IAAI,WAAW,YAAY;YAC3B,OAAO;AACd,gBAAY;AAEZ,QAAI,kBAAkB,OAAO,QAC3B,6BAAY,IAAI,MACd,SAAS,MAAM,YAAY,4BAA4B,KAAK,QAAQ,QAAQ,IAC7E;AAGH,QAAI,QAAQ,QACV,OAAM,IAAI,MAAM,sCAAsC;AAGxD,QAAI,UAAU,KAAK,QAAQ,UAAU,EACnC,OAAM,KAAK,MAAM,KAAK,UAAU,mBAAmB;aAE7C;AACR,iBAAa,UAAU;;;AAI3B,QAAM,IAAI,MACR,4BAA4B,MAAM,YAAY,SAAS,KAAK,QAAQ,QAAQ,aAAa,WAAW,UACrG;;;;;;CAOH,AAAQ,mBAAmB,SAAmC;AAC5D,MAAI,QAAQ,WAAW,EACrB,OAAM,IAAI,MAAM,4BAA4B;AAG9C,MAAI,QAAQ,WAAW,EACrB,QAAO,QAAQ;EAGjB,MAAM,cAAc,QAAQ,QAAQ,KAAK,QAAQ,MAAM,IAAI,QAAQ,EAAE;EACrE,MAAM,SAAS,IAAI,WAAW,YAAY;EAE1C,IAAI,SAAS;AACb,OAAK,MAAM,UAAU,SAAS;AAC5B,UAAO,IAAI,QAAQ,OAAO;AAC1B,aAAU,OAAO;;AAGnB,SAAO;;;;;;CAOT,AAAQ,oBAAoB,GAAG,SAAqC;EAClE,MAAM,aAAa,IAAI,iBAAiB;AAExC,OAAK,MAAM,UAAU,SAAS;AAC5B,OAAI,OAAO,SAAS;AAClB,eAAW,OAAO;AAClB,WAAO,WAAW;;AAEpB,UAAO,iBAAiB,eAAe,WAAW,OAAO,EAAE,EACzD,MAAM,MACP,CAAC;;AAGJ,SAAO,WAAW;;CAGpB,AAAQ,MAAM,IAA2B;AACvC,SAAO,IAAI,SAAS,YAAY,WAAW,SAAS,GAAG,CAAC;;;AAI5D,IAAM,YAAN,MAAgB;CAId,YAAY,SAAiB;iBAFK,EAAE;AAGlC,OAAK,UAAU;;CAGjB,MAAM,UAAyB;AAC7B,MAAI,KAAK,UAAU,GAAG;AACpB,QAAK;AACL;;AAGF,SAAO,IAAI,SAAe,YAAY;AACpC,QAAK,QAAQ,KAAK,QAAQ;IAC1B;;CAGJ,UAAgB;AACd,MAAI,KAAK,QAAQ,SAAS,GAAG;GAC3B,MAAM,OAAO,KAAK,QAAQ,OAAO;AAEjC,OAAI,KACF,OAAM;QAGR,MAAK"}
@@ -0,0 +1,88 @@
1
+ //#region src/stream/buffers.ts
2
+ var RingBuffer = class {
3
+ constructor(capacity, keyExtractor) {
4
+ if (capacity <= 0) throw new Error("Capacity must be greater than 0");
5
+ this.capacity = capacity;
6
+ this.buffer = new Array(capacity).fill(null);
7
+ this.writeIndex = 0;
8
+ this.size = 0;
9
+ this.keyExtractor = keyExtractor;
10
+ this.keyIndex = /* @__PURE__ */ new Map();
11
+ }
12
+ add(item) {
13
+ const key = this.keyExtractor(item);
14
+ const existingIndex = this.keyIndex.get(key);
15
+ if (existingIndex !== void 0) {
16
+ this.buffer[existingIndex] = item;
17
+ return;
18
+ }
19
+ const evicted = this.buffer[this.writeIndex];
20
+ if (evicted !== null) {
21
+ const evictedKey = this.keyExtractor(evicted);
22
+ this.keyIndex.delete(evictedKey);
23
+ }
24
+ this.buffer[this.writeIndex] = item;
25
+ this.keyIndex.set(key, this.writeIndex);
26
+ this.writeIndex = (this.writeIndex + 1) % this.capacity;
27
+ this.size = Math.min(this.size + 1, this.capacity);
28
+ }
29
+ get(key) {
30
+ const index = this.keyIndex.get(key);
31
+ if (index === void 0) return null;
32
+ return this.buffer[index];
33
+ }
34
+ has(key) {
35
+ return this.keyIndex.has(key);
36
+ }
37
+ remove(key) {
38
+ const index = this.keyIndex.get(key);
39
+ if (index === void 0) return;
40
+ this.buffer[index] = null;
41
+ this.keyIndex.delete(key);
42
+ this.size = Math.max(this.size - 1, 0);
43
+ }
44
+ getAll() {
45
+ const result = [];
46
+ for (let i = 0; i < this.size; i++) {
47
+ const index = (this.writeIndex - this.size + i + this.capacity) % this.capacity;
48
+ const item = this.buffer[index];
49
+ if (item !== null) result.push(item);
50
+ }
51
+ return result;
52
+ }
53
+ getSize() {
54
+ return this.size;
55
+ }
56
+ clear() {
57
+ this.buffer = new Array(this.capacity).fill(null);
58
+ this.keyIndex.clear();
59
+ this.writeIndex = 0;
60
+ this.size = 0;
61
+ }
62
+ };
63
+ var EventRingBuffer = class {
64
+ constructor(capacity = 100) {
65
+ this.buffer = new RingBuffer(capacity, (event) => event.id);
66
+ }
67
+ add(event) {
68
+ this.buffer.add(event);
69
+ }
70
+ has(eventId) {
71
+ return this.buffer.has(eventId);
72
+ }
73
+ getEventsSince(lastEventId) {
74
+ const allEvents = this.buffer.getAll();
75
+ const result = [];
76
+ let foundLastEvent = false;
77
+ for (const event of allEvents) if (foundLastEvent) result.push(event);
78
+ else if (event.id === lastEventId) foundLastEvent = true;
79
+ return result;
80
+ }
81
+ clear() {
82
+ this.buffer.clear();
83
+ }
84
+ };
85
+
86
+ //#endregion
87
+ export { EventRingBuffer, RingBuffer };
88
+ //# sourceMappingURL=buffers.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"buffers.js","names":["result: T[]","result: BufferedEvent[]"],"sources":["../../src/stream/buffers.ts"],"sourcesContent":["import type { BufferedEvent } from \"./types\";\n\n// generic ring buffer implementation\nexport class RingBuffer<T> {\n public buffer: (T | null)[];\n public capacity: number;\n private writeIndex: number;\n private size: number;\n private keyExtractor: (item: T) => string;\n private keyIndex: Map<string, number>;\n\n constructor(capacity: number, keyExtractor: (item: T) => string) {\n if (capacity <= 0) {\n throw new Error(\"Capacity must be greater than 0\");\n }\n\n this.capacity = capacity;\n this.buffer = new Array(capacity).fill(null);\n this.writeIndex = 0;\n this.size = 0;\n this.keyExtractor = keyExtractor;\n this.keyIndex = new Map();\n }\n\n // add an item to the buffer\n add(item: T): void {\n const key = this.keyExtractor(item);\n\n // check if item already exists\n const existingIndex = this.keyIndex.get(key);\n if (existingIndex !== undefined) {\n // update existing item\n this.buffer[existingIndex] = item;\n return;\n }\n\n // evict least recently used item if at capacity\n const evicted = this.buffer[this.writeIndex];\n if (evicted !== null) {\n const evictedKey = this.keyExtractor(evicted);\n this.keyIndex.delete(evictedKey);\n }\n\n // add new item\n this.buffer[this.writeIndex] = item;\n this.keyIndex.set(key, this.writeIndex);\n\n // update write index and size\n this.writeIndex = (this.writeIndex + 1) % this.capacity;\n this.size = Math.min(this.size + 1, this.capacity);\n }\n\n // get an item from the buffer\n get(key: string): T | null {\n const index = this.keyIndex.get(key);\n if (index === undefined) return null;\n\n return this.buffer[index];\n }\n\n // check if an item exists in the buffer\n has(key: string): boolean {\n return this.keyIndex.has(key);\n }\n\n // remove an item from the buffer\n remove(key: string): void {\n const index = this.keyIndex.get(key);\n if (index === undefined) return;\n\n // remove item from buffer\n this.buffer[index] = null;\n this.keyIndex.delete(key);\n\n // update size\n this.size = Math.max(this.size - 1, 0);\n }\n\n // get all items from the buffer\n getAll(): T[] {\n const result: T[] = [];\n\n // iterate over buffer in order of insertion\n for (let i = 0; i < this.size; i++) {\n // calculate index of item in buffer\n const index =\n (this.writeIndex - this.size + i + this.capacity) % this.capacity;\n // add item to result if not null\n const item = this.buffer[index];\n if (item !== null) {\n result.push(item);\n }\n }\n return result;\n }\n\n // get the size of the buffer\n getSize(): number {\n return this.size;\n }\n\n // clear the buffer\n clear(): void {\n this.buffer = new Array(this.capacity).fill(null);\n this.keyIndex.clear();\n this.writeIndex = 0;\n this.size = 0;\n }\n}\n\n// event ring buffer implementation\nexport class EventRingBuffer {\n private buffer: RingBuffer<BufferedEvent>;\n\n constructor(capacity: number = 100) {\n this.buffer = new RingBuffer<BufferedEvent>(capacity, (event) => event.id);\n }\n\n // add an event to the buffer\n add(event: BufferedEvent): void {\n this.buffer.add(event);\n }\n\n // check if an event exists in the buffer\n has(eventId: string): boolean {\n return this.buffer.has(eventId);\n }\n\n // get all events since a given event id\n getEventsSince(lastEventId: string): BufferedEvent[] {\n const allEvents = this.buffer.getAll();\n const result: BufferedEvent[] = [];\n // flag to track if we've found the last event\n let foundLastEvent = false;\n\n // iterate over all events\n for (const event of allEvents) {\n // if found, add to result\n if (foundLastEvent) {\n result.push(event);\n // if not found, check if it's the last event\n } else if (event.id === lastEventId) {\n foundLastEvent = true;\n }\n }\n return result;\n }\n\n clear(): void {\n this.buffer.clear();\n }\n}\n"],"mappings":";AAGA,IAAa,aAAb,MAA2B;CAQzB,YAAY,UAAkB,cAAmC;AAC/D,MAAI,YAAY,EACd,OAAM,IAAI,MAAM,kCAAkC;AAGpD,OAAK,WAAW;AAChB,OAAK,SAAS,IAAI,MAAM,SAAS,CAAC,KAAK,KAAK;AAC5C,OAAK,aAAa;AAClB,OAAK,OAAO;AACZ,OAAK,eAAe;AACpB,OAAK,2BAAW,IAAI,KAAK;;CAI3B,IAAI,MAAe;EACjB,MAAM,MAAM,KAAK,aAAa,KAAK;EAGnC,MAAM,gBAAgB,KAAK,SAAS,IAAI,IAAI;AAC5C,MAAI,kBAAkB,QAAW;AAE/B,QAAK,OAAO,iBAAiB;AAC7B;;EAIF,MAAM,UAAU,KAAK,OAAO,KAAK;AACjC,MAAI,YAAY,MAAM;GACpB,MAAM,aAAa,KAAK,aAAa,QAAQ;AAC7C,QAAK,SAAS,OAAO,WAAW;;AAIlC,OAAK,OAAO,KAAK,cAAc;AAC/B,OAAK,SAAS,IAAI,KAAK,KAAK,WAAW;AAGvC,OAAK,cAAc,KAAK,aAAa,KAAK,KAAK;AAC/C,OAAK,OAAO,KAAK,IAAI,KAAK,OAAO,GAAG,KAAK,SAAS;;CAIpD,IAAI,KAAuB;EACzB,MAAM,QAAQ,KAAK,SAAS,IAAI,IAAI;AACpC,MAAI,UAAU,OAAW,QAAO;AAEhC,SAAO,KAAK,OAAO;;CAIrB,IAAI,KAAsB;AACxB,SAAO,KAAK,SAAS,IAAI,IAAI;;CAI/B,OAAO,KAAmB;EACxB,MAAM,QAAQ,KAAK,SAAS,IAAI,IAAI;AACpC,MAAI,UAAU,OAAW;AAGzB,OAAK,OAAO,SAAS;AACrB,OAAK,SAAS,OAAO,IAAI;AAGzB,OAAK,OAAO,KAAK,IAAI,KAAK,OAAO,GAAG,EAAE;;CAIxC,SAAc;EACZ,MAAMA,SAAc,EAAE;AAGtB,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,MAAM,KAAK;GAElC,MAAM,SACH,KAAK,aAAa,KAAK,OAAO,IAAI,KAAK,YAAY,KAAK;GAE3D,MAAM,OAAO,KAAK,OAAO;AACzB,OAAI,SAAS,KACX,QAAO,KAAK,KAAK;;AAGrB,SAAO;;CAIT,UAAkB;AAChB,SAAO,KAAK;;CAId,QAAc;AACZ,OAAK,SAAS,IAAI,MAAM,KAAK,SAAS,CAAC,KAAK,KAAK;AACjD,OAAK,SAAS,OAAO;AACrB,OAAK,aAAa;AAClB,OAAK,OAAO;;;AAKhB,IAAa,kBAAb,MAA6B;CAG3B,YAAY,WAAmB,KAAK;AAClC,OAAK,SAAS,IAAI,WAA0B,WAAW,UAAU,MAAM,GAAG;;CAI5E,IAAI,OAA4B;AAC9B,OAAK,OAAO,IAAI,MAAM;;CAIxB,IAAI,SAA0B;AAC5B,SAAO,KAAK,OAAO,IAAI,QAAQ;;CAIjC,eAAe,aAAsC;EACnD,MAAM,YAAY,KAAK,OAAO,QAAQ;EACtC,MAAMC,SAA0B,EAAE;EAElC,IAAI,iBAAiB;AAGrB,OAAK,MAAM,SAAS,UAElB,KAAI,eACF,QAAO,KAAK,MAAM;WAET,MAAM,OAAO,YACtB,kBAAiB;AAGrB,SAAO;;CAGT,QAAc;AACZ,OAAK,OAAO,OAAO"}
@@ -0,0 +1,14 @@
1
+ //#region src/stream/defaults.ts
2
+ const streamDefaults = {
3
+ bufferSize: 100,
4
+ maxEventSize: 1024 * 1024,
5
+ bufferTTL: 600 * 1e3,
6
+ cleanupInterval: 300 * 1e3,
7
+ maxPersistentBuffers: 1e4,
8
+ heartbeatInterval: 10 * 1e3,
9
+ maxActiveStreams: 1e3
10
+ };
11
+
12
+ //#endregion
13
+ export { streamDefaults };
14
+ //# sourceMappingURL=defaults.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"defaults.js","names":[],"sources":["../../src/stream/defaults.ts"],"sourcesContent":["export const streamDefaults = {\n bufferSize: 100,\n maxEventSize: 1024 * 1024, // 1MB\n bufferTTL: 10 * 60 * 1000, // 10 minutes\n cleanupInterval: 5 * 60 * 1000, // 5 minutes\n maxPersistentBuffers: 10000, // 10000 buffers\n heartbeatInterval: 10 * 1000, // 10 seconds\n maxActiveStreams: 1000, // 1000 streams\n} as const;\n"],"mappings":";AAAA,MAAa,iBAAiB;CAC5B,YAAY;CACZ,cAAc,OAAO;CACrB,WAAW,MAAU;CACrB,iBAAiB,MAAS;CAC1B,sBAAsB;CACtB,mBAAmB,KAAK;CACxB,kBAAkB;CACnB"}
@@ -0,0 +1,6 @@
1
+ import { ArrowStreamProcessor } from "./arrow-stream-processor.js";
2
+ import { streamDefaults } from "./defaults.js";
3
+ import { SSEWarningCode } from "./types.js";
4
+ import { StreamManager } from "./stream-manager.js";
5
+
6
+ export { };
@@ -0,0 +1,61 @@
1
+ import { streamDefaults } from "./defaults.js";
2
+ import { SSEErrorCode, SSEWarningCode } from "./types.js";
3
+ import { StreamValidator } from "./validator.js";
4
+
5
+ //#region src/stream/sse-writer.ts
6
+ var SSEWriter = class {
7
+ setupHeaders(res) {
8
+ res.setHeader("Content-Type", "text/event-stream");
9
+ res.setHeader("Cache-Control", "no-cache");
10
+ res.setHeader("Connection", "keep-alive");
11
+ res.setHeader("Content-Encoding", "none");
12
+ res.flushHeaders?.();
13
+ }
14
+ writeEvent(res, eventId, event) {
15
+ if (res.writableEnded) return;
16
+ const eventType = StreamValidator.sanitizeEventType(event.type);
17
+ const eventData = JSON.stringify(event);
18
+ res.write(`id: ${eventId}\n`);
19
+ res.write(`event: ${eventType}\n`);
20
+ res.write(`data: ${eventData}\n\n`);
21
+ }
22
+ writeError(res, eventId, error, code = SSEErrorCode.INTERNAL_ERROR) {
23
+ if (res.writableEnded) return;
24
+ const errorData = {
25
+ error,
26
+ code
27
+ };
28
+ res.write(`id: ${eventId}\n`);
29
+ res.write(`event: error\n`);
30
+ res.write(`data: ${JSON.stringify(errorData)}\n\n`);
31
+ }
32
+ writeBufferedEvent(res, event) {
33
+ if (res.writableEnded) return;
34
+ res.write(`id: ${event.id}\n`);
35
+ res.write(`event: ${event.type}\n`);
36
+ res.write(`data: ${event.data}\n\n`);
37
+ }
38
+ writeBufferOverflowWarning(res, lastEventId) {
39
+ if (res.writableEnded) return;
40
+ try {
41
+ res.write(`event: warning\n`);
42
+ res.write(`data: ${JSON.stringify({
43
+ warning: "Buffer overflow detected - some events were lost",
44
+ code: SSEWarningCode.BUFFER_OVERFLOW_RESTART,
45
+ lastEventId
46
+ })}\n\n`);
47
+ } catch (_error) {}
48
+ }
49
+ startHeartbeat(res, signal, interval) {
50
+ const heartbeatInterval = interval ?? streamDefaults.heartbeatInterval;
51
+ return setInterval(() => {
52
+ if (!signal.aborted && !res.writableEnded) try {
53
+ res.write(`: heartbeat\n\n`);
54
+ } catch (_error) {}
55
+ }, heartbeatInterval);
56
+ }
57
+ };
58
+
59
+ //#endregion
60
+ export { SSEWriter };
61
+ //# sourceMappingURL=sse-writer.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sse-writer.js","names":["errorData: SSEError"],"sources":["../../src/stream/sse-writer.ts"],"sourcesContent":["import type { IAppResponse } from \"shared\";\nimport { streamDefaults } from \"./defaults\";\nimport {\n type BufferedEvent,\n type SSEError,\n SSEErrorCode,\n SSEWarningCode,\n} from \"./types\";\nimport { StreamValidator } from \"./validator\";\n\nexport class SSEWriter {\n // setup SSE headers\n setupHeaders(res: IAppResponse): void {\n res.setHeader(\"Content-Type\", \"text/event-stream\");\n res.setHeader(\"Cache-Control\", \"no-cache\");\n res.setHeader(\"Connection\", \"keep-alive\");\n res.setHeader(\"Content-Encoding\", \"none\");\n\n res.flushHeaders?.();\n }\n\n // write a single event to the response\n writeEvent(res: IAppResponse, eventId: string, event: any): void {\n if (res.writableEnded) return;\n\n const eventType = StreamValidator.sanitizeEventType(event.type);\n const eventData = JSON.stringify(event);\n\n res.write(`id: ${eventId}\\n`);\n res.write(`event: ${eventType}\\n`);\n res.write(`data: ${eventData}\\n\\n`);\n }\n writeError(\n res: IAppResponse,\n eventId: string,\n error: string,\n code: SSEErrorCode = SSEErrorCode.INTERNAL_ERROR,\n ): void {\n if (res.writableEnded) return;\n\n const errorData: SSEError = {\n error,\n code,\n };\n\n res.write(`id: ${eventId}\\n`);\n res.write(`event: error\\n`);\n res.write(`data: ${JSON.stringify(errorData)}\\n\\n`);\n }\n\n // write a buffered event for replay\n writeBufferedEvent(res: IAppResponse, event: BufferedEvent): void {\n if (res.writableEnded) return;\n\n res.write(`id: ${event.id}\\n`);\n res.write(`event: ${event.type}\\n`);\n res.write(`data: ${event.data}\\n\\n`);\n }\n\n // write a buffer overflow warning\n writeBufferOverflowWarning(res: IAppResponse, lastEventId: string): void {\n if (res.writableEnded) return;\n\n try {\n res.write(`event: warning\\n`);\n res.write(\n `data: ${JSON.stringify({\n warning: \"Buffer overflow detected - some events were lost\",\n code: SSEWarningCode.BUFFER_OVERFLOW_RESTART,\n lastEventId,\n })}\\n\\n`,\n );\n } catch (_error) {\n // ignore write errors - client will ignore this event\n }\n }\n\n // start the heartbeat interval\n startHeartbeat(\n res: IAppResponse,\n signal: AbortSignal,\n interval?: number,\n ): NodeJS.Timeout {\n const heartbeatInterval = interval ?? streamDefaults.heartbeatInterval;\n\n return setInterval(() => {\n if (!signal.aborted && !res.writableEnded) {\n try {\n res.write(`: heartbeat\\n\\n`);\n } catch (_error) {\n // ignore write errors - client will ignore this event\n }\n }\n }, heartbeatInterval);\n }\n}\n"],"mappings":";;;;;AAUA,IAAa,YAAb,MAAuB;CAErB,aAAa,KAAyB;AACpC,MAAI,UAAU,gBAAgB,oBAAoB;AAClD,MAAI,UAAU,iBAAiB,WAAW;AAC1C,MAAI,UAAU,cAAc,aAAa;AACzC,MAAI,UAAU,oBAAoB,OAAO;AAEzC,MAAI,gBAAgB;;CAItB,WAAW,KAAmB,SAAiB,OAAkB;AAC/D,MAAI,IAAI,cAAe;EAEvB,MAAM,YAAY,gBAAgB,kBAAkB,MAAM,KAAK;EAC/D,MAAM,YAAY,KAAK,UAAU,MAAM;AAEvC,MAAI,MAAM,OAAO,QAAQ,IAAI;AAC7B,MAAI,MAAM,UAAU,UAAU,IAAI;AAClC,MAAI,MAAM,SAAS,UAAU,MAAM;;CAErC,WACE,KACA,SACA,OACA,OAAqB,aAAa,gBAC5B;AACN,MAAI,IAAI,cAAe;EAEvB,MAAMA,YAAsB;GAC1B;GACA;GACD;AAED,MAAI,MAAM,OAAO,QAAQ,IAAI;AAC7B,MAAI,MAAM,iBAAiB;AAC3B,MAAI,MAAM,SAAS,KAAK,UAAU,UAAU,CAAC,MAAM;;CAIrD,mBAAmB,KAAmB,OAA4B;AAChE,MAAI,IAAI,cAAe;AAEvB,MAAI,MAAM,OAAO,MAAM,GAAG,IAAI;AAC9B,MAAI,MAAM,UAAU,MAAM,KAAK,IAAI;AACnC,MAAI,MAAM,SAAS,MAAM,KAAK,MAAM;;CAItC,2BAA2B,KAAmB,aAA2B;AACvE,MAAI,IAAI,cAAe;AAEvB,MAAI;AACF,OAAI,MAAM,mBAAmB;AAC7B,OAAI,MACF,SAAS,KAAK,UAAU;IACtB,SAAS;IACT,MAAM,eAAe;IACrB;IACD,CAAC,CAAC,MACJ;WACM,QAAQ;;CAMnB,eACE,KACA,QACA,UACgB;EAChB,MAAM,oBAAoB,YAAY,eAAe;AAErD,SAAO,kBAAkB;AACvB,OAAI,CAAC,OAAO,WAAW,CAAC,IAAI,cAC1B,KAAI;AACF,QAAI,MAAM,kBAAkB;YACrB,QAAQ;KAIlB,kBAAkB"}
@@ -0,0 +1,27 @@
1
+ import { IAppResponse } from "../shared/src/plugin.js";
2
+ import { StreamConfig } from "../shared/src/execute.js";
3
+
4
+ //#region src/stream/stream-manager.d.ts
5
+ declare class StreamManager {
6
+ private activeOperations;
7
+ private streamRegistry;
8
+ private sseWriter;
9
+ private maxEventSize;
10
+ private bufferTTL;
11
+ constructor(options?: StreamConfig);
12
+ stream(res: IAppResponse, handler: (signal: AbortSignal) => AsyncGenerator<any, void, unknown>, options?: StreamConfig): Promise<void>;
13
+ abortAll(): void;
14
+ getActiveCount(): number;
15
+ private _attachToExistingStream;
16
+ private _createNewStream;
17
+ private _processGeneratorInBackground;
18
+ private _combineSignals;
19
+ private _broadcastEventsToClients;
20
+ private _broadcastErrorToClients;
21
+ private _closeAllClients;
22
+ private _cleanupStream;
23
+ private _categorizeError;
24
+ }
25
+ //#endregion
26
+ export { StreamManager };
27
+ //# sourceMappingURL=stream-manager.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"stream-manager.d.ts","names":[],"sources":["../../src/stream/stream-manager.ts"],"sourcesContent":[],"mappings":";;;;cAUa,aAAA;;;EAAA,QAAA,SAAa;EAAA,QAAA,YAAA;UAOF,SAAA;aAYf,CAAA,OAAA,CAAA,EAZe,YAYf;QACa,CAAA,GAAA,EADb,YACa,EAAA,OAAA,EAAA,CAAA,MAAA,EAAA,WAAA,EAAA,GAAgB,cAAhB,CAAA,GAAA,EAAA,IAAA,EAAA,OAAA,CAAA,EAAA,OAAA,CAAA,EACR,YADQ,CAAA,EAEjB,OAFiB,CAAA,IAAA,CAAA;UAAgB,CAAA,CAAA,EAAA,IAAA;gBACxB,CAAA,CAAA,EAAA,MAAA;UACT,uBAAA;EAAO,QAAA,gBAAA"}
@@ -0,0 +1,191 @@
1
+ import { streamDefaults } from "./defaults.js";
2
+ import { EventRingBuffer } from "./buffers.js";
3
+ import { SSEErrorCode } from "./types.js";
4
+ import { StreamValidator } from "./validator.js";
5
+ import { SSEWriter } from "./sse-writer.js";
6
+ import { StreamRegistry } from "./stream-registry.js";
7
+ import { randomUUID } from "node:crypto";
8
+
9
+ //#region src/stream/stream-manager.ts
10
+ var StreamManager = class {
11
+ constructor(options) {
12
+ this.streamRegistry = new StreamRegistry(options?.maxActiveStreams ?? streamDefaults.maxActiveStreams);
13
+ this.sseWriter = new SSEWriter();
14
+ this.maxEventSize = options?.maxEventSize ?? streamDefaults.maxEventSize;
15
+ this.bufferTTL = options?.bufferTTL ?? streamDefaults.bufferTTL;
16
+ this.activeOperations = /* @__PURE__ */ new Set();
17
+ }
18
+ stream(res, handler, options) {
19
+ const { streamId } = options || {};
20
+ this.sseWriter.setupHeaders(res);
21
+ if (streamId && StreamValidator.validateStreamId(streamId)) {
22
+ const existingStream = this.streamRegistry.get(streamId);
23
+ if (existingStream) return this._attachToExistingStream(res, existingStream, options);
24
+ }
25
+ return this._createNewStream(res, handler, options);
26
+ }
27
+ abortAll() {
28
+ this.activeOperations.forEach((operation) => {
29
+ if (operation.heartbeat) clearInterval(operation.heartbeat);
30
+ operation.controller.abort("Server shutdown");
31
+ });
32
+ this.activeOperations.clear();
33
+ this.streamRegistry.clear();
34
+ }
35
+ getActiveCount() {
36
+ return this.activeOperations.size;
37
+ }
38
+ async _attachToExistingStream(res, streamEntry, options) {
39
+ const lastEventId = res.req?.headers["last-event-id"];
40
+ if (StreamValidator.validateEventId(lastEventId)) {
41
+ const validEventId = lastEventId;
42
+ if (streamEntry.eventBuffer.has(validEventId)) {
43
+ const missedEvents = streamEntry.eventBuffer.getEventsSince(validEventId);
44
+ for (const event of missedEvents) {
45
+ if (options?.userSignal?.aborted) break;
46
+ this.sseWriter.writeBufferedEvent(res, event);
47
+ }
48
+ } else this.sseWriter.writeBufferOverflowWarning(res, validEventId);
49
+ }
50
+ streamEntry.clients.add(res);
51
+ streamEntry.lastAccess = Date.now();
52
+ const combinedSignal = this._combineSignals(streamEntry.abortController.signal, options?.userSignal);
53
+ const heartbeat = this.sseWriter.startHeartbeat(res, combinedSignal);
54
+ const streamOperation = {
55
+ controller: streamEntry.abortController,
56
+ type: "stream",
57
+ heartbeat
58
+ };
59
+ this.activeOperations.add(streamOperation);
60
+ res.on("close", () => {
61
+ clearInterval(heartbeat);
62
+ streamEntry.clients.delete(res);
63
+ this.activeOperations.delete(streamOperation);
64
+ if (streamEntry.isCompleted && streamEntry.clients.size === 0) setTimeout(() => {
65
+ if (streamEntry.clients.size === 0) this.streamRegistry.remove(streamEntry.streamId);
66
+ }, this.bufferTTL);
67
+ });
68
+ if (streamEntry.isCompleted) {
69
+ res.end();
70
+ this.activeOperations.delete(streamOperation);
71
+ clearInterval(heartbeat);
72
+ }
73
+ }
74
+ async _createNewStream(res, handler, options) {
75
+ const streamId = options?.streamId ?? randomUUID();
76
+ const abortController = new AbortController();
77
+ const eventBuffer = new EventRingBuffer(options?.bufferSize ?? streamDefaults.bufferSize);
78
+ const combinedSignal = this._combineSignals(abortController.signal, options?.userSignal);
79
+ const heartbeat = this.sseWriter.startHeartbeat(res, combinedSignal);
80
+ const streamEntry = {
81
+ streamId,
82
+ generator: handler(combinedSignal),
83
+ eventBuffer,
84
+ clients: new Set([res]),
85
+ isCompleted: false,
86
+ lastAccess: Date.now(),
87
+ abortController
88
+ };
89
+ this.streamRegistry.add(streamEntry);
90
+ const streamOperation = {
91
+ controller: abortController,
92
+ type: "stream",
93
+ heartbeat
94
+ };
95
+ this.activeOperations.add(streamOperation);
96
+ res.on("close", () => {
97
+ clearInterval(heartbeat);
98
+ this.activeOperations.delete(streamOperation);
99
+ streamEntry.clients.delete(res);
100
+ });
101
+ await this._processGeneratorInBackground(streamEntry);
102
+ clearInterval(heartbeat);
103
+ this.activeOperations.delete(streamOperation);
104
+ }
105
+ async _processGeneratorInBackground(streamEntry) {
106
+ try {
107
+ for await (const event of streamEntry.generator) {
108
+ if (streamEntry.abortController.signal.aborted) break;
109
+ const eventId = randomUUID();
110
+ const eventData = JSON.stringify(event);
111
+ if (eventData.length > this.maxEventSize) {
112
+ const errorMsg = `Event exceeds max size of ${this.maxEventSize} bytes`;
113
+ const errorCode = SSEErrorCode.INVALID_REQUEST;
114
+ this._broadcastErrorToClients(streamEntry, eventId, errorMsg, errorCode);
115
+ continue;
116
+ }
117
+ streamEntry.eventBuffer.add({
118
+ id: eventId,
119
+ type: event.type,
120
+ data: eventData,
121
+ timestamp: Date.now()
122
+ });
123
+ this._broadcastEventsToClients(streamEntry, eventId, event);
124
+ streamEntry.lastAccess = Date.now();
125
+ }
126
+ streamEntry.isCompleted = true;
127
+ this._closeAllClients(streamEntry);
128
+ this._cleanupStream(streamEntry);
129
+ } catch (error) {
130
+ const errorMsg = error instanceof Error ? error.message : "Internal server error";
131
+ const errorEventId = randomUUID();
132
+ const errorCode = this._categorizeError(error);
133
+ streamEntry.eventBuffer.add({
134
+ id: errorEventId,
135
+ type: "error",
136
+ data: JSON.stringify({
137
+ error: errorMsg,
138
+ code: errorCode
139
+ }),
140
+ timestamp: Date.now()
141
+ });
142
+ this._broadcastErrorToClients(streamEntry, errorEventId, errorMsg, errorCode, true);
143
+ streamEntry.isCompleted = true;
144
+ }
145
+ }
146
+ _combineSignals(internalSignal, userSignal) {
147
+ if (!userSignal) return internalSignal || new AbortController().signal;
148
+ const signals = [internalSignal, userSignal].filter(Boolean);
149
+ const controller = new AbortController();
150
+ signals.forEach((signal) => {
151
+ if (signal?.aborted) {
152
+ controller.abort(signal.reason);
153
+ return;
154
+ }
155
+ signal?.addEventListener("abort", () => {
156
+ controller.abort(signal.reason);
157
+ }, { once: true });
158
+ });
159
+ return controller.signal;
160
+ }
161
+ _broadcastEventsToClients(streamEntry, eventId, event) {
162
+ for (const client of streamEntry.clients) if (!client.writableEnded) this.sseWriter.writeEvent(client, eventId, event);
163
+ }
164
+ _broadcastErrorToClients(streamEntry, eventId, errorMessage, errorCode, closeClients = false) {
165
+ for (const client of streamEntry.clients) if (!client.writableEnded) {
166
+ this.sseWriter.writeError(client, eventId, errorMessage, errorCode);
167
+ if (closeClients) client.end();
168
+ }
169
+ }
170
+ _closeAllClients(streamEntry) {
171
+ for (const client of streamEntry.clients) if (!client.writableEnded) client.end();
172
+ }
173
+ _cleanupStream(streamEntry) {
174
+ if (streamEntry.clients.size === 0) setTimeout(() => {
175
+ if (streamEntry.clients.size === 0) this.streamRegistry.remove(streamEntry.streamId);
176
+ }, this.bufferTTL);
177
+ }
178
+ _categorizeError(error) {
179
+ if (error instanceof Error) {
180
+ const message = error.message.toLowerCase();
181
+ if (message.includes("timeout") || message.includes("timed out")) return SSEErrorCode.TIMEOUT;
182
+ if (message.includes("unavailable") || message.includes("econnrefused")) return SSEErrorCode.TEMPORARY_UNAVAILABLE;
183
+ if (error.name === "AbortError") return SSEErrorCode.STREAM_ABORTED;
184
+ }
185
+ return SSEErrorCode.INTERNAL_ERROR;
186
+ }
187
+ };
188
+
189
+ //#endregion
190
+ export { StreamManager };
191
+ //# sourceMappingURL=stream-manager.js.map