@aikidosec/broker-client 1.0.10 → 1.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,11 +14,11 @@ export function pauseStreamIfOverLimit(requestId, stream) {
14
14
  const state = activeStreams.get(requestId);
15
15
  if (!state || state.paused) return;
16
16
 
17
- const shouldPause = totalBufferSize() >= GLOBAL_MAX_BUFFER_SIZE || state.buffer.length >= PER_STREAM_MAX_BUFFER_SIZE;
17
+ const shouldPause = totalBufferSize() >= GLOBAL_MAX_BUFFER_SIZE || state.bufferedBytes >= PER_STREAM_MAX_BUFFER_SIZE;
18
18
  if (shouldPause) {
19
19
  state.paused = true;
20
20
  stream.pause();
21
- log.info(`Stream paused for ${requestId}, stream buffer: ${(state.buffer.length / (1024 * 1024)).toFixed(2)} MB, global: ${(totalBufferSize() / (1024 * 1024)).toFixed(2)} MB`);
21
+ log.info(`Stream paused for ${requestId}, stream buffer: ${(state.bufferedBytes / (1024 * 1024)).toFixed(2)} MB, global: ${(totalBufferSize() / (1024 * 1024)).toFixed(2)} MB`);
22
22
  }
23
23
  }
24
24
 
@@ -30,10 +30,10 @@ export function resumePausedStreams() {
30
30
  if (totalBufferSize() >= GLOBAL_MAX_BUFFER_SIZE) return;
31
31
 
32
32
  for (const [requestId, state] of activeStreams.entries()) {
33
- if (state.paused && !state.complete && state.buffer.length < PER_STREAM_MAX_BUFFER_SIZE) {
33
+ if (state.paused && !state.complete && state.bufferedBytes < PER_STREAM_MAX_BUFFER_SIZE) {
34
34
  state.paused = false;
35
35
  state.stream.resume();
36
- log.info(`Stream resumed for ${requestId} after cleanup, stream buffer: ${(state.buffer.length / (1024 * 1024)).toFixed(2)} MB, global: ${(totalBufferSize() / (1024 * 1024)).toFixed(2)} MB`);
36
+ log.info(`Stream resumed for ${requestId} after cleanup, stream buffer: ${(state.bufferedBytes / (1024 * 1024)).toFixed(2)} MB, global: ${(totalBufferSize() / (1024 * 1024)).toFixed(2)} MB`);
37
37
  }
38
38
  }
39
39
  }
@@ -78,7 +78,7 @@ async function waitForData(state) {
78
78
  const checkIntervalMs = 100;
79
79
  let waited = 0;
80
80
 
81
- const hasEnoughData = () => state.buffer.length >= STREAM_CHUNK_SIZE;
81
+ const hasEnoughData = () => state.bufferedBytes >= STREAM_CHUNK_SIZE;
82
82
  const isStreamDone = () => state.complete || state.error;
83
83
  const hasTimedOut = () => waited >= maxWaitMs;
84
84
 
@@ -89,42 +89,77 @@ async function waitForData(state) {
89
89
  }
90
90
 
91
91
  /**
92
- * Extract a chunk from the buffer and manage buffer state
92
+ * Extract a chunk from the buffer queue and manage buffer state
93
93
  */
94
94
  function extractChunkFromBuffer(state, requestId) {
95
- let chunkData;
96
95
  let isComplete = false;
96
+ let bytesToExtract;
97
97
 
98
- if (state.buffer.length >= STREAM_CHUNK_SIZE) {
99
- // Have enough data for a full chunk
100
- chunkData = state.buffer.slice(0, STREAM_CHUNK_SIZE);
101
- state.buffer = state.buffer.slice(STREAM_CHUNK_SIZE);
102
- subtractFromTotalBuffer(chunkData.length);
103
-
104
- // Resume stream if it was paused and both limits are now satisfied
105
- resumeStreamIfBelowBufferLimits(state);
98
+ if (state.bufferedBytes >= STREAM_CHUNK_SIZE) {
99
+ bytesToExtract = STREAM_CHUNK_SIZE;
106
100
  } else if (state.complete) {
107
- // Stream is done, send remaining buffer
108
- chunkData = state.buffer;
109
- subtractFromTotalBuffer(chunkData.length);
110
- state.buffer = Buffer.alloc(0);
101
+ bytesToExtract = state.bufferedBytes;
111
102
  isComplete = true;
112
103
  } else {
113
104
  // Timeout waiting for data - send what we have
114
- log.warn(`get_next_chunk: Timeout waiting for data for ${requestId}, sending ${state.buffer.length} bytes`);
115
- chunkData = state.buffer;
116
- subtractFromTotalBuffer(chunkData.length);
117
- state.buffer = Buffer.alloc(0);
105
+ log.warn(`get_next_chunk: Timeout waiting for data for ${requestId}, sending ${state.bufferedBytes} bytes`);
106
+ bytesToExtract = state.bufferedBytes;
107
+ }
108
+
109
+ const chunkData = extractBytesFromQueue(state, bytesToExtract);
110
+ subtractFromTotalBuffer(chunkData.length);
111
+
112
+ // Resume stream if it was paused and both limits are now satisfied
113
+ if (!isComplete) {
114
+ resumeStreamIfBelowBufferLimits(state);
118
115
  }
119
116
 
120
117
  return { chunkData, isComplete };
121
118
  }
122
119
 
120
+ /**
121
+ * Extract exact number of bytes from buffer queue.
122
+ *
123
+ * Efficiency: The queue contains many small buffers (~64KB each from internal resource).
124
+ * We take whole buffers by reference (no copy) and only slice the last partial buffer.
125
+ * Final concat is O(n) for extracted buffers only, not the entire queue.
126
+ *
127
+ * Example: To extract 10MB from [64KB, 64KB, 64KB, ...] (156 buffers):
128
+ * - shift() 155 whole buffers (just pointer moves, no copying)
129
+ * - slice() 1 partial buffer
130
+ * - concat() once at the end (single 10MB allocation)
131
+ */
132
+ function extractBytesFromQueue(state, bytesToExtract) {
133
+ if (bytesToExtract === 0) return Buffer.alloc(0);
134
+
135
+ const extracted = [];
136
+ let extractedBytes = 0;
137
+
138
+ while (extractedBytes < bytesToExtract && state.buffers.length > 0) {
139
+ const buf = state.buffers[0];
140
+ const needed = bytesToExtract - extractedBytes;
141
+
142
+ if (buf.length <= needed) {
143
+ // Take entire buffer
144
+ extracted.push(state.buffers.shift());
145
+ extractedBytes += buf.length;
146
+ } else {
147
+ // Take partial buffer
148
+ extracted.push(buf.slice(0, needed));
149
+ state.buffers[0] = buf.slice(needed);
150
+ extractedBytes += needed;
151
+ }
152
+ }
153
+
154
+ state.bufferedBytes -= extractedBytes;
155
+ return Buffer.concat(extracted);
156
+ }
157
+
123
158
  /**
124
159
  * Resume a paused stream if buffer limits allow
125
160
  */
126
161
  function resumeStreamIfBelowBufferLimits(state) {
127
- const canResume = totalBufferSize() < GLOBAL_MAX_BUFFER_SIZE && state.buffer.length < PER_STREAM_MAX_BUFFER_SIZE;
162
+ const canResume = totalBufferSize() < GLOBAL_MAX_BUFFER_SIZE && state.bufferedBytes < PER_STREAM_MAX_BUFFER_SIZE;
128
163
  if (state.paused && canResume) {
129
164
  state.paused = false;
130
165
  state.stream.resume();
@@ -22,7 +22,8 @@ export function initStreamingResponse({ requestId, statusCode, headers, stream,
22
22
  // Store stream state for pull-based retrieval
23
23
  const streamState = {
24
24
  stream,
25
- buffer: Buffer.alloc(0),
25
+ buffers: [], // Queue of buffers - O(1) append
26
+ bufferedBytes: 0, // Total bytes across all buffers
26
27
  complete: false,
27
28
  error: null,
28
29
  totalBytesSent: 0,
@@ -34,7 +35,9 @@ export function initStreamingResponse({ requestId, statusCode, headers, stream,
34
35
  activeStreams.set(requestId, streamState);
35
36
 
36
37
  // Buffer data as it arrives from the internal resource
37
- // Use bounded buffering with global and per-stream limits to control memory usage
38
+ // Incoming chunks are small (typically 16-64KB from OS/Node.js TCP buffers)
39
+ // We accumulate them in an array and only concat when sending 10MB chunks to server
40
+ // This avoids O(n²) memory copying that would occur with Buffer.concat on every chunk
38
41
  stream.on('data', (chunk) => {
39
42
  // Yield to event loop periodically to allow ping/pong processing
40
43
  // This prevents stream data from starving Socket.IO heartbeats
@@ -45,7 +48,8 @@ export function initStreamingResponse({ requestId, statusCode, headers, stream,
45
48
  return;
46
49
  }
47
50
 
48
- state.buffer = Buffer.concat([state.buffer, chunk]);
51
+ state.buffers.push(chunk);
52
+ state.bufferedBytes += chunk.length;
49
53
  addToTotalBuffer(chunk.length);
50
54
 
51
55
  // Pause stream if buffer limits exceeded (backpressure)
@@ -57,7 +61,7 @@ export function initStreamingResponse({ requestId, statusCode, headers, stream,
57
61
  const state = activeStreams.get(requestId);
58
62
  if (state) {
59
63
  state.complete = true;
60
- log.info(`Stream ended for request ${requestId}, ${(state.buffer.length / (1024 * 1024)).toFixed(2)} MB remaining in buffer`);
64
+ log.info(`Stream ended for request ${requestId}, ${(state.bufferedBytes / (1024 * 1024)).toFixed(2)} MB remaining in buffer`);
61
65
  }
62
66
  });
63
67
 
@@ -3,19 +3,29 @@
3
3
  */
4
4
 
5
5
  // Chunk size for streaming responses (10MB before base64 encoding)
6
- export const STREAM_CHUNK_SIZE = 30 * 1024 * 1024;
6
+ export const STREAM_CHUNK_SIZE = 10 * 1024 * 1024;
7
7
 
8
8
  // Global maximum buffer size across ALL streams
9
- export const GLOBAL_MAX_BUFFER_SIZE = STREAM_CHUNK_SIZE * 30; // 900MB
9
+ export const GLOBAL_MAX_BUFFER_SIZE = STREAM_CHUNK_SIZE * 30; // 300MB
10
10
 
11
11
  // Per-stream maximum buffer size
12
- export const PER_STREAM_MAX_BUFFER_SIZE = STREAM_CHUNK_SIZE * 5; // 150MB
12
+ export const PER_STREAM_MAX_BUFFER_SIZE = STREAM_CHUNK_SIZE * 1.5; // 15MB
13
13
 
14
14
  // Threshold for using streaming vs direct response
15
15
  export const STREAMING_THRESHOLD = STREAM_CHUNK_SIZE;
16
16
 
17
17
  // Active streams for pull-based streaming (server pulls chunks from client)
18
- // Key: request_id, Value: { stream, buffer, complete, error, totalBytesSent, chunkIndex, paused, lastActivity }
18
+ // Key: request_id, Value: { stream, buffers, bufferedBytes, complete, error, totalBytesSent, chunkIndex, paused, lastActivity }
19
+ //
20
+ // Buffer structure:
21
+ // - buffers[]: Array of small Buffer chunks received from the internal resource (e.g., API, database)
22
+ // Typical chunk size: 16-64KB (depends on OS TCP buffers and Node.js stream settings)
23
+ // - bufferedBytes: Total bytes across all buffers (avoids O(n) length calculation)
24
+ //
25
+ // Data flow:
26
+ // Internal Resource → HTTP stream → small chunks (16-64KB) → buffers[]
27
+ // When server requests: extract from buffers[] → concat to 10MB chunk → send via Socket.IO
28
+ //
19
29
  export const activeStreams = new Map();
20
30
 
21
31
  // Track total buffer size across all streams
@@ -61,9 +71,7 @@ export function subtractFromTotalBuffer(bytes) {
61
71
  export function recalculateTotalBufferSize() {
62
72
  let actual = 0;
63
73
  for (const [, state] of activeStreams.entries()) {
64
- if (state.buffer) {
65
- actual += state.buffer.length;
66
- }
74
+ actual += state.bufferedBytes || 0;
67
75
  }
68
76
 
69
77
  if (actual !== _totalBufferSize) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aikidosec/broker-client",
3
- "version": "1.0.10",
3
+ "version": "1.0.12",
4
4
  "description": "Aikido Broker Client - Runs in customer network to forward requests to internal resources",
5
5
  "main": "app/client.js",
6
6
  "type": "module",