open-sse 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +180 -0
- package/config/constants.js +206 -0
- package/config/defaultThinkingSignature.js +7 -0
- package/config/ollamaModels.js +19 -0
- package/config/providerModels.js +161 -0
- package/handlers/chatCore.js +277 -0
- package/handlers/responsesHandler.js +69 -0
- package/index.js +69 -0
- package/package.json +44 -0
- package/services/accountFallback.js +148 -0
- package/services/combo.js +69 -0
- package/services/compact.js +64 -0
- package/services/model.js +109 -0
- package/services/provider.js +237 -0
- package/services/tokenRefresh.js +542 -0
- package/services/usage.js +398 -0
- package/translator/formats.js +12 -0
- package/translator/from-openai/claude.js +341 -0
- package/translator/from-openai/gemini.js +469 -0
- package/translator/from-openai/openai-responses.js +361 -0
- package/translator/helpers/claudeHelper.js +179 -0
- package/translator/helpers/geminiHelper.js +131 -0
- package/translator/helpers/openaiHelper.js +80 -0
- package/translator/helpers/responsesApiHelper.js +103 -0
- package/translator/helpers/toolCallHelper.js +111 -0
- package/translator/index.js +167 -0
- package/translator/to-openai/claude.js +238 -0
- package/translator/to-openai/gemini.js +151 -0
- package/translator/to-openai/openai-responses.js +140 -0
- package/translator/to-openai/openai.js +371 -0
- package/utils/bypassHandler.js +258 -0
- package/utils/error.js +133 -0
- package/utils/ollamaTransform.js +82 -0
- package/utils/requestLogger.js +217 -0
- package/utils/stream.js +274 -0
- package/utils/streamHandler.js +131 -0
package/utils/stream.js
ADDED
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
import { translateResponse, initState } from "../translator/index.js";
|
|
2
|
+
import { FORMATS } from "../translator/formats.js";
|
|
3
|
+
|
|
4
|
+
// Get HH:MM timestamp
|
|
5
|
+
function getTimeString() {
|
|
6
|
+
return new Date().toLocaleTimeString("en-US", { hour12: false, hour: "2-digit", minute: "2-digit" });
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
// Extract usage from any format (Claude, OpenAI, Gemini)
|
|
10
|
+
function extractUsage(chunk) {
|
|
11
|
+
// Claude format (message_delta event)
|
|
12
|
+
if (chunk.type === "message_delta" && chunk.usage) {
|
|
13
|
+
return {
|
|
14
|
+
prompt_tokens: chunk.usage.input_tokens || 0,
|
|
15
|
+
completion_tokens: chunk.usage.output_tokens || 0,
|
|
16
|
+
cache_read_input_tokens: chunk.usage.cache_read_input_tokens,
|
|
17
|
+
cache_creation_input_tokens: chunk.usage.cache_creation_input_tokens
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
// OpenAI format
|
|
21
|
+
if (chunk.usage?.prompt_tokens !== undefined) {
|
|
22
|
+
return {
|
|
23
|
+
prompt_tokens: chunk.usage.prompt_tokens,
|
|
24
|
+
completion_tokens: chunk.usage.completion_tokens || 0,
|
|
25
|
+
cached_tokens: chunk.usage.prompt_tokens_details?.cached_tokens,
|
|
26
|
+
reasoning_tokens: chunk.usage.completion_tokens_details?.reasoning_tokens
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
// Gemini format
|
|
30
|
+
if (chunk.usageMetadata) {
|
|
31
|
+
return {
|
|
32
|
+
prompt_tokens: chunk.usageMetadata.promptTokenCount || 0,
|
|
33
|
+
completion_tokens: chunk.usageMetadata.candidatesTokenCount || 0,
|
|
34
|
+
reasoning_tokens: chunk.usageMetadata.thoughtsTokenCount
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
return null;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// ANSI color codes
|
|
41
|
+
export const COLORS = {
|
|
42
|
+
reset: "\x1b[0m",
|
|
43
|
+
red: "\x1b[31m",
|
|
44
|
+
green: "\x1b[32m",
|
|
45
|
+
yellow: "\x1b[33m",
|
|
46
|
+
blue: "\x1b[34m",
|
|
47
|
+
cyan: "\x1b[36m"
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
// Log usage with cache info (green color)
|
|
51
|
+
function logUsage(provider, usage) {
|
|
52
|
+
if (!usage) return;
|
|
53
|
+
|
|
54
|
+
const p = provider?.toUpperCase() || "UNKNOWN";
|
|
55
|
+
const inTokens = usage.prompt_tokens || 0;
|
|
56
|
+
const outTokens = usage.completion_tokens || 0;
|
|
57
|
+
|
|
58
|
+
let msg = `[${getTimeString()}] 📊 [USAGE] ${p} | in=${inTokens} | out=${outTokens}`;
|
|
59
|
+
|
|
60
|
+
if (usage.cache_creation_input_tokens) msg += ` | cache_write=${usage.cache_creation_input_tokens}`;
|
|
61
|
+
if (usage.cache_read_input_tokens) msg += ` | cache_read=${usage.cache_read_input_tokens}`;
|
|
62
|
+
if (usage.cached_tokens) msg += ` | cached=${usage.cached_tokens}`;
|
|
63
|
+
if (usage.reasoning_tokens) msg += ` | reasoning=${usage.reasoning_tokens}`;
|
|
64
|
+
|
|
65
|
+
console.log(`${COLORS.green}${msg}${COLORS.reset}`);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Parse SSE data line
|
|
69
|
+
function parseSSELine(line) {
|
|
70
|
+
if (!line || !line.startsWith("data:")) return null;
|
|
71
|
+
|
|
72
|
+
const data = line.slice(5).trim();
|
|
73
|
+
if (data === "[DONE]") return { done: true };
|
|
74
|
+
|
|
75
|
+
try {
|
|
76
|
+
return JSON.parse(data);
|
|
77
|
+
} catch {
|
|
78
|
+
return null;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Format output as SSE
|
|
84
|
+
* @param {object} data - Data to format
|
|
85
|
+
* @param {string} sourceFormat - Target format for client
|
|
86
|
+
* @returns {string} SSE formatted string
|
|
87
|
+
*/
|
|
88
|
+
export function formatSSE(data, sourceFormat) {
|
|
89
|
+
if (data.done) return "data: [DONE]\n\n";
|
|
90
|
+
|
|
91
|
+
// OpenAI Responses API format: has event field
|
|
92
|
+
if (data.event && data.data) {
|
|
93
|
+
return `event: ${data.event}\ndata: ${JSON.stringify(data.data)}\n\n`;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// Claude format: include event prefix
|
|
97
|
+
if (sourceFormat === FORMATS.CLAUDE && data.type) {
|
|
98
|
+
return `event: ${data.type}\ndata: ${JSON.stringify(data)}\n\n`;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
return `data: ${JSON.stringify(data)}\n\n`;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* Stream modes
|
|
106
|
+
*/
|
|
107
|
+
const STREAM_MODE = {
|
|
108
|
+
TRANSLATE: "translate", // Full translation between formats
|
|
109
|
+
PASSTHROUGH: "passthrough" // No translation, normalize output, extract usage
|
|
110
|
+
};
|
|
111
|
+
|
|
112
|
+
/**
|
|
113
|
+
* Create unified SSE transform stream
|
|
114
|
+
* @param {object} options
|
|
115
|
+
* @param {string} options.mode - Stream mode: translate, passthrough
|
|
116
|
+
* @param {string} options.targetFormat - Provider format (for translate mode)
|
|
117
|
+
* @param {string} options.sourceFormat - Client format (for translate mode)
|
|
118
|
+
* @param {string} options.provider - Provider name
|
|
119
|
+
* @param {object} options.reqLogger - Request logger instance
|
|
120
|
+
*/
|
|
121
|
+
export function createSSEStream(options = {}) {
|
|
122
|
+
const {
|
|
123
|
+
mode = STREAM_MODE.TRANSLATE,
|
|
124
|
+
targetFormat,
|
|
125
|
+
sourceFormat,
|
|
126
|
+
provider = null,
|
|
127
|
+
reqLogger = null
|
|
128
|
+
} = options;
|
|
129
|
+
|
|
130
|
+
const decoder = new TextDecoder();
|
|
131
|
+
const encoder = new TextEncoder();
|
|
132
|
+
let buffer = "";
|
|
133
|
+
let usage = null;
|
|
134
|
+
|
|
135
|
+
// State for translate mode
|
|
136
|
+
const state = mode === STREAM_MODE.TRANSLATE ? { ...initState(sourceFormat), provider } : null;
|
|
137
|
+
|
|
138
|
+
return new TransformStream({
|
|
139
|
+
transform(chunk, controller) {
|
|
140
|
+
const text = decoder.decode(chunk, { stream: true });
|
|
141
|
+
buffer += text;
|
|
142
|
+
reqLogger?.appendProviderChunk?.(text);
|
|
143
|
+
|
|
144
|
+
const lines = buffer.split("\n");
|
|
145
|
+
buffer = lines.pop() || "";
|
|
146
|
+
|
|
147
|
+
for (const line of lines) {
|
|
148
|
+
const trimmed = line.trim();
|
|
149
|
+
|
|
150
|
+
// Passthrough mode: normalize and forward
|
|
151
|
+
if (mode === STREAM_MODE.PASSTHROUGH) {
|
|
152
|
+
if (trimmed.startsWith("data:") && trimmed.slice(5).trim() !== "[DONE]") {
|
|
153
|
+
try {
|
|
154
|
+
const parsed = JSON.parse(trimmed.slice(5).trim());
|
|
155
|
+
const extracted = extractUsage(parsed);
|
|
156
|
+
if (extracted) usage = extracted;
|
|
157
|
+
} catch {}
|
|
158
|
+
}
|
|
159
|
+
// Normalize: ensure "data: " has space
|
|
160
|
+
let output;
|
|
161
|
+
if (line.startsWith("data:") && !line.startsWith("data: ")) {
|
|
162
|
+
output = "data: " + line.slice(5) + "\n";
|
|
163
|
+
} else {
|
|
164
|
+
output = line + "\n";
|
|
165
|
+
}
|
|
166
|
+
reqLogger?.appendConvertedChunk?.(output);
|
|
167
|
+
controller.enqueue(encoder.encode(output));
|
|
168
|
+
continue;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
// Translate mode
|
|
172
|
+
if (!trimmed) continue;
|
|
173
|
+
|
|
174
|
+
const parsed = parseSSELine(trimmed);
|
|
175
|
+
if (!parsed) continue;
|
|
176
|
+
|
|
177
|
+
if (parsed.done) {
|
|
178
|
+
const output = "data: [DONE]\n\n";
|
|
179
|
+
reqLogger?.appendConvertedChunk?.(output);
|
|
180
|
+
controller.enqueue(encoder.encode(output));
|
|
181
|
+
continue;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
// Extract usage
|
|
185
|
+
const extracted = extractUsage(parsed);
|
|
186
|
+
if (extracted) state.usage = extracted;
|
|
187
|
+
|
|
188
|
+
// Translate and emit
|
|
189
|
+
const translated = translateResponse(targetFormat, sourceFormat, parsed, state);
|
|
190
|
+
if (translated?.length > 0) {
|
|
191
|
+
for (const item of translated) {
|
|
192
|
+
const output = formatSSE(item, sourceFormat);
|
|
193
|
+
reqLogger?.appendConvertedChunk?.(output);
|
|
194
|
+
controller.enqueue(encoder.encode(output));
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
},
|
|
199
|
+
|
|
200
|
+
flush(controller) {
|
|
201
|
+
try {
|
|
202
|
+
const remaining = decoder.decode();
|
|
203
|
+
if (remaining) buffer += remaining;
|
|
204
|
+
|
|
205
|
+
if (mode === STREAM_MODE.PASSTHROUGH) {
|
|
206
|
+
if (buffer) {
|
|
207
|
+
let output = buffer;
|
|
208
|
+
if (buffer.startsWith("data:") && !buffer.startsWith("data: ")) {
|
|
209
|
+
output = "data: " + buffer.slice(5);
|
|
210
|
+
}
|
|
211
|
+
reqLogger?.appendConvertedChunk?.(output);
|
|
212
|
+
controller.enqueue(encoder.encode(output));
|
|
213
|
+
}
|
|
214
|
+
if (usage) logUsage(provider, usage);
|
|
215
|
+
return;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
// Translate mode: process remaining buffer
|
|
219
|
+
if (buffer.trim()) {
|
|
220
|
+
const parsed = parseSSELine(buffer.trim());
|
|
221
|
+
if (parsed && !parsed.done) {
|
|
222
|
+
const translated = translateResponse(targetFormat, sourceFormat, parsed, state);
|
|
223
|
+
if (translated?.length > 0) {
|
|
224
|
+
for (const item of translated) {
|
|
225
|
+
const output = formatSSE(item, sourceFormat);
|
|
226
|
+
reqLogger?.appendConvertedChunk?.(output);
|
|
227
|
+
controller.enqueue(encoder.encode(output));
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
// Flush remaining events (only once at stream end)
|
|
234
|
+
const flushed = translateResponse(targetFormat, sourceFormat, null, state);
|
|
235
|
+
if (flushed?.length > 0) {
|
|
236
|
+
for (const item of flushed) {
|
|
237
|
+
const output = formatSSE(item, sourceFormat);
|
|
238
|
+
reqLogger?.appendConvertedChunk?.(output);
|
|
239
|
+
controller.enqueue(encoder.encode(output));
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
// Send [DONE] and log usage
|
|
244
|
+
const doneOutput = "data: [DONE]\n\n";
|
|
245
|
+
reqLogger?.appendConvertedChunk?.(doneOutput);
|
|
246
|
+
controller.enqueue(encoder.encode(doneOutput));
|
|
247
|
+
|
|
248
|
+
if (state?.usage) logUsage(state.provider || targetFormat, state.usage);
|
|
249
|
+
} catch (error) {
|
|
250
|
+
console.log("Error in flush:", error);
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
});
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
// Convenience functions for backward compatibility
|
|
257
|
+
export function createSSETransformStreamWithLogger(targetFormat, sourceFormat, provider = null, reqLogger = null) {
|
|
258
|
+
return createSSEStream({
|
|
259
|
+
mode: STREAM_MODE.TRANSLATE,
|
|
260
|
+
targetFormat,
|
|
261
|
+
sourceFormat,
|
|
262
|
+
provider,
|
|
263
|
+
reqLogger
|
|
264
|
+
});
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
export function createPassthroughStreamWithLogger(provider = null, reqLogger = null) {
|
|
268
|
+
return createSSEStream({
|
|
269
|
+
mode: STREAM_MODE.PASSTHROUGH,
|
|
270
|
+
provider,
|
|
271
|
+
reqLogger
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
// Stream handler with disconnect detection - shared for all providers
|
|
2
|
+
|
|
3
|
+
// Get HH:MM timestamp
|
|
4
|
+
function getTimeString() {
|
|
5
|
+
return new Date().toLocaleTimeString("en-US", { hour12: false, hour: "2-digit", minute: "2-digit" });
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Create stream controller with abort and disconnect detection
|
|
10
|
+
* @param {object} options
|
|
11
|
+
* @param {function} options.onDisconnect - Callback when client disconnects
|
|
12
|
+
* @param {object} options.log - Logger instance
|
|
13
|
+
* @param {string} options.provider - Provider name
|
|
14
|
+
* @param {string} options.model - Model name
|
|
15
|
+
*/
|
|
16
|
+
export function createStreamController({ onDisconnect, log, provider, model } = {}) {
|
|
17
|
+
const abortController = new AbortController();
|
|
18
|
+
const startTime = Date.now();
|
|
19
|
+
let disconnected = false;
|
|
20
|
+
let abortTimeout = null;
|
|
21
|
+
|
|
22
|
+
const logStream = (status) => {
|
|
23
|
+
const duration = Date.now() - startTime;
|
|
24
|
+
const p = provider?.toUpperCase() || "UNKNOWN";
|
|
25
|
+
console.log(`[${getTimeString()}] 🌊 [STREAM] ${p} | ${model || "unknown"} | ${duration}ms | ${status}`);
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
return {
|
|
29
|
+
signal: abortController.signal,
|
|
30
|
+
startTime,
|
|
31
|
+
|
|
32
|
+
isConnected: () => !disconnected,
|
|
33
|
+
|
|
34
|
+
// Call when client disconnects
|
|
35
|
+
handleDisconnect: (reason = "client_closed") => {
|
|
36
|
+
if (disconnected) return;
|
|
37
|
+
disconnected = true;
|
|
38
|
+
|
|
39
|
+
logStream(`disconnect: ${reason}`);
|
|
40
|
+
|
|
41
|
+
// Delay abort to allow cleanup
|
|
42
|
+
abortTimeout = setTimeout(() => {
|
|
43
|
+
abortController.abort();
|
|
44
|
+
}, 500);
|
|
45
|
+
|
|
46
|
+
onDisconnect?.({ reason, duration: Date.now() - startTime });
|
|
47
|
+
},
|
|
48
|
+
|
|
49
|
+
// Call when stream completes normally
|
|
50
|
+
handleComplete: () => {
|
|
51
|
+
if (disconnected) return;
|
|
52
|
+
disconnected = true;
|
|
53
|
+
|
|
54
|
+
logStream("complete");
|
|
55
|
+
|
|
56
|
+
if (abortTimeout) {
|
|
57
|
+
clearTimeout(abortTimeout);
|
|
58
|
+
abortTimeout = null;
|
|
59
|
+
}
|
|
60
|
+
},
|
|
61
|
+
|
|
62
|
+
// Call on error
|
|
63
|
+
handleError: (error) => {
|
|
64
|
+
if (abortTimeout) {
|
|
65
|
+
clearTimeout(abortTimeout);
|
|
66
|
+
abortTimeout = null;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
if (error.name === "AbortError") {
|
|
70
|
+
logStream("aborted");
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
logStream(`error: ${error.message}`);
|
|
75
|
+
},
|
|
76
|
+
|
|
77
|
+
abort: () => abortController.abort()
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Create transform stream with disconnect detection
|
|
83
|
+
* Wraps existing transform stream and adds abort capability
|
|
84
|
+
*/
|
|
85
|
+
export function createDisconnectAwareStream(transformStream, streamController) {
|
|
86
|
+
const reader = transformStream.readable.getReader();
|
|
87
|
+
const writer = transformStream.writable.getWriter();
|
|
88
|
+
|
|
89
|
+
return new ReadableStream({
|
|
90
|
+
async pull(controller) {
|
|
91
|
+
if (!streamController.isConnected()) {
|
|
92
|
+
controller.close();
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
try {
|
|
97
|
+
const { done, value } = await reader.read();
|
|
98
|
+
if (done) {
|
|
99
|
+
streamController.handleComplete();
|
|
100
|
+
controller.close();
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
103
|
+
controller.enqueue(value);
|
|
104
|
+
} catch (error) {
|
|
105
|
+
streamController.handleError(error);
|
|
106
|
+
controller.error(error);
|
|
107
|
+
}
|
|
108
|
+
},
|
|
109
|
+
|
|
110
|
+
cancel(reason) {
|
|
111
|
+
streamController.handleDisconnect(reason || "cancelled");
|
|
112
|
+
reader.cancel();
|
|
113
|
+
writer.abort();
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* Pipe provider response through transform with disconnect detection
|
|
120
|
+
* @param {Response} providerResponse - Response from provider
|
|
121
|
+
* @param {TransformStream} transformStream - Transform stream for SSE
|
|
122
|
+
* @param {object} streamController - Stream controller from createStreamController
|
|
123
|
+
*/
|
|
124
|
+
export function pipeWithDisconnect(providerResponse, transformStream, streamController) {
|
|
125
|
+
const transformedBody = providerResponse.body.pipeThrough(transformStream);
|
|
126
|
+
return createDisconnectAwareStream(
|
|
127
|
+
{ readable: transformedBody, writable: { getWriter: () => ({ abort: () => {} }) } },
|
|
128
|
+
streamController
|
|
129
|
+
);
|
|
130
|
+
}
|
|
131
|
+
|