@ai-sdk-tool/proxy 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +172 -0
- package/dist/index.cjs +927 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +183 -0
- package/dist/index.d.ts +183 -0
- package/dist/index.js +882 -0
- package/dist/index.js.map +1 -0
- package/package.json +62 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,927 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var src_exports = {};
|
|
32
|
+
__export(src_exports, {
|
|
33
|
+
OpenAIProxyServer: () => OpenAIProxyServer,
|
|
34
|
+
convertAISDKResultToOpenAI: () => convertAISDKResultToOpenAI,
|
|
35
|
+
convertAISDKStreamChunkToOpenAI: () => convertAISDKStreamChunkToOpenAI,
|
|
36
|
+
convertAISDKToolCallsToOpenAI: () => convertAISDKToolCallsToOpenAI,
|
|
37
|
+
convertOpenAIRequestToAISDK: () => convertOpenAIRequestToAISDK,
|
|
38
|
+
createOpenAIStreamConverter: () => createOpenAIStreamConverter,
|
|
39
|
+
createSSEResponse: () => createSSEResponse,
|
|
40
|
+
generateResponseId: () => generateResponseId,
|
|
41
|
+
getCurrentTimestamp: () => getCurrentTimestamp
|
|
42
|
+
});
|
|
43
|
+
module.exports = __toCommonJS(src_exports);
|
|
44
|
+
|
|
45
|
+
// src/openai-request-converter.ts
|
|
46
|
+
var import_zod = require("zod");
|
|
47
|
+
function createZodSchema(prop) {
|
|
48
|
+
if (prop.type === "string") {
|
|
49
|
+
return prop.enum ? import_zod.z.enum(prop.enum) : import_zod.z.string();
|
|
50
|
+
}
|
|
51
|
+
if (prop.type === "number") {
|
|
52
|
+
return import_zod.z.number();
|
|
53
|
+
}
|
|
54
|
+
if (prop.type === "boolean") {
|
|
55
|
+
return import_zod.z.boolean();
|
|
56
|
+
}
|
|
57
|
+
if (prop.type === "array") {
|
|
58
|
+
return import_zod.z.array(import_zod.z.any());
|
|
59
|
+
}
|
|
60
|
+
if (prop.type === "object") {
|
|
61
|
+
return import_zod.z.object({});
|
|
62
|
+
}
|
|
63
|
+
return import_zod.z.any();
|
|
64
|
+
}
|
|
65
|
+
function convertOpenAIToolToZod(parameters) {
|
|
66
|
+
if (!parameters) {
|
|
67
|
+
return import_zod.z.object({});
|
|
68
|
+
}
|
|
69
|
+
const params = parameters;
|
|
70
|
+
if (!params.properties) {
|
|
71
|
+
return import_zod.z.object({});
|
|
72
|
+
}
|
|
73
|
+
const schemaShape = {};
|
|
74
|
+
for (const [key, prop] of Object.entries(params.properties)) {
|
|
75
|
+
schemaShape[key] = createZodSchema(prop);
|
|
76
|
+
}
|
|
77
|
+
return import_zod.z.object(schemaShape);
|
|
78
|
+
}
|
|
79
|
+
function convertOpenAITools(openaiTools) {
|
|
80
|
+
const aisdkTools = {};
|
|
81
|
+
if (!openaiTools) {
|
|
82
|
+
return aisdkTools;
|
|
83
|
+
}
|
|
84
|
+
for (const openaiTool of openaiTools) {
|
|
85
|
+
const toolName = openaiTool.function.name;
|
|
86
|
+
aisdkTools[toolName] = {
|
|
87
|
+
description: openaiTool.function.description || "",
|
|
88
|
+
inputSchema: convertOpenAIToolToZod(openaiTool.function.parameters)
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
return aisdkTools;
|
|
92
|
+
}
|
|
93
|
+
function convertStopToSequences(stop) {
|
|
94
|
+
if (!stop) {
|
|
95
|
+
return;
|
|
96
|
+
}
|
|
97
|
+
return Array.isArray(stop) ? stop : [stop];
|
|
98
|
+
}
|
|
99
|
+
function normalizeMessageContent(content) {
|
|
100
|
+
if (typeof content === "string") {
|
|
101
|
+
return content ? [{ type: "text", text: content }] : [];
|
|
102
|
+
}
|
|
103
|
+
if (Array.isArray(content)) {
|
|
104
|
+
const parts = content;
|
|
105
|
+
return parts.map((part) => {
|
|
106
|
+
if (typeof part === "string") {
|
|
107
|
+
return part;
|
|
108
|
+
}
|
|
109
|
+
if (part && typeof part === "object" && "text" in part) {
|
|
110
|
+
const textValue = part.text;
|
|
111
|
+
if (typeof textValue === "string") {
|
|
112
|
+
return textValue;
|
|
113
|
+
}
|
|
114
|
+
if (textValue !== void 0) {
|
|
115
|
+
return JSON.stringify(textValue);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
return JSON.stringify(part);
|
|
119
|
+
}).filter((text) => Boolean(text)).map((text) => ({ type: "text", text }));
|
|
120
|
+
}
|
|
121
|
+
if (content === null || content === void 0) {
|
|
122
|
+
return [];
|
|
123
|
+
}
|
|
124
|
+
if (typeof content === "object") {
|
|
125
|
+
return [{ type: "text", text: JSON.stringify(content) }];
|
|
126
|
+
}
|
|
127
|
+
return [{ type: "text", text: String(content) }];
|
|
128
|
+
}
|
|
129
|
+
function buildToolCallParts(toolCalls) {
|
|
130
|
+
return toolCalls.map((toolCall) => {
|
|
131
|
+
let parsedArgs = toolCall.function.arguments;
|
|
132
|
+
if (typeof toolCall.function.arguments === "string") {
|
|
133
|
+
try {
|
|
134
|
+
parsedArgs = JSON.parse(toolCall.function.arguments || "{}");
|
|
135
|
+
} catch (e) {
|
|
136
|
+
parsedArgs = toolCall.function.arguments;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
return {
|
|
140
|
+
type: "tool-call",
|
|
141
|
+
toolCallId: toolCall.id,
|
|
142
|
+
toolName: toolCall.function.name,
|
|
143
|
+
input: parsedArgs
|
|
144
|
+
};
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
function buildAssistantContent(message) {
|
|
148
|
+
var _a;
|
|
149
|
+
const textParts = normalizeMessageContent(message.content);
|
|
150
|
+
const toolCallParts = ((_a = message.tool_calls) == null ? void 0 : _a.length) ? buildToolCallParts(message.tool_calls) : [];
|
|
151
|
+
if (toolCallParts.length === 0) {
|
|
152
|
+
if (textParts.length === 0) {
|
|
153
|
+
return "";
|
|
154
|
+
}
|
|
155
|
+
if (textParts.length === 1) {
|
|
156
|
+
return textParts[0].text;
|
|
157
|
+
}
|
|
158
|
+
return textParts;
|
|
159
|
+
}
|
|
160
|
+
return [...textParts, ...toolCallParts];
|
|
161
|
+
}
|
|
162
|
+
function isJsonValue(value) {
|
|
163
|
+
if (value === null || typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
|
|
164
|
+
return true;
|
|
165
|
+
}
|
|
166
|
+
if (Array.isArray(value)) {
|
|
167
|
+
return value.every(isJsonValue);
|
|
168
|
+
}
|
|
169
|
+
if (typeof value === "object") {
|
|
170
|
+
return Object.values(value).every(isJsonValue);
|
|
171
|
+
}
|
|
172
|
+
return false;
|
|
173
|
+
}
|
|
174
|
+
function buildToolOutput(rawValue) {
|
|
175
|
+
if (!rawValue) {
|
|
176
|
+
return { type: "text", value: "" };
|
|
177
|
+
}
|
|
178
|
+
try {
|
|
179
|
+
const parsed = JSON.parse(rawValue);
|
|
180
|
+
if (isJsonValue(parsed)) {
|
|
181
|
+
return { type: "json", value: parsed };
|
|
182
|
+
}
|
|
183
|
+
} catch (e) {
|
|
184
|
+
}
|
|
185
|
+
return { type: "text", value: rawValue };
|
|
186
|
+
}
|
|
187
|
+
function buildToolContent(message, toolNameLookup) {
|
|
188
|
+
var _a, _b;
|
|
189
|
+
const textParts = normalizeMessageContent(message.content);
|
|
190
|
+
const combined = textParts.map((part) => part.text).join("\n");
|
|
191
|
+
const toolCallId = (_a = message.tool_call_id) != null ? _a : "";
|
|
192
|
+
const toolName = toolCallId ? (_b = toolNameLookup.get(toolCallId)) != null ? _b : toolCallId : "";
|
|
193
|
+
return [
|
|
194
|
+
{
|
|
195
|
+
type: "tool-result",
|
|
196
|
+
toolCallId,
|
|
197
|
+
toolName,
|
|
198
|
+
output: buildToolOutput(combined)
|
|
199
|
+
}
|
|
200
|
+
];
|
|
201
|
+
}
|
|
202
|
+
function convertMessageToModelMessage(message, toolNameLookup) {
|
|
203
|
+
var _a;
|
|
204
|
+
if (message.role === "assistant") {
|
|
205
|
+
if ((_a = message.tool_calls) == null ? void 0 : _a.length) {
|
|
206
|
+
for (const toolCall of message.tool_calls) {
|
|
207
|
+
toolNameLookup.set(toolCall.id, toolCall.function.name);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
return {
|
|
211
|
+
role: "assistant",
|
|
212
|
+
content: buildAssistantContent(
|
|
213
|
+
message
|
|
214
|
+
)
|
|
215
|
+
};
|
|
216
|
+
}
|
|
217
|
+
if (message.role === "tool") {
|
|
218
|
+
return {
|
|
219
|
+
role: "tool",
|
|
220
|
+
content: buildToolContent(
|
|
221
|
+
message,
|
|
222
|
+
toolNameLookup
|
|
223
|
+
)
|
|
224
|
+
};
|
|
225
|
+
}
|
|
226
|
+
if (message.role === "system") {
|
|
227
|
+
const text = normalizeMessageContent(message.content).map((part) => part.text).join("\n");
|
|
228
|
+
return {
|
|
229
|
+
role: "system",
|
|
230
|
+
content: text
|
|
231
|
+
};
|
|
232
|
+
}
|
|
233
|
+
const userParts = normalizeMessageContent(message.content);
|
|
234
|
+
if (userParts.length === 0) {
|
|
235
|
+
return {
|
|
236
|
+
role: "user",
|
|
237
|
+
content: ""
|
|
238
|
+
};
|
|
239
|
+
}
|
|
240
|
+
if (userParts.length === 1) {
|
|
241
|
+
return {
|
|
242
|
+
role: "user",
|
|
243
|
+
content: userParts[0].text
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
return {
|
|
247
|
+
role: "user",
|
|
248
|
+
content: userParts
|
|
249
|
+
};
|
|
250
|
+
}
|
|
251
|
+
function convertOpenAIRequestToAISDK(openaiRequest, proxyConfig) {
|
|
252
|
+
const {
|
|
253
|
+
messages,
|
|
254
|
+
tools: openaiTools,
|
|
255
|
+
temperature,
|
|
256
|
+
max_tokens,
|
|
257
|
+
stop,
|
|
258
|
+
tool_choice
|
|
259
|
+
} = openaiRequest;
|
|
260
|
+
const toolNameLookup = /* @__PURE__ */ new Map();
|
|
261
|
+
const aiMessages = messages.map(
|
|
262
|
+
(message) => convertMessageToModelMessage(message, toolNameLookup)
|
|
263
|
+
);
|
|
264
|
+
const aisdkTools = convertOpenAITools(openaiTools);
|
|
265
|
+
const providerOptions = (proxyConfig == null ? void 0 : proxyConfig.parserDebug) ? {
|
|
266
|
+
toolCallMiddleware: {
|
|
267
|
+
debugLevel: proxyConfig.parserDebug.level,
|
|
268
|
+
logErrors: proxyConfig.parserDebug.logErrors,
|
|
269
|
+
captureSummary: proxyConfig.parserDebug.captureSummary
|
|
270
|
+
}
|
|
271
|
+
} : void 0;
|
|
272
|
+
return {
|
|
273
|
+
messages: aiMessages,
|
|
274
|
+
tools: aisdkTools,
|
|
275
|
+
temperature,
|
|
276
|
+
maxOutputTokens: max_tokens,
|
|
277
|
+
stopSequences: convertStopToSequences(stop),
|
|
278
|
+
toolChoice: mapOpenAIToolChoice(tool_choice),
|
|
279
|
+
...providerOptions ? { providerOptions } : {}
|
|
280
|
+
};
|
|
281
|
+
}
|
|
282
|
+
function mapOpenAIToolChoice(choice) {
|
|
283
|
+
if (!choice) {
|
|
284
|
+
return;
|
|
285
|
+
}
|
|
286
|
+
if (choice === "auto" || choice === "none") {
|
|
287
|
+
return choice;
|
|
288
|
+
}
|
|
289
|
+
if (typeof choice === "object" && choice.type === "function") {
|
|
290
|
+
return { type: "tool", toolName: choice.function.name };
|
|
291
|
+
}
|
|
292
|
+
return;
|
|
293
|
+
}
|
|
294
|
+
function convertAISDKToolCallsToOpenAI(toolCalls) {
|
|
295
|
+
return toolCalls.map((call) => ({
|
|
296
|
+
id: `call_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
|
|
297
|
+
type: "function",
|
|
298
|
+
function: {
|
|
299
|
+
name: call.toolName,
|
|
300
|
+
arguments: JSON.stringify(call.args)
|
|
301
|
+
}
|
|
302
|
+
}));
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
// src/response-utils.ts
|
|
306
|
+
function generateResponseId() {
|
|
307
|
+
return `chatcmpl-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
308
|
+
}
|
|
309
|
+
function getCurrentTimestamp() {
|
|
310
|
+
return Math.floor(Date.now() / 1e3);
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
// src/response-converter.ts
|
|
314
|
+
function convertAISDKResultToOpenAI(aisdkResult, model, stream = false) {
|
|
315
|
+
var _a;
|
|
316
|
+
const choices = [];
|
|
317
|
+
if (aisdkResult.text) {
|
|
318
|
+
const choice = {
|
|
319
|
+
index: 0,
|
|
320
|
+
finish_reason: aisdkResult.finishReason || "stop"
|
|
321
|
+
};
|
|
322
|
+
if (stream) {
|
|
323
|
+
choice.delta = {
|
|
324
|
+
role: "assistant",
|
|
325
|
+
content: aisdkResult.text
|
|
326
|
+
};
|
|
327
|
+
} else {
|
|
328
|
+
choice.message = {
|
|
329
|
+
role: "assistant",
|
|
330
|
+
content: aisdkResult.text
|
|
331
|
+
};
|
|
332
|
+
}
|
|
333
|
+
choices.push(choice);
|
|
334
|
+
}
|
|
335
|
+
if (aisdkResult.toolCalls && aisdkResult.toolCalls.length > 0) {
|
|
336
|
+
const choice = {
|
|
337
|
+
index: 0,
|
|
338
|
+
finish_reason: "tool_calls"
|
|
339
|
+
};
|
|
340
|
+
const openAIToolCalls = (_a = aisdkResult.toolCalls) == null ? void 0 : _a.map(
|
|
341
|
+
(call) => ({
|
|
342
|
+
id: `call_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
|
|
343
|
+
type: "function",
|
|
344
|
+
function: {
|
|
345
|
+
name: call.toolName,
|
|
346
|
+
arguments: JSON.stringify(call.args)
|
|
347
|
+
}
|
|
348
|
+
})
|
|
349
|
+
);
|
|
350
|
+
if (stream) {
|
|
351
|
+
choice.delta = {
|
|
352
|
+
role: "assistant",
|
|
353
|
+
tool_calls: openAIToolCalls
|
|
354
|
+
};
|
|
355
|
+
} else {
|
|
356
|
+
choice.message = {
|
|
357
|
+
role: "assistant",
|
|
358
|
+
content: null,
|
|
359
|
+
tool_calls: openAIToolCalls
|
|
360
|
+
};
|
|
361
|
+
}
|
|
362
|
+
choices.push(choice);
|
|
363
|
+
}
|
|
364
|
+
const response = {
|
|
365
|
+
id: generateResponseId(),
|
|
366
|
+
object: stream ? "chat.completion.chunk" : "chat.completion",
|
|
367
|
+
created: getCurrentTimestamp(),
|
|
368
|
+
model,
|
|
369
|
+
choices
|
|
370
|
+
};
|
|
371
|
+
if (aisdkResult.usage) {
|
|
372
|
+
response.usage = {
|
|
373
|
+
prompt_tokens: aisdkResult.usage.promptTokens || 0,
|
|
374
|
+
completion_tokens: aisdkResult.usage.completionTokens || 0,
|
|
375
|
+
total_tokens: aisdkResult.usage.totalTokens || 0
|
|
376
|
+
};
|
|
377
|
+
}
|
|
378
|
+
return response;
|
|
379
|
+
}
|
|
380
|
+
function createFinishResponse(model, finishReason, responseId) {
|
|
381
|
+
let validFinishReason;
|
|
382
|
+
if (finishReason === "tool_calls" || finishReason === "tool-calls") {
|
|
383
|
+
validFinishReason = "tool_calls";
|
|
384
|
+
} else if (finishReason === "stop") {
|
|
385
|
+
validFinishReason = "stop";
|
|
386
|
+
} else if (finishReason === "length") {
|
|
387
|
+
validFinishReason = "length";
|
|
388
|
+
} else if (finishReason === "content_filter") {
|
|
389
|
+
validFinishReason = "content_filter";
|
|
390
|
+
} else {
|
|
391
|
+
validFinishReason = "stop";
|
|
392
|
+
}
|
|
393
|
+
return {
|
|
394
|
+
id: responseId,
|
|
395
|
+
object: "chat.completion.chunk",
|
|
396
|
+
created: getCurrentTimestamp(),
|
|
397
|
+
model,
|
|
398
|
+
choices: [
|
|
399
|
+
{
|
|
400
|
+
index: 0,
|
|
401
|
+
delta: {},
|
|
402
|
+
finish_reason: validFinishReason
|
|
403
|
+
}
|
|
404
|
+
]
|
|
405
|
+
};
|
|
406
|
+
}
|
|
407
|
+
function createContentResponse(model, content, responseId, isReasoning = false) {
|
|
408
|
+
const delta = { role: "assistant" };
|
|
409
|
+
if (isReasoning) {
|
|
410
|
+
delta.reasoning_content = content;
|
|
411
|
+
} else {
|
|
412
|
+
delta.content = content;
|
|
413
|
+
}
|
|
414
|
+
return {
|
|
415
|
+
id: responseId,
|
|
416
|
+
object: "chat.completion.chunk",
|
|
417
|
+
created: getCurrentTimestamp(),
|
|
418
|
+
model,
|
|
419
|
+
choices: [
|
|
420
|
+
{
|
|
421
|
+
index: 0,
|
|
422
|
+
delta
|
|
423
|
+
}
|
|
424
|
+
]
|
|
425
|
+
};
|
|
426
|
+
}
|
|
427
|
+
function createToolCallResponse(model, toolCall, responseId, includeRole = false) {
|
|
428
|
+
var _a, _b;
|
|
429
|
+
return {
|
|
430
|
+
id: responseId,
|
|
431
|
+
object: "chat.completion.chunk",
|
|
432
|
+
created: getCurrentTimestamp(),
|
|
433
|
+
model,
|
|
434
|
+
choices: [
|
|
435
|
+
{
|
|
436
|
+
index: 0,
|
|
437
|
+
delta: {
|
|
438
|
+
...includeRole ? { role: "assistant" } : {},
|
|
439
|
+
tool_calls: [
|
|
440
|
+
{
|
|
441
|
+
index: toolCall.index || 0,
|
|
442
|
+
type: "function",
|
|
443
|
+
function: {
|
|
444
|
+
name: ((_a = toolCall.function) == null ? void 0 : _a.name) || "",
|
|
445
|
+
arguments: ((_b = toolCall.function) == null ? void 0 : _b.arguments) || ""
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
]
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
]
|
|
452
|
+
};
|
|
453
|
+
}
|
|
454
|
+
function createOpenAIStreamConverter(model, options) {
|
|
455
|
+
let streamHasToolCalls = false;
|
|
456
|
+
let streamFinishSent = false;
|
|
457
|
+
let streamResponseId = generateResponseId();
|
|
458
|
+
const logChunk = (options == null ? void 0 : options.logChunks) === false ? void 0 : (chunk) => {
|
|
459
|
+
const logType = process.env.USE_MIDDLEWARE === "true" ? "middleware" : "native";
|
|
460
|
+
console.log(
|
|
461
|
+
`\u{1F50D} AI SDK Chunk [${logType}]:`,
|
|
462
|
+
JSON.stringify(chunk, null, 2)
|
|
463
|
+
);
|
|
464
|
+
};
|
|
465
|
+
const handlers = {
|
|
466
|
+
start: () => [],
|
|
467
|
+
"reasoning-delta": (chunk) => {
|
|
468
|
+
if (!chunk.text) {
|
|
469
|
+
return [];
|
|
470
|
+
}
|
|
471
|
+
return [
|
|
472
|
+
{
|
|
473
|
+
data: JSON.stringify(
|
|
474
|
+
createContentResponse(model, chunk.text, streamResponseId, true)
|
|
475
|
+
)
|
|
476
|
+
}
|
|
477
|
+
];
|
|
478
|
+
},
|
|
479
|
+
"text-delta": (chunk) => {
|
|
480
|
+
if (!chunk.text) {
|
|
481
|
+
return [];
|
|
482
|
+
}
|
|
483
|
+
return [
|
|
484
|
+
{
|
|
485
|
+
data: JSON.stringify(
|
|
486
|
+
createContentResponse(model, chunk.text, streamResponseId, false)
|
|
487
|
+
)
|
|
488
|
+
}
|
|
489
|
+
];
|
|
490
|
+
},
|
|
491
|
+
"tool-call": (chunk) => {
|
|
492
|
+
var _a;
|
|
493
|
+
const toolCallId = chunk.toolCallId || `call_${generateResponseId()}`;
|
|
494
|
+
const toolName = chunk.toolName || "";
|
|
495
|
+
const argsString = typeof chunk.input === "string" ? chunk.input : JSON.stringify((_a = chunk.input) != null ? _a : {});
|
|
496
|
+
const toolCallDelta = {
|
|
497
|
+
index: 0,
|
|
498
|
+
id: toolCallId,
|
|
499
|
+
type: "function",
|
|
500
|
+
function: { name: toolName, arguments: argsString }
|
|
501
|
+
};
|
|
502
|
+
const response = createToolCallResponse(
|
|
503
|
+
model,
|
|
504
|
+
toolCallDelta,
|
|
505
|
+
streamResponseId,
|
|
506
|
+
true
|
|
507
|
+
);
|
|
508
|
+
return [{ data: JSON.stringify(response) }];
|
|
509
|
+
},
|
|
510
|
+
"reasoning-end": () => [],
|
|
511
|
+
"text-end": () => [],
|
|
512
|
+
"finish-step": (chunk) => {
|
|
513
|
+
if (streamFinishSent) {
|
|
514
|
+
return [];
|
|
515
|
+
}
|
|
516
|
+
const hadToolCalls = streamHasToolCalls;
|
|
517
|
+
let finishReason = chunk.finishReason || "stop";
|
|
518
|
+
if (finishReason === "tool_calls" || finishReason === "tool-calls") {
|
|
519
|
+
finishReason = "tool_calls";
|
|
520
|
+
}
|
|
521
|
+
const resolvedReason = hadToolCalls ? "tool_calls" : finishReason;
|
|
522
|
+
streamFinishSent = true;
|
|
523
|
+
streamHasToolCalls = false;
|
|
524
|
+
return [
|
|
525
|
+
{
|
|
526
|
+
data: JSON.stringify(
|
|
527
|
+
createFinishResponse(model, resolvedReason, streamResponseId)
|
|
528
|
+
)
|
|
529
|
+
}
|
|
530
|
+
];
|
|
531
|
+
},
|
|
532
|
+
"tool-call-delta": (chunk) => {
|
|
533
|
+
const toolCall = {
|
|
534
|
+
index: chunk.toolCallId ? Number(chunk.toolCallId) : 0,
|
|
535
|
+
type: "function",
|
|
536
|
+
function: {
|
|
537
|
+
name: chunk.toolName || "",
|
|
538
|
+
arguments: chunk.args || ""
|
|
539
|
+
}
|
|
540
|
+
};
|
|
541
|
+
return [
|
|
542
|
+
{
|
|
543
|
+
data: JSON.stringify(
|
|
544
|
+
createToolCallResponse(model, toolCall, streamResponseId)
|
|
545
|
+
)
|
|
546
|
+
}
|
|
547
|
+
];
|
|
548
|
+
},
|
|
549
|
+
"tool-result": (chunk) => {
|
|
550
|
+
const resultText = `
|
|
551
|
+
[Tool: ${chunk.toolName} returned ${JSON.stringify(chunk.output)}]
|
|
552
|
+
`;
|
|
553
|
+
return [
|
|
554
|
+
{
|
|
555
|
+
data: JSON.stringify(
|
|
556
|
+
createContentResponse(model, resultText, streamResponseId, false)
|
|
557
|
+
)
|
|
558
|
+
}
|
|
559
|
+
];
|
|
560
|
+
},
|
|
561
|
+
finish: (chunk) => {
|
|
562
|
+
if (streamFinishSent) {
|
|
563
|
+
return [];
|
|
564
|
+
}
|
|
565
|
+
const hadToolCalls = streamHasToolCalls;
|
|
566
|
+
let finishReason = chunk.finishReason || "stop";
|
|
567
|
+
if (finishReason === "tool_calls" || finishReason === "tool-calls") {
|
|
568
|
+
finishReason = "tool_calls";
|
|
569
|
+
}
|
|
570
|
+
const resolvedReason = hadToolCalls ? "tool_calls" : finishReason;
|
|
571
|
+
streamFinishSent = true;
|
|
572
|
+
streamHasToolCalls = false;
|
|
573
|
+
return [
|
|
574
|
+
{
|
|
575
|
+
data: JSON.stringify(
|
|
576
|
+
createFinishResponse(model, resolvedReason, streamResponseId)
|
|
577
|
+
)
|
|
578
|
+
}
|
|
579
|
+
];
|
|
580
|
+
}
|
|
581
|
+
};
|
|
582
|
+
return (chunk) => {
|
|
583
|
+
var _a;
|
|
584
|
+
const out = [];
|
|
585
|
+
logChunk == null ? void 0 : logChunk(chunk);
|
|
586
|
+
if (chunk.type === "start") {
|
|
587
|
+
streamHasToolCalls = false;
|
|
588
|
+
streamFinishSent = false;
|
|
589
|
+
streamResponseId = (_a = chunk.id) != null ? _a : generateResponseId();
|
|
590
|
+
}
|
|
591
|
+
const handler = handlers[chunk.type];
|
|
592
|
+
if (handler) {
|
|
593
|
+
const result = handler(chunk, model);
|
|
594
|
+
if (chunk.type === "tool-call" || chunk.type === "tool-call-delta") {
|
|
595
|
+
streamHasToolCalls = true;
|
|
596
|
+
}
|
|
597
|
+
out.push(...result);
|
|
598
|
+
} else {
|
|
599
|
+
console.warn(`\u26A0\uFE0F Unknown AI SDK chunk type: ${chunk.type}`, chunk);
|
|
600
|
+
}
|
|
601
|
+
if (chunk.type === "finish-step" || chunk.type === "finish") {
|
|
602
|
+
streamHasToolCalls = false;
|
|
603
|
+
}
|
|
604
|
+
return out.filter((resultChunk) => {
|
|
605
|
+
var _a2, _b, _c, _d;
|
|
606
|
+
try {
|
|
607
|
+
const parsed = JSON.parse(resultChunk.data);
|
|
608
|
+
const delta = (_b = (_a2 = parsed.choices) == null ? void 0 : _a2[0]) == null ? void 0 : _b.delta;
|
|
609
|
+
return delta && (delta.role || delta.content || delta.reasoning_content || delta.tool_calls && delta.tool_calls.length > 0 || ((_d = (_c = parsed.choices) == null ? void 0 : _c[0]) == null ? void 0 : _d.finish_reason));
|
|
610
|
+
} catch (e) {
|
|
611
|
+
return true;
|
|
612
|
+
}
|
|
613
|
+
});
|
|
614
|
+
};
|
|
615
|
+
}
|
|
616
|
+
function convertAISDKStreamChunkToOpenAI(chunk, model) {
|
|
617
|
+
const convert = createOpenAIStreamConverter(model);
|
|
618
|
+
return convert(chunk);
|
|
619
|
+
}
|
|
620
|
+
function createSSEResponse(chunks) {
|
|
621
|
+
return chunks.map((chunk) => `data: ${chunk.data}
|
|
622
|
+
|
|
623
|
+
`).join("");
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
// src/server.ts
|
|
627
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
628
|
+
var import_cors = __toESM(require("@fastify/cors"), 1);
|
|
629
|
+
var import_ai = require("ai");
|
|
630
|
+
var import_fastify = __toESM(require("fastify"), 1);
|
|
631
|
+
var import_zod2 = require("zod");
|
|
632
|
+
function serializeZodSchema(schema) {
|
|
633
|
+
if (!schema) {
|
|
634
|
+
return null;
|
|
635
|
+
}
|
|
636
|
+
const jsonSchema = (0, import_zod2.toJSONSchema)(schema, {
|
|
637
|
+
unrepresentable: "any"
|
|
638
|
+
});
|
|
639
|
+
if (typeof jsonSchema === "object" && jsonSchema !== null) {
|
|
640
|
+
const { $schema, ...rest } = jsonSchema;
|
|
641
|
+
return rest;
|
|
642
|
+
}
|
|
643
|
+
return jsonSchema;
|
|
644
|
+
}
|
|
645
|
+
function serializeMessages(messages) {
|
|
646
|
+
return messages.map((message, index) => ({
|
|
647
|
+
index,
|
|
648
|
+
role: message.role,
|
|
649
|
+
content: message.content,
|
|
650
|
+
toolCalls: message.tool_calls
|
|
651
|
+
}));
|
|
652
|
+
}
|
|
653
|
+
function logIncomingRequest(openaiRequest, enabled) {
|
|
654
|
+
var _a;
|
|
655
|
+
if (!enabled) {
|
|
656
|
+
return;
|
|
657
|
+
}
|
|
658
|
+
const toolNames = ((_a = openaiRequest.tools) != null ? _a : []).map((tool) => {
|
|
659
|
+
var _a2;
|
|
660
|
+
return "function" in tool ? (_a2 = tool.function) == null ? void 0 : _a2.name : void 0;
|
|
661
|
+
}).filter((name) => Boolean(name));
|
|
662
|
+
console.log(
|
|
663
|
+
"[proxy] Incoming OpenAI request",
|
|
664
|
+
JSON.stringify(
|
|
665
|
+
{
|
|
666
|
+
model: openaiRequest.model,
|
|
667
|
+
stream: Boolean(openaiRequest.stream),
|
|
668
|
+
temperature: openaiRequest.temperature,
|
|
669
|
+
maxTokens: openaiRequest.max_tokens,
|
|
670
|
+
toolNames,
|
|
671
|
+
toolChoice: openaiRequest.tool_choice,
|
|
672
|
+
messages: serializeMessages(openaiRequest.messages),
|
|
673
|
+
tools: openaiRequest.tools
|
|
674
|
+
},
|
|
675
|
+
null,
|
|
676
|
+
2
|
|
677
|
+
)
|
|
678
|
+
);
|
|
679
|
+
}
|
|
680
|
+
function serializeAISDKMessages(messages) {
|
|
681
|
+
return messages == null ? void 0 : messages.map((message, index) => ({
|
|
682
|
+
index,
|
|
683
|
+
role: message.role,
|
|
684
|
+
content: message.content
|
|
685
|
+
}));
|
|
686
|
+
}
|
|
687
|
+
function logRequestConversion(openaiRequest, aisdkParams, enabled) {
|
|
688
|
+
var _a, _b;
|
|
689
|
+
if (!enabled) {
|
|
690
|
+
return;
|
|
691
|
+
}
|
|
692
|
+
const messages = (_a = aisdkParams.messages) != null ? _a : [];
|
|
693
|
+
console.log(
|
|
694
|
+
"[proxy] Converted AI SDK params",
|
|
695
|
+
JSON.stringify(
|
|
696
|
+
{
|
|
697
|
+
model: openaiRequest.model,
|
|
698
|
+
hasSystemMessage: messages.some((message) => message.role === "system"),
|
|
699
|
+
messages: serializeAISDKMessages(messages),
|
|
700
|
+
tools: Object.entries((_b = aisdkParams.tools) != null ? _b : {}).map(([name, tool]) => ({
|
|
701
|
+
name,
|
|
702
|
+
description: tool.description,
|
|
703
|
+
inputSchema: serializeZodSchema(tool.inputSchema)
|
|
704
|
+
})),
|
|
705
|
+
temperature: aisdkParams.temperature,
|
|
706
|
+
maxOutputTokens: aisdkParams.maxOutputTokens,
|
|
707
|
+
stopSequences: aisdkParams.stopSequences
|
|
708
|
+
},
|
|
709
|
+
null,
|
|
710
|
+
2
|
|
711
|
+
)
|
|
712
|
+
);
|
|
713
|
+
}
|
|
714
|
+
var OpenAIProxyServer = class {
|
|
715
|
+
constructor(config) {
|
|
716
|
+
var _a;
|
|
717
|
+
this.config = {
|
|
718
|
+
port: 3e3,
|
|
719
|
+
host: "localhost",
|
|
720
|
+
cors: true,
|
|
721
|
+
...config
|
|
722
|
+
};
|
|
723
|
+
this.logger = (_a = config.logger) != null ? _a : console;
|
|
724
|
+
this.fastify = (0, import_fastify.default)();
|
|
725
|
+
if (this.config.cors) {
|
|
726
|
+
this.fastify.register(import_cors.default);
|
|
727
|
+
}
|
|
728
|
+
this.setupRoutes();
|
|
729
|
+
}
|
|
730
|
+
setupRoutes() {
|
|
731
|
+
this.fastify.get(
|
|
732
|
+
"/health",
|
|
733
|
+
async (_request, _reply) => ({
|
|
734
|
+
status: "ok",
|
|
735
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
736
|
+
})
|
|
737
|
+
);
|
|
738
|
+
this.fastify.post(
|
|
739
|
+
"/v1/chat/completions",
|
|
740
|
+
(request, reply) => {
|
|
741
|
+
var _a, _b, _c, _d;
|
|
742
|
+
try {
|
|
743
|
+
const openaiRequest = request.body;
|
|
744
|
+
if (!(openaiRequest.messages && Array.isArray(openaiRequest.messages))) {
|
|
745
|
+
return reply.code(400).send({
|
|
746
|
+
error: {
|
|
747
|
+
message: "Messages array is required",
|
|
748
|
+
type: "invalid_request_error"
|
|
749
|
+
}
|
|
750
|
+
});
|
|
751
|
+
}
|
|
752
|
+
logIncomingRequest(
|
|
753
|
+
openaiRequest,
|
|
754
|
+
(_b = (_a = this.config.logging) == null ? void 0 : _a.requests) != null ? _b : true
|
|
755
|
+
);
|
|
756
|
+
const aisdkParams = convertOpenAIRequestToAISDK(openaiRequest, {
|
|
757
|
+
parserDebug: this.config.parserDebug
|
|
758
|
+
});
|
|
759
|
+
logRequestConversion(
|
|
760
|
+
openaiRequest,
|
|
761
|
+
aisdkParams,
|
|
762
|
+
(_d = (_c = this.config.logging) == null ? void 0 : _c.conversions) != null ? _d : true
|
|
763
|
+
);
|
|
764
|
+
if (openaiRequest.stream) {
|
|
765
|
+
return this.handleStreamingRequest(
|
|
766
|
+
aisdkParams,
|
|
767
|
+
openaiRequest,
|
|
768
|
+
reply
|
|
769
|
+
);
|
|
770
|
+
}
|
|
771
|
+
return this.handleNonStreamingRequest(
|
|
772
|
+
aisdkParams,
|
|
773
|
+
openaiRequest,
|
|
774
|
+
reply
|
|
775
|
+
);
|
|
776
|
+
} catch (error) {
|
|
777
|
+
this.logger.error("Request handling error:", error);
|
|
778
|
+
return reply.code(500).send({
|
|
779
|
+
error: {
|
|
780
|
+
message: "Internal server error",
|
|
781
|
+
type: "server_error"
|
|
782
|
+
}
|
|
783
|
+
});
|
|
784
|
+
}
|
|
785
|
+
}
|
|
786
|
+
);
|
|
787
|
+
this.fastify.get("/v1/models", async () => ({
|
|
788
|
+
object: "list",
|
|
789
|
+
data: [
|
|
790
|
+
{
|
|
791
|
+
id: "wrapped-model",
|
|
792
|
+
object: "model",
|
|
793
|
+
created: Math.floor(Date.now() / 1e3),
|
|
794
|
+
owned_by: "ai-sdk-tool-proxy"
|
|
795
|
+
}
|
|
796
|
+
]
|
|
797
|
+
}));
|
|
798
|
+
}
|
|
799
|
+
// Merge server-defined tools (with execute) and request-defined tools (schema-only)
|
|
800
|
+
// Server tools take precedence when names overlap.
|
|
801
|
+
mergeTools(serverTools, requestTools) {
|
|
802
|
+
const toProviderTool = (tool) => {
|
|
803
|
+
if (!tool) {
|
|
804
|
+
return;
|
|
805
|
+
}
|
|
806
|
+
return {
|
|
807
|
+
description: tool.description,
|
|
808
|
+
inputSchema: (0, import_provider_utils.zodSchema)(tool.inputSchema),
|
|
809
|
+
...tool.execute ? { execute: tool.execute } : {}
|
|
810
|
+
};
|
|
811
|
+
};
|
|
812
|
+
const merged = {};
|
|
813
|
+
for (const [name, t] of Object.entries(requestTools != null ? requestTools : {})) {
|
|
814
|
+
const pt = toProviderTool(t);
|
|
815
|
+
if (pt) {
|
|
816
|
+
merged[name] = pt;
|
|
817
|
+
}
|
|
818
|
+
}
|
|
819
|
+
for (const [name, t] of Object.entries(serverTools != null ? serverTools : {})) {
|
|
820
|
+
const pt = toProviderTool(t);
|
|
821
|
+
if (pt) {
|
|
822
|
+
merged[name] = pt;
|
|
823
|
+
}
|
|
824
|
+
}
|
|
825
|
+
return Object.keys(merged).length > 0 ? merged : void 0;
|
|
826
|
+
}
|
|
827
|
+
async handleStreamingRequest(aisdkParams, openaiRequest, reply) {
|
|
828
|
+
var _a, _b;
|
|
829
|
+
reply.raw.writeHead(200, {
|
|
830
|
+
"Content-Type": "text/event-stream",
|
|
831
|
+
"Cache-Control": "no-cache",
|
|
832
|
+
Connection: "keep-alive",
|
|
833
|
+
"Access-Control-Allow-Origin": "*"
|
|
834
|
+
});
|
|
835
|
+
try {
|
|
836
|
+
const mergedTools = this.mergeTools(this.config.tools, aisdkParams.tools);
|
|
837
|
+
const result = await (0, import_ai.streamText)({
|
|
838
|
+
model: this.config.model,
|
|
839
|
+
...aisdkParams,
|
|
840
|
+
...mergedTools ? { tools: mergedTools } : {}
|
|
841
|
+
});
|
|
842
|
+
const convert = createOpenAIStreamConverter(openaiRequest.model, {
|
|
843
|
+
logChunks: (_b = (_a = this.config.logging) == null ? void 0 : _a.streamChunks) != null ? _b : true
|
|
844
|
+
});
|
|
845
|
+
for await (const chunk of result.fullStream) {
|
|
846
|
+
const openaiChunks = convert(chunk);
|
|
847
|
+
for (const openaiChunk of openaiChunks) {
|
|
848
|
+
reply.raw.write(`data: ${openaiChunk.data}
|
|
849
|
+
|
|
850
|
+
`);
|
|
851
|
+
}
|
|
852
|
+
}
|
|
853
|
+
reply.raw.write("data: [DONE]\n\n");
|
|
854
|
+
reply.raw.end();
|
|
855
|
+
} catch (error) {
|
|
856
|
+
this.logger.error("Streaming error:", error);
|
|
857
|
+
reply.raw.write('data: {"error": {"message": "Streaming error"}}\n\n');
|
|
858
|
+
reply.raw.end();
|
|
859
|
+
}
|
|
860
|
+
return reply;
|
|
861
|
+
}
|
|
862
|
+
async handleNonStreamingRequest(aisdkParams, openaiRequest, reply) {
|
|
863
|
+
try {
|
|
864
|
+
const mergedTools = this.mergeTools(this.config.tools, aisdkParams.tools);
|
|
865
|
+
const result = await (0, import_ai.generateText)({
|
|
866
|
+
model: this.config.model,
|
|
867
|
+
...aisdkParams,
|
|
868
|
+
...mergedTools ? { tools: mergedTools } : {}
|
|
869
|
+
});
|
|
870
|
+
const openaiResponse = convertAISDKResultToOpenAI(
|
|
871
|
+
result,
|
|
872
|
+
openaiRequest.model,
|
|
873
|
+
false
|
|
874
|
+
);
|
|
875
|
+
reply.send(openaiResponse);
|
|
876
|
+
} catch (error) {
|
|
877
|
+
this.logger.error("Generation error:", error);
|
|
878
|
+
return reply.code(500).send({
|
|
879
|
+
error: {
|
|
880
|
+
message: "Generation failed",
|
|
881
|
+
type: "generation_error"
|
|
882
|
+
}
|
|
883
|
+
});
|
|
884
|
+
}
|
|
885
|
+
}
|
|
886
|
+
async start() {
|
|
887
|
+
try {
|
|
888
|
+
await this.fastify.listen({
|
|
889
|
+
port: this.config.port || 3e3,
|
|
890
|
+
host: this.config.host || "localhost"
|
|
891
|
+
});
|
|
892
|
+
this.logger.info(
|
|
893
|
+
`\u{1F680} OpenAI Proxy Server running on http://${this.config.host}:${this.config.port}`
|
|
894
|
+
);
|
|
895
|
+
this.logger.info(
|
|
896
|
+
`\u{1F4E1} Endpoint: http://${this.config.host}:${this.config.port}/v1/chat/completions`
|
|
897
|
+
);
|
|
898
|
+
this.logger.info(
|
|
899
|
+
`\u{1F3E5} Health: http://${this.config.host}:${this.config.port}/health`
|
|
900
|
+
);
|
|
901
|
+
} catch (error) {
|
|
902
|
+
this.logger.error("Failed to start server:", error);
|
|
903
|
+
process.exit(1);
|
|
904
|
+
}
|
|
905
|
+
}
|
|
906
|
+
async stop() {
|
|
907
|
+
try {
|
|
908
|
+
await this.fastify.close();
|
|
909
|
+
this.logger.info("\u{1F6D1} Server stopped");
|
|
910
|
+
} catch (error) {
|
|
911
|
+
this.logger.error("Error stopping server:", error);
|
|
912
|
+
}
|
|
913
|
+
}
|
|
914
|
+
};
|
|
915
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
916
|
+
0 && (module.exports = {
|
|
917
|
+
OpenAIProxyServer,
|
|
918
|
+
convertAISDKResultToOpenAI,
|
|
919
|
+
convertAISDKStreamChunkToOpenAI,
|
|
920
|
+
convertAISDKToolCallsToOpenAI,
|
|
921
|
+
convertOpenAIRequestToAISDK,
|
|
922
|
+
createOpenAIStreamConverter,
|
|
923
|
+
createSSEResponse,
|
|
924
|
+
generateResponseId,
|
|
925
|
+
getCurrentTimestamp
|
|
926
|
+
});
|
|
927
|
+
//# sourceMappingURL=index.cjs.map
|