@ai-sdk-tool/proxy 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +172 -0
- package/dist/index.cjs +927 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +183 -0
- package/dist/index.d.ts +183 -0
- package/dist/index.js +882 -0
- package/dist/index.js.map +1 -0
- package/package.json +62 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,882 @@
|
|
|
1
|
+
// src/openai-request-converter.ts
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
function createZodSchema(prop) {
|
|
4
|
+
if (prop.type === "string") {
|
|
5
|
+
return prop.enum ? z.enum(prop.enum) : z.string();
|
|
6
|
+
}
|
|
7
|
+
if (prop.type === "number") {
|
|
8
|
+
return z.number();
|
|
9
|
+
}
|
|
10
|
+
if (prop.type === "boolean") {
|
|
11
|
+
return z.boolean();
|
|
12
|
+
}
|
|
13
|
+
if (prop.type === "array") {
|
|
14
|
+
return z.array(z.any());
|
|
15
|
+
}
|
|
16
|
+
if (prop.type === "object") {
|
|
17
|
+
return z.object({});
|
|
18
|
+
}
|
|
19
|
+
return z.any();
|
|
20
|
+
}
|
|
21
|
+
function convertOpenAIToolToZod(parameters) {
|
|
22
|
+
if (!parameters) {
|
|
23
|
+
return z.object({});
|
|
24
|
+
}
|
|
25
|
+
const params = parameters;
|
|
26
|
+
if (!params.properties) {
|
|
27
|
+
return z.object({});
|
|
28
|
+
}
|
|
29
|
+
const schemaShape = {};
|
|
30
|
+
for (const [key, prop] of Object.entries(params.properties)) {
|
|
31
|
+
schemaShape[key] = createZodSchema(prop);
|
|
32
|
+
}
|
|
33
|
+
return z.object(schemaShape);
|
|
34
|
+
}
|
|
35
|
+
function convertOpenAITools(openaiTools) {
|
|
36
|
+
const aisdkTools = {};
|
|
37
|
+
if (!openaiTools) {
|
|
38
|
+
return aisdkTools;
|
|
39
|
+
}
|
|
40
|
+
for (const openaiTool of openaiTools) {
|
|
41
|
+
const toolName = openaiTool.function.name;
|
|
42
|
+
aisdkTools[toolName] = {
|
|
43
|
+
description: openaiTool.function.description || "",
|
|
44
|
+
inputSchema: convertOpenAIToolToZod(openaiTool.function.parameters)
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
return aisdkTools;
|
|
48
|
+
}
|
|
49
|
+
function convertStopToSequences(stop) {
|
|
50
|
+
if (!stop) {
|
|
51
|
+
return;
|
|
52
|
+
}
|
|
53
|
+
return Array.isArray(stop) ? stop : [stop];
|
|
54
|
+
}
|
|
55
|
+
function normalizeMessageContent(content) {
|
|
56
|
+
if (typeof content === "string") {
|
|
57
|
+
return content ? [{ type: "text", text: content }] : [];
|
|
58
|
+
}
|
|
59
|
+
if (Array.isArray(content)) {
|
|
60
|
+
const parts = content;
|
|
61
|
+
return parts.map((part) => {
|
|
62
|
+
if (typeof part === "string") {
|
|
63
|
+
return part;
|
|
64
|
+
}
|
|
65
|
+
if (part && typeof part === "object" && "text" in part) {
|
|
66
|
+
const textValue = part.text;
|
|
67
|
+
if (typeof textValue === "string") {
|
|
68
|
+
return textValue;
|
|
69
|
+
}
|
|
70
|
+
if (textValue !== void 0) {
|
|
71
|
+
return JSON.stringify(textValue);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
return JSON.stringify(part);
|
|
75
|
+
}).filter((text) => Boolean(text)).map((text) => ({ type: "text", text }));
|
|
76
|
+
}
|
|
77
|
+
if (content === null || content === void 0) {
|
|
78
|
+
return [];
|
|
79
|
+
}
|
|
80
|
+
if (typeof content === "object") {
|
|
81
|
+
return [{ type: "text", text: JSON.stringify(content) }];
|
|
82
|
+
}
|
|
83
|
+
return [{ type: "text", text: String(content) }];
|
|
84
|
+
}
|
|
85
|
+
function buildToolCallParts(toolCalls) {
|
|
86
|
+
return toolCalls.map((toolCall) => {
|
|
87
|
+
let parsedArgs = toolCall.function.arguments;
|
|
88
|
+
if (typeof toolCall.function.arguments === "string") {
|
|
89
|
+
try {
|
|
90
|
+
parsedArgs = JSON.parse(toolCall.function.arguments || "{}");
|
|
91
|
+
} catch (e) {
|
|
92
|
+
parsedArgs = toolCall.function.arguments;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
return {
|
|
96
|
+
type: "tool-call",
|
|
97
|
+
toolCallId: toolCall.id,
|
|
98
|
+
toolName: toolCall.function.name,
|
|
99
|
+
input: parsedArgs
|
|
100
|
+
};
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
function buildAssistantContent(message) {
|
|
104
|
+
var _a;
|
|
105
|
+
const textParts = normalizeMessageContent(message.content);
|
|
106
|
+
const toolCallParts = ((_a = message.tool_calls) == null ? void 0 : _a.length) ? buildToolCallParts(message.tool_calls) : [];
|
|
107
|
+
if (toolCallParts.length === 0) {
|
|
108
|
+
if (textParts.length === 0) {
|
|
109
|
+
return "";
|
|
110
|
+
}
|
|
111
|
+
if (textParts.length === 1) {
|
|
112
|
+
return textParts[0].text;
|
|
113
|
+
}
|
|
114
|
+
return textParts;
|
|
115
|
+
}
|
|
116
|
+
return [...textParts, ...toolCallParts];
|
|
117
|
+
}
|
|
118
|
+
function isJsonValue(value) {
|
|
119
|
+
if (value === null || typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
|
|
120
|
+
return true;
|
|
121
|
+
}
|
|
122
|
+
if (Array.isArray(value)) {
|
|
123
|
+
return value.every(isJsonValue);
|
|
124
|
+
}
|
|
125
|
+
if (typeof value === "object") {
|
|
126
|
+
return Object.values(value).every(isJsonValue);
|
|
127
|
+
}
|
|
128
|
+
return false;
|
|
129
|
+
}
|
|
130
|
+
function buildToolOutput(rawValue) {
|
|
131
|
+
if (!rawValue) {
|
|
132
|
+
return { type: "text", value: "" };
|
|
133
|
+
}
|
|
134
|
+
try {
|
|
135
|
+
const parsed = JSON.parse(rawValue);
|
|
136
|
+
if (isJsonValue(parsed)) {
|
|
137
|
+
return { type: "json", value: parsed };
|
|
138
|
+
}
|
|
139
|
+
} catch (e) {
|
|
140
|
+
}
|
|
141
|
+
return { type: "text", value: rawValue };
|
|
142
|
+
}
|
|
143
|
+
function buildToolContent(message, toolNameLookup) {
|
|
144
|
+
var _a, _b;
|
|
145
|
+
const textParts = normalizeMessageContent(message.content);
|
|
146
|
+
const combined = textParts.map((part) => part.text).join("\n");
|
|
147
|
+
const toolCallId = (_a = message.tool_call_id) != null ? _a : "";
|
|
148
|
+
const toolName = toolCallId ? (_b = toolNameLookup.get(toolCallId)) != null ? _b : toolCallId : "";
|
|
149
|
+
return [
|
|
150
|
+
{
|
|
151
|
+
type: "tool-result",
|
|
152
|
+
toolCallId,
|
|
153
|
+
toolName,
|
|
154
|
+
output: buildToolOutput(combined)
|
|
155
|
+
}
|
|
156
|
+
];
|
|
157
|
+
}
|
|
158
|
+
function convertMessageToModelMessage(message, toolNameLookup) {
|
|
159
|
+
var _a;
|
|
160
|
+
if (message.role === "assistant") {
|
|
161
|
+
if ((_a = message.tool_calls) == null ? void 0 : _a.length) {
|
|
162
|
+
for (const toolCall of message.tool_calls) {
|
|
163
|
+
toolNameLookup.set(toolCall.id, toolCall.function.name);
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
return {
|
|
167
|
+
role: "assistant",
|
|
168
|
+
content: buildAssistantContent(
|
|
169
|
+
message
|
|
170
|
+
)
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
if (message.role === "tool") {
|
|
174
|
+
return {
|
|
175
|
+
role: "tool",
|
|
176
|
+
content: buildToolContent(
|
|
177
|
+
message,
|
|
178
|
+
toolNameLookup
|
|
179
|
+
)
|
|
180
|
+
};
|
|
181
|
+
}
|
|
182
|
+
if (message.role === "system") {
|
|
183
|
+
const text = normalizeMessageContent(message.content).map((part) => part.text).join("\n");
|
|
184
|
+
return {
|
|
185
|
+
role: "system",
|
|
186
|
+
content: text
|
|
187
|
+
};
|
|
188
|
+
}
|
|
189
|
+
const userParts = normalizeMessageContent(message.content);
|
|
190
|
+
if (userParts.length === 0) {
|
|
191
|
+
return {
|
|
192
|
+
role: "user",
|
|
193
|
+
content: ""
|
|
194
|
+
};
|
|
195
|
+
}
|
|
196
|
+
if (userParts.length === 1) {
|
|
197
|
+
return {
|
|
198
|
+
role: "user",
|
|
199
|
+
content: userParts[0].text
|
|
200
|
+
};
|
|
201
|
+
}
|
|
202
|
+
return {
|
|
203
|
+
role: "user",
|
|
204
|
+
content: userParts
|
|
205
|
+
};
|
|
206
|
+
}
|
|
207
|
+
function convertOpenAIRequestToAISDK(openaiRequest, proxyConfig) {
|
|
208
|
+
const {
|
|
209
|
+
messages,
|
|
210
|
+
tools: openaiTools,
|
|
211
|
+
temperature,
|
|
212
|
+
max_tokens,
|
|
213
|
+
stop,
|
|
214
|
+
tool_choice
|
|
215
|
+
} = openaiRequest;
|
|
216
|
+
const toolNameLookup = /* @__PURE__ */ new Map();
|
|
217
|
+
const aiMessages = messages.map(
|
|
218
|
+
(message) => convertMessageToModelMessage(message, toolNameLookup)
|
|
219
|
+
);
|
|
220
|
+
const aisdkTools = convertOpenAITools(openaiTools);
|
|
221
|
+
const providerOptions = (proxyConfig == null ? void 0 : proxyConfig.parserDebug) ? {
|
|
222
|
+
toolCallMiddleware: {
|
|
223
|
+
debugLevel: proxyConfig.parserDebug.level,
|
|
224
|
+
logErrors: proxyConfig.parserDebug.logErrors,
|
|
225
|
+
captureSummary: proxyConfig.parserDebug.captureSummary
|
|
226
|
+
}
|
|
227
|
+
} : void 0;
|
|
228
|
+
return {
|
|
229
|
+
messages: aiMessages,
|
|
230
|
+
tools: aisdkTools,
|
|
231
|
+
temperature,
|
|
232
|
+
maxOutputTokens: max_tokens,
|
|
233
|
+
stopSequences: convertStopToSequences(stop),
|
|
234
|
+
toolChoice: mapOpenAIToolChoice(tool_choice),
|
|
235
|
+
...providerOptions ? { providerOptions } : {}
|
|
236
|
+
};
|
|
237
|
+
}
|
|
238
|
+
function mapOpenAIToolChoice(choice) {
|
|
239
|
+
if (!choice) {
|
|
240
|
+
return;
|
|
241
|
+
}
|
|
242
|
+
if (choice === "auto" || choice === "none") {
|
|
243
|
+
return choice;
|
|
244
|
+
}
|
|
245
|
+
if (typeof choice === "object" && choice.type === "function") {
|
|
246
|
+
return { type: "tool", toolName: choice.function.name };
|
|
247
|
+
}
|
|
248
|
+
return;
|
|
249
|
+
}
|
|
250
|
+
function convertAISDKToolCallsToOpenAI(toolCalls) {
|
|
251
|
+
return toolCalls.map((call) => ({
|
|
252
|
+
id: `call_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
|
|
253
|
+
type: "function",
|
|
254
|
+
function: {
|
|
255
|
+
name: call.toolName,
|
|
256
|
+
arguments: JSON.stringify(call.args)
|
|
257
|
+
}
|
|
258
|
+
}));
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// src/response-utils.ts
|
|
262
|
+
function generateResponseId() {
|
|
263
|
+
return `chatcmpl-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
264
|
+
}
|
|
265
|
+
function getCurrentTimestamp() {
|
|
266
|
+
return Math.floor(Date.now() / 1e3);
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
// src/response-converter.ts
|
|
270
|
+
function convertAISDKResultToOpenAI(aisdkResult, model, stream = false) {
|
|
271
|
+
var _a;
|
|
272
|
+
const choices = [];
|
|
273
|
+
if (aisdkResult.text) {
|
|
274
|
+
const choice = {
|
|
275
|
+
index: 0,
|
|
276
|
+
finish_reason: aisdkResult.finishReason || "stop"
|
|
277
|
+
};
|
|
278
|
+
if (stream) {
|
|
279
|
+
choice.delta = {
|
|
280
|
+
role: "assistant",
|
|
281
|
+
content: aisdkResult.text
|
|
282
|
+
};
|
|
283
|
+
} else {
|
|
284
|
+
choice.message = {
|
|
285
|
+
role: "assistant",
|
|
286
|
+
content: aisdkResult.text
|
|
287
|
+
};
|
|
288
|
+
}
|
|
289
|
+
choices.push(choice);
|
|
290
|
+
}
|
|
291
|
+
if (aisdkResult.toolCalls && aisdkResult.toolCalls.length > 0) {
|
|
292
|
+
const choice = {
|
|
293
|
+
index: 0,
|
|
294
|
+
finish_reason: "tool_calls"
|
|
295
|
+
};
|
|
296
|
+
const openAIToolCalls = (_a = aisdkResult.toolCalls) == null ? void 0 : _a.map(
|
|
297
|
+
(call) => ({
|
|
298
|
+
id: `call_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
|
|
299
|
+
type: "function",
|
|
300
|
+
function: {
|
|
301
|
+
name: call.toolName,
|
|
302
|
+
arguments: JSON.stringify(call.args)
|
|
303
|
+
}
|
|
304
|
+
})
|
|
305
|
+
);
|
|
306
|
+
if (stream) {
|
|
307
|
+
choice.delta = {
|
|
308
|
+
role: "assistant",
|
|
309
|
+
tool_calls: openAIToolCalls
|
|
310
|
+
};
|
|
311
|
+
} else {
|
|
312
|
+
choice.message = {
|
|
313
|
+
role: "assistant",
|
|
314
|
+
content: null,
|
|
315
|
+
tool_calls: openAIToolCalls
|
|
316
|
+
};
|
|
317
|
+
}
|
|
318
|
+
choices.push(choice);
|
|
319
|
+
}
|
|
320
|
+
const response = {
|
|
321
|
+
id: generateResponseId(),
|
|
322
|
+
object: stream ? "chat.completion.chunk" : "chat.completion",
|
|
323
|
+
created: getCurrentTimestamp(),
|
|
324
|
+
model,
|
|
325
|
+
choices
|
|
326
|
+
};
|
|
327
|
+
if (aisdkResult.usage) {
|
|
328
|
+
response.usage = {
|
|
329
|
+
prompt_tokens: aisdkResult.usage.promptTokens || 0,
|
|
330
|
+
completion_tokens: aisdkResult.usage.completionTokens || 0,
|
|
331
|
+
total_tokens: aisdkResult.usage.totalTokens || 0
|
|
332
|
+
};
|
|
333
|
+
}
|
|
334
|
+
return response;
|
|
335
|
+
}
|
|
336
|
+
function createFinishResponse(model, finishReason, responseId) {
|
|
337
|
+
let validFinishReason;
|
|
338
|
+
if (finishReason === "tool_calls" || finishReason === "tool-calls") {
|
|
339
|
+
validFinishReason = "tool_calls";
|
|
340
|
+
} else if (finishReason === "stop") {
|
|
341
|
+
validFinishReason = "stop";
|
|
342
|
+
} else if (finishReason === "length") {
|
|
343
|
+
validFinishReason = "length";
|
|
344
|
+
} else if (finishReason === "content_filter") {
|
|
345
|
+
validFinishReason = "content_filter";
|
|
346
|
+
} else {
|
|
347
|
+
validFinishReason = "stop";
|
|
348
|
+
}
|
|
349
|
+
return {
|
|
350
|
+
id: responseId,
|
|
351
|
+
object: "chat.completion.chunk",
|
|
352
|
+
created: getCurrentTimestamp(),
|
|
353
|
+
model,
|
|
354
|
+
choices: [
|
|
355
|
+
{
|
|
356
|
+
index: 0,
|
|
357
|
+
delta: {},
|
|
358
|
+
finish_reason: validFinishReason
|
|
359
|
+
}
|
|
360
|
+
]
|
|
361
|
+
};
|
|
362
|
+
}
|
|
363
|
+
function createContentResponse(model, content, responseId, isReasoning = false) {
|
|
364
|
+
const delta = { role: "assistant" };
|
|
365
|
+
if (isReasoning) {
|
|
366
|
+
delta.reasoning_content = content;
|
|
367
|
+
} else {
|
|
368
|
+
delta.content = content;
|
|
369
|
+
}
|
|
370
|
+
return {
|
|
371
|
+
id: responseId,
|
|
372
|
+
object: "chat.completion.chunk",
|
|
373
|
+
created: getCurrentTimestamp(),
|
|
374
|
+
model,
|
|
375
|
+
choices: [
|
|
376
|
+
{
|
|
377
|
+
index: 0,
|
|
378
|
+
delta
|
|
379
|
+
}
|
|
380
|
+
]
|
|
381
|
+
};
|
|
382
|
+
}
|
|
383
|
+
function createToolCallResponse(model, toolCall, responseId, includeRole = false) {
|
|
384
|
+
var _a, _b;
|
|
385
|
+
return {
|
|
386
|
+
id: responseId,
|
|
387
|
+
object: "chat.completion.chunk",
|
|
388
|
+
created: getCurrentTimestamp(),
|
|
389
|
+
model,
|
|
390
|
+
choices: [
|
|
391
|
+
{
|
|
392
|
+
index: 0,
|
|
393
|
+
delta: {
|
|
394
|
+
...includeRole ? { role: "assistant" } : {},
|
|
395
|
+
tool_calls: [
|
|
396
|
+
{
|
|
397
|
+
index: toolCall.index || 0,
|
|
398
|
+
type: "function",
|
|
399
|
+
function: {
|
|
400
|
+
name: ((_a = toolCall.function) == null ? void 0 : _a.name) || "",
|
|
401
|
+
arguments: ((_b = toolCall.function) == null ? void 0 : _b.arguments) || ""
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
]
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
]
|
|
408
|
+
};
|
|
409
|
+
}
|
|
410
|
+
function createOpenAIStreamConverter(model, options) {
|
|
411
|
+
let streamHasToolCalls = false;
|
|
412
|
+
let streamFinishSent = false;
|
|
413
|
+
let streamResponseId = generateResponseId();
|
|
414
|
+
const logChunk = (options == null ? void 0 : options.logChunks) === false ? void 0 : (chunk) => {
|
|
415
|
+
const logType = process.env.USE_MIDDLEWARE === "true" ? "middleware" : "native";
|
|
416
|
+
console.log(
|
|
417
|
+
`\u{1F50D} AI SDK Chunk [${logType}]:`,
|
|
418
|
+
JSON.stringify(chunk, null, 2)
|
|
419
|
+
);
|
|
420
|
+
};
|
|
421
|
+
const handlers = {
|
|
422
|
+
start: () => [],
|
|
423
|
+
"reasoning-delta": (chunk) => {
|
|
424
|
+
if (!chunk.text) {
|
|
425
|
+
return [];
|
|
426
|
+
}
|
|
427
|
+
return [
|
|
428
|
+
{
|
|
429
|
+
data: JSON.stringify(
|
|
430
|
+
createContentResponse(model, chunk.text, streamResponseId, true)
|
|
431
|
+
)
|
|
432
|
+
}
|
|
433
|
+
];
|
|
434
|
+
},
|
|
435
|
+
"text-delta": (chunk) => {
|
|
436
|
+
if (!chunk.text) {
|
|
437
|
+
return [];
|
|
438
|
+
}
|
|
439
|
+
return [
|
|
440
|
+
{
|
|
441
|
+
data: JSON.stringify(
|
|
442
|
+
createContentResponse(model, chunk.text, streamResponseId, false)
|
|
443
|
+
)
|
|
444
|
+
}
|
|
445
|
+
];
|
|
446
|
+
},
|
|
447
|
+
"tool-call": (chunk) => {
|
|
448
|
+
var _a;
|
|
449
|
+
const toolCallId = chunk.toolCallId || `call_${generateResponseId()}`;
|
|
450
|
+
const toolName = chunk.toolName || "";
|
|
451
|
+
const argsString = typeof chunk.input === "string" ? chunk.input : JSON.stringify((_a = chunk.input) != null ? _a : {});
|
|
452
|
+
const toolCallDelta = {
|
|
453
|
+
index: 0,
|
|
454
|
+
id: toolCallId,
|
|
455
|
+
type: "function",
|
|
456
|
+
function: { name: toolName, arguments: argsString }
|
|
457
|
+
};
|
|
458
|
+
const response = createToolCallResponse(
|
|
459
|
+
model,
|
|
460
|
+
toolCallDelta,
|
|
461
|
+
streamResponseId,
|
|
462
|
+
true
|
|
463
|
+
);
|
|
464
|
+
return [{ data: JSON.stringify(response) }];
|
|
465
|
+
},
|
|
466
|
+
"reasoning-end": () => [],
|
|
467
|
+
"text-end": () => [],
|
|
468
|
+
"finish-step": (chunk) => {
|
|
469
|
+
if (streamFinishSent) {
|
|
470
|
+
return [];
|
|
471
|
+
}
|
|
472
|
+
const hadToolCalls = streamHasToolCalls;
|
|
473
|
+
let finishReason = chunk.finishReason || "stop";
|
|
474
|
+
if (finishReason === "tool_calls" || finishReason === "tool-calls") {
|
|
475
|
+
finishReason = "tool_calls";
|
|
476
|
+
}
|
|
477
|
+
const resolvedReason = hadToolCalls ? "tool_calls" : finishReason;
|
|
478
|
+
streamFinishSent = true;
|
|
479
|
+
streamHasToolCalls = false;
|
|
480
|
+
return [
|
|
481
|
+
{
|
|
482
|
+
data: JSON.stringify(
|
|
483
|
+
createFinishResponse(model, resolvedReason, streamResponseId)
|
|
484
|
+
)
|
|
485
|
+
}
|
|
486
|
+
];
|
|
487
|
+
},
|
|
488
|
+
"tool-call-delta": (chunk) => {
|
|
489
|
+
const toolCall = {
|
|
490
|
+
index: chunk.toolCallId ? Number(chunk.toolCallId) : 0,
|
|
491
|
+
type: "function",
|
|
492
|
+
function: {
|
|
493
|
+
name: chunk.toolName || "",
|
|
494
|
+
arguments: chunk.args || ""
|
|
495
|
+
}
|
|
496
|
+
};
|
|
497
|
+
return [
|
|
498
|
+
{
|
|
499
|
+
data: JSON.stringify(
|
|
500
|
+
createToolCallResponse(model, toolCall, streamResponseId)
|
|
501
|
+
)
|
|
502
|
+
}
|
|
503
|
+
];
|
|
504
|
+
},
|
|
505
|
+
"tool-result": (chunk) => {
|
|
506
|
+
const resultText = `
|
|
507
|
+
[Tool: ${chunk.toolName} returned ${JSON.stringify(chunk.output)}]
|
|
508
|
+
`;
|
|
509
|
+
return [
|
|
510
|
+
{
|
|
511
|
+
data: JSON.stringify(
|
|
512
|
+
createContentResponse(model, resultText, streamResponseId, false)
|
|
513
|
+
)
|
|
514
|
+
}
|
|
515
|
+
];
|
|
516
|
+
},
|
|
517
|
+
finish: (chunk) => {
|
|
518
|
+
if (streamFinishSent) {
|
|
519
|
+
return [];
|
|
520
|
+
}
|
|
521
|
+
const hadToolCalls = streamHasToolCalls;
|
|
522
|
+
let finishReason = chunk.finishReason || "stop";
|
|
523
|
+
if (finishReason === "tool_calls" || finishReason === "tool-calls") {
|
|
524
|
+
finishReason = "tool_calls";
|
|
525
|
+
}
|
|
526
|
+
const resolvedReason = hadToolCalls ? "tool_calls" : finishReason;
|
|
527
|
+
streamFinishSent = true;
|
|
528
|
+
streamHasToolCalls = false;
|
|
529
|
+
return [
|
|
530
|
+
{
|
|
531
|
+
data: JSON.stringify(
|
|
532
|
+
createFinishResponse(model, resolvedReason, streamResponseId)
|
|
533
|
+
)
|
|
534
|
+
}
|
|
535
|
+
];
|
|
536
|
+
}
|
|
537
|
+
};
|
|
538
|
+
return (chunk) => {
|
|
539
|
+
var _a;
|
|
540
|
+
const out = [];
|
|
541
|
+
logChunk == null ? void 0 : logChunk(chunk);
|
|
542
|
+
if (chunk.type === "start") {
|
|
543
|
+
streamHasToolCalls = false;
|
|
544
|
+
streamFinishSent = false;
|
|
545
|
+
streamResponseId = (_a = chunk.id) != null ? _a : generateResponseId();
|
|
546
|
+
}
|
|
547
|
+
const handler = handlers[chunk.type];
|
|
548
|
+
if (handler) {
|
|
549
|
+
const result = handler(chunk, model);
|
|
550
|
+
if (chunk.type === "tool-call" || chunk.type === "tool-call-delta") {
|
|
551
|
+
streamHasToolCalls = true;
|
|
552
|
+
}
|
|
553
|
+
out.push(...result);
|
|
554
|
+
} else {
|
|
555
|
+
console.warn(`\u26A0\uFE0F Unknown AI SDK chunk type: ${chunk.type}`, chunk);
|
|
556
|
+
}
|
|
557
|
+
if (chunk.type === "finish-step" || chunk.type === "finish") {
|
|
558
|
+
streamHasToolCalls = false;
|
|
559
|
+
}
|
|
560
|
+
return out.filter((resultChunk) => {
|
|
561
|
+
var _a2, _b, _c, _d;
|
|
562
|
+
try {
|
|
563
|
+
const parsed = JSON.parse(resultChunk.data);
|
|
564
|
+
const delta = (_b = (_a2 = parsed.choices) == null ? void 0 : _a2[0]) == null ? void 0 : _b.delta;
|
|
565
|
+
return delta && (delta.role || delta.content || delta.reasoning_content || delta.tool_calls && delta.tool_calls.length > 0 || ((_d = (_c = parsed.choices) == null ? void 0 : _c[0]) == null ? void 0 : _d.finish_reason));
|
|
566
|
+
} catch (e) {
|
|
567
|
+
return true;
|
|
568
|
+
}
|
|
569
|
+
});
|
|
570
|
+
};
|
|
571
|
+
}
|
|
572
|
+
function convertAISDKStreamChunkToOpenAI(chunk, model) {
|
|
573
|
+
const convert = createOpenAIStreamConverter(model);
|
|
574
|
+
return convert(chunk);
|
|
575
|
+
}
|
|
576
|
+
function createSSEResponse(chunks) {
|
|
577
|
+
return chunks.map((chunk) => `data: ${chunk.data}
|
|
578
|
+
|
|
579
|
+
`).join("");
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
// src/server.ts
|
|
583
|
+
import { zodSchema } from "@ai-sdk/provider-utils";
|
|
584
|
+
import cors from "@fastify/cors";
|
|
585
|
+
import { generateText, streamText } from "ai";
|
|
586
|
+
import Fastify from "fastify";
|
|
587
|
+
import { toJSONSchema } from "zod";
|
|
588
|
+
function serializeZodSchema(schema) {
|
|
589
|
+
if (!schema) {
|
|
590
|
+
return null;
|
|
591
|
+
}
|
|
592
|
+
const jsonSchema = toJSONSchema(schema, {
|
|
593
|
+
unrepresentable: "any"
|
|
594
|
+
});
|
|
595
|
+
if (typeof jsonSchema === "object" && jsonSchema !== null) {
|
|
596
|
+
const { $schema, ...rest } = jsonSchema;
|
|
597
|
+
return rest;
|
|
598
|
+
}
|
|
599
|
+
return jsonSchema;
|
|
600
|
+
}
|
|
601
|
+
function serializeMessages(messages) {
|
|
602
|
+
return messages.map((message, index) => ({
|
|
603
|
+
index,
|
|
604
|
+
role: message.role,
|
|
605
|
+
content: message.content,
|
|
606
|
+
toolCalls: message.tool_calls
|
|
607
|
+
}));
|
|
608
|
+
}
|
|
609
|
+
function logIncomingRequest(openaiRequest, enabled) {
|
|
610
|
+
var _a;
|
|
611
|
+
if (!enabled) {
|
|
612
|
+
return;
|
|
613
|
+
}
|
|
614
|
+
const toolNames = ((_a = openaiRequest.tools) != null ? _a : []).map((tool) => {
|
|
615
|
+
var _a2;
|
|
616
|
+
return "function" in tool ? (_a2 = tool.function) == null ? void 0 : _a2.name : void 0;
|
|
617
|
+
}).filter((name) => Boolean(name));
|
|
618
|
+
console.log(
|
|
619
|
+
"[proxy] Incoming OpenAI request",
|
|
620
|
+
JSON.stringify(
|
|
621
|
+
{
|
|
622
|
+
model: openaiRequest.model,
|
|
623
|
+
stream: Boolean(openaiRequest.stream),
|
|
624
|
+
temperature: openaiRequest.temperature,
|
|
625
|
+
maxTokens: openaiRequest.max_tokens,
|
|
626
|
+
toolNames,
|
|
627
|
+
toolChoice: openaiRequest.tool_choice,
|
|
628
|
+
messages: serializeMessages(openaiRequest.messages),
|
|
629
|
+
tools: openaiRequest.tools
|
|
630
|
+
},
|
|
631
|
+
null,
|
|
632
|
+
2
|
|
633
|
+
)
|
|
634
|
+
);
|
|
635
|
+
}
|
|
636
|
+
function serializeAISDKMessages(messages) {
|
|
637
|
+
return messages == null ? void 0 : messages.map((message, index) => ({
|
|
638
|
+
index,
|
|
639
|
+
role: message.role,
|
|
640
|
+
content: message.content
|
|
641
|
+
}));
|
|
642
|
+
}
|
|
643
|
+
function logRequestConversion(openaiRequest, aisdkParams, enabled) {
|
|
644
|
+
var _a, _b;
|
|
645
|
+
if (!enabled) {
|
|
646
|
+
return;
|
|
647
|
+
}
|
|
648
|
+
const messages = (_a = aisdkParams.messages) != null ? _a : [];
|
|
649
|
+
console.log(
|
|
650
|
+
"[proxy] Converted AI SDK params",
|
|
651
|
+
JSON.stringify(
|
|
652
|
+
{
|
|
653
|
+
model: openaiRequest.model,
|
|
654
|
+
hasSystemMessage: messages.some((message) => message.role === "system"),
|
|
655
|
+
messages: serializeAISDKMessages(messages),
|
|
656
|
+
tools: Object.entries((_b = aisdkParams.tools) != null ? _b : {}).map(([name, tool]) => ({
|
|
657
|
+
name,
|
|
658
|
+
description: tool.description,
|
|
659
|
+
inputSchema: serializeZodSchema(tool.inputSchema)
|
|
660
|
+
})),
|
|
661
|
+
temperature: aisdkParams.temperature,
|
|
662
|
+
maxOutputTokens: aisdkParams.maxOutputTokens,
|
|
663
|
+
stopSequences: aisdkParams.stopSequences
|
|
664
|
+
},
|
|
665
|
+
null,
|
|
666
|
+
2
|
|
667
|
+
)
|
|
668
|
+
);
|
|
669
|
+
}
|
|
670
|
+
var OpenAIProxyServer = class {
|
|
671
|
+
constructor(config) {
|
|
672
|
+
var _a;
|
|
673
|
+
this.config = {
|
|
674
|
+
port: 3e3,
|
|
675
|
+
host: "localhost",
|
|
676
|
+
cors: true,
|
|
677
|
+
...config
|
|
678
|
+
};
|
|
679
|
+
this.logger = (_a = config.logger) != null ? _a : console;
|
|
680
|
+
this.fastify = Fastify();
|
|
681
|
+
if (this.config.cors) {
|
|
682
|
+
this.fastify.register(cors);
|
|
683
|
+
}
|
|
684
|
+
this.setupRoutes();
|
|
685
|
+
}
|
|
686
|
+
setupRoutes() {
|
|
687
|
+
this.fastify.get(
|
|
688
|
+
"/health",
|
|
689
|
+
async (_request, _reply) => ({
|
|
690
|
+
status: "ok",
|
|
691
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
692
|
+
})
|
|
693
|
+
);
|
|
694
|
+
this.fastify.post(
|
|
695
|
+
"/v1/chat/completions",
|
|
696
|
+
(request, reply) => {
|
|
697
|
+
var _a, _b, _c, _d;
|
|
698
|
+
try {
|
|
699
|
+
const openaiRequest = request.body;
|
|
700
|
+
if (!(openaiRequest.messages && Array.isArray(openaiRequest.messages))) {
|
|
701
|
+
return reply.code(400).send({
|
|
702
|
+
error: {
|
|
703
|
+
message: "Messages array is required",
|
|
704
|
+
type: "invalid_request_error"
|
|
705
|
+
}
|
|
706
|
+
});
|
|
707
|
+
}
|
|
708
|
+
logIncomingRequest(
|
|
709
|
+
openaiRequest,
|
|
710
|
+
(_b = (_a = this.config.logging) == null ? void 0 : _a.requests) != null ? _b : true
|
|
711
|
+
);
|
|
712
|
+
const aisdkParams = convertOpenAIRequestToAISDK(openaiRequest, {
|
|
713
|
+
parserDebug: this.config.parserDebug
|
|
714
|
+
});
|
|
715
|
+
logRequestConversion(
|
|
716
|
+
openaiRequest,
|
|
717
|
+
aisdkParams,
|
|
718
|
+
(_d = (_c = this.config.logging) == null ? void 0 : _c.conversions) != null ? _d : true
|
|
719
|
+
);
|
|
720
|
+
if (openaiRequest.stream) {
|
|
721
|
+
return this.handleStreamingRequest(
|
|
722
|
+
aisdkParams,
|
|
723
|
+
openaiRequest,
|
|
724
|
+
reply
|
|
725
|
+
);
|
|
726
|
+
}
|
|
727
|
+
return this.handleNonStreamingRequest(
|
|
728
|
+
aisdkParams,
|
|
729
|
+
openaiRequest,
|
|
730
|
+
reply
|
|
731
|
+
);
|
|
732
|
+
} catch (error) {
|
|
733
|
+
this.logger.error("Request handling error:", error);
|
|
734
|
+
return reply.code(500).send({
|
|
735
|
+
error: {
|
|
736
|
+
message: "Internal server error",
|
|
737
|
+
type: "server_error"
|
|
738
|
+
}
|
|
739
|
+
});
|
|
740
|
+
}
|
|
741
|
+
}
|
|
742
|
+
);
|
|
743
|
+
this.fastify.get("/v1/models", async () => ({
|
|
744
|
+
object: "list",
|
|
745
|
+
data: [
|
|
746
|
+
{
|
|
747
|
+
id: "wrapped-model",
|
|
748
|
+
object: "model",
|
|
749
|
+
created: Math.floor(Date.now() / 1e3),
|
|
750
|
+
owned_by: "ai-sdk-tool-proxy"
|
|
751
|
+
}
|
|
752
|
+
]
|
|
753
|
+
}));
|
|
754
|
+
}
|
|
755
|
+
// Merge server-defined tools (with execute) and request-defined tools (schema-only)
|
|
756
|
+
// Server tools take precedence when names overlap.
|
|
757
|
+
mergeTools(serverTools, requestTools) {
|
|
758
|
+
const toProviderTool = (tool) => {
|
|
759
|
+
if (!tool) {
|
|
760
|
+
return;
|
|
761
|
+
}
|
|
762
|
+
return {
|
|
763
|
+
description: tool.description,
|
|
764
|
+
inputSchema: zodSchema(tool.inputSchema),
|
|
765
|
+
...tool.execute ? { execute: tool.execute } : {}
|
|
766
|
+
};
|
|
767
|
+
};
|
|
768
|
+
const merged = {};
|
|
769
|
+
for (const [name, t] of Object.entries(requestTools != null ? requestTools : {})) {
|
|
770
|
+
const pt = toProviderTool(t);
|
|
771
|
+
if (pt) {
|
|
772
|
+
merged[name] = pt;
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
for (const [name, t] of Object.entries(serverTools != null ? serverTools : {})) {
|
|
776
|
+
const pt = toProviderTool(t);
|
|
777
|
+
if (pt) {
|
|
778
|
+
merged[name] = pt;
|
|
779
|
+
}
|
|
780
|
+
}
|
|
781
|
+
return Object.keys(merged).length > 0 ? merged : void 0;
|
|
782
|
+
}
|
|
783
|
+
async handleStreamingRequest(aisdkParams, openaiRequest, reply) {
|
|
784
|
+
var _a, _b;
|
|
785
|
+
reply.raw.writeHead(200, {
|
|
786
|
+
"Content-Type": "text/event-stream",
|
|
787
|
+
"Cache-Control": "no-cache",
|
|
788
|
+
Connection: "keep-alive",
|
|
789
|
+
"Access-Control-Allow-Origin": "*"
|
|
790
|
+
});
|
|
791
|
+
try {
|
|
792
|
+
const mergedTools = this.mergeTools(this.config.tools, aisdkParams.tools);
|
|
793
|
+
const result = await streamText({
|
|
794
|
+
model: this.config.model,
|
|
795
|
+
...aisdkParams,
|
|
796
|
+
...mergedTools ? { tools: mergedTools } : {}
|
|
797
|
+
});
|
|
798
|
+
const convert = createOpenAIStreamConverter(openaiRequest.model, {
|
|
799
|
+
logChunks: (_b = (_a = this.config.logging) == null ? void 0 : _a.streamChunks) != null ? _b : true
|
|
800
|
+
});
|
|
801
|
+
for await (const chunk of result.fullStream) {
|
|
802
|
+
const openaiChunks = convert(chunk);
|
|
803
|
+
for (const openaiChunk of openaiChunks) {
|
|
804
|
+
reply.raw.write(`data: ${openaiChunk.data}
|
|
805
|
+
|
|
806
|
+
`);
|
|
807
|
+
}
|
|
808
|
+
}
|
|
809
|
+
reply.raw.write("data: [DONE]\n\n");
|
|
810
|
+
reply.raw.end();
|
|
811
|
+
} catch (error) {
|
|
812
|
+
this.logger.error("Streaming error:", error);
|
|
813
|
+
reply.raw.write('data: {"error": {"message": "Streaming error"}}\n\n');
|
|
814
|
+
reply.raw.end();
|
|
815
|
+
}
|
|
816
|
+
return reply;
|
|
817
|
+
}
|
|
818
|
+
async handleNonStreamingRequest(aisdkParams, openaiRequest, reply) {
|
|
819
|
+
try {
|
|
820
|
+
const mergedTools = this.mergeTools(this.config.tools, aisdkParams.tools);
|
|
821
|
+
const result = await generateText({
|
|
822
|
+
model: this.config.model,
|
|
823
|
+
...aisdkParams,
|
|
824
|
+
...mergedTools ? { tools: mergedTools } : {}
|
|
825
|
+
});
|
|
826
|
+
const openaiResponse = convertAISDKResultToOpenAI(
|
|
827
|
+
result,
|
|
828
|
+
openaiRequest.model,
|
|
829
|
+
false
|
|
830
|
+
);
|
|
831
|
+
reply.send(openaiResponse);
|
|
832
|
+
} catch (error) {
|
|
833
|
+
this.logger.error("Generation error:", error);
|
|
834
|
+
return reply.code(500).send({
|
|
835
|
+
error: {
|
|
836
|
+
message: "Generation failed",
|
|
837
|
+
type: "generation_error"
|
|
838
|
+
}
|
|
839
|
+
});
|
|
840
|
+
}
|
|
841
|
+
}
|
|
842
|
+
async start() {
|
|
843
|
+
try {
|
|
844
|
+
await this.fastify.listen({
|
|
845
|
+
port: this.config.port || 3e3,
|
|
846
|
+
host: this.config.host || "localhost"
|
|
847
|
+
});
|
|
848
|
+
this.logger.info(
|
|
849
|
+
`\u{1F680} OpenAI Proxy Server running on http://${this.config.host}:${this.config.port}`
|
|
850
|
+
);
|
|
851
|
+
this.logger.info(
|
|
852
|
+
`\u{1F4E1} Endpoint: http://${this.config.host}:${this.config.port}/v1/chat/completions`
|
|
853
|
+
);
|
|
854
|
+
this.logger.info(
|
|
855
|
+
`\u{1F3E5} Health: http://${this.config.host}:${this.config.port}/health`
|
|
856
|
+
);
|
|
857
|
+
} catch (error) {
|
|
858
|
+
this.logger.error("Failed to start server:", error);
|
|
859
|
+
process.exit(1);
|
|
860
|
+
}
|
|
861
|
+
}
|
|
862
|
+
async stop() {
|
|
863
|
+
try {
|
|
864
|
+
await this.fastify.close();
|
|
865
|
+
this.logger.info("\u{1F6D1} Server stopped");
|
|
866
|
+
} catch (error) {
|
|
867
|
+
this.logger.error("Error stopping server:", error);
|
|
868
|
+
}
|
|
869
|
+
}
|
|
870
|
+
};
|
|
871
|
+
export {
|
|
872
|
+
OpenAIProxyServer,
|
|
873
|
+
convertAISDKResultToOpenAI,
|
|
874
|
+
convertAISDKStreamChunkToOpenAI,
|
|
875
|
+
convertAISDKToolCallsToOpenAI,
|
|
876
|
+
convertOpenAIRequestToAISDK,
|
|
877
|
+
createOpenAIStreamConverter,
|
|
878
|
+
createSSEResponse,
|
|
879
|
+
generateResponseId,
|
|
880
|
+
getCurrentTimestamp
|
|
881
|
+
};
|
|
882
|
+
//# sourceMappingURL=index.js.map
|