@kenkaiiii/gg-ai 4.2.2 → 4.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1093 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +235 -0
- package/dist/index.d.ts +235 -5
- package/dist/index.js +1051 -5
- package/dist/index.js.map +1 -1
- package/package.json +7 -3
- package/dist/errors.d.ts +0 -12
- package/dist/errors.d.ts.map +0 -1
- package/dist/errors.js +0 -17
- package/dist/errors.js.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/providers/anthropic.d.ts +0 -4
- package/dist/providers/anthropic.d.ts.map +0 -1
- package/dist/providers/anthropic.js +0 -207
- package/dist/providers/anthropic.js.map +0 -1
- package/dist/providers/openai-codex.d.ts +0 -4
- package/dist/providers/openai-codex.d.ts.map +0 -1
- package/dist/providers/openai-codex.js +0 -338
- package/dist/providers/openai-codex.js.map +0 -1
- package/dist/providers/openai.d.ts +0 -4
- package/dist/providers/openai.d.ts.map +0 -1
- package/dist/providers/openai.js +0 -182
- package/dist/providers/openai.js.map +0 -1
- package/dist/providers/transform.d.ts +0 -30
- package/dist/providers/transform.d.ts.map +0 -1
- package/dist/providers/transform.js +0 -323
- package/dist/providers/transform.js.map +0 -1
- package/dist/stream.d.ts +0 -19
- package/dist/stream.d.ts.map +0 -1
- package/dist/stream.js +0 -44
- package/dist/stream.js.map +0 -1
- package/dist/types.d.ts +0 -167
- package/dist/types.d.ts.map +0 -1
- package/dist/types.js +0 -2
- package/dist/types.js.map +0 -1
- package/dist/utils/event-stream.d.ts +0 -38
- package/dist/utils/event-stream.d.ts.map +0 -1
- package/dist/utils/event-stream.js +0 -100
- package/dist/utils/event-stream.js.map +0 -1
- package/dist/utils/zod-to-json-schema.d.ts +0 -7
- package/dist/utils/zod-to-json-schema.d.ts.map +0 -1
- package/dist/utils/zod-to-json-schema.js +0 -12
- package/dist/utils/zod-to-json-schema.js.map +0 -1
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,1093 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
EventStream: () => EventStream,
|
|
34
|
+
GGAIError: () => GGAIError,
|
|
35
|
+
ProviderError: () => ProviderError,
|
|
36
|
+
StreamResult: () => StreamResult,
|
|
37
|
+
stream: () => stream
|
|
38
|
+
});
|
|
39
|
+
module.exports = __toCommonJS(index_exports);
|
|
40
|
+
|
|
41
|
+
// src/errors.ts
|
|
42
|
+
var GGAIError = class extends Error {
|
|
43
|
+
constructor(message, options) {
|
|
44
|
+
super(message, options);
|
|
45
|
+
this.name = "GGAIError";
|
|
46
|
+
}
|
|
47
|
+
};
|
|
48
|
+
var ProviderError = class extends GGAIError {
|
|
49
|
+
provider;
|
|
50
|
+
statusCode;
|
|
51
|
+
constructor(provider, message, options) {
|
|
52
|
+
super(`[${provider}] ${message}`, { cause: options?.cause });
|
|
53
|
+
this.name = "ProviderError";
|
|
54
|
+
this.provider = provider;
|
|
55
|
+
this.statusCode = options?.statusCode;
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
// src/providers/anthropic.ts
|
|
60
|
+
var import_sdk = __toESM(require("@anthropic-ai/sdk"), 1);
|
|
61
|
+
|
|
62
|
+
// src/utils/event-stream.ts
|
|
63
|
+
var EventStream = class {
|
|
64
|
+
queue = [];
|
|
65
|
+
resolve = null;
|
|
66
|
+
done = false;
|
|
67
|
+
error = null;
|
|
68
|
+
push(event) {
|
|
69
|
+
if (this.queue.length > 5e4) {
|
|
70
|
+
this.queue.splice(0, this.queue.length - 25e3);
|
|
71
|
+
}
|
|
72
|
+
this.queue.push(event);
|
|
73
|
+
this.resolve?.();
|
|
74
|
+
this.resolve = null;
|
|
75
|
+
}
|
|
76
|
+
close() {
|
|
77
|
+
this.done = true;
|
|
78
|
+
this.resolve?.();
|
|
79
|
+
this.resolve = null;
|
|
80
|
+
}
|
|
81
|
+
abort(error) {
|
|
82
|
+
this.error = error;
|
|
83
|
+
this.done = true;
|
|
84
|
+
this.resolve?.();
|
|
85
|
+
this.resolve = null;
|
|
86
|
+
}
|
|
87
|
+
async *[Symbol.asyncIterator]() {
|
|
88
|
+
let index = 0;
|
|
89
|
+
while (true) {
|
|
90
|
+
while (index < this.queue.length) {
|
|
91
|
+
yield this.queue[index++];
|
|
92
|
+
}
|
|
93
|
+
this.queue.splice(0, index);
|
|
94
|
+
index = 0;
|
|
95
|
+
if (this.error) throw this.error;
|
|
96
|
+
if (this.done) return;
|
|
97
|
+
await new Promise((r) => {
|
|
98
|
+
this.resolve = r;
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
};
|
|
103
|
+
var StreamResult = class {
|
|
104
|
+
events;
|
|
105
|
+
response;
|
|
106
|
+
resolveResponse;
|
|
107
|
+
rejectResponse;
|
|
108
|
+
hasConsumer = false;
|
|
109
|
+
constructor() {
|
|
110
|
+
this.events = new EventStream();
|
|
111
|
+
this.response = new Promise((resolve, reject) => {
|
|
112
|
+
this.resolveResponse = resolve;
|
|
113
|
+
this.rejectResponse = reject;
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
push(event) {
|
|
117
|
+
this.events.push(event);
|
|
118
|
+
}
|
|
119
|
+
complete(response) {
|
|
120
|
+
this.events.close();
|
|
121
|
+
this.resolveResponse(response);
|
|
122
|
+
}
|
|
123
|
+
abort(error) {
|
|
124
|
+
this.events.abort(error);
|
|
125
|
+
this.rejectResponse(error);
|
|
126
|
+
}
|
|
127
|
+
[Symbol.asyncIterator]() {
|
|
128
|
+
this.hasConsumer = true;
|
|
129
|
+
return this.events[Symbol.asyncIterator]();
|
|
130
|
+
}
|
|
131
|
+
then(onfulfilled, onrejected) {
|
|
132
|
+
this.drainEvents().catch(() => {
|
|
133
|
+
});
|
|
134
|
+
return this.response.then(onfulfilled, onrejected);
|
|
135
|
+
}
|
|
136
|
+
async drainEvents() {
|
|
137
|
+
if (this.hasConsumer) return;
|
|
138
|
+
this.hasConsumer = true;
|
|
139
|
+
for await (const _ of this.events) {
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
|
|
144
|
+
// src/utils/zod-to-json-schema.ts
|
|
145
|
+
var import_zod = require("zod");
|
|
146
|
+
function zodToJsonSchema(schema) {
|
|
147
|
+
const jsonSchema = import_zod.z.toJSONSchema(schema);
|
|
148
|
+
const { $schema: _schema, ...rest } = jsonSchema;
|
|
149
|
+
return rest;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// src/providers/transform.ts
|
|
153
|
+
function toAnthropicCacheControl(retention, baseUrl) {
|
|
154
|
+
const resolved = retention ?? "short";
|
|
155
|
+
if (resolved === "none") return void 0;
|
|
156
|
+
const ttl = resolved === "long" && (!baseUrl || baseUrl.includes("api.anthropic.com")) ? "1h" : void 0;
|
|
157
|
+
return { type: "ephemeral", ...ttl && { ttl } };
|
|
158
|
+
}
|
|
159
|
+
function toAnthropicMessages(messages, cacheControl) {
|
|
160
|
+
let systemText;
|
|
161
|
+
const out = [];
|
|
162
|
+
for (const msg of messages) {
|
|
163
|
+
if (msg.role === "system") {
|
|
164
|
+
systemText = msg.content;
|
|
165
|
+
continue;
|
|
166
|
+
}
|
|
167
|
+
if (msg.role === "user") {
|
|
168
|
+
out.push({
|
|
169
|
+
role: "user",
|
|
170
|
+
content: typeof msg.content === "string" ? msg.content : msg.content.map((part) => {
|
|
171
|
+
if (part.type === "text") return { type: "text", text: part.text };
|
|
172
|
+
return {
|
|
173
|
+
type: "image",
|
|
174
|
+
source: {
|
|
175
|
+
type: "base64",
|
|
176
|
+
media_type: part.mediaType,
|
|
177
|
+
data: part.data
|
|
178
|
+
}
|
|
179
|
+
};
|
|
180
|
+
})
|
|
181
|
+
});
|
|
182
|
+
continue;
|
|
183
|
+
}
|
|
184
|
+
if (msg.role === "assistant") {
|
|
185
|
+
const content = typeof msg.content === "string" ? msg.content : msg.content.filter((part) => {
|
|
186
|
+
if (part.type === "thinking" && !part.signature) return false;
|
|
187
|
+
return true;
|
|
188
|
+
}).map((part) => {
|
|
189
|
+
if (part.type === "text") return { type: "text", text: part.text };
|
|
190
|
+
if (part.type === "thinking")
|
|
191
|
+
return { type: "thinking", thinking: part.text, signature: part.signature };
|
|
192
|
+
if (part.type === "tool_call")
|
|
193
|
+
return {
|
|
194
|
+
type: "tool_use",
|
|
195
|
+
id: part.id,
|
|
196
|
+
name: part.name,
|
|
197
|
+
input: part.args
|
|
198
|
+
};
|
|
199
|
+
if (part.type === "server_tool_call")
|
|
200
|
+
return {
|
|
201
|
+
type: "server_tool_use",
|
|
202
|
+
id: part.id,
|
|
203
|
+
name: part.name,
|
|
204
|
+
input: part.input
|
|
205
|
+
};
|
|
206
|
+
if (part.type === "server_tool_result")
|
|
207
|
+
return part.data;
|
|
208
|
+
if (part.type === "raw") return part.data;
|
|
209
|
+
return { type: "text", text: "" };
|
|
210
|
+
});
|
|
211
|
+
out.push({ role: "assistant", content });
|
|
212
|
+
continue;
|
|
213
|
+
}
|
|
214
|
+
if (msg.role === "tool") {
|
|
215
|
+
out.push({
|
|
216
|
+
role: "user",
|
|
217
|
+
content: msg.content.map((result) => ({
|
|
218
|
+
type: "tool_result",
|
|
219
|
+
tool_use_id: result.toolCallId,
|
|
220
|
+
content: result.content,
|
|
221
|
+
is_error: result.isError
|
|
222
|
+
}))
|
|
223
|
+
});
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
if (cacheControl && out.length > 0) {
|
|
227
|
+
for (let i = out.length - 1; i >= 0; i--) {
|
|
228
|
+
if (out[i].role === "user") {
|
|
229
|
+
const content = out[i].content;
|
|
230
|
+
if (typeof content === "string") {
|
|
231
|
+
out[i] = {
|
|
232
|
+
role: "user",
|
|
233
|
+
content: [
|
|
234
|
+
{
|
|
235
|
+
type: "text",
|
|
236
|
+
text: content,
|
|
237
|
+
cache_control: cacheControl
|
|
238
|
+
}
|
|
239
|
+
]
|
|
240
|
+
};
|
|
241
|
+
} else if (Array.isArray(content) && content.length > 0) {
|
|
242
|
+
const last = content[content.length - 1];
|
|
243
|
+
content[content.length - 1] = {
|
|
244
|
+
...last,
|
|
245
|
+
cache_control: cacheControl
|
|
246
|
+
};
|
|
247
|
+
}
|
|
248
|
+
break;
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
let system;
|
|
253
|
+
if (systemText) {
|
|
254
|
+
const marker = "<!-- uncached -->";
|
|
255
|
+
const markerIdx = systemText.indexOf(marker);
|
|
256
|
+
if (markerIdx !== -1 && cacheControl) {
|
|
257
|
+
const cachedPart = systemText.slice(0, markerIdx).trimEnd();
|
|
258
|
+
const uncachedPart = systemText.slice(markerIdx + marker.length).trimStart();
|
|
259
|
+
system = [
|
|
260
|
+
{ type: "text", text: cachedPart, cache_control: cacheControl },
|
|
261
|
+
...uncachedPart ? [{ type: "text", text: uncachedPart }] : []
|
|
262
|
+
];
|
|
263
|
+
} else {
|
|
264
|
+
system = [
|
|
265
|
+
{
|
|
266
|
+
type: "text",
|
|
267
|
+
text: systemText,
|
|
268
|
+
...cacheControl && { cache_control: cacheControl }
|
|
269
|
+
}
|
|
270
|
+
];
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
return { system, messages: out };
|
|
274
|
+
}
|
|
275
|
+
function toAnthropicTools(tools) {
|
|
276
|
+
return tools.map((tool) => ({
|
|
277
|
+
name: tool.name,
|
|
278
|
+
description: tool.description,
|
|
279
|
+
input_schema: tool.rawInputSchema ?? zodToJsonSchema(tool.parameters)
|
|
280
|
+
}));
|
|
281
|
+
}
|
|
282
|
+
function toAnthropicToolChoice(choice) {
|
|
283
|
+
if (choice === "auto") return { type: "auto" };
|
|
284
|
+
if (choice === "none") return { type: "none" };
|
|
285
|
+
if (choice === "required") return { type: "any" };
|
|
286
|
+
return { type: "tool", name: choice.name };
|
|
287
|
+
}
|
|
288
|
+
function supportsAdaptiveThinking(model) {
|
|
289
|
+
return /opus-4-6|sonnet-4-6/.test(model);
|
|
290
|
+
}
|
|
291
|
+
function toAnthropicThinking(level, maxTokens, model) {
|
|
292
|
+
if (supportsAdaptiveThinking(model)) {
|
|
293
|
+
let effort = level;
|
|
294
|
+
if (level === "max" && !model.includes("opus")) {
|
|
295
|
+
effort = "high";
|
|
296
|
+
}
|
|
297
|
+
return {
|
|
298
|
+
thinking: { type: "adaptive" },
|
|
299
|
+
maxTokens,
|
|
300
|
+
outputConfig: { effort }
|
|
301
|
+
};
|
|
302
|
+
}
|
|
303
|
+
const effectiveLevel = level === "max" ? "high" : level;
|
|
304
|
+
const budgetMap = {
|
|
305
|
+
low: Math.max(1024, Math.floor(maxTokens * 0.25)),
|
|
306
|
+
medium: Math.max(2048, Math.floor(maxTokens * 0.5)),
|
|
307
|
+
high: Math.max(4096, maxTokens)
|
|
308
|
+
};
|
|
309
|
+
const budget = budgetMap[effectiveLevel];
|
|
310
|
+
return {
|
|
311
|
+
thinking: { type: "enabled", budget_tokens: budget },
|
|
312
|
+
maxTokens: maxTokens + budget
|
|
313
|
+
};
|
|
314
|
+
}
|
|
315
|
+
function toOpenAIMessages(messages) {
|
|
316
|
+
const out = [];
|
|
317
|
+
for (const msg of messages) {
|
|
318
|
+
if (msg.role === "system") {
|
|
319
|
+
out.push({ role: "system", content: msg.content });
|
|
320
|
+
continue;
|
|
321
|
+
}
|
|
322
|
+
if (msg.role === "user") {
|
|
323
|
+
if (typeof msg.content === "string") {
|
|
324
|
+
out.push({ role: "user", content: msg.content });
|
|
325
|
+
} else {
|
|
326
|
+
out.push({
|
|
327
|
+
role: "user",
|
|
328
|
+
content: msg.content.map(
|
|
329
|
+
(part) => {
|
|
330
|
+
if (part.type === "text") return { type: "text", text: part.text };
|
|
331
|
+
return {
|
|
332
|
+
type: "image_url",
|
|
333
|
+
image_url: {
|
|
334
|
+
url: `data:${part.mediaType};base64,${part.data}`
|
|
335
|
+
}
|
|
336
|
+
};
|
|
337
|
+
}
|
|
338
|
+
)
|
|
339
|
+
});
|
|
340
|
+
}
|
|
341
|
+
continue;
|
|
342
|
+
}
|
|
343
|
+
if (msg.role === "assistant") {
|
|
344
|
+
const parts = typeof msg.content === "string" ? msg.content : void 0;
|
|
345
|
+
const toolCalls = typeof msg.content !== "string" ? msg.content.filter(
|
|
346
|
+
(p) => p.type === "tool_call"
|
|
347
|
+
).map(
|
|
348
|
+
(tc) => ({
|
|
349
|
+
id: tc.id,
|
|
350
|
+
type: "function",
|
|
351
|
+
function: { name: tc.name, arguments: JSON.stringify(tc.args) }
|
|
352
|
+
})
|
|
353
|
+
) : void 0;
|
|
354
|
+
const textParts = typeof msg.content !== "string" ? msg.content.filter((p) => p.type === "text").map((p) => p.text).join("") : void 0;
|
|
355
|
+
const thinkingParts = typeof msg.content !== "string" ? msg.content.filter((p) => p.type === "thinking").map((p) => p.text).join("") : void 0;
|
|
356
|
+
const assistantMsg = {
|
|
357
|
+
role: "assistant",
|
|
358
|
+
content: parts ?? textParts ?? null,
|
|
359
|
+
...toolCalls?.length ? { tool_calls: toolCalls } : {}
|
|
360
|
+
};
|
|
361
|
+
if (thinkingParts || toolCalls?.length) {
|
|
362
|
+
assistantMsg.reasoning_content = thinkingParts || " ";
|
|
363
|
+
}
|
|
364
|
+
out.push(assistantMsg);
|
|
365
|
+
continue;
|
|
366
|
+
}
|
|
367
|
+
if (msg.role === "tool") {
|
|
368
|
+
for (const result of msg.content) {
|
|
369
|
+
out.push({
|
|
370
|
+
role: "tool",
|
|
371
|
+
tool_call_id: result.toolCallId,
|
|
372
|
+
content: result.content
|
|
373
|
+
});
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
return out;
|
|
378
|
+
}
|
|
379
|
+
function toOpenAITools(tools) {
|
|
380
|
+
return tools.map((tool) => ({
|
|
381
|
+
type: "function",
|
|
382
|
+
function: {
|
|
383
|
+
name: tool.name,
|
|
384
|
+
description: tool.description,
|
|
385
|
+
parameters: tool.rawInputSchema ?? zodToJsonSchema(tool.parameters)
|
|
386
|
+
}
|
|
387
|
+
}));
|
|
388
|
+
}
|
|
389
|
+
function toOpenAIToolChoice(choice) {
|
|
390
|
+
if (choice === "auto") return "auto";
|
|
391
|
+
if (choice === "none") return "none";
|
|
392
|
+
if (choice === "required") return "required";
|
|
393
|
+
return { type: "function", function: { name: choice.name } };
|
|
394
|
+
}
|
|
395
|
+
function toOpenAIReasoningEffort(level) {
|
|
396
|
+
return level === "max" ? "high" : level;
|
|
397
|
+
}
|
|
398
|
+
function normalizeAnthropicStopReason(reason) {
|
|
399
|
+
switch (reason) {
|
|
400
|
+
case "tool_use":
|
|
401
|
+
return "tool_use";
|
|
402
|
+
case "max_tokens":
|
|
403
|
+
return "max_tokens";
|
|
404
|
+
case "pause_turn":
|
|
405
|
+
return "pause_turn";
|
|
406
|
+
case "stop_sequence":
|
|
407
|
+
return "stop_sequence";
|
|
408
|
+
case "refusal":
|
|
409
|
+
return "refusal";
|
|
410
|
+
default:
|
|
411
|
+
return "end_turn";
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
function normalizeOpenAIStopReason(reason) {
|
|
415
|
+
switch (reason) {
|
|
416
|
+
case "tool_calls":
|
|
417
|
+
return "tool_use";
|
|
418
|
+
case "length":
|
|
419
|
+
return "max_tokens";
|
|
420
|
+
case "stop":
|
|
421
|
+
return "stop_sequence";
|
|
422
|
+
default:
|
|
423
|
+
return "end_turn";
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
// src/providers/anthropic.ts
|
|
428
|
+
function streamAnthropic(options) {
|
|
429
|
+
const result = new StreamResult();
|
|
430
|
+
runStream(options, result).catch((err) => result.abort(toError(err)));
|
|
431
|
+
return result;
|
|
432
|
+
}
|
|
433
|
+
async function runStream(options, result) {
|
|
434
|
+
const isOAuth = options.apiKey?.startsWith("sk-ant-oat");
|
|
435
|
+
const client = new import_sdk.default({
|
|
436
|
+
...isOAuth ? { apiKey: null, authToken: options.apiKey } : { apiKey: options.apiKey },
|
|
437
|
+
...options.baseUrl ? { baseURL: options.baseUrl } : {}
|
|
438
|
+
});
|
|
439
|
+
const cacheControl = toAnthropicCacheControl(options.cacheRetention, options.baseUrl);
|
|
440
|
+
const { system, messages } = toAnthropicMessages(options.messages, cacheControl);
|
|
441
|
+
let maxTokens = options.maxTokens ?? 4096;
|
|
442
|
+
let thinking;
|
|
443
|
+
let outputConfig;
|
|
444
|
+
if (options.thinking) {
|
|
445
|
+
const t = toAnthropicThinking(options.thinking, maxTokens, options.model);
|
|
446
|
+
thinking = t.thinking;
|
|
447
|
+
maxTokens = t.maxTokens;
|
|
448
|
+
if (t.outputConfig) {
|
|
449
|
+
outputConfig = t.outputConfig;
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
const params = {
|
|
453
|
+
model: options.model,
|
|
454
|
+
max_tokens: maxTokens,
|
|
455
|
+
messages,
|
|
456
|
+
...system ? { system } : {},
|
|
457
|
+
...thinking ? { thinking } : {},
|
|
458
|
+
...outputConfig ? { output_config: outputConfig } : {},
|
|
459
|
+
...options.temperature != null && !thinking ? { temperature: options.temperature } : {},
|
|
460
|
+
...options.topP != null ? { top_p: options.topP } : {},
|
|
461
|
+
...options.stop ? { stop_sequences: options.stop } : {},
|
|
462
|
+
...options.tools?.length || options.serverTools?.length || options.webSearch ? {
|
|
463
|
+
tools: [
|
|
464
|
+
...options.tools?.length ? toAnthropicTools(options.tools) : [],
|
|
465
|
+
...options.serverTools ?? [],
|
|
466
|
+
...options.webSearch ? [{ type: "web_search_20250305", name: "web_search" }] : []
|
|
467
|
+
]
|
|
468
|
+
} : {},
|
|
469
|
+
...options.toolChoice && options.tools?.length ? { tool_choice: toAnthropicToolChoice(options.toolChoice) } : {},
|
|
470
|
+
...options.compaction ? { context_management: { edits: [{ type: "compact_20260112" }] } } : {},
|
|
471
|
+
stream: true
|
|
472
|
+
};
|
|
473
|
+
const betaHeaders = [
|
|
474
|
+
...isOAuth ? ["oauth-2025-04-20"] : [],
|
|
475
|
+
...options.compaction ? ["compact-2026-01-12"] : []
|
|
476
|
+
];
|
|
477
|
+
const stream2 = client.messages.stream(params, {
|
|
478
|
+
signal: options.signal ?? void 0,
|
|
479
|
+
...betaHeaders.length ? { headers: { "anthropic-beta": betaHeaders.join(",") } } : {}
|
|
480
|
+
});
|
|
481
|
+
const contentParts = [];
|
|
482
|
+
let currentToolId = "";
|
|
483
|
+
let currentToolName = "";
|
|
484
|
+
stream2.on("text", (text) => {
|
|
485
|
+
result.push({ type: "text_delta", text });
|
|
486
|
+
});
|
|
487
|
+
stream2.on("thinking", (thinkingDelta) => {
|
|
488
|
+
result.push({ type: "thinking_delta", text: thinkingDelta });
|
|
489
|
+
});
|
|
490
|
+
stream2.on("streamEvent", (event) => {
|
|
491
|
+
if (event.type === "content_block_start") {
|
|
492
|
+
if (event.content_block.type === "tool_use") {
|
|
493
|
+
currentToolId = event.content_block.id;
|
|
494
|
+
currentToolName = event.content_block.name;
|
|
495
|
+
}
|
|
496
|
+
if (event.content_block.type === "server_tool_use") {
|
|
497
|
+
currentToolId = event.content_block.id;
|
|
498
|
+
currentToolName = event.content_block.name;
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
});
|
|
502
|
+
stream2.on("inputJson", (delta) => {
|
|
503
|
+
result.push({
|
|
504
|
+
type: "toolcall_delta",
|
|
505
|
+
id: currentToolId,
|
|
506
|
+
name: currentToolName,
|
|
507
|
+
argsJson: delta
|
|
508
|
+
});
|
|
509
|
+
});
|
|
510
|
+
stream2.on("contentBlock", (block) => {
|
|
511
|
+
if (block.type === "text") {
|
|
512
|
+
contentParts.push({ type: "text", text: block.text });
|
|
513
|
+
} else if (block.type === "thinking") {
|
|
514
|
+
contentParts.push({ type: "thinking", text: block.thinking, signature: block.signature });
|
|
515
|
+
} else if (block.type === "tool_use") {
|
|
516
|
+
const tc = {
|
|
517
|
+
type: "tool_call",
|
|
518
|
+
id: block.id,
|
|
519
|
+
name: block.name,
|
|
520
|
+
args: block.input
|
|
521
|
+
};
|
|
522
|
+
contentParts.push(tc);
|
|
523
|
+
result.push({
|
|
524
|
+
type: "toolcall_done",
|
|
525
|
+
id: tc.id,
|
|
526
|
+
name: tc.name,
|
|
527
|
+
args: tc.args
|
|
528
|
+
});
|
|
529
|
+
} else if (block.type === "server_tool_use") {
|
|
530
|
+
const stc = {
|
|
531
|
+
type: "server_tool_call",
|
|
532
|
+
id: block.id,
|
|
533
|
+
name: block.name,
|
|
534
|
+
input: block.input
|
|
535
|
+
};
|
|
536
|
+
contentParts.push(stc);
|
|
537
|
+
result.push({
|
|
538
|
+
type: "server_toolcall",
|
|
539
|
+
id: stc.id,
|
|
540
|
+
name: stc.name,
|
|
541
|
+
input: stc.input
|
|
542
|
+
});
|
|
543
|
+
} else {
|
|
544
|
+
const raw = block;
|
|
545
|
+
const blockType = raw.type;
|
|
546
|
+
if (blockType === "web_search_tool_result") {
|
|
547
|
+
const str = {
|
|
548
|
+
type: "server_tool_result",
|
|
549
|
+
toolUseId: raw.tool_use_id,
|
|
550
|
+
resultType: blockType,
|
|
551
|
+
data: raw
|
|
552
|
+
};
|
|
553
|
+
contentParts.push(str);
|
|
554
|
+
result.push({
|
|
555
|
+
type: "server_toolresult",
|
|
556
|
+
toolUseId: str.toolUseId,
|
|
557
|
+
resultType: str.resultType,
|
|
558
|
+
data: str.data
|
|
559
|
+
});
|
|
560
|
+
} else {
|
|
561
|
+
contentParts.push({ type: "raw", data: raw });
|
|
562
|
+
}
|
|
563
|
+
}
|
|
564
|
+
});
|
|
565
|
+
try {
|
|
566
|
+
const finalMessage = await stream2.finalMessage();
|
|
567
|
+
const stopReason = normalizeAnthropicStopReason(finalMessage.stop_reason);
|
|
568
|
+
const response = {
|
|
569
|
+
message: {
|
|
570
|
+
role: "assistant",
|
|
571
|
+
content: contentParts.length > 0 ? contentParts : ""
|
|
572
|
+
},
|
|
573
|
+
stopReason,
|
|
574
|
+
usage: {
|
|
575
|
+
inputTokens: finalMessage.usage.input_tokens,
|
|
576
|
+
outputTokens: finalMessage.usage.output_tokens,
|
|
577
|
+
...finalMessage.usage.cache_read_input_tokens != null && {
|
|
578
|
+
cacheRead: finalMessage.usage.cache_read_input_tokens
|
|
579
|
+
},
|
|
580
|
+
...finalMessage.usage.cache_creation_input_tokens != null && {
|
|
581
|
+
cacheWrite: finalMessage.usage.cache_creation_input_tokens
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
};
|
|
585
|
+
result.push({ type: "done", stopReason });
|
|
586
|
+
result.complete(response);
|
|
587
|
+
} catch (err) {
|
|
588
|
+
const error = toError(err);
|
|
589
|
+
result.push({ type: "error", error });
|
|
590
|
+
result.abort(error);
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
function toError(err) {
|
|
594
|
+
if (err instanceof import_sdk.default.APIError) {
|
|
595
|
+
return new ProviderError("anthropic", err.message, {
|
|
596
|
+
statusCode: err.status,
|
|
597
|
+
cause: err
|
|
598
|
+
});
|
|
599
|
+
}
|
|
600
|
+
if (err instanceof Error) {
|
|
601
|
+
return new ProviderError("anthropic", err.message, { cause: err });
|
|
602
|
+
}
|
|
603
|
+
return new ProviderError("anthropic", String(err));
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
// src/providers/openai.ts
|
|
607
|
+
var import_openai = __toESM(require("openai"), 1);
|
|
608
|
+
function streamOpenAI(options) {
|
|
609
|
+
const result = new StreamResult();
|
|
610
|
+
runStream2(options, result).catch((err) => result.abort(toError2(err)));
|
|
611
|
+
return result;
|
|
612
|
+
}
|
|
613
|
+
async function runStream2(options, result) {
|
|
614
|
+
const client = new import_openai.default({
|
|
615
|
+
apiKey: options.apiKey,
|
|
616
|
+
...options.baseUrl ? { baseURL: options.baseUrl } : {}
|
|
617
|
+
});
|
|
618
|
+
const usesThinkingParam = options.provider === "glm" || options.provider === "moonshot";
|
|
619
|
+
const messages = toOpenAIMessages(options.messages);
|
|
620
|
+
const params = {
|
|
621
|
+
model: options.model,
|
|
622
|
+
messages,
|
|
623
|
+
stream: true,
|
|
624
|
+
...options.maxTokens ? { max_tokens: options.maxTokens } : {},
|
|
625
|
+
...options.temperature != null && !options.thinking ? { temperature: options.temperature } : {},
|
|
626
|
+
...options.topP != null ? { top_p: options.topP } : {},
|
|
627
|
+
...options.stop ? { stop: options.stop } : {},
|
|
628
|
+
...options.thinking && !usesThinkingParam ? { reasoning_effort: toOpenAIReasoningEffort(options.thinking) } : {},
|
|
629
|
+
...options.tools?.length ? { tools: toOpenAITools(options.tools) } : {},
|
|
630
|
+
...options.toolChoice && options.tools?.length ? { tool_choice: toOpenAIToolChoice(options.toolChoice) } : {},
|
|
631
|
+
stream_options: { include_usage: true }
|
|
632
|
+
};
|
|
633
|
+
if (options.webSearch) {
|
|
634
|
+
if (options.provider === "moonshot") {
|
|
635
|
+
const raw = params;
|
|
636
|
+
const tools = (raw.tools ?? []).slice();
|
|
637
|
+
tools.push({ type: "builtin_function", function: { name: "$web_search" } });
|
|
638
|
+
raw.tools = tools;
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
if (usesThinkingParam) {
|
|
642
|
+
params.thinking = options.thinking ? { type: "enabled" } : { type: "disabled" };
|
|
643
|
+
}
|
|
644
|
+
const stream2 = await client.chat.completions.create(params, {
|
|
645
|
+
signal: options.signal ?? void 0
|
|
646
|
+
});
|
|
647
|
+
const contentParts = [];
|
|
648
|
+
const toolCallAccum = /* @__PURE__ */ new Map();
|
|
649
|
+
let textAccum = "";
|
|
650
|
+
let thinkingAccum = "";
|
|
651
|
+
let inputTokens = 0;
|
|
652
|
+
let outputTokens = 0;
|
|
653
|
+
let cacheRead = 0;
|
|
654
|
+
let finishReason = null;
|
|
655
|
+
for await (const chunk of stream2) {
|
|
656
|
+
const choice = chunk.choices?.[0];
|
|
657
|
+
if (chunk.usage) {
|
|
658
|
+
inputTokens = chunk.usage.prompt_tokens;
|
|
659
|
+
outputTokens = chunk.usage.completion_tokens;
|
|
660
|
+
const details = chunk.usage.prompt_tokens_details;
|
|
661
|
+
if (details?.cached_tokens) {
|
|
662
|
+
cacheRead = details.cached_tokens;
|
|
663
|
+
}
|
|
664
|
+
}
|
|
665
|
+
if (!choice) continue;
|
|
666
|
+
if (choice.finish_reason) {
|
|
667
|
+
finishReason = choice.finish_reason;
|
|
668
|
+
}
|
|
669
|
+
const delta = choice.delta;
|
|
670
|
+
const reasoningContent = delta.reasoning_content;
|
|
671
|
+
if (typeof reasoningContent === "string" && reasoningContent) {
|
|
672
|
+
thinkingAccum += reasoningContent;
|
|
673
|
+
result.push({ type: "thinking_delta", text: reasoningContent });
|
|
674
|
+
}
|
|
675
|
+
if (delta.content) {
|
|
676
|
+
textAccum += delta.content;
|
|
677
|
+
result.push({ type: "text_delta", text: delta.content });
|
|
678
|
+
}
|
|
679
|
+
if (delta.tool_calls) {
|
|
680
|
+
for (const tc of delta.tool_calls) {
|
|
681
|
+
let accum = toolCallAccum.get(tc.index);
|
|
682
|
+
if (!accum) {
|
|
683
|
+
accum = {
|
|
684
|
+
id: tc.id ?? "",
|
|
685
|
+
name: tc.function?.name ?? "",
|
|
686
|
+
argsJson: ""
|
|
687
|
+
};
|
|
688
|
+
toolCallAccum.set(tc.index, accum);
|
|
689
|
+
}
|
|
690
|
+
if (tc.id) accum.id = tc.id;
|
|
691
|
+
if (tc.function?.name) accum.name = tc.function.name;
|
|
692
|
+
if (tc.function?.arguments) {
|
|
693
|
+
accum.argsJson += tc.function.arguments;
|
|
694
|
+
result.push({
|
|
695
|
+
type: "toolcall_delta",
|
|
696
|
+
id: accum.id,
|
|
697
|
+
name: accum.name,
|
|
698
|
+
argsJson: tc.function.arguments
|
|
699
|
+
});
|
|
700
|
+
}
|
|
701
|
+
}
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
if (thinkingAccum) {
|
|
705
|
+
contentParts.push({ type: "thinking", text: thinkingAccum });
|
|
706
|
+
}
|
|
707
|
+
if (textAccum) {
|
|
708
|
+
contentParts.push({ type: "text", text: textAccum });
|
|
709
|
+
}
|
|
710
|
+
for (const [, tc] of toolCallAccum) {
|
|
711
|
+
let args = {};
|
|
712
|
+
try {
|
|
713
|
+
args = JSON.parse(tc.argsJson);
|
|
714
|
+
} catch {
|
|
715
|
+
}
|
|
716
|
+
const toolCall = {
|
|
717
|
+
type: "tool_call",
|
|
718
|
+
id: tc.id,
|
|
719
|
+
name: tc.name,
|
|
720
|
+
args
|
|
721
|
+
};
|
|
722
|
+
contentParts.push(toolCall);
|
|
723
|
+
result.push({
|
|
724
|
+
type: "toolcall_done",
|
|
725
|
+
id: tc.id,
|
|
726
|
+
name: tc.name,
|
|
727
|
+
args
|
|
728
|
+
});
|
|
729
|
+
}
|
|
730
|
+
const stopReason = normalizeOpenAIStopReason(finishReason);
|
|
731
|
+
const response = {
|
|
732
|
+
message: {
|
|
733
|
+
role: "assistant",
|
|
734
|
+
content: contentParts.length > 0 ? contentParts : textAccum || ""
|
|
735
|
+
},
|
|
736
|
+
stopReason,
|
|
737
|
+
usage: { inputTokens, outputTokens, ...cacheRead > 0 && { cacheRead } }
|
|
738
|
+
};
|
|
739
|
+
result.push({ type: "done", stopReason });
|
|
740
|
+
result.complete(response);
|
|
741
|
+
}
|
|
742
|
+
function toError2(err) {
|
|
743
|
+
if (err instanceof import_openai.default.APIError) {
|
|
744
|
+
let msg = err.message;
|
|
745
|
+
const body = err.error;
|
|
746
|
+
if (body) {
|
|
747
|
+
msg += ` | body: ${JSON.stringify(body)}`;
|
|
748
|
+
}
|
|
749
|
+
return new ProviderError("openai", msg, {
|
|
750
|
+
statusCode: err.status,
|
|
751
|
+
cause: err
|
|
752
|
+
});
|
|
753
|
+
}
|
|
754
|
+
if (err instanceof Error) {
|
|
755
|
+
return new ProviderError("openai", err.message, { cause: err });
|
|
756
|
+
}
|
|
757
|
+
return new ProviderError("openai", String(err));
|
|
758
|
+
}
|
|
759
|
+
|
|
760
|
+
// src/providers/openai-codex.ts
|
|
761
|
+
var import_node_os = __toESM(require("os"), 1);
|
|
762
|
+
var DEFAULT_BASE_URL = "https://chatgpt.com/backend-api";
|
|
763
|
+
function streamOpenAICodex(options) {
|
|
764
|
+
const result = new StreamResult();
|
|
765
|
+
runStream3(options, result).catch((err) => result.abort(toError3(err)));
|
|
766
|
+
return result;
|
|
767
|
+
}
|
|
768
|
+
async function runStream3(options, result) {
|
|
769
|
+
const baseUrl = (options.baseUrl || DEFAULT_BASE_URL).replace(/\/+$/, "");
|
|
770
|
+
const url = `${baseUrl}/codex/responses`;
|
|
771
|
+
const { system, input } = toCodexInput(options.messages);
|
|
772
|
+
const body = {
|
|
773
|
+
model: options.model,
|
|
774
|
+
store: false,
|
|
775
|
+
stream: true,
|
|
776
|
+
instructions: system,
|
|
777
|
+
input,
|
|
778
|
+
tool_choice: "auto",
|
|
779
|
+
parallel_tool_calls: true,
|
|
780
|
+
include: ["reasoning.encrypted_content"]
|
|
781
|
+
};
|
|
782
|
+
if (options.tools?.length) {
|
|
783
|
+
body.tools = toCodexTools(options.tools);
|
|
784
|
+
}
|
|
785
|
+
if (options.temperature != null && !options.thinking) {
|
|
786
|
+
body.temperature = options.temperature;
|
|
787
|
+
}
|
|
788
|
+
if (options.thinking) {
|
|
789
|
+
body.reasoning = {
|
|
790
|
+
effort: options.thinking,
|
|
791
|
+
summary: "auto"
|
|
792
|
+
};
|
|
793
|
+
}
|
|
794
|
+
const headers = {
|
|
795
|
+
"Content-Type": "application/json",
|
|
796
|
+
Accept: "text/event-stream",
|
|
797
|
+
Authorization: `Bearer ${options.apiKey}`,
|
|
798
|
+
"OpenAI-Beta": "responses=experimental",
|
|
799
|
+
originator: "ggcoder",
|
|
800
|
+
"User-Agent": `ggcoder (${import_node_os.default.platform()} ${import_node_os.default.release()}; ${import_node_os.default.arch()})`
|
|
801
|
+
};
|
|
802
|
+
if (options.accountId) {
|
|
803
|
+
headers["chatgpt-account-id"] = options.accountId;
|
|
804
|
+
}
|
|
805
|
+
const response = await fetch(url, {
|
|
806
|
+
method: "POST",
|
|
807
|
+
headers,
|
|
808
|
+
body: JSON.stringify(body),
|
|
809
|
+
signal: options.signal
|
|
810
|
+
});
|
|
811
|
+
if (!response.ok) {
|
|
812
|
+
const text = await response.text().catch(() => "");
|
|
813
|
+
let message = `Codex API error (${response.status}): ${text}`;
|
|
814
|
+
if (response.status === 400 && text.includes("not supported")) {
|
|
815
|
+
message += `
|
|
816
|
+
|
|
817
|
+
Hint: Codex models require a ChatGPT Plus ($20/mo) or Pro ($200/mo) subscription. The "codex-spark" variants require ChatGPT Pro. Ensure your account has an active subscription at https://chatgpt.com/settings`;
|
|
818
|
+
}
|
|
819
|
+
throw new ProviderError("openai", message, {
|
|
820
|
+
statusCode: response.status
|
|
821
|
+
});
|
|
822
|
+
}
|
|
823
|
+
if (!response.body) {
|
|
824
|
+
throw new ProviderError("openai", "No response body from Codex API");
|
|
825
|
+
}
|
|
826
|
+
const contentParts = [];
|
|
827
|
+
let textAccum = "";
|
|
828
|
+
const toolCalls = /* @__PURE__ */ new Map();
|
|
829
|
+
let inputTokens = 0;
|
|
830
|
+
let outputTokens = 0;
|
|
831
|
+
for await (const event of parseSSE(response.body)) {
|
|
832
|
+
const type = event.type;
|
|
833
|
+
if (!type) continue;
|
|
834
|
+
if (type === "error") {
|
|
835
|
+
const msg = event.message || JSON.stringify(event);
|
|
836
|
+
throw new ProviderError("openai", `Codex error: ${msg}`);
|
|
837
|
+
}
|
|
838
|
+
if (type === "response.failed") {
|
|
839
|
+
const msg = event.error?.message || "Codex response failed";
|
|
840
|
+
throw new ProviderError("openai", msg);
|
|
841
|
+
}
|
|
842
|
+
if (type === "response.output_text.delta") {
|
|
843
|
+
const delta = event.delta;
|
|
844
|
+
textAccum += delta;
|
|
845
|
+
result.push({ type: "text_delta", text: delta });
|
|
846
|
+
}
|
|
847
|
+
if (type === "response.reasoning_summary_text.delta") {
|
|
848
|
+
const delta = event.delta;
|
|
849
|
+
result.push({ type: "thinking_delta", text: delta });
|
|
850
|
+
}
|
|
851
|
+
if (type === "response.output_item.added") {
|
|
852
|
+
const item = event.item;
|
|
853
|
+
if (item?.type === "function_call") {
|
|
854
|
+
const callId = item.call_id;
|
|
855
|
+
const itemId = item.id;
|
|
856
|
+
const id = `${callId}|${itemId}`;
|
|
857
|
+
const name = item.name;
|
|
858
|
+
toolCalls.set(id, { id, name, argsJson: item.arguments || "" });
|
|
859
|
+
}
|
|
860
|
+
}
|
|
861
|
+
if (type === "response.function_call_arguments.delta") {
|
|
862
|
+
const delta = event.delta;
|
|
863
|
+
const itemId = event.item_id;
|
|
864
|
+
for (const [key, tc] of toolCalls) {
|
|
865
|
+
if (key.endsWith(`|${itemId}`)) {
|
|
866
|
+
tc.argsJson += delta;
|
|
867
|
+
result.push({
|
|
868
|
+
type: "toolcall_delta",
|
|
869
|
+
id: tc.id,
|
|
870
|
+
name: tc.name,
|
|
871
|
+
argsJson: delta
|
|
872
|
+
});
|
|
873
|
+
break;
|
|
874
|
+
}
|
|
875
|
+
}
|
|
876
|
+
}
|
|
877
|
+
if (type === "response.function_call_arguments.done") {
|
|
878
|
+
const itemId = event.item_id;
|
|
879
|
+
const argsStr = event.arguments;
|
|
880
|
+
for (const [key, tc] of toolCalls) {
|
|
881
|
+
if (key.endsWith(`|${itemId}`)) {
|
|
882
|
+
tc.argsJson = argsStr;
|
|
883
|
+
break;
|
|
884
|
+
}
|
|
885
|
+
}
|
|
886
|
+
}
|
|
887
|
+
if (type === "response.output_item.done") {
|
|
888
|
+
const item = event.item;
|
|
889
|
+
if (item?.type === "function_call") {
|
|
890
|
+
const callId = item.call_id;
|
|
891
|
+
const itemId = item.id;
|
|
892
|
+
const id = `${callId}|${itemId}`;
|
|
893
|
+
const tc = toolCalls.get(id);
|
|
894
|
+
if (tc) {
|
|
895
|
+
let args = {};
|
|
896
|
+
try {
|
|
897
|
+
args = JSON.parse(tc.argsJson);
|
|
898
|
+
} catch {
|
|
899
|
+
}
|
|
900
|
+
result.push({
|
|
901
|
+
type: "toolcall_done",
|
|
902
|
+
id: tc.id,
|
|
903
|
+
name: tc.name,
|
|
904
|
+
args
|
|
905
|
+
});
|
|
906
|
+
}
|
|
907
|
+
}
|
|
908
|
+
}
|
|
909
|
+
if (type === "response.completed" || type === "response.done") {
|
|
910
|
+
const resp = event.response;
|
|
911
|
+
const usage = resp?.usage;
|
|
912
|
+
if (usage) {
|
|
913
|
+
inputTokens = usage.input_tokens ?? 0;
|
|
914
|
+
outputTokens = usage.output_tokens ?? 0;
|
|
915
|
+
}
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
if (textAccum) {
|
|
919
|
+
contentParts.push({ type: "text", text: textAccum });
|
|
920
|
+
}
|
|
921
|
+
for (const [, tc] of toolCalls) {
|
|
922
|
+
let args = {};
|
|
923
|
+
try {
|
|
924
|
+
args = JSON.parse(tc.argsJson);
|
|
925
|
+
} catch {
|
|
926
|
+
}
|
|
927
|
+
const toolCall = {
|
|
928
|
+
type: "tool_call",
|
|
929
|
+
id: tc.id,
|
|
930
|
+
name: tc.name,
|
|
931
|
+
args
|
|
932
|
+
};
|
|
933
|
+
contentParts.push(toolCall);
|
|
934
|
+
}
|
|
935
|
+
const hasToolCalls = contentParts.some((p) => p.type === "tool_call");
|
|
936
|
+
const stopReason = hasToolCalls ? "tool_use" : "end_turn";
|
|
937
|
+
const streamResponse = {
|
|
938
|
+
message: {
|
|
939
|
+
role: "assistant",
|
|
940
|
+
content: contentParts.length > 0 ? contentParts : textAccum || ""
|
|
941
|
+
},
|
|
942
|
+
stopReason,
|
|
943
|
+
usage: { inputTokens, outputTokens }
|
|
944
|
+
};
|
|
945
|
+
result.push({ type: "done", stopReason });
|
|
946
|
+
result.complete(streamResponse);
|
|
947
|
+
}
|
|
948
|
+
async function* parseSSE(body) {
|
|
949
|
+
const reader = body.getReader();
|
|
950
|
+
const decoder = new TextDecoder();
|
|
951
|
+
let buffer = "";
|
|
952
|
+
try {
|
|
953
|
+
while (true) {
|
|
954
|
+
const { done, value } = await reader.read();
|
|
955
|
+
if (done) break;
|
|
956
|
+
buffer += decoder.decode(value, { stream: true });
|
|
957
|
+
let idx = buffer.indexOf("\n\n");
|
|
958
|
+
while (idx !== -1) {
|
|
959
|
+
const chunk = buffer.slice(0, idx);
|
|
960
|
+
buffer = buffer.slice(idx + 2);
|
|
961
|
+
const dataLines = chunk.split("\n").filter((l) => l.startsWith("data:")).map((l) => l.slice(5).trim());
|
|
962
|
+
if (dataLines.length > 0) {
|
|
963
|
+
const data = dataLines.join("\n").trim();
|
|
964
|
+
if (data && data !== "[DONE]") {
|
|
965
|
+
try {
|
|
966
|
+
yield JSON.parse(data);
|
|
967
|
+
} catch {
|
|
968
|
+
}
|
|
969
|
+
}
|
|
970
|
+
}
|
|
971
|
+
idx = buffer.indexOf("\n\n");
|
|
972
|
+
}
|
|
973
|
+
}
|
|
974
|
+
} finally {
|
|
975
|
+
reader.releaseLock();
|
|
976
|
+
}
|
|
977
|
+
}
|
|
978
|
+
function toCodexInput(messages) {
|
|
979
|
+
let system;
|
|
980
|
+
const input = [];
|
|
981
|
+
for (const msg of messages) {
|
|
982
|
+
if (msg.role === "system") {
|
|
983
|
+
system = msg.content;
|
|
984
|
+
continue;
|
|
985
|
+
}
|
|
986
|
+
if (msg.role === "user") {
|
|
987
|
+
const content = typeof msg.content === "string" ? [{ type: "input_text", text: msg.content }] : msg.content.map((part) => {
|
|
988
|
+
if (part.type === "text") return { type: "input_text", text: part.text };
|
|
989
|
+
return {
|
|
990
|
+
type: "input_image",
|
|
991
|
+
detail: "auto",
|
|
992
|
+
image_url: `data:${part.mediaType};base64,${part.data}`
|
|
993
|
+
};
|
|
994
|
+
});
|
|
995
|
+
input.push({ role: "user", content });
|
|
996
|
+
continue;
|
|
997
|
+
}
|
|
998
|
+
if (msg.role === "assistant") {
|
|
999
|
+
if (typeof msg.content === "string") {
|
|
1000
|
+
input.push({
|
|
1001
|
+
type: "message",
|
|
1002
|
+
role: "assistant",
|
|
1003
|
+
content: [{ type: "output_text", text: msg.content, annotations: [] }],
|
|
1004
|
+
status: "completed"
|
|
1005
|
+
});
|
|
1006
|
+
continue;
|
|
1007
|
+
}
|
|
1008
|
+
for (const part of msg.content) {
|
|
1009
|
+
if (part.type === "text") {
|
|
1010
|
+
input.push({
|
|
1011
|
+
type: "message",
|
|
1012
|
+
role: "assistant",
|
|
1013
|
+
content: [{ type: "output_text", text: part.text, annotations: [] }],
|
|
1014
|
+
status: "completed"
|
|
1015
|
+
});
|
|
1016
|
+
} else if (part.type === "tool_call") {
|
|
1017
|
+
const [callId, itemId] = part.id.includes("|") ? part.id.split("|", 2) : [part.id, part.id];
|
|
1018
|
+
input.push({
|
|
1019
|
+
type: "function_call",
|
|
1020
|
+
id: itemId,
|
|
1021
|
+
call_id: callId,
|
|
1022
|
+
name: part.name,
|
|
1023
|
+
arguments: JSON.stringify(part.args)
|
|
1024
|
+
});
|
|
1025
|
+
}
|
|
1026
|
+
}
|
|
1027
|
+
continue;
|
|
1028
|
+
}
|
|
1029
|
+
if (msg.role === "tool") {
|
|
1030
|
+
for (const result of msg.content) {
|
|
1031
|
+
const [callId] = result.toolCallId.includes("|") ? result.toolCallId.split("|", 2) : [result.toolCallId];
|
|
1032
|
+
input.push({
|
|
1033
|
+
type: "function_call_output",
|
|
1034
|
+
call_id: callId,
|
|
1035
|
+
output: result.content
|
|
1036
|
+
});
|
|
1037
|
+
}
|
|
1038
|
+
}
|
|
1039
|
+
}
|
|
1040
|
+
return { system, input };
|
|
1041
|
+
}
|
|
1042
|
+
function toCodexTools(tools) {
|
|
1043
|
+
return tools.map((tool) => ({
|
|
1044
|
+
type: "function",
|
|
1045
|
+
name: tool.name,
|
|
1046
|
+
description: tool.description,
|
|
1047
|
+
parameters: tool.rawInputSchema ?? zodToJsonSchema(tool.parameters),
|
|
1048
|
+
strict: null
|
|
1049
|
+
}));
|
|
1050
|
+
}
|
|
1051
|
+
function toError3(err) {
|
|
1052
|
+
if (err instanceof ProviderError) return err;
|
|
1053
|
+
if (err instanceof Error) {
|
|
1054
|
+
return new ProviderError("openai", err.message, { cause: err });
|
|
1055
|
+
}
|
|
1056
|
+
return new ProviderError("openai", String(err));
|
|
1057
|
+
}
|
|
1058
|
+
|
|
1059
|
+
// src/stream.ts
|
|
1060
|
+
function stream(options) {
|
|
1061
|
+
switch (options.provider) {
|
|
1062
|
+
case "anthropic":
|
|
1063
|
+
return streamAnthropic(options);
|
|
1064
|
+
case "openai":
|
|
1065
|
+
if (options.accountId) {
|
|
1066
|
+
return streamOpenAICodex(options);
|
|
1067
|
+
}
|
|
1068
|
+
return streamOpenAI(options);
|
|
1069
|
+
case "glm":
|
|
1070
|
+
return streamOpenAI({
|
|
1071
|
+
...options,
|
|
1072
|
+
baseUrl: options.baseUrl ?? "https://api.z.ai/api/paas/v4"
|
|
1073
|
+
});
|
|
1074
|
+
case "moonshot":
|
|
1075
|
+
return streamOpenAI({
|
|
1076
|
+
...options,
|
|
1077
|
+
baseUrl: options.baseUrl ?? "https://api.moonshot.ai/v1"
|
|
1078
|
+
});
|
|
1079
|
+
default:
|
|
1080
|
+
throw new GGAIError(
|
|
1081
|
+
`Unknown provider: ${options.provider}. Supported: "anthropic", "openai", "glm", "moonshot"`
|
|
1082
|
+
);
|
|
1083
|
+
}
|
|
1084
|
+
}
|
|
1085
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
1086
|
+
0 && (module.exports = {
|
|
1087
|
+
EventStream,
|
|
1088
|
+
GGAIError,
|
|
1089
|
+
ProviderError,
|
|
1090
|
+
StreamResult,
|
|
1091
|
+
stream
|
|
1092
|
+
});
|
|
1093
|
+
//# sourceMappingURL=index.cjs.map
|