aemeathcli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +607 -0
- package/dist/App-P4MYD4QY.js +2719 -0
- package/dist/App-P4MYD4QY.js.map +1 -0
- package/dist/api-key-fallback-YQQBOQIL.js +11 -0
- package/dist/api-key-fallback-YQQBOQIL.js.map +1 -0
- package/dist/chunk-4IJD72YB.js +184 -0
- package/dist/chunk-4IJD72YB.js.map +1 -0
- package/dist/chunk-6PDJ45T4.js +325 -0
- package/dist/chunk-6PDJ45T4.js.map +1 -0
- package/dist/chunk-CARHU3DO.js +562 -0
- package/dist/chunk-CARHU3DO.js.map +1 -0
- package/dist/chunk-CGEV3ARR.js +80 -0
- package/dist/chunk-CGEV3ARR.js.map +1 -0
- package/dist/chunk-CS5X3BWX.js +27 -0
- package/dist/chunk-CS5X3BWX.js.map +1 -0
- package/dist/chunk-CYQNBB25.js +44 -0
- package/dist/chunk-CYQNBB25.js.map +1 -0
- package/dist/chunk-DAHGLHNR.js +657 -0
- package/dist/chunk-DAHGLHNR.js.map +1 -0
- package/dist/chunk-H66O5Z2V.js +305 -0
- package/dist/chunk-H66O5Z2V.js.map +1 -0
- package/dist/chunk-HCIHOHLX.js +322 -0
- package/dist/chunk-HCIHOHLX.js.map +1 -0
- package/dist/chunk-HMJRPNPZ.js +1031 -0
- package/dist/chunk-HMJRPNPZ.js.map +1 -0
- package/dist/chunk-I5PZ4JTS.js +119 -0
- package/dist/chunk-I5PZ4JTS.js.map +1 -0
- package/dist/chunk-IYW62KKR.js +255 -0
- package/dist/chunk-IYW62KKR.js.map +1 -0
- package/dist/chunk-JAXXTYID.js +51 -0
- package/dist/chunk-JAXXTYID.js.map +1 -0
- package/dist/chunk-LSOYPSAT.js +183 -0
- package/dist/chunk-LSOYPSAT.js.map +1 -0
- package/dist/chunk-MFBHNWGV.js +416 -0
- package/dist/chunk-MFBHNWGV.js.map +1 -0
- package/dist/chunk-MXZSI3AY.js +311 -0
- package/dist/chunk-MXZSI3AY.js.map +1 -0
- package/dist/chunk-NBR3GHMT.js +72 -0
- package/dist/chunk-NBR3GHMT.js.map +1 -0
- package/dist/chunk-O3ZF22SW.js +246 -0
- package/dist/chunk-O3ZF22SW.js.map +1 -0
- package/dist/chunk-SUSJPZU2.js +181 -0
- package/dist/chunk-SUSJPZU2.js.map +1 -0
- package/dist/chunk-TEVZS4FA.js +310 -0
- package/dist/chunk-TEVZS4FA.js.map +1 -0
- package/dist/chunk-UY2SYSEZ.js +211 -0
- package/dist/chunk-UY2SYSEZ.js.map +1 -0
- package/dist/chunk-WAHVZH7V.js +260 -0
- package/dist/chunk-WAHVZH7V.js.map +1 -0
- package/dist/chunk-WPP3PEDE.js +234 -0
- package/dist/chunk-WPP3PEDE.js.map +1 -0
- package/dist/chunk-Y5XVD2CD.js +1610 -0
- package/dist/chunk-Y5XVD2CD.js.map +1 -0
- package/dist/chunk-YL5XFHR3.js +56 -0
- package/dist/chunk-YL5XFHR3.js.map +1 -0
- package/dist/chunk-ZGOHARPV.js +122 -0
- package/dist/chunk-ZGOHARPV.js.map +1 -0
- package/dist/claude-adapter-QMLFMSP3.js +6 -0
- package/dist/claude-adapter-QMLFMSP3.js.map +1 -0
- package/dist/claude-login-5WELXPKT.js +324 -0
- package/dist/claude-login-5WELXPKT.js.map +1 -0
- package/dist/cli.d.ts +1 -0
- package/dist/cli.js +703 -0
- package/dist/cli.js.map +1 -0
- package/dist/codex-login-7HHLJHBF.js +164 -0
- package/dist/codex-login-7HHLJHBF.js.map +1 -0
- package/dist/config-store-W6FBCQAQ.js +6 -0
- package/dist/config-store-W6FBCQAQ.js.map +1 -0
- package/dist/executor-6RIKIGXK.js +4 -0
- package/dist/executor-6RIKIGXK.js.map +1 -0
- package/dist/gemini-adapter-6JIHZ7WI.js +6 -0
- package/dist/gemini-adapter-6JIHZ7WI.js.map +1 -0
- package/dist/gemini-login-ZZLYC3J6.js +346 -0
- package/dist/gemini-login-ZZLYC3J6.js.map +1 -0
- package/dist/index.d.ts +2210 -0
- package/dist/index.js +1419 -0
- package/dist/index.js.map +1 -0
- package/dist/kimi-adapter-JN4HFFHU.js +6 -0
- package/dist/kimi-adapter-JN4HFFHU.js.map +1 -0
- package/dist/kimi-login-CZPS63NK.js +149 -0
- package/dist/kimi-login-CZPS63NK.js.map +1 -0
- package/dist/native-cli-adapters-OLW3XX57.js +6 -0
- package/dist/native-cli-adapters-OLW3XX57.js.map +1 -0
- package/dist/ollama-adapter-OJQ3FKWK.js +6 -0
- package/dist/ollama-adapter-OJQ3FKWK.js.map +1 -0
- package/dist/openai-adapter-XU46EN7B.js +6 -0
- package/dist/openai-adapter-XU46EN7B.js.map +1 -0
- package/dist/registry-4KD24ZC3.js +6 -0
- package/dist/registry-4KD24ZC3.js.map +1 -0
- package/dist/registry-H7B3AHPQ.js +5 -0
- package/dist/registry-H7B3AHPQ.js.map +1 -0
- package/dist/server-manager-PTGBHCLS.js +5 -0
- package/dist/server-manager-PTGBHCLS.js.map +1 -0
- package/dist/session-manager-ECEEACGY.js +12 -0
- package/dist/session-manager-ECEEACGY.js.map +1 -0
- package/dist/team-manager-HC4XGCFY.js +11 -0
- package/dist/team-manager-HC4XGCFY.js.map +1 -0
- package/dist/tmux-manager-GPYZ3WQH.js +6 -0
- package/dist/tmux-manager-GPYZ3WQH.js.map +1 -0
- package/dist/tools-TSMXMHIF.js +6 -0
- package/dist/tools-TSMXMHIF.js.map +1 -0
- package/package.json +89 -0
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
import { SUPPORTED_MODELS } from './chunk-HCIHOHLX.js';
|
|
2
|
+
import { ModelNotFoundError, AuthenticationError, RateLimitError } from './chunk-ZGOHARPV.js';
|
|
3
|
+
import { logger } from './chunk-JAXXTYID.js';
|
|
4
|
+
import { generateText, streamText } from 'ai';
|
|
5
|
+
import { createAnthropic } from '@ai-sdk/anthropic';
|
|
6
|
+
|
|
7
|
+
var PROVIDER_NAME = "anthropic";
|
|
8
|
+
var CLAUDE_MODELS = [
|
|
9
|
+
"claude-opus-4-6",
|
|
10
|
+
"claude-opus-4-6-1m",
|
|
11
|
+
"claude-sonnet-4-6",
|
|
12
|
+
"claude-sonnet-4-6-1m",
|
|
13
|
+
"claude-haiku-4-5"
|
|
14
|
+
];
|
|
15
|
+
var CHARS_PER_TOKEN_ESTIMATE = 4;
|
|
16
|
+
function mapRole(role) {
|
|
17
|
+
switch (role) {
|
|
18
|
+
case "user":
|
|
19
|
+
return "user";
|
|
20
|
+
case "assistant":
|
|
21
|
+
return "assistant";
|
|
22
|
+
case "system":
|
|
23
|
+
return "system";
|
|
24
|
+
case "tool":
|
|
25
|
+
return "tool";
|
|
26
|
+
default:
|
|
27
|
+
return "user";
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
function convertTools(tools) {
|
|
31
|
+
if (tools === void 0 || tools.length === 0) {
|
|
32
|
+
return void 0;
|
|
33
|
+
}
|
|
34
|
+
const result = {};
|
|
35
|
+
for (const tool of tools) {
|
|
36
|
+
const properties = {};
|
|
37
|
+
const required = [];
|
|
38
|
+
for (const param of tool.parameters) {
|
|
39
|
+
const prop = {
|
|
40
|
+
type: param.type,
|
|
41
|
+
description: param.description
|
|
42
|
+
};
|
|
43
|
+
if (param.enum !== void 0) {
|
|
44
|
+
prop["enum"] = param.enum;
|
|
45
|
+
}
|
|
46
|
+
if (param.default !== void 0) {
|
|
47
|
+
prop["default"] = param.default;
|
|
48
|
+
}
|
|
49
|
+
properties[param.name] = prop;
|
|
50
|
+
if (param.required) {
|
|
51
|
+
required.push(param.name);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
result[tool.name] = {
|
|
55
|
+
description: tool.description,
|
|
56
|
+
parameters: {
|
|
57
|
+
type: "object",
|
|
58
|
+
properties,
|
|
59
|
+
required
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
return result;
|
|
64
|
+
}
|
|
65
|
+
function buildMessages(messages) {
|
|
66
|
+
return messages.map((msg) => {
|
|
67
|
+
if (msg.role === "assistant" && msg.toolCalls !== void 0 && msg.toolCalls.length > 0) {
|
|
68
|
+
const parts = [];
|
|
69
|
+
if (msg.content.length > 0) {
|
|
70
|
+
parts.push({ type: "text", text: msg.content });
|
|
71
|
+
}
|
|
72
|
+
for (const tc of msg.toolCalls) {
|
|
73
|
+
parts.push({
|
|
74
|
+
type: "tool-call",
|
|
75
|
+
toolCallId: tc.id,
|
|
76
|
+
toolName: tc.name,
|
|
77
|
+
args: tc.arguments
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
return { role: "assistant", content: parts };
|
|
81
|
+
}
|
|
82
|
+
if (msg.role === "tool" && msg.toolCalls !== void 0 && msg.toolCalls.length > 0) {
|
|
83
|
+
const firstCall = msg.toolCalls[0];
|
|
84
|
+
if (firstCall !== void 0) {
|
|
85
|
+
return {
|
|
86
|
+
role: "tool",
|
|
87
|
+
content: [{
|
|
88
|
+
type: "tool-result",
|
|
89
|
+
toolCallId: firstCall.id,
|
|
90
|
+
toolName: firstCall.name,
|
|
91
|
+
result: msg.content
|
|
92
|
+
}]
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
return {
|
|
97
|
+
role: mapRole(msg.role),
|
|
98
|
+
content: msg.content
|
|
99
|
+
};
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
function computeCost(modelInfo, inputTokens, outputTokens) {
|
|
103
|
+
return inputTokens / 1e6 * modelInfo.inputPricePerMToken + outputTokens / 1e6 * modelInfo.outputPricePerMToken;
|
|
104
|
+
}
|
|
105
|
+
function classifyError(error, model) {
|
|
106
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
107
|
+
const lower = message.toLowerCase();
|
|
108
|
+
if (lower.includes("401") || lower.includes("unauthorized") || lower.includes("invalid api key")) {
|
|
109
|
+
throw new AuthenticationError(PROVIDER_NAME, message);
|
|
110
|
+
}
|
|
111
|
+
if (lower.includes("429") || lower.includes("rate limit") || lower.includes("too many requests")) {
|
|
112
|
+
const match = /(\d+)\s*s/i.exec(message);
|
|
113
|
+
const retryMs = match?.[1] !== void 0 ? parseInt(match[1], 10) * 1e3 : 6e4;
|
|
114
|
+
throw new RateLimitError(PROVIDER_NAME, retryMs);
|
|
115
|
+
}
|
|
116
|
+
if (lower.includes("model") && lower.includes("not found")) {
|
|
117
|
+
throw new ModelNotFoundError(model);
|
|
118
|
+
}
|
|
119
|
+
throw error instanceof Error ? error : new Error(message);
|
|
120
|
+
}
|
|
121
|
+
var ClaudeAdapter = class {
|
|
122
|
+
name = PROVIDER_NAME;
|
|
123
|
+
supportedModels = CLAUDE_MODELS;
|
|
124
|
+
anthropic;
|
|
125
|
+
constructor(options) {
|
|
126
|
+
const apiKey = options?.apiKey ?? process.env["ANTHROPIC_API_KEY"];
|
|
127
|
+
this.anthropic = createAnthropic({
|
|
128
|
+
...apiKey !== void 0 ? { apiKey } : {},
|
|
129
|
+
...options?.baseUrl !== void 0 ? { baseURL: options.baseUrl } : {}
|
|
130
|
+
});
|
|
131
|
+
}
|
|
132
|
+
async chat(request) {
|
|
133
|
+
const modelInfo = this.getModelInfo(request.model);
|
|
134
|
+
const messages = buildMessages(request.messages);
|
|
135
|
+
const tools = convertTools(request.tools);
|
|
136
|
+
try {
|
|
137
|
+
const result = await generateText({
|
|
138
|
+
model: this.anthropic(request.model),
|
|
139
|
+
messages,
|
|
140
|
+
...request.system !== void 0 ? { system: request.system } : {},
|
|
141
|
+
tools,
|
|
142
|
+
maxTokens: request.maxTokens ?? modelInfo.maxOutputTokens,
|
|
143
|
+
...request.temperature !== void 0 ? { temperature: request.temperature } : {}
|
|
144
|
+
});
|
|
145
|
+
const toolCalls = extractToolCalls(result);
|
|
146
|
+
const inputTokens = result.usage?.promptTokens ?? 0;
|
|
147
|
+
const outputTokens = result.usage?.completionTokens ?? 0;
|
|
148
|
+
const usage = {
|
|
149
|
+
inputTokens,
|
|
150
|
+
outputTokens,
|
|
151
|
+
totalTokens: inputTokens + outputTokens,
|
|
152
|
+
costUsd: computeCost(modelInfo, inputTokens, outputTokens)
|
|
153
|
+
};
|
|
154
|
+
const responseMessage = {
|
|
155
|
+
id: result.response?.id ?? crypto.randomUUID(),
|
|
156
|
+
role: "assistant",
|
|
157
|
+
content: result.text,
|
|
158
|
+
model: request.model,
|
|
159
|
+
provider: PROVIDER_NAME,
|
|
160
|
+
toolCalls: toolCalls.length > 0 ? toolCalls : void 0,
|
|
161
|
+
tokenUsage: usage,
|
|
162
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
163
|
+
};
|
|
164
|
+
return {
|
|
165
|
+
id: result.response?.id ?? crypto.randomUUID(),
|
|
166
|
+
model: request.model,
|
|
167
|
+
provider: PROVIDER_NAME,
|
|
168
|
+
message: responseMessage,
|
|
169
|
+
usage,
|
|
170
|
+
finishReason: mapFinishReason(result.finishReason)
|
|
171
|
+
};
|
|
172
|
+
} catch (error) {
|
|
173
|
+
classifyError(error, request.model);
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
async *stream(request) {
|
|
177
|
+
const modelInfo = this.getModelInfo(request.model);
|
|
178
|
+
const messages = buildMessages(request.messages);
|
|
179
|
+
const tools = convertTools(request.tools);
|
|
180
|
+
try {
|
|
181
|
+
const result = streamText({
|
|
182
|
+
model: this.anthropic(request.model),
|
|
183
|
+
messages,
|
|
184
|
+
...request.system !== void 0 ? { system: request.system } : {},
|
|
185
|
+
tools,
|
|
186
|
+
maxTokens: request.maxTokens ?? modelInfo.maxOutputTokens,
|
|
187
|
+
...request.temperature !== void 0 ? { temperature: request.temperature } : {}
|
|
188
|
+
});
|
|
189
|
+
for await (const part of result.fullStream) {
|
|
190
|
+
if (part.type === "text-delta") {
|
|
191
|
+
yield { type: "text", content: part.textDelta };
|
|
192
|
+
} else if (part.type === "tool-call") {
|
|
193
|
+
const toolCall = {
|
|
194
|
+
id: part.toolCallId,
|
|
195
|
+
name: part.toolName,
|
|
196
|
+
arguments: part.args
|
|
197
|
+
};
|
|
198
|
+
yield { type: "tool_call", toolCall };
|
|
199
|
+
} else if (part.type === "finish") {
|
|
200
|
+
const inputTokens = part.usage?.promptTokens ?? 0;
|
|
201
|
+
const outputTokens = part.usage?.completionTokens ?? 0;
|
|
202
|
+
const usage = {
|
|
203
|
+
inputTokens,
|
|
204
|
+
outputTokens,
|
|
205
|
+
totalTokens: inputTokens + outputTokens,
|
|
206
|
+
costUsd: computeCost(modelInfo, inputTokens, outputTokens)
|
|
207
|
+
};
|
|
208
|
+
yield { type: "usage", usage };
|
|
209
|
+
} else if (part.type === "error") {
|
|
210
|
+
const errMsg = part.error instanceof Error ? part.error.message : String(part.error);
|
|
211
|
+
yield { type: "error", error: errMsg };
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
yield { type: "done" };
|
|
215
|
+
} catch (error) {
|
|
216
|
+
const errMsg = error instanceof Error ? error.message : String(error);
|
|
217
|
+
logger.error({ error: errMsg, model: request.model }, "Claude stream error");
|
|
218
|
+
yield { type: "error", error: errMsg };
|
|
219
|
+
yield { type: "done" };
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
async countTokens(text, _model) {
|
|
223
|
+
return Math.ceil(text.length / CHARS_PER_TOKEN_ESTIMATE);
|
|
224
|
+
}
|
|
225
|
+
getModelInfo(model) {
|
|
226
|
+
const info = SUPPORTED_MODELS[model];
|
|
227
|
+
if (info === void 0 || info.provider !== PROVIDER_NAME) {
|
|
228
|
+
throw new ModelNotFoundError(model);
|
|
229
|
+
}
|
|
230
|
+
return info;
|
|
231
|
+
}
|
|
232
|
+
};
|
|
233
|
+
function extractToolCalls(result) {
|
|
234
|
+
if (result.toolCalls === void 0 || result.toolCalls.length === 0) {
|
|
235
|
+
return [];
|
|
236
|
+
}
|
|
237
|
+
return result.toolCalls.map((tc) => ({
|
|
238
|
+
id: tc.toolCallId,
|
|
239
|
+
name: tc.toolName,
|
|
240
|
+
arguments: tc.args
|
|
241
|
+
}));
|
|
242
|
+
}
|
|
243
|
+
function mapFinishReason(reason) {
|
|
244
|
+
switch (reason) {
|
|
245
|
+
case "stop":
|
|
246
|
+
case "end-turn":
|
|
247
|
+
return "stop";
|
|
248
|
+
case "tool-calls":
|
|
249
|
+
return "tool_calls";
|
|
250
|
+
case "length":
|
|
251
|
+
case "max-tokens":
|
|
252
|
+
return "max_tokens";
|
|
253
|
+
default:
|
|
254
|
+
return "stop";
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
export { ClaudeAdapter };
|
|
259
|
+
//# sourceMappingURL=chunk-WAHVZH7V.js.map
|
|
260
|
+
//# sourceMappingURL=chunk-WAHVZH7V.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/providers/claude-adapter.ts"],"names":[],"mappings":";;;;;;AA0BA,IAAM,aAAA,GAA8B,WAAA;AAEpC,IAAM,aAAA,GAAmC;AAAA,EACvC,iBAAA;AAAA,EACA,oBAAA;AAAA,EACA,mBAAA;AAAA,EACA,sBAAA;AAAA,EACA;AACF,CAAA;AAEA,IAAM,wBAAA,GAA2B,CAAA;AAEjC,SAAS,QAAQ,IAAA,EAAwD;AACvE,EAAA,QAAQ,IAAA;AAAM,IACZ,KAAK,MAAA;AACH,MAAA,OAAO,MAAA;AAAA,IACT,KAAK,WAAA;AACH,MAAA,OAAO,WAAA;AAAA,IACT,KAAK,QAAA;AACH,MAAA,OAAO,QAAA;AAAA,IACT,KAAK,MAAA;AACH,MAAA,OAAO,MAAA;AAAA,IACT;AACE,MAAA,OAAO,MAAA;AAAA;AAEb;AAEA,SAAS,aACP,KAAA,EAC0F;AAC1F,EAAA,IAAI,KAAA,KAAU,MAAA,IAAa,KAAA,CAAM,MAAA,KAAW,CAAA,EAAG;AAC7C,IAAA,OAAO,MAAA;AAAA,EACT;AAEA,EAAA,MAAM,SAAuF,EAAC;AAE9F,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,IAAA,MAAM,aAAsC,EAAC;AAC7C,IAAA,MAAM,WAAqB,EAAC;AAE5B,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,UAAA,EAAY;AACnC,MAAA,MAAM,IAAA,GAAgC;AAAA,QACpC,MAAM,KAAA,CAAM,IAAA;AAAA,QACZ,aAAa,KAAA,CAAM;AAAA,OACrB;AACA,MAAA,IAAI,KAAA,CAAM,SAAS,MAAA,EAAW;AAC5B,QAAA,IAAA,CAAK,MAAM,IAAI,KAAA,CAAM,IAAA;AAAA,MACvB;AACA,MAAA,IAAI,KAAA,CAAM,YAAY,MAAA,EAAW;AAC/B,QAAA,IAAA,CAAK,SAAS,IAAI,KAAA,CAAM,OAAA;AAAA,MAC1B;AACA,MAAA,UAAA,CAAW,KAAA,CAAM,IAAI,CAAA,GAAI,IAAA;AACzB,MAAA,IAAI,MAAM,QAAA,EAAU;AAClB,QAAA,QAAA,CAAS,IAAA,CAAK,MAAM,IAAI,CAAA;AAAA,MAC1B;AAAA,IACF;AAEA,IAAA,MAAA,CAAO,IAAA,CAAK,IAAI,CAAA,GAAI;AAAA,MAClB,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,UAAA,EAAY;AAAA,QACV,IAAA,EAAM,QAAA;AAAA,QACN,UAAA;AAAA,QACA;AAAA;AACF,KACF;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;AAEA,SAAS,cACP,QAAA,EACe;AACf,EAAA,OAAO,QAAA,CAAS,GAAA,CAAI,CAAC,GAAA,KAAQ;AAE3B,IAAA,IAAI,GAAA,CAAI,SAAS,WAAA,IAAe,GAAA,CAAI,cAAc,MAAA,IAAa,GAAA,CAAI,SAAA,CAAU,MAAA,GAAS,CAAA,EAAG;AACvF,MAAA,MAAM,QAAmB,EAAC;AAC1B,MAAA,IAAI,GAAA,CAAI,OAAA,CAAQ,MAAA,GAAS,CAAA,EAAG;AAC1B,QAAA,KAAA,CAAM,KAAK,EAAE,IAAA,EAAM,QAAQ,IAAA,EAAM,GAAA,CAAI,SAAS,CAAA;AAAA,MAChD;AACA,MAAA,KAAA,MAAW,EAAA,IAAM,IAAI,SAAA,EAAW;AAC9B,QAAA,KAAA,CAAM,IAAA,CAAK;AAAA,UACT,IAAA,EAAM,WAAA;AAAA,UACN,YAAY,EAAA,CAAG,EAAA;AAAA,UACf,UAAU,EAAA,CAAG,IAAA;AAAA,UACb,MAAM,EAAA,CAAG;AAAA,SACV,CAAA;AAAA,MACH;AACA,MAAA,OAAO,EAAE,IAAA,EAAM,WAAA,EAAsB,OAAA,EAAS,KAAA,EAAM;AAAA,IACtD;AAGA,IAAA,IAAI,GAAA,CAAI,SAAS,MAAA,IAAU,GAAA,CAAI,cAAc,MAAA,IAAa,GAAA,CAAI,SAAA,CAAU,MAAA,GAAS,CAAA,EAAG;AAClF,MAAA,MAAM,SAAA,GAAY,GAAA,CAAI,SAAA,CAAU,CAAC,CAAA;AACjC,MAAA,IAAI,cAAc,MAAA,EAAW;AAC3B,QAAA,OAAO;AAAA,UACL,IAAA,EAAM,MAAA;AAAA,UACN,SAAS,CAAC;AAAA,YACR,IAAA,EAAM,aAAA;AAAA,YACN,YAAY,SAAA,CAAU,EAAA;AAAA,YACtB,UAAU,SAAA,CAAU,IAAA;AAAA,YACpB,QAAQ,GAAA,CAAI;AAAA,WACb;AAAA,SACH;AAAA,MACF;AAAA,IACF;AAGA,IAAA,OAAO;AAAA,MACL,IAAA,EAAM,OAAA,CAAQ,GAAA,CAAI,IAAI,CAAA;AAAA,MACtB,SAAS,GAAA,CAAI;AAAA,KACf;AAAA,EACF,CAAC,CAAA;AACH;AAEA,SAAS,WAAA,CAAY,SAAA,EAAuB,WAAA,EAAqB,YAAA,EAA8B;AAC7F,EAAA,OACG,cAAc,GAAA,GAAa,SAAA,CAAU,mBAAA,GACrC,YAAA,GAAe,MAAa,SAAA,CAAU,oBAAA;AAE3C;AAEA,SAAS,aAAA,CAAc,OAAgB,KAAA,EAAsB;AAC3D,EAAA,MAAM,UAAU,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK,CAAA;AACrE,EAAA,MAAM,KAAA,GAAQ,QAAQ,WAAA,EAAY;AAElC,EAAA,IAAI,KAAA,CAAM,QAAA,CAAS,KAAK,CAAA,IAAK,KAAA,CAAM,QAAA,CAAS,cAAc,CAAA,IAAK,KAAA,CAAM,QAAA,CAAS,iBAAiB,CAAA,EAAG;AAChG,IAAA,MAAM,IAAI,mBAAA,CAAoB,aAAA,EAAe,OAAO,CAAA;AAAA,EACtD;AACA,EAAA,IAAI,KAAA,CAAM,QAAA,CAAS,KAAK,CAAA,IAAK,KAAA,CAAM,QAAA,CAAS,YAAY,CAAA,IAAK,KAAA,CAAM,QAAA,CAAS,mBAAmB,CAAA,EAAG;AAChG,IAAA,MAAM,KAAA,GAAQ,YAAA,CAAa,IAAA,CAAK,OAAO,CAAA;AACvC,IAAA,MAAM,OAAA,GAAU,KAAA,GAAQ,CAAC,CAAA,KAAM,MAAA,GAAY,QAAA,CAAS,KAAA,CAAM,CAAC,CAAA,EAAG,EAAE,CAAA,GAAI,GAAA,GAAO,GAAA;AAC3E,IAAA,MAAM,IAAI,cAAA,CAAe,aAAA,EAAe,OAAO,CAAA;AAAA,EACjD;AACA,EAAA,IAAI,MAAM,QAAA,CAAS,OAAO,KAAK,KAAA,CAAM,QAAA,CAAS,WAAW,CAAA,EAAG;AAC1D,IAAA,MAAM,IAAI,mBAAmB,KAAK,CAAA;AAAA,EACpC;AAEA,EAAA,MAAM,KAAA,YAAiB,KAAA,GAAQ,KAAA,GAAQ,IAAI,MAAM,OAAO,CAAA;AAC1D;AAEO,IAAM,gBAAN,MAA8C;AAAA,EAC1C,IAAA,GAAO,aAAA;AAAA,EACP,eAAA,GAAkB,aAAA;AAAA,EAEV,SAAA;AAAA,EAEjB,YAAY,OAAA,EAA4B;AACtC,IAAA,MAAM,MAAA,GAAS,OAAA,EAAS,MAAA,IAAU,OAAA,CAAQ,IAAI,mBAAmB,CAAA;AACjE,IAAA,IAAA,CAAK,YAAY,eAAA,CAAgB;AAAA,MAC/B,GAAI,MAAA,KAAW,MAAA,GAAY,EAAE,MAAA,KAAW,EAAC;AAAA,MACzC,GAAI,SAAS,OAAA,KAAY,MAAA,GAAY,EAAE,OAAA,EAAS,OAAA,CAAQ,OAAA,EAAQ,GAAI;AAAC,KACtE,CAAA;AAAA,EACH;AAAA,EAEA,MAAM,KAAK,OAAA,EAA+C;AACxD,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,YAAA,CAAa,OAAA,CAAQ,KAAK,CAAA;AACjD,IAAA,MAAM,QAAA,GAAW,aAAA,CAAc,OAAA,CAAQ,QAAQ,CAAA;AAC/C,IAAA,MAAM,KAAA,GAAQ,YAAA,CAAa,OAAA,CAAQ,KAAK,CAAA;AAExC,IAAA,IAAI;AACF,MAAA,MAAM,MAAA,GAAS,MAAM,YAAA,CAAa;AAAA,QAChC,KAAA,EAAO,IAAA,CAAK,SAAA,CAAU,OAAA,CAAQ,KAAK,CAAA;AAAA,QACnC,QAAA;AAAA,QACA,GAAI,QAAQ,MAAA,KAAW,KAAA,CAAA,GAAY,EAAE,MAAA,EAAQ,OAAA,CAAQ,MAAA,EAAO,GAAI,EAAC;AAAA,QACjE,KAAA;AAAA,QACA,SAAA,EAAW,OAAA,CAAQ,SAAA,IAAa,SAAA,CAAU,eAAA;AAAA,QAC1C,GAAI,QAAQ,WAAA,KAAgB,KAAA,CAAA,GAAY,EAAE,WAAA,EAAa,OAAA,CAAQ,WAAA,EAAY,GAAI;AAAC,OACjF,CAAA;AAED,MAAA,MAAM,SAAA,GAAY,iBAAiB,MAAM,CAAA;AACzC,MAAA,MAAM,WAAA,GAAc,MAAA,CAAO,KAAA,EAAO,YAAA,IAAgB,CAAA;AAClD,MAAA,MAAM,YAAA,GAAe,MAAA,CAAO,KAAA,EAAO,gBAAA,IAAoB,CAAA;AAEvD,MAAA,MAAM,KAAA,GAAqB;AAAA,QACzB,WAAA;AAAA,QACA,YAAA;AAAA,QACA,aAAa,WAAA,GAAc,YAAA;AAAA,QAC3B,OAAA,EAAS,WAAA,CAAY,SAAA,EAAW,WAAA,EAAa,YAAY;AAAA,OAC3D;AAEA,MAAA,MAAM,eAAA,GAAgC;AAAA,QACpC,EAAA,EAAI,MAAA,CAAO,QAAA,EAAU,EAAA,IAAM,OAAO,UAAA,EAAW;AAAA,QAC7C,IAAA,EAAM,WAAA;AAAA,QACN,SAAS,MAAA,CAAO,IAAA;AAAA,QAChB,OAAO,OAAA,CAAQ,KAAA;AAAA,QACf,QAAA,EAAU,aAAA;AAAA,QACV,SAAA,EAAW,SAAA,CAAU,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY,KAAA,CAAA;AAAA,QAC9C,UAAA,EAAY,KAAA;AAAA,QACZ,SAAA,sBAAe,IAAA;AAAK,OACtB;AAEA,MAAA,OAAO;AAAA,QACL,EAAA,EAAI,MAAA,CAAO,QAAA,EAAU,EAAA,IAAM,OAAO,UAAA,EAAW;AAAA,QAC7C,OAAO,OAAA,CAAQ,KAAA;AAAA,QACf,QAAA,EAAU,aAAA;AAAA,QACV,OAAA,EAAS,eAAA;AAAA,QACT,KAAA;AAAA,QACA,YAAA,EAAc,eAAA,CAAgB,MAAA,CAAO,YAAY;AAAA,OACnD;AAAA,IACF,SAAS,KAAA,EAAgB;AACvB,MAAA,aAAA,CAAc,KAAA,EAAO,QAAQ,KAAK,CAAA;AAAA,IACpC;AAAA,EACF;AAAA,EAEA,OAAO,OAAO,OAAA,EAAoD;AAChE,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,YAAA,CAAa,OAAA,CAAQ,KAAK,CAAA;AACjD,IAAA,MAAM,QAAA,GAAW,aAAA,CAAc,OAAA,CAAQ,QAAQ,CAAA;AAC/C,IAAA,MAAM,KAAA,GAAQ,YAAA,CAAa,OAAA,CAAQ,KAAK,CAAA;AAExC,IAAA,IAAI;AACF,MAAA,MAAM,SAAS,UAAA,CAAW;AAAA,QACxB,KAAA,EAAO,IAAA,CAAK,SAAA,CAAU,OAAA,CAAQ,KAAK,CAAA;AAAA,QACnC,QAAA;AAAA,QACA,GAAI,QAAQ,MAAA,KAAW,KAAA,CAAA,GAAY,EAAE,MAAA,EAAQ,OAAA,CAAQ,MAAA,EAAO,GAAI,EAAC;AAAA,QACjE,KAAA;AAAA,QACA,SAAA,EAAW,OAAA,CAAQ,SAAA,IAAa,SAAA,CAAU,eAAA;AAAA,QAC1C,GAAI,QAAQ,WAAA,KAAgB,KAAA,CAAA,GAAY,EAAE,WAAA,EAAa,OAAA,CAAQ,WAAA,EAAY,GAAI;AAAC,OACjF,CAAA;AAED,MAAA,WAAA,MAAiB,IAAA,IAAQ,OAAO,UAAA,EAAY;AAC1C,QAAA,IAAI,IAAA,CAAK,SAAS,YAAA,EAAc;AAC9B,UAAA,MAAM,EAAE,IAAA,EAAM,MAAA,EAAQ,OAAA,EAAS,KAAK,SAAA,EAAU;AAAA,QAChD,CAAA,MAAA,IAAW,IAAA,CAAK,IAAA,KAAS,WAAA,EAAa;AACpC,UAAA,MAAM,QAAA,GAAsB;AAAA,YAC1B,IAAI,IAAA,CAAK,UAAA;AAAA,YACT,MAAM,IAAA,CAAK,QAAA;AAAA,YACX,WAAW,IAAA,CAAK;AAAA,WAClB;AACA,UAAA,MAAM,EAAE,IAAA,EAAM,WAAA,EAAa,QAAA,EAAS;AAAA,QACtC,CAAA,MAAA,IAAW,IAAA,CAAK,IAAA,KAAS,QAAA,EAAU;AACjC,UAAA,MAAM,WAAA,GAAc,IAAA,CAAK,KAAA,EAAO,YAAA,IAAgB,CAAA;AAChD,UAAA,MAAM,YAAA,GAAe,IAAA,CAAK,KAAA,EAAO,gBAAA,IAAoB,CAAA;AACrD,UAAA,MAAM,KAAA,GAAqB;AAAA,YACzB,WAAA;AAAA,YACA,YAAA;AAAA,YACA,aAAa,WAAA,GAAc,YAAA;AAAA,YAC3B,OAAA,EAAS,WAAA,CAAY,SAAA,EAAW,WAAA,EAAa,YAAY;AAAA,WAC3D;AACA,UAAA,MAAM,EAAE,IAAA,EAAM,OAAA,EAAS,KAAA,EAAM;AAAA,QAC/B,CAAA,MAAA,IAAW,IAAA,CAAK,IAAA,KAAS,OAAA,EAAS;AAChC,UAAA,MAAM,MAAA,GAAS,KAAK,KAAA,YAAiB,KAAA,GAAQ,KAAK,KAAA,CAAM,OAAA,GAAU,MAAA,CAAO,IAAA,CAAK,KAAK,CAAA;AACnF,UAAA,MAAM,EAAE,IAAA,EAAM,OAAA,EAAS,KAAA,EAAO,MAAA,EAAO;AAAA,QACvC;AAAA,MACF;AAEA,MAAA,MAAM,EAAE,MAAM,MAAA,EAAO;AAAA,IACvB,SAAS,KAAA,EAAgB;AACvB,MAAA,MAAM,SAAS,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK,CAAA;AACpE,MAAA,MAAA,CAAO,KAAA,CAAM,EAAE,KAAA,EAAO,MAAA,EAAQ,OAAO,OAAA,CAAQ,KAAA,IAAS,qBAAqB,CAAA;AAC3E,MAAA,MAAM,EAAE,IAAA,EAAM,OAAA,EAAS,KAAA,EAAO,MAAA,EAAO;AACrC,MAAA,MAAM,EAAE,MAAM,MAAA,EAAO;AAAA,IACvB;AAAA,EACF;AAAA,EAEA,MAAM,WAAA,CAAY,IAAA,EAAc,MAAA,EAAiC;AAC/D,IAAA,OAAO,IAAA,CAAK,IAAA,CAAK,IAAA,CAAK,MAAA,GAAS,wBAAwB,CAAA;AAAA,EACzD;AAAA,EAEA,aAAa,KAAA,EAA2B;AACtC,IAAA,MAAM,IAAA,GAAO,iBAAiB,KAAK,CAAA;AACnC,IAAA,IAAI,IAAA,KAAS,MAAA,IAAa,IAAA,CAAK,QAAA,KAAa,aAAA,EAAe;AACzD,MAAA,MAAM,IAAI,mBAAmB,KAAK,CAAA;AAAA,IACpC;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AACF;AAEA,SAAS,iBACP,MAAA,EACa;AACb,EAAA,IAAI,OAAO,SAAA,KAAc,MAAA,IAAa,MAAA,CAAO,SAAA,CAAU,WAAW,CAAA,EAAG;AACnE,IAAA,OAAO,EAAC;AAAA,EACV;AACA,EAAA,OAAO,MAAA,CAAO,SAAA,CAAU,GAAA,CAAI,CAAC,EAAA,MAAQ;AAAA,IACnC,IAAI,EAAA,CAAG,UAAA;AAAA,IACP,MAAM,EAAA,CAAG,QAAA;AAAA,IACT,WAAW,EAAA,CAAG;AAAA,GAChB,CAAE,CAAA;AACJ;AAEA,SAAS,gBACP,MAAA,EACgD;AAChD,EAAA,QAAQ,MAAA;AAAQ,IACd,KAAK,MAAA;AAAA,IACL,KAAK,UAAA;AACH,MAAA,OAAO,MAAA;AAAA,IACT,KAAK,YAAA;AACH,MAAA,OAAO,YAAA;AAAA,IACT,KAAK,QAAA;AAAA,IACL,KAAK,YAAA;AACH,MAAA,OAAO,YAAA;AAAA,IACT;AACE,MAAA,OAAO,MAAA;AAAA;AAEb","file":"chunk-WAHVZH7V.js","sourcesContent":["/**\n * Claude (Anthropic) adapter via Vercel AI SDK per PRD section 7.1\n * Supports Claude Opus 4.6, Sonnet 4.6, Haiku 4.5\n */\n\nimport { generateText, streamText, type CoreMessage } from \"ai\";\nimport { createAnthropic } from \"@ai-sdk/anthropic\";\nimport { logger } from \"../utils/logger.js\";\nimport {\n AuthenticationError,\n RateLimitError,\n ModelNotFoundError,\n} from \"../types/errors.js\";\nimport { SUPPORTED_MODELS } from \"../types/model.js\";\nimport type { IModelInfo, ProviderName } from \"../types/model.js\";\nimport type {\n IChatRequest,\n IChatResponse,\n IChatMessage,\n IStreamChunk,\n IToolCall,\n IToolDefinition,\n ITokenUsage,\n} from \"../types/message.js\";\nimport type { IModelProvider, IProviderOptions } from \"./types.js\";\n\nconst PROVIDER_NAME: ProviderName = \"anthropic\";\n\nconst CLAUDE_MODELS: readonly string[] = [\n \"claude-opus-4-6\",\n \"claude-opus-4-6-1m\",\n \"claude-sonnet-4-6\",\n \"claude-sonnet-4-6-1m\",\n \"claude-haiku-4-5\",\n] as const;\n\nconst CHARS_PER_TOKEN_ESTIMATE = 4;\n\nfunction mapRole(role: string): \"user\" | \"assistant\" | \"system\" | \"tool\" {\n switch (role) {\n case \"user\":\n return \"user\";\n case \"assistant\":\n return \"assistant\";\n case \"system\":\n return \"system\";\n case \"tool\":\n return \"tool\";\n default:\n return \"user\";\n }\n}\n\nfunction convertTools(\n tools: readonly IToolDefinition[] | undefined,\n): Record<string, { description: string; parameters: Record<string, unknown> }> | undefined {\n if (tools === undefined || tools.length === 0) {\n return undefined;\n }\n\n const result: Record<string, { description: string; parameters: Record<string, unknown> }> = {};\n\n for (const tool of tools) {\n const properties: Record<string, unknown> = {};\n const required: string[] = [];\n\n for (const param of tool.parameters) {\n const prop: Record<string, unknown> = {\n type: param.type,\n description: param.description,\n };\n if (param.enum !== undefined) {\n prop[\"enum\"] = param.enum;\n }\n if (param.default !== undefined) {\n prop[\"default\"] = param.default;\n }\n properties[param.name] = prop;\n if (param.required) {\n required.push(param.name);\n }\n }\n\n result[tool.name] = {\n description: tool.description,\n parameters: {\n type: \"object\",\n properties,\n required,\n },\n };\n }\n\n return result;\n}\n\nfunction buildMessages(\n messages: readonly IChatMessage[],\n): CoreMessage[] {\n return messages.map((msg) => {\n // Assistant message with tool calls → multi-part content\n if (msg.role === \"assistant\" && msg.toolCalls !== undefined && msg.toolCalls.length > 0) {\n const parts: unknown[] = [];\n if (msg.content.length > 0) {\n parts.push({ type: \"text\", text: msg.content });\n }\n for (const tc of msg.toolCalls) {\n parts.push({\n type: \"tool-call\",\n toolCallId: tc.id,\n toolName: tc.name,\n args: tc.arguments,\n });\n }\n return { role: \"assistant\" as const, content: parts };\n }\n\n // Tool result message — toolCalls[0] carries the call metadata\n if (msg.role === \"tool\" && msg.toolCalls !== undefined && msg.toolCalls.length > 0) {\n const firstCall = msg.toolCalls[0];\n if (firstCall !== undefined) {\n return {\n role: \"tool\" as const,\n content: [{\n type: \"tool-result\" as const,\n toolCallId: firstCall.id,\n toolName: firstCall.name,\n result: msg.content,\n }],\n };\n }\n }\n\n // Standard text message\n return {\n role: mapRole(msg.role),\n content: msg.content,\n };\n }) as CoreMessage[];\n}\n\nfunction computeCost(modelInfo: IModelInfo, inputTokens: number, outputTokens: number): number {\n return (\n (inputTokens / 1_000_000) * modelInfo.inputPricePerMToken +\n (outputTokens / 1_000_000) * modelInfo.outputPricePerMToken\n );\n}\n\nfunction classifyError(error: unknown, model: string): never {\n const message = error instanceof Error ? error.message : String(error);\n const lower = message.toLowerCase();\n\n if (lower.includes(\"401\") || lower.includes(\"unauthorized\") || lower.includes(\"invalid api key\")) {\n throw new AuthenticationError(PROVIDER_NAME, message);\n }\n if (lower.includes(\"429\") || lower.includes(\"rate limit\") || lower.includes(\"too many requests\")) {\n const match = /(\\d+)\\s*s/i.exec(message);\n const retryMs = match?.[1] !== undefined ? parseInt(match[1], 10) * 1000 : 60_000;\n throw new RateLimitError(PROVIDER_NAME, retryMs);\n }\n if (lower.includes(\"model\") && lower.includes(\"not found\")) {\n throw new ModelNotFoundError(model);\n }\n\n throw error instanceof Error ? error : new Error(message);\n}\n\nexport class ClaudeAdapter implements IModelProvider {\n readonly name = PROVIDER_NAME;\n readonly supportedModels = CLAUDE_MODELS;\n\n private readonly anthropic: ReturnType<typeof createAnthropic>;\n\n constructor(options?: IProviderOptions) {\n const apiKey = options?.apiKey ?? process.env[\"ANTHROPIC_API_KEY\"];\n this.anthropic = createAnthropic({\n ...(apiKey !== undefined ? { apiKey } : {}),\n ...(options?.baseUrl !== undefined ? { baseURL: options.baseUrl } : {}),\n });\n }\n\n async chat(request: IChatRequest): Promise<IChatResponse> {\n const modelInfo = this.getModelInfo(request.model);\n const messages = buildMessages(request.messages);\n const tools = convertTools(request.tools);\n\n try {\n const result = await generateText({\n model: this.anthropic(request.model),\n messages,\n ...(request.system !== undefined ? { system: request.system } : {}),\n tools: tools as Record<string, never>,\n maxTokens: request.maxTokens ?? modelInfo.maxOutputTokens,\n ...(request.temperature !== undefined ? { temperature: request.temperature } : {}),\n });\n\n const toolCalls = extractToolCalls(result);\n const inputTokens = result.usage?.promptTokens ?? 0;\n const outputTokens = result.usage?.completionTokens ?? 0;\n\n const usage: ITokenUsage = {\n inputTokens,\n outputTokens,\n totalTokens: inputTokens + outputTokens,\n costUsd: computeCost(modelInfo, inputTokens, outputTokens),\n };\n\n const responseMessage: IChatMessage = {\n id: result.response?.id ?? crypto.randomUUID(),\n role: \"assistant\",\n content: result.text,\n model: request.model,\n provider: PROVIDER_NAME,\n toolCalls: toolCalls.length > 0 ? toolCalls : undefined,\n tokenUsage: usage,\n createdAt: new Date(),\n };\n\n return {\n id: result.response?.id ?? crypto.randomUUID(),\n model: request.model,\n provider: PROVIDER_NAME,\n message: responseMessage,\n usage,\n finishReason: mapFinishReason(result.finishReason),\n };\n } catch (error: unknown) {\n classifyError(error, request.model);\n }\n }\n\n async *stream(request: IChatRequest): AsyncIterable<IStreamChunk> {\n const modelInfo = this.getModelInfo(request.model);\n const messages = buildMessages(request.messages);\n const tools = convertTools(request.tools);\n\n try {\n const result = streamText({\n model: this.anthropic(request.model),\n messages,\n ...(request.system !== undefined ? { system: request.system } : {}),\n tools: tools as Record<string, never>,\n maxTokens: request.maxTokens ?? modelInfo.maxOutputTokens,\n ...(request.temperature !== undefined ? { temperature: request.temperature } : {}),\n });\n\n for await (const part of result.fullStream) {\n if (part.type === \"text-delta\") {\n yield { type: \"text\", content: part.textDelta };\n } else if (part.type === \"tool-call\") {\n const toolCall: IToolCall = {\n id: part.toolCallId,\n name: part.toolName,\n arguments: part.args as Record<string, unknown>,\n };\n yield { type: \"tool_call\", toolCall };\n } else if (part.type === \"finish\") {\n const inputTokens = part.usage?.promptTokens ?? 0;\n const outputTokens = part.usage?.completionTokens ?? 0;\n const usage: ITokenUsage = {\n inputTokens,\n outputTokens,\n totalTokens: inputTokens + outputTokens,\n costUsd: computeCost(modelInfo, inputTokens, outputTokens),\n };\n yield { type: \"usage\", usage };\n } else if (part.type === \"error\") {\n const errMsg = part.error instanceof Error ? part.error.message : String(part.error);\n yield { type: \"error\", error: errMsg };\n }\n }\n\n yield { type: \"done\" };\n } catch (error: unknown) {\n const errMsg = error instanceof Error ? error.message : String(error);\n logger.error({ error: errMsg, model: request.model }, \"Claude stream error\");\n yield { type: \"error\", error: errMsg };\n yield { type: \"done\" };\n }\n }\n\n async countTokens(text: string, _model: string): Promise<number> {\n return Math.ceil(text.length / CHARS_PER_TOKEN_ESTIMATE);\n }\n\n getModelInfo(model: string): IModelInfo {\n const info = SUPPORTED_MODELS[model];\n if (info === undefined || info.provider !== PROVIDER_NAME) {\n throw new ModelNotFoundError(model);\n }\n return info;\n }\n}\n\nfunction extractToolCalls(\n result: { toolCalls?: ReadonlyArray<{ toolCallId: string; toolName: string; args: unknown }> },\n): IToolCall[] {\n if (result.toolCalls === undefined || result.toolCalls.length === 0) {\n return [];\n }\n return result.toolCalls.map((tc) => ({\n id: tc.toolCallId,\n name: tc.toolName,\n arguments: tc.args as Record<string, unknown>,\n }));\n}\n\nfunction mapFinishReason(\n reason: string | undefined,\n): \"stop\" | \"tool_calls\" | \"max_tokens\" | \"error\" {\n switch (reason) {\n case \"stop\":\n case \"end-turn\":\n return \"stop\";\n case \"tool-calls\":\n return \"tool_calls\";\n case \"length\":\n case \"max-tokens\":\n return \"max_tokens\";\n default:\n return \"stop\";\n }\n}\n"]}
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
import { SUPPORTED_MODELS } from './chunk-HCIHOHLX.js';
|
|
2
|
+
import { ModelNotFoundError, AuthenticationError, RateLimitError } from './chunk-ZGOHARPV.js';
|
|
3
|
+
import { logger } from './chunk-JAXXTYID.js';
|
|
4
|
+
import { generateText, streamText } from 'ai';
|
|
5
|
+
import { createOpenAI } from '@ai-sdk/openai';
|
|
6
|
+
|
|
7
|
+
var PROVIDER_NAME = "openai";
|
|
8
|
+
var OPENAI_MODELS = [
|
|
9
|
+
"gpt-5.3-codex",
|
|
10
|
+
"gpt-5.3-codex-spark",
|
|
11
|
+
"gpt-5.2-codex",
|
|
12
|
+
"gpt-5.1-codex-max",
|
|
13
|
+
"gpt-5.2",
|
|
14
|
+
"gpt-5.1-codex-mini"
|
|
15
|
+
];
|
|
16
|
+
var CHARS_PER_TOKEN_ESTIMATE = 4;
|
|
17
|
+
function convertTools(tools) {
|
|
18
|
+
if (tools === void 0 || tools.length === 0) {
|
|
19
|
+
return void 0;
|
|
20
|
+
}
|
|
21
|
+
const result = {};
|
|
22
|
+
for (const tool of tools) {
|
|
23
|
+
const properties = {};
|
|
24
|
+
const required = [];
|
|
25
|
+
for (const param of tool.parameters) {
|
|
26
|
+
const prop = {
|
|
27
|
+
type: param.type,
|
|
28
|
+
description: param.description
|
|
29
|
+
};
|
|
30
|
+
if (param.enum !== void 0) {
|
|
31
|
+
prop["enum"] = param.enum;
|
|
32
|
+
}
|
|
33
|
+
if (param.default !== void 0) {
|
|
34
|
+
prop["default"] = param.default;
|
|
35
|
+
}
|
|
36
|
+
properties[param.name] = prop;
|
|
37
|
+
if (param.required) {
|
|
38
|
+
required.push(param.name);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
result[tool.name] = {
|
|
42
|
+
description: tool.description,
|
|
43
|
+
parameters: {
|
|
44
|
+
type: "object",
|
|
45
|
+
properties,
|
|
46
|
+
required
|
|
47
|
+
}
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
return result;
|
|
51
|
+
}
|
|
52
|
+
function buildMessages(messages) {
|
|
53
|
+
return messages.map((msg) => ({
|
|
54
|
+
role: msg.role,
|
|
55
|
+
content: msg.content
|
|
56
|
+
}));
|
|
57
|
+
}
|
|
58
|
+
function computeCost(modelInfo, inputTokens, outputTokens) {
|
|
59
|
+
return inputTokens / 1e6 * modelInfo.inputPricePerMToken + outputTokens / 1e6 * modelInfo.outputPricePerMToken;
|
|
60
|
+
}
|
|
61
|
+
function classifyError(error, model) {
|
|
62
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
63
|
+
const lower = message.toLowerCase();
|
|
64
|
+
if (lower.includes("401") || lower.includes("unauthorized") || lower.includes("invalid api key")) {
|
|
65
|
+
throw new AuthenticationError(PROVIDER_NAME, message);
|
|
66
|
+
}
|
|
67
|
+
if (lower.includes("429") || lower.includes("rate limit") || lower.includes("too many requests")) {
|
|
68
|
+
const match = /(\d+)\s*s/i.exec(message);
|
|
69
|
+
const retryMs = match?.[1] !== void 0 ? parseInt(match[1], 10) * 1e3 : 6e4;
|
|
70
|
+
throw new RateLimitError(PROVIDER_NAME, retryMs);
|
|
71
|
+
}
|
|
72
|
+
if (lower.includes("model") && lower.includes("not found")) {
|
|
73
|
+
throw new ModelNotFoundError(model);
|
|
74
|
+
}
|
|
75
|
+
throw error instanceof Error ? error : new Error(message);
|
|
76
|
+
}
|
|
77
|
+
var OpenAIAdapter = class {
|
|
78
|
+
name = PROVIDER_NAME;
|
|
79
|
+
supportedModels = OPENAI_MODELS;
|
|
80
|
+
openai;
|
|
81
|
+
apiKey;
|
|
82
|
+
baseUrl;
|
|
83
|
+
constructor(options) {
|
|
84
|
+
this.apiKey = options?.apiKey ?? process.env["OPENAI_API_KEY"];
|
|
85
|
+
this.baseUrl = options?.baseUrl;
|
|
86
|
+
this.openai = createOpenAI({
|
|
87
|
+
...this.apiKey !== void 0 ? { apiKey: this.apiKey } : {},
|
|
88
|
+
...this.baseUrl !== void 0 ? { baseURL: this.baseUrl } : {}
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
async chat(request) {
|
|
92
|
+
const modelInfo = this.getModelInfo(request.model);
|
|
93
|
+
const messages = buildMessages(request.messages);
|
|
94
|
+
const tools = convertTools(request.tools);
|
|
95
|
+
try {
|
|
96
|
+
const result = await generateText({
|
|
97
|
+
model: this.openai(request.model),
|
|
98
|
+
messages,
|
|
99
|
+
...request.system !== void 0 ? { system: request.system } : {},
|
|
100
|
+
tools,
|
|
101
|
+
maxTokens: request.maxTokens ?? modelInfo.maxOutputTokens,
|
|
102
|
+
...request.temperature !== void 0 ? { temperature: request.temperature } : {}
|
|
103
|
+
});
|
|
104
|
+
const toolCalls = extractToolCalls(result);
|
|
105
|
+
const inputTokens = result.usage?.promptTokens ?? 0;
|
|
106
|
+
const outputTokens = result.usage?.completionTokens ?? 0;
|
|
107
|
+
const usage = {
|
|
108
|
+
inputTokens,
|
|
109
|
+
outputTokens,
|
|
110
|
+
totalTokens: inputTokens + outputTokens,
|
|
111
|
+
costUsd: computeCost(modelInfo, inputTokens, outputTokens)
|
|
112
|
+
};
|
|
113
|
+
const responseMessage = {
|
|
114
|
+
id: result.response?.id ?? crypto.randomUUID(),
|
|
115
|
+
role: "assistant",
|
|
116
|
+
content: result.text,
|
|
117
|
+
model: request.model,
|
|
118
|
+
provider: PROVIDER_NAME,
|
|
119
|
+
toolCalls: toolCalls.length > 0 ? toolCalls : void 0,
|
|
120
|
+
tokenUsage: usage,
|
|
121
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
122
|
+
};
|
|
123
|
+
return {
|
|
124
|
+
id: result.response?.id ?? crypto.randomUUID(),
|
|
125
|
+
model: request.model,
|
|
126
|
+
provider: PROVIDER_NAME,
|
|
127
|
+
message: responseMessage,
|
|
128
|
+
usage,
|
|
129
|
+
finishReason: mapFinishReason(result.finishReason)
|
|
130
|
+
};
|
|
131
|
+
} catch (error) {
|
|
132
|
+
classifyError(error, request.model);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
async *stream(request) {
|
|
136
|
+
const modelInfo = this.getModelInfo(request.model);
|
|
137
|
+
const messages = buildMessages(request.messages);
|
|
138
|
+
const tools = convertTools(request.tools);
|
|
139
|
+
try {
|
|
140
|
+
const result = streamText({
|
|
141
|
+
model: this.openai(request.model),
|
|
142
|
+
messages,
|
|
143
|
+
...request.system !== void 0 ? { system: request.system } : {},
|
|
144
|
+
tools,
|
|
145
|
+
maxTokens: request.maxTokens ?? modelInfo.maxOutputTokens,
|
|
146
|
+
...request.temperature !== void 0 ? { temperature: request.temperature } : {}
|
|
147
|
+
});
|
|
148
|
+
for await (const part of result.fullStream) {
|
|
149
|
+
if (part.type === "text-delta") {
|
|
150
|
+
yield { type: "text", content: part.textDelta };
|
|
151
|
+
} else if (part.type === "tool-call") {
|
|
152
|
+
const toolCall = {
|
|
153
|
+
id: part.toolCallId,
|
|
154
|
+
name: part.toolName,
|
|
155
|
+
arguments: part.args
|
|
156
|
+
};
|
|
157
|
+
yield { type: "tool_call", toolCall };
|
|
158
|
+
} else if (part.type === "finish") {
|
|
159
|
+
const inputTokens = part.usage?.promptTokens ?? 0;
|
|
160
|
+
const outputTokens = part.usage?.completionTokens ?? 0;
|
|
161
|
+
const usage = {
|
|
162
|
+
inputTokens,
|
|
163
|
+
outputTokens,
|
|
164
|
+
totalTokens: inputTokens + outputTokens,
|
|
165
|
+
costUsd: computeCost(modelInfo, inputTokens, outputTokens)
|
|
166
|
+
};
|
|
167
|
+
yield { type: "usage", usage };
|
|
168
|
+
} else if (part.type === "error") {
|
|
169
|
+
const errMsg = part.error instanceof Error ? part.error.message : String(part.error);
|
|
170
|
+
yield { type: "error", error: errMsg };
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
yield { type: "done" };
|
|
174
|
+
} catch (error) {
|
|
175
|
+
const errMsg = error instanceof Error ? error.message : String(error);
|
|
176
|
+
logger.error({ error: errMsg, model: request.model }, "OpenAI stream error");
|
|
177
|
+
yield { type: "error", error: errMsg };
|
|
178
|
+
yield { type: "done" };
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
async countTokens(text, _model) {
|
|
182
|
+
return Math.ceil(text.length / CHARS_PER_TOKEN_ESTIMATE);
|
|
183
|
+
}
|
|
184
|
+
getModelInfo(model) {
|
|
185
|
+
const info = SUPPORTED_MODELS[model];
|
|
186
|
+
if (info === void 0 || info.provider !== PROVIDER_NAME) {
|
|
187
|
+
throw new ModelNotFoundError(model);
|
|
188
|
+
}
|
|
189
|
+
return info;
|
|
190
|
+
}
|
|
191
|
+
async listAvailableModels() {
|
|
192
|
+
if (!this.apiKey) return [...this.supportedModels];
|
|
193
|
+
try {
|
|
194
|
+
const base = this.baseUrl ?? "https://api.openai.com/v1";
|
|
195
|
+
const response = await fetch(`${base}/models`, {
|
|
196
|
+
headers: { Authorization: `Bearer ${this.apiKey}` },
|
|
197
|
+
signal: AbortSignal.timeout(5e3)
|
|
198
|
+
});
|
|
199
|
+
if (!response.ok) return [...this.supportedModels];
|
|
200
|
+
const data = await response.json();
|
|
201
|
+
const chatPrefixes = ["gpt-", "o1", "o3", "o4", "chatgpt-"];
|
|
202
|
+
const models = data.data.map((m) => m.id).filter((id) => chatPrefixes.some((p) => id.startsWith(p))).sort();
|
|
203
|
+
return models.length > 0 ? models : [...this.supportedModels];
|
|
204
|
+
} catch {
|
|
205
|
+
return [...this.supportedModels];
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
};
|
|
209
|
+
function extractToolCalls(result) {
|
|
210
|
+
if (result.toolCalls === void 0 || result.toolCalls.length === 0) {
|
|
211
|
+
return [];
|
|
212
|
+
}
|
|
213
|
+
return result.toolCalls.map((tc) => ({
|
|
214
|
+
id: tc.toolCallId,
|
|
215
|
+
name: tc.toolName,
|
|
216
|
+
arguments: tc.args
|
|
217
|
+
}));
|
|
218
|
+
}
|
|
219
|
+
function mapFinishReason(reason) {
|
|
220
|
+
switch (reason) {
|
|
221
|
+
case "stop":
|
|
222
|
+
return "stop";
|
|
223
|
+
case "tool-calls":
|
|
224
|
+
return "tool_calls";
|
|
225
|
+
case "length":
|
|
226
|
+
return "max_tokens";
|
|
227
|
+
default:
|
|
228
|
+
return "stop";
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
export { OpenAIAdapter };
|
|
233
|
+
//# sourceMappingURL=chunk-WPP3PEDE.js.map
|
|
234
|
+
//# sourceMappingURL=chunk-WPP3PEDE.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/providers/openai-adapter.ts"],"names":[],"mappings":";;;;;;AA0BA,IAAM,aAAA,GAA8B,QAAA;AAEpC,IAAM,aAAA,GAAmC;AAAA,EACvC,eAAA;AAAA,EACA,qBAAA;AAAA,EACA,eAAA;AAAA,EACA,mBAAA;AAAA,EACA,SAAA;AAAA,EACA;AACF,CAAA;AAEA,IAAM,wBAAA,GAA2B,CAAA;AAEjC,SAAS,aACP,KAAA,EAC0F;AAC1F,EAAA,IAAI,KAAA,KAAU,MAAA,IAAa,KAAA,CAAM,MAAA,KAAW,CAAA,EAAG;AAC7C,IAAA,OAAO,MAAA;AAAA,EACT;AAEA,EAAA,MAAM,SAAuF,EAAC;AAE9F,EAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACxB,IAAA,MAAM,aAAsC,EAAC;AAC7C,IAAA,MAAM,WAAqB,EAAC;AAE5B,IAAA,KAAA,MAAW,KAAA,IAAS,KAAK,UAAA,EAAY;AACnC,MAAA,MAAM,IAAA,GAAgC;AAAA,QACpC,MAAM,KAAA,CAAM,IAAA;AAAA,QACZ,aAAa,KAAA,CAAM;AAAA,OACrB;AACA,MAAA,IAAI,KAAA,CAAM,SAAS,MAAA,EAAW;AAC5B,QAAA,IAAA,CAAK,MAAM,IAAI,KAAA,CAAM,IAAA;AAAA,MACvB;AACA,MAAA,IAAI,KAAA,CAAM,YAAY,MAAA,EAAW;AAC/B,QAAA,IAAA,CAAK,SAAS,IAAI,KAAA,CAAM,OAAA;AAAA,MAC1B;AACA,MAAA,UAAA,CAAW,KAAA,CAAM,IAAI,CAAA,GAAI,IAAA;AACzB,MAAA,IAAI,MAAM,QAAA,EAAU;AAClB,QAAA,QAAA,CAAS,IAAA,CAAK,MAAM,IAAI,CAAA;AAAA,MAC1B;AAAA,IACF;AAEA,IAAA,MAAA,CAAO,IAAA,CAAK,IAAI,CAAA,GAAI;AAAA,MAClB,aAAa,IAAA,CAAK,WAAA;AAAA,MAClB,UAAA,EAAY;AAAA,QACV,IAAA,EAAM,QAAA;AAAA,QACN,UAAA;AAAA,QACA;AAAA;AACF,KACF;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;AAEA,SAAS,cACP,QAAA,EACe;AACf,EAAA,OAAO,QAAA,CAAS,GAAA,CAAI,CAAC,GAAA,MAAS;AAAA,IAC5B,MAAM,GAAA,CAAI,IAAA;AAAA,IACV,SAAS,GAAA,CAAI;AAAA,GACf,CAAE,CAAA;AACJ;AAEA,SAAS,WAAA,CAAY,SAAA,EAAuB,WAAA,EAAqB,YAAA,EAA8B;AAC7F,EAAA,OACG,cAAc,GAAA,GAAa,SAAA,CAAU,mBAAA,GACrC,YAAA,GAAe,MAAa,SAAA,CAAU,oBAAA;AAE3C;AAEA,SAAS,aAAA,CAAc,OAAgB,KAAA,EAAsB;AAC3D,EAAA,MAAM,UAAU,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK,CAAA;AACrE,EAAA,MAAM,KAAA,GAAQ,QAAQ,WAAA,EAAY;AAElC,EAAA,IAAI,KAAA,CAAM,QAAA,CAAS,KAAK,CAAA,IAAK,KAAA,CAAM,QAAA,CAAS,cAAc,CAAA,IAAK,KAAA,CAAM,QAAA,CAAS,iBAAiB,CAAA,EAAG;AAChG,IAAA,MAAM,IAAI,mBAAA,CAAoB,aAAA,EAAe,OAAO,CAAA;AAAA,EACtD;AACA,EAAA,IAAI,KAAA,CAAM,QAAA,CAAS,KAAK,CAAA,IAAK,KAAA,CAAM,QAAA,CAAS,YAAY,CAAA,IAAK,KAAA,CAAM,QAAA,CAAS,mBAAmB,CAAA,EAAG;AAChG,IAAA,MAAM,KAAA,GAAQ,YAAA,CAAa,IAAA,CAAK,OAAO,CAAA;AACvC,IAAA,MAAM,OAAA,GAAU,KAAA,GAAQ,CAAC,CAAA,KAAM,MAAA,GAAY,QAAA,CAAS,KAAA,CAAM,CAAC,CAAA,EAAG,EAAE,CAAA,GAAI,GAAA,GAAO,GAAA;AAC3E,IAAA,MAAM,IAAI,cAAA,CAAe,aAAA,EAAe,OAAO,CAAA;AAAA,EACjD;AACA,EAAA,IAAI,MAAM,QAAA,CAAS,OAAO,KAAK,KAAA,CAAM,QAAA,CAAS,WAAW,CAAA,EAAG;AAC1D,IAAA,MAAM,IAAI,mBAAmB,KAAK,CAAA;AAAA,EACpC;AAEA,EAAA,MAAM,KAAA,YAAiB,KAAA,GAAQ,KAAA,GAAQ,IAAI,MAAM,OAAO,CAAA;AAC1D;AAEO,IAAM,gBAAN,MAA8C;AAAA,EAC1C,IAAA,GAAO,aAAA;AAAA,EACP,eAAA,GAAkB,aAAA;AAAA,EAEV,MAAA;AAAA,EACA,MAAA;AAAA,EACA,OAAA;AAAA,EAEjB,YAAY,OAAA,EAA4B;AACtC,IAAA,IAAA,CAAK,MAAA,GAAS,OAAA,EAAS,MAAA,IAAU,OAAA,CAAQ,IAAI,gBAAgB,CAAA;AAC7D,IAAA,IAAA,CAAK,UAAU,OAAA,EAAS,OAAA;AACxB,IAAA,IAAA,CAAK,SAAS,YAAA,CAAa;AAAA,MACzB,GAAI,KAAK,MAAA,KAAW,MAAA,GAAY,EAAE,MAAA,EAAQ,IAAA,CAAK,MAAA,EAAO,GAAI,EAAC;AAAA,MAC3D,GAAI,KAAK,OAAA,KAAY,MAAA,GAAY,EAAE,OAAA,EAAS,IAAA,CAAK,OAAA,EAAQ,GAAI;AAAC,KAC/D,CAAA;AAAA,EACH;AAAA,EAEA,MAAM,KAAK,OAAA,EAA+C;AACxD,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,YAAA,CAAa,OAAA,CAAQ,KAAK,CAAA;AACjD,IAAA,MAAM,QAAA,GAAW,aAAA,CAAc,OAAA,CAAQ,QAAQ,CAAA;AAC/C,IAAA,MAAM,KAAA,GAAQ,YAAA,CAAa,OAAA,CAAQ,KAAK,CAAA;AAExC,IAAA,IAAI;AACF,MAAA,MAAM,MAAA,GAAS,MAAM,YAAA,CAAa;AAAA,QAChC,KAAA,EAAO,IAAA,CAAK,MAAA,CAAO,OAAA,CAAQ,KAAK,CAAA;AAAA,QAChC,QAAA;AAAA,QACA,GAAI,QAAQ,MAAA,KAAW,KAAA,CAAA,GAAY,EAAE,MAAA,EAAQ,OAAA,CAAQ,MAAA,EAAO,GAAI,EAAC;AAAA,QACjE,KAAA;AAAA,QACA,SAAA,EAAW,OAAA,CAAQ,SAAA,IAAa,SAAA,CAAU,eAAA;AAAA,QAC1C,GAAI,QAAQ,WAAA,KAAgB,KAAA,CAAA,GAAY,EAAE,WAAA,EAAa,OAAA,CAAQ,WAAA,EAAY,GAAI;AAAC,OACjF,CAAA;AAED,MAAA,MAAM,SAAA,GAAY,iBAAiB,MAAM,CAAA;AACzC,MAAA,MAAM,WAAA,GAAc,MAAA,CAAO,KAAA,EAAO,YAAA,IAAgB,CAAA;AAClD,MAAA,MAAM,YAAA,GAAe,MAAA,CAAO,KAAA,EAAO,gBAAA,IAAoB,CAAA;AAEvD,MAAA,MAAM,KAAA,GAAqB;AAAA,QACzB,WAAA;AAAA,QACA,YAAA;AAAA,QACA,aAAa,WAAA,GAAc,YAAA;AAAA,QAC3B,OAAA,EAAS,WAAA,CAAY,SAAA,EAAW,WAAA,EAAa,YAAY;AAAA,OAC3D;AAEA,MAAA,MAAM,eAAA,GAAgC;AAAA,QACpC,EAAA,EAAI,MAAA,CAAO,QAAA,EAAU,EAAA,IAAM,OAAO,UAAA,EAAW;AAAA,QAC7C,IAAA,EAAM,WAAA;AAAA,QACN,SAAS,MAAA,CAAO,IAAA;AAAA,QAChB,OAAO,OAAA,CAAQ,KAAA;AAAA,QACf,QAAA,EAAU,aAAA;AAAA,QACV,SAAA,EAAW,SAAA,CAAU,MAAA,GAAS,CAAA,GAAI,SAAA,GAAY,KAAA,CAAA;AAAA,QAC9C,UAAA,EAAY,KAAA;AAAA,QACZ,SAAA,sBAAe,IAAA;AAAK,OACtB;AAEA,MAAA,OAAO;AAAA,QACL,EAAA,EAAI,MAAA,CAAO,QAAA,EAAU,EAAA,IAAM,OAAO,UAAA,EAAW;AAAA,QAC7C,OAAO,OAAA,CAAQ,KAAA;AAAA,QACf,QAAA,EAAU,aAAA;AAAA,QACV,OAAA,EAAS,eAAA;AAAA,QACT,KAAA;AAAA,QACA,YAAA,EAAc,eAAA,CAAgB,MAAA,CAAO,YAAY;AAAA,OACnD;AAAA,IACF,SAAS,KAAA,EAAgB;AACvB,MAAA,aAAA,CAAc,KAAA,EAAO,QAAQ,KAAK,CAAA;AAAA,IACpC;AAAA,EACF;AAAA,EAEA,OAAO,OAAO,OAAA,EAAoD;AAChE,IAAA,MAAM,SAAA,GAAY,IAAA,CAAK,YAAA,CAAa,OAAA,CAAQ,KAAK,CAAA;AACjD,IAAA,MAAM,QAAA,GAAW,aAAA,CAAc,OAAA,CAAQ,QAAQ,CAAA;AAC/C,IAAA,MAAM,KAAA,GAAQ,YAAA,CAAa,OAAA,CAAQ,KAAK,CAAA;AAExC,IAAA,IAAI;AACF,MAAA,MAAM,SAAS,UAAA,CAAW;AAAA,QACxB,KAAA,EAAO,IAAA,CAAK,MAAA,CAAO,OAAA,CAAQ,KAAK,CAAA;AAAA,QAChC,QAAA;AAAA,QACA,GAAI,QAAQ,MAAA,KAAW,KAAA,CAAA,GAAY,EAAE,MAAA,EAAQ,OAAA,CAAQ,MAAA,EAAO,GAAI,EAAC;AAAA,QACjE,KAAA;AAAA,QACA,SAAA,EAAW,OAAA,CAAQ,SAAA,IAAa,SAAA,CAAU,eAAA;AAAA,QAC1C,GAAI,QAAQ,WAAA,KAAgB,KAAA,CAAA,GAAY,EAAE,WAAA,EAAa,OAAA,CAAQ,WAAA,EAAY,GAAI;AAAC,OACjF,CAAA;AAED,MAAA,WAAA,MAAiB,IAAA,IAAQ,OAAO,UAAA,EAAY;AAC1C,QAAA,IAAI,IAAA,CAAK,SAAS,YAAA,EAAc;AAC9B,UAAA,MAAM,EAAE,IAAA,EAAM,MAAA,EAAQ,OAAA,EAAS,KAAK,SAAA,EAAU;AAAA,QAChD,CAAA,MAAA,IAAW,IAAA,CAAK,IAAA,KAAS,WAAA,EAAa;AACpC,UAAA,MAAM,QAAA,GAAsB;AAAA,YAC1B,IAAI,IAAA,CAAK,UAAA;AAAA,YACT,MAAM,IAAA,CAAK,QAAA;AAAA,YACX,WAAW,IAAA,CAAK;AAAA,WAClB;AACA,UAAA,MAAM,EAAE,IAAA,EAAM,WAAA,EAAa,QAAA,EAAS;AAAA,QACtC,CAAA,MAAA,IAAW,IAAA,CAAK,IAAA,KAAS,QAAA,EAAU;AACjC,UAAA,MAAM,WAAA,GAAc,IAAA,CAAK,KAAA,EAAO,YAAA,IAAgB,CAAA;AAChD,UAAA,MAAM,YAAA,GAAe,IAAA,CAAK,KAAA,EAAO,gBAAA,IAAoB,CAAA;AACrD,UAAA,MAAM,KAAA,GAAqB;AAAA,YACzB,WAAA;AAAA,YACA,YAAA;AAAA,YACA,aAAa,WAAA,GAAc,YAAA;AAAA,YAC3B,OAAA,EAAS,WAAA,CAAY,SAAA,EAAW,WAAA,EAAa,YAAY;AAAA,WAC3D;AACA,UAAA,MAAM,EAAE,IAAA,EAAM,OAAA,EAAS,KAAA,EAAM;AAAA,QAC/B,CAAA,MAAA,IAAW,IAAA,CAAK,IAAA,KAAS,OAAA,EAAS;AAChC,UAAA,MAAM,MAAA,GAAS,KAAK,KAAA,YAAiB,KAAA,GAAQ,KAAK,KAAA,CAAM,OAAA,GAAU,MAAA,CAAO,IAAA,CAAK,KAAK,CAAA;AACnF,UAAA,MAAM,EAAE,IAAA,EAAM,OAAA,EAAS,KAAA,EAAO,MAAA,EAAO;AAAA,QACvC;AAAA,MACF;AAEA,MAAA,MAAM,EAAE,MAAM,MAAA,EAAO;AAAA,IACvB,SAAS,KAAA,EAAgB;AACvB,MAAA,MAAM,SAAS,KAAA,YAAiB,KAAA,GAAQ,KAAA,CAAM,OAAA,GAAU,OAAO,KAAK,CAAA;AACpE,MAAA,MAAA,CAAO,KAAA,CAAM,EAAE,KAAA,EAAO,MAAA,EAAQ,OAAO,OAAA,CAAQ,KAAA,IAAS,qBAAqB,CAAA;AAC3E,MAAA,MAAM,EAAE,IAAA,EAAM,OAAA,EAAS,KAAA,EAAO,MAAA,EAAO;AACrC,MAAA,MAAM,EAAE,MAAM,MAAA,EAAO;AAAA,IACvB;AAAA,EACF;AAAA,EAEA,MAAM,WAAA,CAAY,IAAA,EAAc,MAAA,EAAiC;AAC/D,IAAA,OAAO,IAAA,CAAK,IAAA,CAAK,IAAA,CAAK,MAAA,GAAS,wBAAwB,CAAA;AAAA,EACzD;AAAA,EAEA,aAAa,KAAA,EAA2B;AACtC,IAAA,MAAM,IAAA,GAAO,iBAAiB,KAAK,CAAA;AACnC,IAAA,IAAI,IAAA,KAAS,MAAA,IAAa,IAAA,CAAK,QAAA,KAAa,aAAA,EAAe;AACzD,MAAA,MAAM,IAAI,mBAAmB,KAAK,CAAA;AAAA,IACpC;AACA,IAAA,OAAO,IAAA;AAAA,EACT;AAAA,EAEA,MAAM,mBAAA,GAAkD;AACtD,IAAA,IAAI,CAAC,IAAA,CAAK,MAAA,SAAe,CAAC,GAAG,KAAK,eAAe,CAAA;AAEjD,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,GAAO,KAAK,OAAA,IAAW,2BAAA;AAC7B,MAAA,MAAM,QAAA,GAAW,MAAM,KAAA,CAAM,CAAA,EAAG,IAAI,CAAA,OAAA,CAAA,EAAW;AAAA,QAC7C,SAAS,EAAE,aAAA,EAAe,CAAA,OAAA,EAAU,IAAA,CAAK,MAAM,CAAA,CAAA,EAAG;AAAA,QAClD,MAAA,EAAQ,WAAA,CAAY,OAAA,CAAQ,GAAI;AAAA,OACjC,CAAA;AACD,MAAA,IAAI,CAAC,QAAA,CAAS,EAAA,SAAW,CAAC,GAAG,KAAK,eAAe,CAAA;AAEjD,MAAA,MAAM,IAAA,GAAQ,MAAM,QAAA,CAAS,IAAA,EAAK;AAClC,MAAA,MAAM,eAAe,CAAC,MAAA,EAAQ,IAAA,EAAM,IAAA,EAAM,MAAM,UAAU,CAAA;AAC1D,MAAA,MAAM,MAAA,GAAS,KAAK,IAAA,CACjB,GAAA,CAAI,CAAC,CAAA,KAAM,CAAA,CAAE,EAAE,CAAA,CACf,MAAA,CAAO,CAAC,OAAO,YAAA,CAAa,IAAA,CAAK,CAAC,CAAA,KAAM,EAAA,CAAG,WAAW,CAAC,CAAC,CAAC,CAAA,CACzD,IAAA,EAAK;AACR,MAAA,OAAO,OAAO,MAAA,GAAS,CAAA,GAAI,SAAS,CAAC,GAAG,KAAK,eAAe,CAAA;AAAA,IAC9D,CAAA,CAAA,MAAQ;AACN,MAAA,OAAO,CAAC,GAAG,IAAA,CAAK,eAAe,CAAA;AAAA,IACjC;AAAA,EACF;AACF;AAEA,SAAS,iBACP,MAAA,EACa;AACb,EAAA,IAAI,OAAO,SAAA,KAAc,MAAA,IAAa,MAAA,CAAO,SAAA,CAAU,WAAW,CAAA,EAAG;AACnE,IAAA,OAAO,EAAC;AAAA,EACV;AACA,EAAA,OAAO,MAAA,CAAO,SAAA,CAAU,GAAA,CAAI,CAAC,EAAA,MAAQ;AAAA,IACnC,IAAI,EAAA,CAAG,UAAA;AAAA,IACP,MAAM,EAAA,CAAG,QAAA;AAAA,IACT,WAAW,EAAA,CAAG;AAAA,GAChB,CAAE,CAAA;AACJ;AAEA,SAAS,gBACP,MAAA,EACgD;AAChD,EAAA,QAAQ,MAAA;AAAQ,IACd,KAAK,MAAA;AACH,MAAA,OAAO,MAAA;AAAA,IACT,KAAK,YAAA;AACH,MAAA,OAAO,YAAA;AAAA,IACT,KAAK,QAAA;AACH,MAAA,OAAO,YAAA;AAAA,IACT;AACE,MAAA,OAAO,MAAA;AAAA;AAEb","file":"chunk-WPP3PEDE.js","sourcesContent":["/**\n * OpenAI adapter via Vercel AI SDK per PRD section 7.1\n * Supports GPT-5.3 Codex, GPT-5.2, GPT-5.1 Codex series\n */\n\nimport { generateText, streamText, type CoreMessage } from \"ai\";\nimport { createOpenAI } from \"@ai-sdk/openai\";\nimport { logger } from \"../utils/logger.js\";\nimport {\n AuthenticationError,\n RateLimitError,\n ModelNotFoundError,\n} from \"../types/errors.js\";\nimport { SUPPORTED_MODELS } from \"../types/model.js\";\nimport type { IModelInfo, ProviderName } from \"../types/model.js\";\nimport type {\n IChatRequest,\n IChatResponse,\n IChatMessage,\n IStreamChunk,\n IToolCall,\n IToolDefinition,\n ITokenUsage,\n} from \"../types/message.js\";\nimport type { IModelProvider, IProviderOptions } from \"./types.js\";\n\nconst PROVIDER_NAME: ProviderName = \"openai\";\n\nconst OPENAI_MODELS: readonly string[] = [\n \"gpt-5.3-codex\",\n \"gpt-5.3-codex-spark\",\n \"gpt-5.2-codex\",\n \"gpt-5.1-codex-max\",\n \"gpt-5.2\",\n \"gpt-5.1-codex-mini\",\n] as const;\n\nconst CHARS_PER_TOKEN_ESTIMATE = 4;\n\nfunction convertTools(\n tools: readonly IToolDefinition[] | undefined,\n): Record<string, { description: string; parameters: Record<string, unknown> }> | undefined {\n if (tools === undefined || tools.length === 0) {\n return undefined;\n }\n\n const result: Record<string, { description: string; parameters: Record<string, unknown> }> = {};\n\n for (const tool of tools) {\n const properties: Record<string, unknown> = {};\n const required: string[] = [];\n\n for (const param of tool.parameters) {\n const prop: Record<string, unknown> = {\n type: param.type,\n description: param.description,\n };\n if (param.enum !== undefined) {\n prop[\"enum\"] = param.enum;\n }\n if (param.default !== undefined) {\n prop[\"default\"] = param.default;\n }\n properties[param.name] = prop;\n if (param.required) {\n required.push(param.name);\n }\n }\n\n result[tool.name] = {\n description: tool.description,\n parameters: {\n type: \"object\",\n properties,\n required,\n },\n };\n }\n\n return result;\n}\n\nfunction buildMessages(\n messages: readonly IChatMessage[],\n): CoreMessage[] {\n return messages.map((msg) => ({\n role: msg.role as \"user\" | \"assistant\" | \"system\" | \"tool\",\n content: msg.content,\n })) as CoreMessage[];\n}\n\nfunction computeCost(modelInfo: IModelInfo, inputTokens: number, outputTokens: number): number {\n return (\n (inputTokens / 1_000_000) * modelInfo.inputPricePerMToken +\n (outputTokens / 1_000_000) * modelInfo.outputPricePerMToken\n );\n}\n\nfunction classifyError(error: unknown, model: string): never {\n const message = error instanceof Error ? error.message : String(error);\n const lower = message.toLowerCase();\n\n if (lower.includes(\"401\") || lower.includes(\"unauthorized\") || lower.includes(\"invalid api key\")) {\n throw new AuthenticationError(PROVIDER_NAME, message);\n }\n if (lower.includes(\"429\") || lower.includes(\"rate limit\") || lower.includes(\"too many requests\")) {\n const match = /(\\d+)\\s*s/i.exec(message);\n const retryMs = match?.[1] !== undefined ? parseInt(match[1], 10) * 1000 : 60_000;\n throw new RateLimitError(PROVIDER_NAME, retryMs);\n }\n if (lower.includes(\"model\") && lower.includes(\"not found\")) {\n throw new ModelNotFoundError(model);\n }\n\n throw error instanceof Error ? error : new Error(message);\n}\n\nexport class OpenAIAdapter implements IModelProvider {\n readonly name = PROVIDER_NAME;\n readonly supportedModels = OPENAI_MODELS;\n\n private readonly openai: ReturnType<typeof createOpenAI>;\n private readonly apiKey: string | undefined;\n private readonly baseUrl: string | undefined;\n\n constructor(options?: IProviderOptions) {\n this.apiKey = options?.apiKey ?? process.env[\"OPENAI_API_KEY\"];\n this.baseUrl = options?.baseUrl;\n this.openai = createOpenAI({\n ...(this.apiKey !== undefined ? { apiKey: this.apiKey } : {}),\n ...(this.baseUrl !== undefined ? { baseURL: this.baseUrl } : {}),\n });\n }\n\n async chat(request: IChatRequest): Promise<IChatResponse> {\n const modelInfo = this.getModelInfo(request.model);\n const messages = buildMessages(request.messages);\n const tools = convertTools(request.tools);\n\n try {\n const result = await generateText({\n model: this.openai(request.model),\n messages,\n ...(request.system !== undefined ? { system: request.system } : {}),\n tools: tools as Record<string, never>,\n maxTokens: request.maxTokens ?? modelInfo.maxOutputTokens,\n ...(request.temperature !== undefined ? { temperature: request.temperature } : {}),\n });\n\n const toolCalls = extractToolCalls(result);\n const inputTokens = result.usage?.promptTokens ?? 0;\n const outputTokens = result.usage?.completionTokens ?? 0;\n\n const usage: ITokenUsage = {\n inputTokens,\n outputTokens,\n totalTokens: inputTokens + outputTokens,\n costUsd: computeCost(modelInfo, inputTokens, outputTokens),\n };\n\n const responseMessage: IChatMessage = {\n id: result.response?.id ?? crypto.randomUUID(),\n role: \"assistant\",\n content: result.text,\n model: request.model,\n provider: PROVIDER_NAME,\n toolCalls: toolCalls.length > 0 ? toolCalls : undefined,\n tokenUsage: usage,\n createdAt: new Date(),\n };\n\n return {\n id: result.response?.id ?? crypto.randomUUID(),\n model: request.model,\n provider: PROVIDER_NAME,\n message: responseMessage,\n usage,\n finishReason: mapFinishReason(result.finishReason),\n };\n } catch (error: unknown) {\n classifyError(error, request.model);\n }\n }\n\n async *stream(request: IChatRequest): AsyncIterable<IStreamChunk> {\n const modelInfo = this.getModelInfo(request.model);\n const messages = buildMessages(request.messages);\n const tools = convertTools(request.tools);\n\n try {\n const result = streamText({\n model: this.openai(request.model),\n messages,\n ...(request.system !== undefined ? { system: request.system } : {}),\n tools: tools as Record<string, never>,\n maxTokens: request.maxTokens ?? modelInfo.maxOutputTokens,\n ...(request.temperature !== undefined ? { temperature: request.temperature } : {}),\n });\n\n for await (const part of result.fullStream) {\n if (part.type === \"text-delta\") {\n yield { type: \"text\", content: part.textDelta };\n } else if (part.type === \"tool-call\") {\n const toolCall: IToolCall = {\n id: part.toolCallId,\n name: part.toolName,\n arguments: part.args as Record<string, unknown>,\n };\n yield { type: \"tool_call\", toolCall };\n } else if (part.type === \"finish\") {\n const inputTokens = part.usage?.promptTokens ?? 0;\n const outputTokens = part.usage?.completionTokens ?? 0;\n const usage: ITokenUsage = {\n inputTokens,\n outputTokens,\n totalTokens: inputTokens + outputTokens,\n costUsd: computeCost(modelInfo, inputTokens, outputTokens),\n };\n yield { type: \"usage\", usage };\n } else if (part.type === \"error\") {\n const errMsg = part.error instanceof Error ? part.error.message : String(part.error);\n yield { type: \"error\", error: errMsg };\n }\n }\n\n yield { type: \"done\" };\n } catch (error: unknown) {\n const errMsg = error instanceof Error ? error.message : String(error);\n logger.error({ error: errMsg, model: request.model }, \"OpenAI stream error\");\n yield { type: \"error\", error: errMsg };\n yield { type: \"done\" };\n }\n }\n\n async countTokens(text: string, _model: string): Promise<number> {\n return Math.ceil(text.length / CHARS_PER_TOKEN_ESTIMATE);\n }\n\n getModelInfo(model: string): IModelInfo {\n const info = SUPPORTED_MODELS[model];\n if (info === undefined || info.provider !== PROVIDER_NAME) {\n throw new ModelNotFoundError(model);\n }\n return info;\n }\n\n async listAvailableModels(): Promise<readonly string[]> {\n if (!this.apiKey) return [...this.supportedModels];\n\n try {\n const base = this.baseUrl ?? \"https://api.openai.com/v1\";\n const response = await fetch(`${base}/models`, {\n headers: { Authorization: `Bearer ${this.apiKey}` },\n signal: AbortSignal.timeout(5000),\n });\n if (!response.ok) return [...this.supportedModels];\n\n const data = (await response.json()) as { data: Array<{ id: string }> };\n const chatPrefixes = [\"gpt-\", \"o1\", \"o3\", \"o4\", \"chatgpt-\"];\n const models = data.data\n .map((m) => m.id)\n .filter((id) => chatPrefixes.some((p) => id.startsWith(p)))\n .sort();\n return models.length > 0 ? models : [...this.supportedModels];\n } catch {\n return [...this.supportedModels];\n }\n }\n}\n\nfunction extractToolCalls(\n result: { toolCalls?: ReadonlyArray<{ toolCallId: string; toolName: string; args: unknown }> },\n): IToolCall[] {\n if (result.toolCalls === undefined || result.toolCalls.length === 0) {\n return [];\n }\n return result.toolCalls.map((tc) => ({\n id: tc.toolCallId,\n name: tc.toolName,\n arguments: tc.args as Record<string, unknown>,\n }));\n}\n\nfunction mapFinishReason(\n reason: string | undefined,\n): \"stop\" | \"tool_calls\" | \"max_tokens\" | \"error\" {\n switch (reason) {\n case \"stop\":\n return \"stop\";\n case \"tool-calls\":\n return \"tool_calls\";\n case \"length\":\n return \"max_tokens\";\n default:\n return \"stop\";\n }\n}\n"]}
|