copilot-gateway 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2257 @@
1
+ import { HTTPError, PATHS, cacheModels, constantTimeEqual, copilotBaseUrl, copilotHeaders, forwardError, getCopilotUsage, getExtraPromptForModel, getReasoningEffortForModel, getSmallModel, isNullish, sleep, state } from "./config-CcK4TXHc.js";
2
+ import consola from "consola";
3
+ import path from "node:path";
4
+ import { randomUUID } from "node:crypto";
5
+ import fs from "node:fs";
6
+ import { Hono } from "hono";
7
+ import { cors } from "hono/cors";
8
+ import { logger } from "hono/logger";
9
+ import { HTTPException } from "hono/http-exception";
10
+ import { streamSSE } from "hono/streaming";
11
+ import util from "node:util";
12
+ import { events } from "fetch-event-stream";
13
+
14
+ //#region src/lib/api-key-auth.ts
15
+ /**
16
+ * Extract API key from request headers
17
+ * Supports both OpenAI format (Authorization: Bearer token) and Anthropic format (x-api-key: token)
18
+ */
19
+ function extractApiKey(c) {
20
+ const authHeader = c.req.header("authorization");
21
+ if (authHeader?.startsWith("Bearer ")) return authHeader.slice(7);
22
+ const anthropicKey = c.req.header("x-api-key");
23
+ if (anthropicKey) return anthropicKey;
24
+ }
25
+ /**
26
+ * API key authentication middleware
27
+ * Validates that the request contains a valid API key if API keys are configured
28
+ */
29
+ const apiKeyAuthMiddleware = async (c, next) => {
30
+ if (!state.apiKeys || state.apiKeys.length === 0) {
31
+ await next();
32
+ return;
33
+ }
34
+ const providedKey = extractApiKey(c);
35
+ if (!providedKey) throw new HTTPException(401, { message: "Missing API key" });
36
+ if (!state.apiKeys.some((key) => constantTimeEqual(key, providedKey))) throw new HTTPException(401, { message: "Invalid API key" });
37
+ await next();
38
+ };
39
+
40
+ //#endregion
41
+ //#region src/lib/approval.ts
42
+ const awaitApproval = async () => {
43
+ if (!await consola.prompt(`Accept incoming request?`, { type: "confirm" })) throw new HTTPError("Request rejected", Response.json({ message: "Request rejected" }, { status: 403 }));
44
+ };
45
+
46
+ //#endregion
47
+ //#region src/lib/logger.ts
48
+ const LOG_RETENTION_MS = 10080 * 60 * 1e3;
49
+ const CLEANUP_INTERVAL_MS = 1440 * 60 * 1e3;
50
+ const LOG_DIR = path.join(PATHS.APP_DIR, "logs");
51
+ const FLUSH_INTERVAL_MS = 1e3;
52
+ const MAX_BUFFER_SIZE = 100;
53
+ const logStreams = /* @__PURE__ */ new Map();
54
+ const logBuffers = /* @__PURE__ */ new Map();
55
+ const ensureLogDirectory = () => {
56
+ if (!fs.existsSync(LOG_DIR)) fs.mkdirSync(LOG_DIR, { recursive: true });
57
+ };
58
+ const cleanupOldLogs = () => {
59
+ if (!fs.existsSync(LOG_DIR)) return;
60
+ const now = Date.now();
61
+ for (const entry of fs.readdirSync(LOG_DIR)) {
62
+ const filePath = path.join(LOG_DIR, entry);
63
+ let stats;
64
+ try {
65
+ stats = fs.statSync(filePath);
66
+ } catch {
67
+ continue;
68
+ }
69
+ if (!stats.isFile()) continue;
70
+ if (now - stats.mtimeMs > LOG_RETENTION_MS) try {
71
+ fs.rmSync(filePath);
72
+ } catch {
73
+ continue;
74
+ }
75
+ }
76
+ };
77
+ const formatArgs = (args) => args.map((arg) => typeof arg === "string" ? arg : util.inspect(arg, {
78
+ depth: null,
79
+ colors: false
80
+ })).join(" ");
81
+ const sanitizeName = (name) => {
82
+ const normalized = name.toLowerCase().replaceAll(/[^a-z0-9]+/g, "-").replaceAll(/^-+|-+$/g, "");
83
+ return normalized === "" ? "handler" : normalized;
84
+ };
85
+ const getLogStream = (filePath) => {
86
+ let stream = logStreams.get(filePath);
87
+ if (!stream || stream.destroyed) {
88
+ stream = fs.createWriteStream(filePath, { flags: "a" });
89
+ logStreams.set(filePath, stream);
90
+ stream.on("error", (error) => {
91
+ console.warn("Log stream error", error);
92
+ logStreams.delete(filePath);
93
+ });
94
+ }
95
+ return stream;
96
+ };
97
+ const flushBuffer = (filePath) => {
98
+ const buffer = logBuffers.get(filePath);
99
+ if (!buffer || buffer.length === 0) return;
100
+ const stream = getLogStream(filePath);
101
+ const content = buffer.join("\n") + "\n";
102
+ stream.write(content, (error) => {
103
+ if (error) console.warn("Failed to write handler log", error);
104
+ });
105
+ logBuffers.set(filePath, []);
106
+ };
107
+ const flushAllBuffers = () => {
108
+ for (const filePath of logBuffers.keys()) flushBuffer(filePath);
109
+ };
110
+ const appendLine = (filePath, line) => {
111
+ let buffer = logBuffers.get(filePath);
112
+ if (!buffer) {
113
+ buffer = [];
114
+ logBuffers.set(filePath, buffer);
115
+ }
116
+ buffer.push(line);
117
+ if (buffer.length >= MAX_BUFFER_SIZE) flushBuffer(filePath);
118
+ };
119
+ setInterval(flushAllBuffers, FLUSH_INTERVAL_MS);
120
+ const cleanup = () => {
121
+ flushAllBuffers();
122
+ for (const stream of logStreams.values()) stream.end();
123
+ logStreams.clear();
124
+ logBuffers.clear();
125
+ };
126
+ process.on("exit", cleanup);
127
+ process.on("SIGINT", () => {
128
+ cleanup();
129
+ process.exit(0);
130
+ });
131
+ process.on("SIGTERM", () => {
132
+ cleanup();
133
+ process.exit(0);
134
+ });
135
+ let lastCleanup = 0;
136
+ const createHandlerLogger = (name) => {
137
+ ensureLogDirectory();
138
+ const sanitizedName = sanitizeName(name);
139
+ const instance = consola.withTag(name);
140
+ if (state.verbose) instance.level = 5;
141
+ instance.setReporters([]);
142
+ instance.addReporter({ log(logObj) {
143
+ ensureLogDirectory();
144
+ if (Date.now() - lastCleanup > CLEANUP_INTERVAL_MS) {
145
+ cleanupOldLogs();
146
+ lastCleanup = Date.now();
147
+ }
148
+ const date = logObj.date;
149
+ const dateKey = date.toLocaleDateString("sv-SE");
150
+ const timestamp = date.toLocaleString("sv-SE", { hour12: false });
151
+ const filePath = path.join(LOG_DIR, `${sanitizedName}-${dateKey}.log`);
152
+ const message = formatArgs(logObj.args);
153
+ const line = `[${timestamp}] [${logObj.type}] [${logObj.tag || name}]${message ? ` ${message}` : ""}`;
154
+ appendLine(filePath, line);
155
+ } });
156
+ return instance;
157
+ };
158
+
159
+ //#endregion
160
+ //#region src/lib/rate-limit.ts
161
+ async function checkRateLimit(state$1) {
162
+ if (state$1.rateLimitSeconds === void 0) return;
163
+ const now = Date.now();
164
+ if (!state$1.lastRequestTimestamp) {
165
+ state$1.lastRequestTimestamp = now;
166
+ return;
167
+ }
168
+ const elapsedSeconds = (now - state$1.lastRequestTimestamp) / 1e3;
169
+ if (elapsedSeconds > state$1.rateLimitSeconds) {
170
+ state$1.lastRequestTimestamp = now;
171
+ return;
172
+ }
173
+ const waitTimeSeconds = Math.ceil(state$1.rateLimitSeconds - elapsedSeconds);
174
+ if (!state$1.rateLimitWait) {
175
+ consola.warn(`Rate limit exceeded. Need to wait ${waitTimeSeconds} more seconds.`);
176
+ throw new HTTPError("Rate limit exceeded", Response.json({ message: "Rate limit exceeded" }, { status: 429 }));
177
+ }
178
+ const waitTimeMs = waitTimeSeconds * 1e3;
179
+ consola.warn(`Rate limit reached. Waiting ${waitTimeSeconds} seconds before proceeding...`);
180
+ await sleep(waitTimeMs);
181
+ state$1.lastRequestTimestamp = now;
182
+ consola.info("Rate limit wait completed, proceeding with request");
183
+ }
184
+
185
+ //#endregion
186
+ //#region src/lib/tokenizer.ts
187
+ const ENCODING_MAP = {
188
+ o200k_base: () => import("gpt-tokenizer/encoding/o200k_base"),
189
+ cl100k_base: () => import("gpt-tokenizer/encoding/cl100k_base"),
190
+ p50k_base: () => import("gpt-tokenizer/encoding/p50k_base"),
191
+ p50k_edit: () => import("gpt-tokenizer/encoding/p50k_edit"),
192
+ r50k_base: () => import("gpt-tokenizer/encoding/r50k_base")
193
+ };
194
+ const encodingCache = /* @__PURE__ */ new Map();
195
+ /**
196
+ * Calculate tokens for tool calls
197
+ */
198
+ const calculateToolCallsTokens = (toolCalls, encoder, constants) => {
199
+ let tokens = 0;
200
+ for (const toolCall of toolCalls) {
201
+ tokens += constants.funcInit;
202
+ tokens += encoder.encode(toolCall.id).length;
203
+ tokens += encoder.encode(toolCall.function.name).length;
204
+ tokens += encoder.encode(toolCall.function.arguments).length;
205
+ }
206
+ tokens += constants.funcEnd;
207
+ return tokens;
208
+ };
209
+ /**
210
+ * Calculate tokens for content parts
211
+ */
212
+ const calculateContentPartsTokens = (contentParts, encoder) => {
213
+ let tokens = 0;
214
+ for (const part of contentParts) if (part.type === "image_url") tokens += encoder.encode(part.image_url.url).length + 85;
215
+ else if (part.text) tokens += encoder.encode(part.text).length;
216
+ return tokens;
217
+ };
218
+ /**
219
+ * Calculate tokens for a single message
220
+ */
221
+ const calculateMessageTokens = (message, encoder, constants) => {
222
+ const tokensPerMessage = 3;
223
+ const tokensPerName = 1;
224
+ let tokens = tokensPerMessage;
225
+ for (const [key, value] of Object.entries(message)) {
226
+ if (key === "reasoning_opaque") continue;
227
+ if (typeof value === "string") tokens += encoder.encode(value).length;
228
+ if (key === "name") tokens += tokensPerName;
229
+ if (key === "tool_calls") tokens += calculateToolCallsTokens(value, encoder, constants);
230
+ if (key === "content" && Array.isArray(value)) tokens += calculateContentPartsTokens(value, encoder);
231
+ }
232
+ return tokens;
233
+ };
234
+ /**
235
+ * Calculate tokens using custom algorithm
236
+ */
237
+ const calculateTokens = (messages, encoder, constants) => {
238
+ if (messages.length === 0) return 0;
239
+ let numTokens = 0;
240
+ for (const message of messages) numTokens += calculateMessageTokens(message, encoder, constants);
241
+ numTokens += 3;
242
+ return numTokens;
243
+ };
244
+ /**
245
+ * Get the corresponding encoder module based on encoding type
246
+ */
247
+ const getEncodeChatFunction = async (encoding) => {
248
+ if (encodingCache.has(encoding)) {
249
+ const cached = encodingCache.get(encoding);
250
+ if (cached) return cached;
251
+ }
252
+ const supportedEncoding = encoding;
253
+ if (!(supportedEncoding in ENCODING_MAP)) {
254
+ const fallbackModule = await ENCODING_MAP.o200k_base();
255
+ encodingCache.set(encoding, fallbackModule);
256
+ return fallbackModule;
257
+ }
258
+ const encodingModule = await ENCODING_MAP[supportedEncoding]();
259
+ encodingCache.set(encoding, encodingModule);
260
+ return encodingModule;
261
+ };
262
+ /**
263
+ * Get tokenizer type from model information
264
+ */
265
+ const getTokenizerFromModel = (model) => {
266
+ return model.capabilities.tokenizer || "o200k_base";
267
+ };
268
+ /**
269
+ * Get model-specific constants for token calculation
270
+ */
271
+ const getModelConstants = (model) => {
272
+ return model.id === "gpt-3.5-turbo" || model.id === "gpt-4" ? {
273
+ funcInit: 10,
274
+ propInit: 3,
275
+ propKey: 3,
276
+ enumInit: -3,
277
+ enumItem: 3,
278
+ funcEnd: 12,
279
+ isGpt: true
280
+ } : {
281
+ funcInit: 7,
282
+ propInit: 3,
283
+ propKey: 3,
284
+ enumInit: -3,
285
+ enumItem: 3,
286
+ funcEnd: 12,
287
+ isGpt: model.id.startsWith("gpt-")
288
+ };
289
+ };
290
+ /**
291
+ * Calculate tokens for a single parameter
292
+ */
293
+ const calculateParameterTokens = (key, prop, context) => {
294
+ const { encoder, constants } = context;
295
+ let tokens = constants.propKey;
296
+ if (typeof prop !== "object" || prop === null) return tokens;
297
+ const param = prop;
298
+ const paramName = key;
299
+ const paramType = param.type || "string";
300
+ let paramDesc = param.description || "";
301
+ if (param.enum && Array.isArray(param.enum)) {
302
+ tokens += constants.enumInit;
303
+ for (const item of param.enum) {
304
+ tokens += constants.enumItem;
305
+ tokens += encoder.encode(String(item)).length;
306
+ }
307
+ }
308
+ if (paramDesc.endsWith(".")) paramDesc = paramDesc.slice(0, -1);
309
+ const line = `${paramName}:${paramType}:${paramDesc}`;
310
+ tokens += encoder.encode(line).length;
311
+ if (param.type === "array" && param["items"]) tokens += calculateParametersTokens(param["items"], encoder, constants);
312
+ const excludedKeys = new Set([
313
+ "type",
314
+ "description",
315
+ "enum",
316
+ "items"
317
+ ]);
318
+ for (const propertyName of Object.keys(param)) if (!excludedKeys.has(propertyName)) {
319
+ const propertyValue = param[propertyName];
320
+ const propertyText = typeof propertyValue === "string" ? propertyValue : JSON.stringify(propertyValue);
321
+ tokens += encoder.encode(`${propertyName}:${propertyText}`).length;
322
+ }
323
+ return tokens;
324
+ };
325
+ /**
326
+ * Calculate tokens for properties object
327
+ */
328
+ const calculatePropertiesTokens = (properties, encoder, constants) => {
329
+ let tokens = 0;
330
+ if (Object.keys(properties).length > 0) {
331
+ tokens += constants.propInit;
332
+ for (const propKey of Object.keys(properties)) tokens += calculateParameterTokens(propKey, properties[propKey], {
333
+ encoder,
334
+ constants
335
+ });
336
+ }
337
+ return tokens;
338
+ };
339
+ /**
340
+ * Calculate tokens for function parameters
341
+ */
342
+ const calculateParametersTokens = (parameters, encoder, constants) => {
343
+ if (!parameters || typeof parameters !== "object") return 0;
344
+ const params = parameters;
345
+ let tokens = 0;
346
+ const excludedKeys = new Set(["$schema", "additionalProperties"]);
347
+ for (const [key, value] of Object.entries(params)) {
348
+ if (excludedKeys.has(key)) continue;
349
+ if (key === "properties") tokens += calculatePropertiesTokens(value, encoder, constants);
350
+ else {
351
+ const paramText = typeof value === "string" ? value : JSON.stringify(value);
352
+ tokens += encoder.encode(`${key}:${paramText}`).length;
353
+ }
354
+ }
355
+ return tokens;
356
+ };
357
+ /**
358
+ * Calculate tokens for a single tool
359
+ */
360
+ const calculateToolTokens = (tool, encoder, constants) => {
361
+ let tokens = constants.funcInit;
362
+ const func = tool.function;
363
+ const fName = func.name;
364
+ let fDesc = func.description || "";
365
+ if (fDesc.endsWith(".")) fDesc = fDesc.slice(0, -1);
366
+ const line = fName + ":" + fDesc;
367
+ tokens += encoder.encode(line).length;
368
+ if (typeof func.parameters === "object" && func.parameters !== null) tokens += calculateParametersTokens(func.parameters, encoder, constants);
369
+ return tokens;
370
+ };
371
+ /**
372
+ * Calculate token count for tools based on model
373
+ */
374
+ const numTokensForTools = (tools, encoder, constants) => {
375
+ let funcTokenCount = 0;
376
+ if (constants.isGpt) {
377
+ for (const tool of tools) funcTokenCount += calculateToolTokens(tool, encoder, constants);
378
+ funcTokenCount += constants.funcEnd;
379
+ } else for (const tool of tools) funcTokenCount += encoder.encode(JSON.stringify(tool)).length;
380
+ return funcTokenCount;
381
+ };
382
+ /**
383
+ * Calculate the token count of messages, supporting multiple GPT encoders
384
+ */
385
+ const getTokenCount = async (payload, model) => {
386
+ const tokenizer = getTokenizerFromModel(model);
387
+ const encoder = await getEncodeChatFunction(tokenizer);
388
+ const simplifiedMessages = payload.messages;
389
+ const inputMessages = simplifiedMessages.filter((msg) => msg.role !== "assistant");
390
+ const outputMessages = simplifiedMessages.filter((msg) => msg.role === "assistant");
391
+ const constants = getModelConstants(model);
392
+ let inputTokens = calculateTokens(inputMessages, encoder, constants);
393
+ if (payload.tools && payload.tools.length > 0) inputTokens += numTokensForTools(payload.tools, encoder, constants);
394
+ const outputTokens = calculateTokens(outputMessages, encoder, constants);
395
+ return {
396
+ input: inputTokens,
397
+ output: outputTokens
398
+ };
399
+ };
400
+
401
+ //#endregion
402
+ //#region src/services/copilot/create-chat-completions.ts
403
+ const createChatCompletions = async (payload) => {
404
+ if (!state.copilotToken) throw new Error("Copilot token not found");
405
+ const enableVision = payload.messages.some((x) => typeof x.content !== "string" && x.content?.some((x$1) => x$1.type === "image_url"));
406
+ const isAgentCall = payload.messages.some((msg) => ["assistant", "tool"].includes(msg.role));
407
+ const headers = {
408
+ ...copilotHeaders(state, enableVision),
409
+ "X-Initiator": isAgentCall ? "agent" : "user"
410
+ };
411
+ const response = await fetch(`${copilotBaseUrl(state)}/chat/completions`, {
412
+ method: "POST",
413
+ headers,
414
+ body: JSON.stringify(payload)
415
+ });
416
+ if (!response.ok) {
417
+ consola.error("Failed to create chat completions", response);
418
+ throw new HTTPError("Failed to create chat completions", response);
419
+ }
420
+ if (payload.stream) return events(response);
421
+ return await response.json();
422
+ };
423
+
424
+ //#endregion
425
+ //#region src/routes/chat-completions/handler.ts
426
+ const logger$3 = createHandlerLogger("chat-completions-handler");
427
+ async function handleCompletion$1(c) {
428
+ await checkRateLimit(state);
429
+ let payload = await c.req.json();
430
+ logger$3.debug("Request payload:", JSON.stringify(payload).slice(-400));
431
+ const selectedModel = state.models?.data.find((model) => model.id === payload.model);
432
+ try {
433
+ if (selectedModel) {
434
+ const tokenCount = await getTokenCount(payload, selectedModel);
435
+ logger$3.info("Current token count:", tokenCount);
436
+ } else logger$3.warn("No model selected, skipping token count calculation");
437
+ } catch (error) {
438
+ logger$3.warn("Failed to calculate token count:", error);
439
+ }
440
+ if (state.manualApprove) await awaitApproval();
441
+ if (isNullish(payload.max_tokens)) {
442
+ payload = {
443
+ ...payload,
444
+ max_tokens: selectedModel?.capabilities.limits.max_output_tokens
445
+ };
446
+ logger$3.debug("Set max_tokens to:", JSON.stringify(payload.max_tokens));
447
+ }
448
+ const response = await createChatCompletions(payload);
449
+ if (isNonStreaming$1(response)) {
450
+ response.object = "chat.completion";
451
+ logger$3.debug("Non-streaming response:", JSON.stringify(response));
452
+ return c.json(response);
453
+ }
454
+ logger$3.debug("Streaming response");
455
+ return streamSSE(c, async (stream) => {
456
+ for await (const chunk of response) {
457
+ logger$3.debug("Streaming chunk:", JSON.stringify(chunk));
458
+ await stream.writeSSE(chunk);
459
+ }
460
+ });
461
+ }
462
+ const isNonStreaming$1 = (response) => Object.hasOwn(response, "choices");
463
+
464
+ //#endregion
465
+ //#region src/routes/chat-completions/route.ts
466
+ const completionRoutes = new Hono();
467
+ completionRoutes.post("/", async (c) => {
468
+ try {
469
+ return await handleCompletion$1(c);
470
+ } catch (error) {
471
+ return await forwardError(c, error);
472
+ }
473
+ });
474
+
475
+ //#endregion
476
+ //#region src/services/copilot/create-embeddings.ts
477
+ const createEmbeddings = async (payload) => {
478
+ if (!state.copilotToken) throw new Error("Copilot token not found");
479
+ const response = await fetch(`${copilotBaseUrl(state)}/embeddings`, {
480
+ method: "POST",
481
+ headers: copilotHeaders(state),
482
+ body: JSON.stringify(payload)
483
+ });
484
+ if (!response.ok) throw new HTTPError("Failed to create embeddings", response);
485
+ return await response.json();
486
+ };
487
+
488
+ //#endregion
489
+ //#region src/routes/embeddings/route.ts
490
+ const embeddingRoutes = new Hono();
491
+ embeddingRoutes.post("/", async (c) => {
492
+ try {
493
+ const paylod = await c.req.json();
494
+ const response = await createEmbeddings(paylod);
495
+ return c.json(response);
496
+ } catch (error) {
497
+ return await forwardError(c, error);
498
+ }
499
+ });
500
+
501
+ //#endregion
502
+ //#region src/routes/event-logging/route.ts
503
+ const eventLoggingRoutes = new Hono();
504
+ eventLoggingRoutes.post("/batch", (c) => {
505
+ return c.text("OK", 200);
506
+ });
507
+
508
+ //#endregion
509
+ //#region src/routes/messages/utils.ts
510
+ function mapOpenAIStopReasonToAnthropic(finishReason) {
511
+ if (finishReason === null) return null;
512
+ return {
513
+ stop: "end_turn",
514
+ length: "max_tokens",
515
+ tool_calls: "tool_use",
516
+ content_filter: "end_turn"
517
+ }[finishReason];
518
+ }
519
+
520
+ //#endregion
521
+ //#region src/routes/messages/non-stream-translation.ts
522
+ const THINKING_TEXT = "Thinking...";
523
+ function translateToOpenAI(payload) {
524
+ const modelId = translateModelName(payload.model);
525
+ const model = state.models?.data.find((m) => m.id === modelId);
526
+ const thinkingBudget = getThinkingBudget(payload, model);
527
+ return {
528
+ model: modelId,
529
+ messages: translateAnthropicMessagesToOpenAI(payload.messages, payload.system, modelId),
530
+ max_tokens: payload.max_tokens,
531
+ stop: payload.stop_sequences,
532
+ stream: payload.stream,
533
+ temperature: payload.temperature,
534
+ top_p: payload.top_p,
535
+ user: payload.metadata?.user_id,
536
+ tools: translateAnthropicToolsToOpenAI(payload.tools),
537
+ tool_choice: translateAnthropicToolChoiceToOpenAI(payload.tool_choice),
538
+ thinking_budget: thinkingBudget
539
+ };
540
+ }
541
+ function getThinkingBudget(payload, model) {
542
+ const thinking = payload.thinking;
543
+ if (model && thinking) {
544
+ const maxThinkingBudget = Math.min(model.capabilities.supports.max_thinking_budget ?? 0, (model.capabilities.limits.max_output_tokens ?? 0) - 1);
545
+ if (maxThinkingBudget > 0 && thinking.budget_tokens !== void 0) {
546
+ const budgetTokens = Math.min(thinking.budget_tokens, maxThinkingBudget);
547
+ return Math.max(budgetTokens, model.capabilities.supports.min_thinking_budget ?? 1024);
548
+ }
549
+ }
550
+ }
551
+ function translateModelName(model) {
552
+ if (/^claude-opus-4-5-\d{8}$/.test(model)) return "claude-opus-4.5";
553
+ else if (/^claude-sonnet-4-5-\d{8}$/.test(model)) return "claude-sonnet-4.5";
554
+ else if (/^claude-haiku-4-5-\d{8}$/.test(model)) return "claude-haiku-4.5";
555
+ else if (model.startsWith("claude-sonnet-4-")) return model.replace(/^claude-sonnet-4-.*/, "claude-sonnet-4");
556
+ else if (model.startsWith("claude-opus-4-")) return model.replace(/^claude-opus-4-.*/, "claude-opus-4");
557
+ return model;
558
+ }
559
+ function translateAnthropicMessagesToOpenAI(anthropicMessages, system, modelId) {
560
+ const systemMessages = handleSystemPrompt(system);
561
+ const otherMessages = anthropicMessages.flatMap((message) => message.role === "user" ? handleUserMessage(message) : handleAssistantMessage(message, modelId));
562
+ return [...systemMessages, ...otherMessages];
563
+ }
564
+ const ANTHROPIC_RESERVED_KEYWORDS = ["x-anthropic-billing-header", "x-anthropic-billing"];
565
+ function filterAnthropicReservedContent(text) {
566
+ let filtered = text;
567
+ for (const keyword of ANTHROPIC_RESERVED_KEYWORDS) filtered = filtered.split("\n").filter((line) => !line.includes(keyword)).join("\n");
568
+ return filtered;
569
+ }
570
+ function handleSystemPrompt(system) {
571
+ if (!system) return [];
572
+ let systemText;
573
+ systemText = typeof system === "string" ? system : system.map((block) => block.text).join("\n\n");
574
+ systemText = filterAnthropicReservedContent(systemText);
575
+ if (systemText.trim().length === 0) return [];
576
+ return [{
577
+ role: "system",
578
+ content: systemText
579
+ }];
580
+ }
581
+ function handleUserMessage(message) {
582
+ const newMessages = [];
583
+ if (Array.isArray(message.content)) {
584
+ const toolResultBlocks = message.content.filter((block) => block.type === "tool_result");
585
+ const otherBlocks = message.content.filter((block) => block.type !== "tool_result");
586
+ for (const block of toolResultBlocks) newMessages.push({
587
+ role: "tool",
588
+ tool_call_id: block.tool_use_id,
589
+ content: mapContent(block.content)
590
+ });
591
+ if (otherBlocks.length > 0) newMessages.push({
592
+ role: "user",
593
+ content: mapContent(otherBlocks)
594
+ });
595
+ } else newMessages.push({
596
+ role: "user",
597
+ content: mapContent(message.content)
598
+ });
599
+ return newMessages;
600
+ }
601
+ function handleAssistantMessage(message, modelId) {
602
+ if (!Array.isArray(message.content)) return [{
603
+ role: "assistant",
604
+ content: mapContent(message.content)
605
+ }];
606
+ const toolUseBlocks = message.content.filter((block) => block.type === "tool_use");
607
+ let thinkingBlocks = message.content.filter((block) => block.type === "thinking");
608
+ if (modelId.startsWith("claude")) thinkingBlocks = thinkingBlocks.filter((b) => b.thinking && b.thinking !== THINKING_TEXT && b.signature && b.signature.length > 0 && !b.signature.includes("@"));
609
+ const thinkingContents = thinkingBlocks.filter((b) => b.thinking && b.thinking !== THINKING_TEXT).map((b) => b.thinking);
610
+ const allThinkingContent = thinkingContents.length > 0 ? thinkingContents.join("\n\n") : void 0;
611
+ const signature = thinkingBlocks.find((b) => b.signature && b.signature.length > 0)?.signature;
612
+ return toolUseBlocks.length > 0 ? [{
613
+ role: "assistant",
614
+ content: mapContent(message.content),
615
+ reasoning_text: allThinkingContent,
616
+ reasoning_opaque: signature,
617
+ tool_calls: toolUseBlocks.map((toolUse) => ({
618
+ id: toolUse.id,
619
+ type: "function",
620
+ function: {
621
+ name: toolUse.name,
622
+ arguments: JSON.stringify(toolUse.input)
623
+ }
624
+ }))
625
+ }] : [{
626
+ role: "assistant",
627
+ content: mapContent(message.content),
628
+ reasoning_text: allThinkingContent,
629
+ reasoning_opaque: signature
630
+ }];
631
+ }
632
+ function mapContent(content) {
633
+ if (typeof content === "string") return content;
634
+ if (!Array.isArray(content)) return null;
635
+ if (!content.some((block) => block.type === "image")) return content.filter((block) => block.type === "text").map((block) => block.text).join("\n\n");
636
+ const contentParts = [];
637
+ for (const block of content) switch (block.type) {
638
+ case "text":
639
+ contentParts.push({
640
+ type: "text",
641
+ text: block.text
642
+ });
643
+ break;
644
+ case "image":
645
+ contentParts.push({
646
+ type: "image_url",
647
+ image_url: { url: `data:${block.source.media_type};base64,${block.source.data}` }
648
+ });
649
+ break;
650
+ }
651
+ return contentParts;
652
+ }
653
+ function translateAnthropicToolsToOpenAI(anthropicTools) {
654
+ if (!anthropicTools) return;
655
+ return anthropicTools.map((tool) => ({
656
+ type: "function",
657
+ function: {
658
+ name: tool.name,
659
+ description: tool.description,
660
+ parameters: normalizeToolParameters(tool.input_schema)
661
+ }
662
+ }));
663
+ }
664
+ function normalizeToolParameters(inputSchema) {
665
+ if (!inputSchema || typeof inputSchema !== "object") return {
666
+ type: "object",
667
+ properties: {}
668
+ };
669
+ const schema = { ...inputSchema };
670
+ if (schema.type === "object" && schema.properties === void 0) return {
671
+ ...schema,
672
+ properties: {}
673
+ };
674
+ return schema;
675
+ }
676
+ function translateAnthropicToolChoiceToOpenAI(anthropicToolChoice) {
677
+ if (!anthropicToolChoice) return;
678
+ switch (anthropicToolChoice.type) {
679
+ case "auto": return "auto";
680
+ case "any": return "required";
681
+ case "tool":
682
+ if (anthropicToolChoice.name) return {
683
+ type: "function",
684
+ function: { name: anthropicToolChoice.name }
685
+ };
686
+ return;
687
+ case "none": return "none";
688
+ default: return;
689
+ }
690
+ }
691
+ function translateToAnthropic(response) {
692
+ const assistantContentBlocks = [];
693
+ let stopReason = response.choices[0]?.finish_reason ?? null;
694
+ for (const choice of response.choices) {
695
+ const textBlocks = getAnthropicTextBlocks(choice.message.content);
696
+ const thinkBlocks = getAnthropicThinkBlocks(choice.message.reasoning_text, choice.message.reasoning_opaque);
697
+ const toolUseBlocks = getAnthropicToolUseBlocks(choice.message.tool_calls);
698
+ assistantContentBlocks.push(...thinkBlocks, ...textBlocks, ...toolUseBlocks);
699
+ if (choice.finish_reason === "tool_calls" || stopReason === "stop") stopReason = choice.finish_reason;
700
+ }
701
+ return {
702
+ id: response.id,
703
+ type: "message",
704
+ role: "assistant",
705
+ model: response.model,
706
+ content: assistantContentBlocks,
707
+ stop_reason: mapOpenAIStopReasonToAnthropic(stopReason),
708
+ stop_sequence: null,
709
+ usage: {
710
+ input_tokens: (response.usage?.prompt_tokens ?? 0) - (response.usage?.prompt_tokens_details?.cached_tokens ?? 0),
711
+ output_tokens: response.usage?.completion_tokens ?? 0,
712
+ ...response.usage?.prompt_tokens_details?.cached_tokens !== void 0 && { cache_read_input_tokens: response.usage.prompt_tokens_details.cached_tokens }
713
+ }
714
+ };
715
+ }
716
+ function getAnthropicTextBlocks(messageContent) {
717
+ if (typeof messageContent === "string" && messageContent.length > 0) return [{
718
+ type: "text",
719
+ text: messageContent
720
+ }];
721
+ if (Array.isArray(messageContent)) return messageContent.filter((part) => part.type === "text").map((part) => ({
722
+ type: "text",
723
+ text: part.text
724
+ }));
725
+ return [];
726
+ }
727
+ function getAnthropicThinkBlocks(reasoningText, reasoningOpaque) {
728
+ if (reasoningText && reasoningText.length > 0) return [{
729
+ type: "thinking",
730
+ thinking: reasoningText,
731
+ signature: reasoningOpaque || ""
732
+ }];
733
+ if (reasoningOpaque && reasoningOpaque.length > 0) return [{
734
+ type: "thinking",
735
+ thinking: THINKING_TEXT,
736
+ signature: reasoningOpaque
737
+ }];
738
+ return [];
739
+ }
740
+ function getAnthropicToolUseBlocks(toolCalls) {
741
+ if (!toolCalls) return [];
742
+ return toolCalls.map((toolCall) => ({
743
+ type: "tool_use",
744
+ id: toolCall.id,
745
+ name: toolCall.function.name,
746
+ input: JSON.parse(toolCall.function.arguments)
747
+ }));
748
+ }
749
+
750
+ //#endregion
751
+ //#region src/routes/messages/count-tokens-handler.ts
752
+ /**
753
+ * Handles token counting for Anthropic messages
754
+ */
755
+ async function handleCountTokens(c) {
756
+ try {
757
+ const anthropicBeta = c.req.header("anthropic-beta");
758
+ const anthropicPayload = await c.req.json();
759
+ const openAIPayload = translateToOpenAI(anthropicPayload);
760
+ const selectedModel = state.models?.data.find((model) => model.id === anthropicPayload.model);
761
+ if (!selectedModel) {
762
+ consola.warn("Model not found, returning default token count");
763
+ return c.json({ input_tokens: 1 });
764
+ }
765
+ const tokenCount = await getTokenCount(openAIPayload, selectedModel);
766
+ if (anthropicPayload.tools && anthropicPayload.tools.length > 0) {
767
+ let addToolSystemPromptCount = false;
768
+ if (anthropicBeta) {
769
+ const toolsLength = anthropicPayload.tools.length;
770
+ addToolSystemPromptCount = !anthropicPayload.tools.some((tool) => tool.name.startsWith("mcp__") || tool.name === "Skill" && toolsLength === 1);
771
+ }
772
+ if (addToolSystemPromptCount) {
773
+ if (anthropicPayload.model.startsWith("claude")) tokenCount.input = tokenCount.input + 346;
774
+ else if (anthropicPayload.model.startsWith("grok")) tokenCount.input = tokenCount.input + 120;
775
+ }
776
+ }
777
+ let finalTokenCount = tokenCount.input + tokenCount.output;
778
+ if (anthropicPayload.model.startsWith("claude")) finalTokenCount = Math.round(finalTokenCount * 1.15);
779
+ consola.info("Token count:", finalTokenCount);
780
+ return c.json({ input_tokens: finalTokenCount });
781
+ } catch (error) {
782
+ consola.error("Error counting tokens:", error);
783
+ return c.json({ input_tokens: 1 });
784
+ }
785
+ }
786
+
787
+ //#endregion
788
+ //#region src/services/copilot/create-responses.ts
789
+ const createResponses = async (payload, { vision, initiator }) => {
790
+ if (!state.copilotToken) throw new Error("Copilot token not found");
791
+ const headers = {
792
+ ...copilotHeaders(state, vision),
793
+ "X-Initiator": initiator
794
+ };
795
+ payload.service_tier = null;
796
+ const response = await fetch(`${copilotBaseUrl(state)}/responses`, {
797
+ method: "POST",
798
+ headers,
799
+ body: JSON.stringify(payload)
800
+ });
801
+ if (!response.ok) {
802
+ consola.error("Failed to create responses", response);
803
+ throw new HTTPError("Failed to create responses", response);
804
+ }
805
+ if (payload.stream) return events(response);
806
+ return await response.json();
807
+ };
808
+
809
+ //#endregion
810
+ //#region src/routes/messages/responses-translation.ts
811
+ const MESSAGE_TYPE = "message";
812
+ const translateAnthropicMessagesToResponsesPayload = (payload) => {
813
+ const input = [];
814
+ for (const message of payload.messages) input.push(...translateMessage(message));
815
+ const translatedTools = convertAnthropicTools(payload.tools);
816
+ const toolChoice = convertAnthropicToolChoice(payload.tool_choice);
817
+ const { safetyIdentifier, promptCacheKey } = parseUserId(payload.metadata?.user_id);
818
+ return {
819
+ model: payload.model,
820
+ input,
821
+ instructions: translateSystemPrompt(payload.system, payload.model),
822
+ temperature: 1,
823
+ top_p: payload.top_p ?? null,
824
+ max_output_tokens: Math.max(payload.max_tokens, 12800),
825
+ tools: translatedTools,
826
+ tool_choice: toolChoice,
827
+ metadata: payload.metadata ? { ...payload.metadata } : null,
828
+ safety_identifier: safetyIdentifier,
829
+ prompt_cache_key: promptCacheKey,
830
+ stream: payload.stream ?? null,
831
+ store: false,
832
+ parallel_tool_calls: true,
833
+ reasoning: {
834
+ effort: getReasoningEffortForModel(payload.model),
835
+ summary: "detailed"
836
+ },
837
+ include: ["reasoning.encrypted_content"]
838
+ };
839
+ };
840
+ const translateMessage = (message) => {
841
+ if (message.role === "user") return translateUserMessage(message);
842
+ return translateAssistantMessage(message);
843
+ };
844
+ const translateUserMessage = (message) => {
845
+ if (typeof message.content === "string") return [createMessage("user", message.content)];
846
+ if (!Array.isArray(message.content)) return [];
847
+ const items = [];
848
+ const pendingContent = [];
849
+ for (const block of message.content) {
850
+ if (block.type === "tool_result") {
851
+ flushPendingContent("user", pendingContent, items);
852
+ items.push(createFunctionCallOutput(block));
853
+ continue;
854
+ }
855
+ const converted = translateUserContentBlock(block);
856
+ if (converted) pendingContent.push(converted);
857
+ }
858
+ flushPendingContent("user", pendingContent, items);
859
+ return items;
860
+ };
861
+ const translateAssistantMessage = (message) => {
862
+ if (typeof message.content === "string") return [createMessage("assistant", message.content)];
863
+ if (!Array.isArray(message.content)) return [];
864
+ const items = [];
865
+ const pendingContent = [];
866
+ for (const block of message.content) {
867
+ if (block.type === "tool_use") {
868
+ flushPendingContent("assistant", pendingContent, items);
869
+ items.push(createFunctionToolCall(block));
870
+ continue;
871
+ }
872
+ if (block.type === "thinking" && block.signature && block.signature.includes("@")) {
873
+ flushPendingContent("assistant", pendingContent, items);
874
+ items.push(createReasoningContent(block));
875
+ continue;
876
+ }
877
+ const converted = translateAssistantContentBlock(block);
878
+ if (converted) pendingContent.push(converted);
879
+ }
880
+ flushPendingContent("assistant", pendingContent, items);
881
+ return items;
882
+ };
883
+ const translateUserContentBlock = (block) => {
884
+ switch (block.type) {
885
+ case "text": return createTextContent(block.text);
886
+ case "image": return createImageContent(block);
887
+ default: return;
888
+ }
889
+ };
890
+ const translateAssistantContentBlock = (block) => {
891
+ switch (block.type) {
892
+ case "text": return createOutPutTextContent(block.text);
893
+ default: return;
894
+ }
895
+ };
896
+ const flushPendingContent = (role, pendingContent, target) => {
897
+ if (pendingContent.length === 0) return;
898
+ const messageContent = [...pendingContent];
899
+ target.push(createMessage(role, messageContent));
900
+ pendingContent.length = 0;
901
+ };
902
+ const createMessage = (role, content) => ({
903
+ type: MESSAGE_TYPE,
904
+ role,
905
+ content
906
+ });
907
+ const createTextContent = (text) => ({
908
+ type: "input_text",
909
+ text
910
+ });
911
+ const createOutPutTextContent = (text) => ({
912
+ type: "output_text",
913
+ text
914
+ });
915
+ const createImageContent = (block) => ({
916
+ type: "input_image",
917
+ image_url: `data:${block.source.media_type};base64,${block.source.data}`,
918
+ detail: "auto"
919
+ });
920
+ const createReasoningContent = (block) => {
921
+ const array = block.signature.split("@");
922
+ const signature = array[0];
923
+ return {
924
+ id: array[1],
925
+ type: "reasoning",
926
+ summary: [{
927
+ type: "summary_text",
928
+ text: block.thinking
929
+ }],
930
+ encrypted_content: signature
931
+ };
932
+ };
933
+ const createFunctionToolCall = (block) => ({
934
+ type: "function_call",
935
+ call_id: block.id,
936
+ name: block.name,
937
+ arguments: JSON.stringify(block.input),
938
+ status: "completed"
939
+ });
940
+ const createFunctionCallOutput = (block) => ({
941
+ type: "function_call_output",
942
+ call_id: block.tool_use_id,
943
+ output: convertToolResultContent(block.content),
944
+ status: block.is_error ? "incomplete" : "completed"
945
+ });
946
+ const translateSystemPrompt = (system, model) => {
947
+ if (!system) return null;
948
+ const extraPrompt = getExtraPromptForModel(model);
949
+ if (typeof system === "string") return system + extraPrompt;
950
+ const text = system.map((block, index) => {
951
+ if (index === 0) return block.text + extraPrompt;
952
+ return block.text;
953
+ }).join(" ");
954
+ return text.length > 0 ? text : null;
955
+ };
956
+ const convertAnthropicTools = (tools) => {
957
+ if (!tools || tools.length === 0) return null;
958
+ return tools.map((tool) => ({
959
+ type: "function",
960
+ name: tool.name,
961
+ parameters: tool.input_schema,
962
+ strict: false,
963
+ ...tool.description ? { description: tool.description } : {}
964
+ }));
965
+ };
966
+ const convertAnthropicToolChoice = (choice) => {
967
+ if (!choice) return "auto";
968
+ switch (choice.type) {
969
+ case "auto": return "auto";
970
+ case "any": return "required";
971
+ case "tool": return choice.name ? {
972
+ type: "function",
973
+ name: choice.name
974
+ } : "auto";
975
+ case "none": return "none";
976
+ default: return "auto";
977
+ }
978
+ };
979
+ const translateResponsesResultToAnthropic = (response) => {
980
+ const contentBlocks = mapOutputToAnthropicContent(response.output);
981
+ const usage = mapResponsesUsage(response);
982
+ let anthropicContent = fallbackContentBlocks(response.output_text);
983
+ if (contentBlocks.length > 0) anthropicContent = contentBlocks;
984
+ const stopReason = mapResponsesStopReason(response);
985
+ return {
986
+ id: response.id,
987
+ type: "message",
988
+ role: "assistant",
989
+ content: anthropicContent,
990
+ model: response.model,
991
+ stop_reason: stopReason,
992
+ stop_sequence: null,
993
+ usage
994
+ };
995
+ };
996
+ const mapOutputToAnthropicContent = (output) => {
997
+ const contentBlocks = [];
998
+ for (const item of output) switch (item.type) {
999
+ case "reasoning": {
1000
+ const thinkingText = extractReasoningText(item);
1001
+ if (thinkingText.length > 0) contentBlocks.push({
1002
+ type: "thinking",
1003
+ thinking: thinkingText,
1004
+ signature: (item.encrypted_content ?? "") + "@" + item.id
1005
+ });
1006
+ break;
1007
+ }
1008
+ case "function_call": {
1009
+ const toolUseBlock = createToolUseContentBlock(item);
1010
+ if (toolUseBlock) contentBlocks.push(toolUseBlock);
1011
+ break;
1012
+ }
1013
+ case "message": {
1014
+ const combinedText = combineMessageTextContent(item.content);
1015
+ if (combinedText.length > 0) contentBlocks.push({
1016
+ type: "text",
1017
+ text: combinedText
1018
+ });
1019
+ break;
1020
+ }
1021
+ default: {
1022
+ const combinedText = combineMessageTextContent(item.content);
1023
+ if (combinedText.length > 0) contentBlocks.push({
1024
+ type: "text",
1025
+ text: combinedText
1026
+ });
1027
+ }
1028
+ }
1029
+ return contentBlocks;
1030
+ };
1031
+ const combineMessageTextContent = (content) => {
1032
+ if (!Array.isArray(content)) return "";
1033
+ let aggregated = "";
1034
+ for (const block of content) {
1035
+ if (isResponseOutputText(block)) {
1036
+ aggregated += block.text;
1037
+ continue;
1038
+ }
1039
+ if (isResponseOutputRefusal(block)) {
1040
+ aggregated += block.refusal;
1041
+ continue;
1042
+ }
1043
+ if (typeof block.text === "string") {
1044
+ aggregated += block.text;
1045
+ continue;
1046
+ }
1047
+ if (typeof block.reasoning === "string") {
1048
+ aggregated += block.reasoning;
1049
+ continue;
1050
+ }
1051
+ }
1052
+ return aggregated;
1053
+ };
1054
+ const extractReasoningText = (item) => {
1055
+ const segments = [];
1056
+ const collectFromBlocks = (blocks) => {
1057
+ if (!Array.isArray(blocks)) return;
1058
+ for (const block of blocks) if (typeof block.text === "string") {
1059
+ segments.push(block.text);
1060
+ continue;
1061
+ }
1062
+ };
1063
+ collectFromBlocks(item.summary);
1064
+ return segments.join("").trim();
1065
+ };
1066
+ const createToolUseContentBlock = (call) => {
1067
+ const toolId = call.call_id;
1068
+ if (!call.name || !toolId) return null;
1069
+ const input = parseFunctionCallArguments(call.arguments);
1070
+ return {
1071
+ type: "tool_use",
1072
+ id: toolId,
1073
+ name: call.name,
1074
+ input
1075
+ };
1076
+ };
1077
+ const parseFunctionCallArguments = (rawArguments) => {
1078
+ if (typeof rawArguments !== "string" || rawArguments.trim().length === 0) return {};
1079
+ try {
1080
+ const parsed = JSON.parse(rawArguments);
1081
+ if (Array.isArray(parsed)) return { arguments: parsed };
1082
+ if (parsed && typeof parsed === "object") return parsed;
1083
+ } catch (error) {
1084
+ consola.warn("Failed to parse function call arguments", {
1085
+ error,
1086
+ rawArguments
1087
+ });
1088
+ }
1089
+ return { raw_arguments: rawArguments };
1090
+ };
1091
+ const fallbackContentBlocks = (outputText) => {
1092
+ if (!outputText) return [];
1093
+ return [{
1094
+ type: "text",
1095
+ text: outputText
1096
+ }];
1097
+ };
1098
+ const mapResponsesStopReason = (response) => {
1099
+ const { status, incomplete_details: incompleteDetails } = response;
1100
+ if (status === "completed") {
1101
+ if (response.output.some((item) => item.type === "function_call")) return "tool_use";
1102
+ return "end_turn";
1103
+ }
1104
+ if (status === "incomplete") {
1105
+ if (incompleteDetails?.reason === "max_output_tokens") return "max_tokens";
1106
+ if (incompleteDetails?.reason === "content_filter") return "end_turn";
1107
+ }
1108
+ return null;
1109
+ };
1110
+ const mapResponsesUsage = (response) => {
1111
+ const inputTokens = response.usage?.input_tokens ?? 0;
1112
+ const outputTokens = response.usage?.output_tokens ?? 0;
1113
+ const inputCachedTokens = response.usage?.input_tokens_details?.cached_tokens;
1114
+ return {
1115
+ input_tokens: inputTokens - (inputCachedTokens ?? 0),
1116
+ output_tokens: outputTokens,
1117
+ ...response.usage?.input_tokens_details?.cached_tokens !== void 0 && { cache_read_input_tokens: response.usage.input_tokens_details.cached_tokens }
1118
+ };
1119
+ };
1120
+ const isRecord$2 = (value) => typeof value === "object" && value !== null;
1121
+ const isResponseOutputText = (block) => isRecord$2(block) && "type" in block && block.type === "output_text";
1122
+ const isResponseOutputRefusal = (block) => isRecord$2(block) && "type" in block && block.type === "refusal";
1123
+ const parseUserId = (userId) => {
1124
+ if (!userId || typeof userId !== "string") return {
1125
+ safetyIdentifier: null,
1126
+ promptCacheKey: null
1127
+ };
1128
+ const userMatch = userId.match(/user_([^_]+)_account/);
1129
+ const safetyIdentifier = userMatch ? userMatch[1] : null;
1130
+ const sessionMatch = userId.match(/_session_(.+)$/);
1131
+ const promptCacheKey = sessionMatch ? sessionMatch[1] : null;
1132
+ return {
1133
+ safetyIdentifier,
1134
+ promptCacheKey
1135
+ };
1136
+ };
1137
+ const convertToolResultContent = (content) => {
1138
+ if (typeof content === "string") return content;
1139
+ if (Array.isArray(content)) {
1140
+ const result = [];
1141
+ for (const block of content) switch (block.type) {
1142
+ case "text":
1143
+ result.push(createTextContent(block.text));
1144
+ break;
1145
+ case "image":
1146
+ result.push(createImageContent(block));
1147
+ break;
1148
+ default: break;
1149
+ }
1150
+ return result;
1151
+ }
1152
+ return "";
1153
+ };
1154
+
1155
+ //#endregion
1156
+ //#region src/routes/messages/responses-stream-translation.ts
1157
+ const MAX_CONSECUTIVE_FUNCTION_CALL_WHITESPACE = 20;
1158
+ var FunctionCallArgumentsValidationError = class extends Error {
1159
+ constructor(message) {
1160
+ super(message);
1161
+ this.name = "FunctionCallArgumentsValidationError";
1162
+ }
1163
+ };
1164
+ const updateWhitespaceRunState = (previousCount, chunk) => {
1165
+ let count = previousCount;
1166
+ for (const char of chunk) {
1167
+ if (char === "\r" || char === "\n" || char === " ") {
1168
+ count += 1;
1169
+ if (count > MAX_CONSECUTIVE_FUNCTION_CALL_WHITESPACE) return {
1170
+ nextCount: count,
1171
+ exceeded: true
1172
+ };
1173
+ continue;
1174
+ }
1175
+ if (char !== " ") count = 0;
1176
+ }
1177
+ return {
1178
+ nextCount: count,
1179
+ exceeded: false
1180
+ };
1181
+ };
1182
+ const createResponsesStreamState = () => ({
1183
+ messageStartSent: false,
1184
+ messageCompleted: false,
1185
+ nextContentBlockIndex: 0,
1186
+ blockIndexByKey: /* @__PURE__ */ new Map(),
1187
+ openBlocks: /* @__PURE__ */ new Set(),
1188
+ blockHasDelta: /* @__PURE__ */ new Set(),
1189
+ functionCallStateByOutputIndex: /* @__PURE__ */ new Map()
1190
+ });
1191
+ const translateResponsesStreamEvent = (rawEvent, state$1) => {
1192
+ switch (rawEvent.type) {
1193
+ case "response.created": return handleResponseCreated(rawEvent, state$1);
1194
+ case "response.output_item.added": return handleOutputItemAdded$1(rawEvent, state$1);
1195
+ case "response.reasoning_summary_text.delta": return handleReasoningSummaryTextDelta(rawEvent, state$1);
1196
+ case "response.output_text.delta": return handleOutputTextDelta(rawEvent, state$1);
1197
+ case "response.reasoning_summary_text.done": return handleReasoningSummaryTextDone(rawEvent, state$1);
1198
+ case "response.output_text.done": return handleOutputTextDone(rawEvent, state$1);
1199
+ case "response.output_item.done": return handleOutputItemDone$1(rawEvent, state$1);
1200
+ case "response.function_call_arguments.delta": return handleFunctionCallArgumentsDelta(rawEvent, state$1);
1201
+ case "response.function_call_arguments.done": return handleFunctionCallArgumentsDone(rawEvent, state$1);
1202
+ case "response.completed":
1203
+ case "response.incomplete": return handleResponseCompleted(rawEvent, state$1);
1204
+ case "response.failed": return handleResponseFailed(rawEvent, state$1);
1205
+ case "error": return handleErrorEvent(rawEvent, state$1);
1206
+ default: return [];
1207
+ }
1208
+ };
1209
+ const handleResponseCreated = (rawEvent, state$1) => {
1210
+ return messageStart(state$1, rawEvent.response);
1211
+ };
1212
+ const handleOutputItemAdded$1 = (rawEvent, state$1) => {
1213
+ const events$1 = new Array();
1214
+ const functionCallDetails = extractFunctionCallDetails(rawEvent);
1215
+ if (!functionCallDetails) return events$1;
1216
+ const { outputIndex, toolCallId, name, initialArguments } = functionCallDetails;
1217
+ const blockIndex = openFunctionCallBlock(state$1, {
1218
+ outputIndex,
1219
+ toolCallId,
1220
+ name,
1221
+ events: events$1
1222
+ });
1223
+ if (initialArguments !== void 0 && initialArguments.length > 0) {
1224
+ events$1.push({
1225
+ type: "content_block_delta",
1226
+ index: blockIndex,
1227
+ delta: {
1228
+ type: "input_json_delta",
1229
+ partial_json: initialArguments
1230
+ }
1231
+ });
1232
+ state$1.blockHasDelta.add(blockIndex);
1233
+ }
1234
+ return events$1;
1235
+ };
1236
+ const handleOutputItemDone$1 = (rawEvent, state$1) => {
1237
+ const events$1 = new Array();
1238
+ const item = rawEvent.item;
1239
+ if (item.type !== "reasoning") return events$1;
1240
+ const outputIndex = rawEvent.output_index;
1241
+ const blockIndex = openThinkingBlockIfNeeded(state$1, outputIndex, events$1);
1242
+ const signature = (item.encrypted_content ?? "") + "@" + item.id;
1243
+ if (signature) {
1244
+ events$1.push({
1245
+ type: "content_block_delta",
1246
+ index: blockIndex,
1247
+ delta: {
1248
+ type: "signature_delta",
1249
+ signature
1250
+ }
1251
+ });
1252
+ state$1.blockHasDelta.add(blockIndex);
1253
+ }
1254
+ return events$1;
1255
+ };
1256
+ const handleFunctionCallArgumentsDelta = (rawEvent, state$1) => {
1257
+ const events$1 = new Array();
1258
+ const outputIndex = rawEvent.output_index;
1259
+ const deltaText = rawEvent.delta;
1260
+ if (!deltaText) return events$1;
1261
+ const blockIndex = openFunctionCallBlock(state$1, {
1262
+ outputIndex,
1263
+ events: events$1
1264
+ });
1265
+ const functionCallState = state$1.functionCallStateByOutputIndex.get(outputIndex);
1266
+ if (!functionCallState) return handleFunctionCallArgumentsValidationError(new FunctionCallArgumentsValidationError("Received function call arguments delta without an open tool call block."), state$1, events$1);
1267
+ const { nextCount, exceeded } = updateWhitespaceRunState(functionCallState.consecutiveWhitespaceCount, deltaText);
1268
+ if (exceeded) return handleFunctionCallArgumentsValidationError(new FunctionCallArgumentsValidationError("Received function call arguments delta containing more than 20 consecutive whitespace characters."), state$1, events$1);
1269
+ functionCallState.consecutiveWhitespaceCount = nextCount;
1270
+ events$1.push({
1271
+ type: "content_block_delta",
1272
+ index: blockIndex,
1273
+ delta: {
1274
+ type: "input_json_delta",
1275
+ partial_json: deltaText
1276
+ }
1277
+ });
1278
+ state$1.blockHasDelta.add(blockIndex);
1279
+ return events$1;
1280
+ };
1281
+ const handleFunctionCallArgumentsDone = (rawEvent, state$1) => {
1282
+ const events$1 = new Array();
1283
+ const outputIndex = rawEvent.output_index;
1284
+ const blockIndex = openFunctionCallBlock(state$1, {
1285
+ outputIndex,
1286
+ events: events$1
1287
+ });
1288
+ const finalArguments = typeof rawEvent.arguments === "string" ? rawEvent.arguments : void 0;
1289
+ if (!state$1.blockHasDelta.has(blockIndex) && finalArguments) {
1290
+ events$1.push({
1291
+ type: "content_block_delta",
1292
+ index: blockIndex,
1293
+ delta: {
1294
+ type: "input_json_delta",
1295
+ partial_json: finalArguments
1296
+ }
1297
+ });
1298
+ state$1.blockHasDelta.add(blockIndex);
1299
+ }
1300
+ state$1.functionCallStateByOutputIndex.delete(outputIndex);
1301
+ return events$1;
1302
+ };
1303
+ const handleOutputTextDelta = (rawEvent, state$1) => {
1304
+ const events$1 = new Array();
1305
+ const outputIndex = rawEvent.output_index;
1306
+ const contentIndex = rawEvent.content_index;
1307
+ const deltaText = rawEvent.delta;
1308
+ if (!deltaText) return events$1;
1309
+ const blockIndex = openTextBlockIfNeeded(state$1, {
1310
+ outputIndex,
1311
+ contentIndex,
1312
+ events: events$1
1313
+ });
1314
+ events$1.push({
1315
+ type: "content_block_delta",
1316
+ index: blockIndex,
1317
+ delta: {
1318
+ type: "text_delta",
1319
+ text: deltaText
1320
+ }
1321
+ });
1322
+ state$1.blockHasDelta.add(blockIndex);
1323
+ return events$1;
1324
+ };
1325
+ const handleReasoningSummaryTextDelta = (rawEvent, state$1) => {
1326
+ const outputIndex = rawEvent.output_index;
1327
+ const deltaText = rawEvent.delta;
1328
+ const events$1 = new Array();
1329
+ const blockIndex = openThinkingBlockIfNeeded(state$1, outputIndex, events$1);
1330
+ events$1.push({
1331
+ type: "content_block_delta",
1332
+ index: blockIndex,
1333
+ delta: {
1334
+ type: "thinking_delta",
1335
+ thinking: deltaText
1336
+ }
1337
+ });
1338
+ state$1.blockHasDelta.add(blockIndex);
1339
+ return events$1;
1340
+ };
1341
+ const handleReasoningSummaryTextDone = (rawEvent, state$1) => {
1342
+ const outputIndex = rawEvent.output_index;
1343
+ const text = rawEvent.text;
1344
+ const events$1 = new Array();
1345
+ const blockIndex = openThinkingBlockIfNeeded(state$1, outputIndex, events$1);
1346
+ if (text && !state$1.blockHasDelta.has(blockIndex)) events$1.push({
1347
+ type: "content_block_delta",
1348
+ index: blockIndex,
1349
+ delta: {
1350
+ type: "thinking_delta",
1351
+ thinking: text
1352
+ }
1353
+ });
1354
+ return events$1;
1355
+ };
1356
+ const handleOutputTextDone = (rawEvent, state$1) => {
1357
+ const events$1 = new Array();
1358
+ const outputIndex = rawEvent.output_index;
1359
+ const contentIndex = rawEvent.content_index;
1360
+ const text = rawEvent.text;
1361
+ const blockIndex = openTextBlockIfNeeded(state$1, {
1362
+ outputIndex,
1363
+ contentIndex,
1364
+ events: events$1
1365
+ });
1366
+ if (text && !state$1.blockHasDelta.has(blockIndex)) events$1.push({
1367
+ type: "content_block_delta",
1368
+ index: blockIndex,
1369
+ delta: {
1370
+ type: "text_delta",
1371
+ text
1372
+ }
1373
+ });
1374
+ return events$1;
1375
+ };
1376
+ const handleResponseCompleted = (rawEvent, state$1) => {
1377
+ const response = rawEvent.response;
1378
+ const events$1 = new Array();
1379
+ closeAllOpenBlocks(state$1, events$1);
1380
+ const anthropic = translateResponsesResultToAnthropic(response);
1381
+ events$1.push({
1382
+ type: "message_delta",
1383
+ delta: {
1384
+ stop_reason: anthropic.stop_reason,
1385
+ stop_sequence: anthropic.stop_sequence
1386
+ },
1387
+ usage: anthropic.usage
1388
+ }, { type: "message_stop" });
1389
+ state$1.messageCompleted = true;
1390
+ return events$1;
1391
+ };
1392
+ const handleResponseFailed = (rawEvent, state$1) => {
1393
+ const response = rawEvent.response;
1394
+ const events$1 = new Array();
1395
+ closeAllOpenBlocks(state$1, events$1);
1396
+ const message = response.error?.message ?? "The response failed due to an unknown error.";
1397
+ events$1.push(buildErrorEvent(message));
1398
+ state$1.messageCompleted = true;
1399
+ return events$1;
1400
+ };
1401
+ const handleErrorEvent = (rawEvent, state$1) => {
1402
+ const message = typeof rawEvent.message === "string" ? rawEvent.message : "An unexpected error occurred during streaming.";
1403
+ state$1.messageCompleted = true;
1404
+ return [buildErrorEvent(message)];
1405
+ };
1406
+ const handleFunctionCallArgumentsValidationError = (error, state$1, events$1 = []) => {
1407
+ const reason = error.message;
1408
+ closeAllOpenBlocks(state$1, events$1);
1409
+ state$1.messageCompleted = true;
1410
+ events$1.push(buildErrorEvent(reason));
1411
+ return events$1;
1412
+ };
1413
+ const messageStart = (state$1, response) => {
1414
+ state$1.messageStartSent = true;
1415
+ const inputCachedTokens = response.usage?.input_tokens_details?.cached_tokens;
1416
+ const inputTokens = (response.usage?.input_tokens ?? 0) - (inputCachedTokens ?? 0);
1417
+ return [{
1418
+ type: "message_start",
1419
+ message: {
1420
+ id: response.id,
1421
+ type: "message",
1422
+ role: "assistant",
1423
+ content: [],
1424
+ model: response.model,
1425
+ stop_reason: null,
1426
+ stop_sequence: null,
1427
+ usage: {
1428
+ input_tokens: inputTokens,
1429
+ output_tokens: 0,
1430
+ cache_read_input_tokens: inputCachedTokens ?? 0
1431
+ }
1432
+ }
1433
+ }];
1434
+ };
1435
+ const openTextBlockIfNeeded = (state$1, params) => {
1436
+ const { outputIndex, contentIndex, events: events$1 } = params;
1437
+ const key = getBlockKey(outputIndex, contentIndex);
1438
+ let blockIndex = state$1.blockIndexByKey.get(key);
1439
+ if (blockIndex === void 0) {
1440
+ blockIndex = state$1.nextContentBlockIndex;
1441
+ state$1.nextContentBlockIndex += 1;
1442
+ state$1.blockIndexByKey.set(key, blockIndex);
1443
+ }
1444
+ if (!state$1.openBlocks.has(blockIndex)) {
1445
+ closeOpenBlocks(state$1, events$1);
1446
+ events$1.push({
1447
+ type: "content_block_start",
1448
+ index: blockIndex,
1449
+ content_block: {
1450
+ type: "text",
1451
+ text: ""
1452
+ }
1453
+ });
1454
+ state$1.openBlocks.add(blockIndex);
1455
+ }
1456
+ return blockIndex;
1457
+ };
1458
+ const openThinkingBlockIfNeeded = (state$1, outputIndex, events$1) => {
1459
+ const key = getBlockKey(outputIndex, 0);
1460
+ let blockIndex = state$1.blockIndexByKey.get(key);
1461
+ if (blockIndex === void 0) {
1462
+ blockIndex = state$1.nextContentBlockIndex;
1463
+ state$1.nextContentBlockIndex += 1;
1464
+ state$1.blockIndexByKey.set(key, blockIndex);
1465
+ }
1466
+ if (!state$1.openBlocks.has(blockIndex)) {
1467
+ closeOpenBlocks(state$1, events$1);
1468
+ events$1.push({
1469
+ type: "content_block_start",
1470
+ index: blockIndex,
1471
+ content_block: {
1472
+ type: "thinking",
1473
+ thinking: ""
1474
+ }
1475
+ });
1476
+ state$1.openBlocks.add(blockIndex);
1477
+ }
1478
+ return blockIndex;
1479
+ };
1480
+ const closeBlockIfOpen = (state$1, blockIndex, events$1) => {
1481
+ if (!state$1.openBlocks.has(blockIndex)) return;
1482
+ events$1.push({
1483
+ type: "content_block_stop",
1484
+ index: blockIndex
1485
+ });
1486
+ state$1.openBlocks.delete(blockIndex);
1487
+ state$1.blockHasDelta.delete(blockIndex);
1488
+ };
1489
+ const closeOpenBlocks = (state$1, events$1) => {
1490
+ for (const blockIndex of state$1.openBlocks) closeBlockIfOpen(state$1, blockIndex, events$1);
1491
+ };
1492
+ const closeAllOpenBlocks = (state$1, events$1) => {
1493
+ closeOpenBlocks(state$1, events$1);
1494
+ state$1.functionCallStateByOutputIndex.clear();
1495
+ };
1496
+ const buildErrorEvent = (message) => ({
1497
+ type: "error",
1498
+ error: {
1499
+ type: "api_error",
1500
+ message
1501
+ }
1502
+ });
1503
+ const getBlockKey = (outputIndex, contentIndex) => `${outputIndex}:${contentIndex}`;
1504
+ const openFunctionCallBlock = (state$1, params) => {
1505
+ const { outputIndex, toolCallId, name, events: events$1 } = params;
1506
+ let functionCallState = state$1.functionCallStateByOutputIndex.get(outputIndex);
1507
+ if (!functionCallState) {
1508
+ const blockIndex$1 = state$1.nextContentBlockIndex;
1509
+ state$1.nextContentBlockIndex += 1;
1510
+ const resolvedToolCallId = toolCallId ?? `tool_call_${blockIndex$1}`;
1511
+ functionCallState = {
1512
+ blockIndex: blockIndex$1,
1513
+ toolCallId: resolvedToolCallId,
1514
+ name: name ?? "function",
1515
+ consecutiveWhitespaceCount: 0
1516
+ };
1517
+ state$1.functionCallStateByOutputIndex.set(outputIndex, functionCallState);
1518
+ }
1519
+ const { blockIndex } = functionCallState;
1520
+ if (!state$1.openBlocks.has(blockIndex)) {
1521
+ closeOpenBlocks(state$1, events$1);
1522
+ events$1.push({
1523
+ type: "content_block_start",
1524
+ index: blockIndex,
1525
+ content_block: {
1526
+ type: "tool_use",
1527
+ id: functionCallState.toolCallId,
1528
+ name: functionCallState.name,
1529
+ input: {}
1530
+ }
1531
+ });
1532
+ state$1.openBlocks.add(blockIndex);
1533
+ }
1534
+ return blockIndex;
1535
+ };
1536
+ const extractFunctionCallDetails = (rawEvent) => {
1537
+ const item = rawEvent.item;
1538
+ if (item.type !== "function_call") return;
1539
+ const outputIndex = rawEvent.output_index;
1540
+ const toolCallId = item.call_id;
1541
+ const name = item.name;
1542
+ const initialArguments = item.arguments;
1543
+ return {
1544
+ outputIndex,
1545
+ toolCallId,
1546
+ name,
1547
+ initialArguments
1548
+ };
1549
+ };
1550
+
1551
+ //#endregion
1552
+ //#region src/routes/responses/utils.ts
1553
+ const getResponsesRequestOptions = (payload) => {
1554
+ const vision = hasVisionInput(payload);
1555
+ const initiator = hasAgentInitiator(payload) ? "agent" : "user";
1556
+ return {
1557
+ vision,
1558
+ initiator
1559
+ };
1560
+ };
1561
+ const hasAgentInitiator = (payload) => {
1562
+ const lastItem = getPayloadItems(payload).at(-1);
1563
+ if (!lastItem) return false;
1564
+ if (!("role" in lastItem) || !lastItem.role) return true;
1565
+ return (typeof lastItem.role === "string" ? lastItem.role.toLowerCase() : "") === "assistant";
1566
+ };
1567
+ const hasVisionInput = (payload) => {
1568
+ return getPayloadItems(payload).some((item) => containsVisionContent(item));
1569
+ };
1570
+ const getPayloadItems = (payload) => {
1571
+ const result = [];
1572
+ const { input } = payload;
1573
+ if (Array.isArray(input)) result.push(...input);
1574
+ return result;
1575
+ };
1576
+ const containsVisionContent = (value) => {
1577
+ if (!value) return false;
1578
+ if (Array.isArray(value)) return value.some((entry) => containsVisionContent(entry));
1579
+ if (typeof value !== "object") return false;
1580
+ const record = value;
1581
+ if ((typeof record.type === "string" ? record.type.toLowerCase() : void 0) === "input_image") return true;
1582
+ if (Array.isArray(record.content)) return record.content.some((entry) => containsVisionContent(entry));
1583
+ return false;
1584
+ };
1585
+
1586
+ //#endregion
1587
+ //#region src/services/copilot/create-messages.ts
1588
+ const hasImageInput = (payload) => payload.messages.some((message) => Array.isArray(message.content) && message.content.some((block) => block.type === "image"));
1589
+ const isInitiatorUser = (payload) => {
1590
+ const lastMessage = payload.messages.at(-1);
1591
+ if (lastMessage?.role !== "user") return false;
1592
+ if (!Array.isArray(lastMessage.content)) return true;
1593
+ return lastMessage.content.some((block) => block.type !== "tool_result");
1594
+ };
1595
+ const filterAnthropicBetaHeader = (anthropicBetaHeader) => anthropicBetaHeader.split(",").map((value) => value.trim()).filter((value) => value.length > 0 && value !== "claude-code-20250219").join(",");
1596
+ const createMessages = async (payload, anthropicBetaHeader) => {
1597
+ if (!state.copilotToken) throw new Error("Copilot token not found");
1598
+ const headers = {
1599
+ ...copilotHeaders(state, hasImageInput(payload)),
1600
+ "X-Initiator": isInitiatorUser(payload) ? "user" : "agent"
1601
+ };
1602
+ if (anthropicBetaHeader) {
1603
+ const filtered = filterAnthropicBetaHeader(anthropicBetaHeader);
1604
+ if (filtered) headers["anthropic-beta"] = filtered;
1605
+ } else if (payload.thinking?.budget_tokens !== void 0) headers["anthropic-beta"] = "interleaved-thinking-2025-05-14";
1606
+ const response = await fetch(`${copilotBaseUrl(state)}/v1/messages`, {
1607
+ method: "POST",
1608
+ headers,
1609
+ body: JSON.stringify(payload)
1610
+ });
1611
+ if (!response.ok) {
1612
+ consola.error("Failed to create messages", response);
1613
+ throw new HTTPError("Failed to create messages", response);
1614
+ }
1615
+ if (payload.stream) return events(response);
1616
+ return await response.json();
1617
+ };
1618
+
1619
+ //#endregion
1620
+ //#region src/routes/messages/stream-translation.ts
1621
+ function isToolBlockOpen(state$1) {
1622
+ if (!state$1.contentBlockOpen) return false;
1623
+ return Object.values(state$1.toolCalls).some((tc) => tc.anthropicBlockIndex === state$1.contentBlockIndex);
1624
+ }
1625
+ function translateChunkToAnthropicEvents(chunk, state$1) {
1626
+ const events$1 = [];
1627
+ if (chunk.choices.length === 0) return events$1;
1628
+ const choice = chunk.choices[0];
1629
+ const { delta } = choice;
1630
+ handleMessageStart(state$1, events$1, chunk);
1631
+ handleThinkingText(delta, state$1, events$1);
1632
+ handleContent(delta, state$1, events$1);
1633
+ handleToolCalls(delta, state$1, events$1);
1634
+ handleFinish(choice, state$1, {
1635
+ events: events$1,
1636
+ chunk
1637
+ });
1638
+ return events$1;
1639
+ }
1640
+ function handleFinish(choice, state$1, context) {
1641
+ const { events: events$1, chunk } = context;
1642
+ if (choice.finish_reason && choice.finish_reason.length > 0) {
1643
+ if (state$1.contentBlockOpen) {
1644
+ const toolBlockOpen = isToolBlockOpen(state$1);
1645
+ context.events.push({
1646
+ type: "content_block_stop",
1647
+ index: state$1.contentBlockIndex
1648
+ });
1649
+ state$1.contentBlockOpen = false;
1650
+ state$1.contentBlockIndex++;
1651
+ if (!toolBlockOpen) handleReasoningOpaque(choice.delta, events$1, state$1);
1652
+ }
1653
+ events$1.push({
1654
+ type: "message_delta",
1655
+ delta: {
1656
+ stop_reason: mapOpenAIStopReasonToAnthropic(choice.finish_reason),
1657
+ stop_sequence: null
1658
+ },
1659
+ usage: {
1660
+ input_tokens: (chunk.usage?.prompt_tokens ?? 0) - (chunk.usage?.prompt_tokens_details?.cached_tokens ?? 0),
1661
+ output_tokens: chunk.usage?.completion_tokens ?? 0,
1662
+ ...chunk.usage?.prompt_tokens_details?.cached_tokens !== void 0 && { cache_read_input_tokens: chunk.usage.prompt_tokens_details.cached_tokens }
1663
+ }
1664
+ }, { type: "message_stop" });
1665
+ }
1666
+ }
1667
+ function handleToolCalls(delta, state$1, events$1) {
1668
+ if (delta.tool_calls && delta.tool_calls.length > 0) {
1669
+ closeThinkingBlockIfOpen(state$1, events$1);
1670
+ handleReasoningOpaqueInToolCalls(state$1, events$1, delta);
1671
+ for (const toolCall of delta.tool_calls) {
1672
+ if (toolCall.id && toolCall.function?.name) {
1673
+ if (state$1.contentBlockOpen) {
1674
+ events$1.push({
1675
+ type: "content_block_stop",
1676
+ index: state$1.contentBlockIndex
1677
+ });
1678
+ state$1.contentBlockIndex++;
1679
+ state$1.contentBlockOpen = false;
1680
+ }
1681
+ const anthropicBlockIndex = state$1.contentBlockIndex;
1682
+ state$1.toolCalls[toolCall.index] = {
1683
+ id: toolCall.id,
1684
+ name: toolCall.function.name,
1685
+ anthropicBlockIndex
1686
+ };
1687
+ events$1.push({
1688
+ type: "content_block_start",
1689
+ index: anthropicBlockIndex,
1690
+ content_block: {
1691
+ type: "tool_use",
1692
+ id: toolCall.id,
1693
+ name: toolCall.function.name,
1694
+ input: {}
1695
+ }
1696
+ });
1697
+ state$1.contentBlockOpen = true;
1698
+ }
1699
+ if (toolCall.function?.arguments) {
1700
+ const toolCallInfo = state$1.toolCalls[toolCall.index];
1701
+ if (toolCallInfo) events$1.push({
1702
+ type: "content_block_delta",
1703
+ index: toolCallInfo.anthropicBlockIndex,
1704
+ delta: {
1705
+ type: "input_json_delta",
1706
+ partial_json: toolCall.function.arguments
1707
+ }
1708
+ });
1709
+ }
1710
+ }
1711
+ }
1712
+ }
1713
+ function handleReasoningOpaqueInToolCalls(state$1, events$1, delta) {
1714
+ if (state$1.contentBlockOpen && !isToolBlockOpen(state$1)) {
1715
+ events$1.push({
1716
+ type: "content_block_stop",
1717
+ index: state$1.contentBlockIndex
1718
+ });
1719
+ state$1.contentBlockIndex++;
1720
+ state$1.contentBlockOpen = false;
1721
+ }
1722
+ handleReasoningOpaque(delta, events$1, state$1);
1723
+ }
1724
+ function handleContent(delta, state$1, events$1) {
1725
+ if (delta.content && delta.content.length > 0) {
1726
+ closeThinkingBlockIfOpen(state$1, events$1);
1727
+ if (isToolBlockOpen(state$1)) {
1728
+ events$1.push({
1729
+ type: "content_block_stop",
1730
+ index: state$1.contentBlockIndex
1731
+ });
1732
+ state$1.contentBlockIndex++;
1733
+ state$1.contentBlockOpen = false;
1734
+ }
1735
+ if (!state$1.contentBlockOpen) {
1736
+ events$1.push({
1737
+ type: "content_block_start",
1738
+ index: state$1.contentBlockIndex,
1739
+ content_block: {
1740
+ type: "text",
1741
+ text: ""
1742
+ }
1743
+ });
1744
+ state$1.contentBlockOpen = true;
1745
+ }
1746
+ events$1.push({
1747
+ type: "content_block_delta",
1748
+ index: state$1.contentBlockIndex,
1749
+ delta: {
1750
+ type: "text_delta",
1751
+ text: delta.content
1752
+ }
1753
+ });
1754
+ }
1755
+ if (delta.content === "" && delta.reasoning_opaque && delta.reasoning_opaque.length > 0) {
1756
+ events$1.push({
1757
+ type: "content_block_delta",
1758
+ index: state$1.contentBlockIndex,
1759
+ delta: {
1760
+ type: "signature_delta",
1761
+ signature: delta.reasoning_opaque
1762
+ }
1763
+ }, {
1764
+ type: "content_block_stop",
1765
+ index: state$1.contentBlockIndex
1766
+ });
1767
+ state$1.contentBlockIndex++;
1768
+ state$1.thinkingBlockOpen = false;
1769
+ }
1770
+ }
1771
+ function handleMessageStart(state$1, events$1, chunk) {
1772
+ if (!state$1.messageStartSent) {
1773
+ events$1.push({
1774
+ type: "message_start",
1775
+ message: {
1776
+ id: chunk.id,
1777
+ type: "message",
1778
+ role: "assistant",
1779
+ content: [],
1780
+ model: chunk.model,
1781
+ stop_reason: null,
1782
+ stop_sequence: null,
1783
+ usage: {
1784
+ input_tokens: (chunk.usage?.prompt_tokens ?? 0) - (chunk.usage?.prompt_tokens_details?.cached_tokens ?? 0),
1785
+ output_tokens: 0,
1786
+ ...chunk.usage?.prompt_tokens_details?.cached_tokens !== void 0 && { cache_read_input_tokens: chunk.usage.prompt_tokens_details.cached_tokens }
1787
+ }
1788
+ }
1789
+ });
1790
+ state$1.messageStartSent = true;
1791
+ }
1792
+ }
1793
+ function handleReasoningOpaque(delta, events$1, state$1) {
1794
+ if (delta.reasoning_opaque && delta.reasoning_opaque.length > 0) {
1795
+ events$1.push({
1796
+ type: "content_block_start",
1797
+ index: state$1.contentBlockIndex,
1798
+ content_block: {
1799
+ type: "thinking",
1800
+ thinking: ""
1801
+ }
1802
+ }, {
1803
+ type: "content_block_delta",
1804
+ index: state$1.contentBlockIndex,
1805
+ delta: {
1806
+ type: "thinking_delta",
1807
+ thinking: ""
1808
+ }
1809
+ }, {
1810
+ type: "content_block_delta",
1811
+ index: state$1.contentBlockIndex,
1812
+ delta: {
1813
+ type: "signature_delta",
1814
+ signature: delta.reasoning_opaque
1815
+ }
1816
+ }, {
1817
+ type: "content_block_stop",
1818
+ index: state$1.contentBlockIndex
1819
+ });
1820
+ state$1.contentBlockIndex++;
1821
+ }
1822
+ }
1823
+ function handleThinkingText(delta, state$1, events$1) {
1824
+ if (delta.reasoning_text && delta.reasoning_text.length > 0) {
1825
+ if (!state$1.thinkingBlockOpen) {
1826
+ events$1.push({
1827
+ type: "content_block_start",
1828
+ index: state$1.contentBlockIndex,
1829
+ content_block: {
1830
+ type: "thinking",
1831
+ thinking: ""
1832
+ }
1833
+ });
1834
+ state$1.thinkingBlockOpen = true;
1835
+ }
1836
+ events$1.push({
1837
+ type: "content_block_delta",
1838
+ index: state$1.contentBlockIndex,
1839
+ delta: {
1840
+ type: "thinking_delta",
1841
+ thinking: delta.reasoning_text
1842
+ }
1843
+ });
1844
+ }
1845
+ }
1846
+ function closeThinkingBlockIfOpen(state$1, events$1) {
1847
+ if (state$1.thinkingBlockOpen) {
1848
+ events$1.push({
1849
+ type: "content_block_delta",
1850
+ index: state$1.contentBlockIndex,
1851
+ delta: {
1852
+ type: "signature_delta",
1853
+ signature: ""
1854
+ }
1855
+ }, {
1856
+ type: "content_block_stop",
1857
+ index: state$1.contentBlockIndex
1858
+ });
1859
+ state$1.contentBlockIndex++;
1860
+ state$1.thinkingBlockOpen = false;
1861
+ }
1862
+ }
1863
+
1864
+ //#endregion
1865
+ //#region src/routes/messages/handler.ts
1866
+ const logger$2 = createHandlerLogger("messages-handler");
1867
+ async function handleCompletion(c) {
1868
+ await checkRateLimit(state);
1869
+ const anthropicPayload = await c.req.json();
1870
+ logger$2.debug("Anthropic request payload:", JSON.stringify(anthropicPayload));
1871
+ const anthropicBeta = c.req.header("anthropic-beta");
1872
+ const noTools = !anthropicPayload.tools || anthropicPayload.tools.length === 0;
1873
+ if (anthropicBeta && noTools) anthropicPayload.model = getSmallModel();
1874
+ const selectedModel = state.models?.data.find((model) => model.id === anthropicPayload.model);
1875
+ if (state.manualApprove) await awaitApproval();
1876
+ if (shouldUseMessagesApi(selectedModel)) return await handleWithMessagesApi(c, anthropicPayload, anthropicBeta);
1877
+ if (shouldUseResponsesApi(selectedModel)) return await handleWithResponsesApi(c, anthropicPayload);
1878
+ return await handleWithChatCompletions(c, anthropicPayload);
1879
+ }
1880
+ const RESPONSES_ENDPOINT$1 = "/responses";
1881
+ const MESSAGES_ENDPOINT = "/v1/messages";
1882
+ const handleWithChatCompletions = async (c, anthropicPayload) => {
1883
+ const openAIPayload = translateToOpenAI(anthropicPayload);
1884
+ logger$2.debug("Translated OpenAI request payload:", JSON.stringify(openAIPayload));
1885
+ const response = await createChatCompletions(openAIPayload);
1886
+ if (isNonStreaming(response)) {
1887
+ logger$2.debug("Non-streaming response from Copilot:", JSON.stringify(response).slice(-400));
1888
+ const anthropicResponse = translateToAnthropic(response);
1889
+ logger$2.debug("Translated Anthropic response:", JSON.stringify(anthropicResponse));
1890
+ return c.json(anthropicResponse);
1891
+ }
1892
+ logger$2.debug("Streaming response from Copilot");
1893
+ return streamSSE(c, async (stream) => {
1894
+ const streamState = {
1895
+ messageStartSent: false,
1896
+ contentBlockIndex: 0,
1897
+ contentBlockOpen: false,
1898
+ toolCalls: {},
1899
+ thinkingBlockOpen: false
1900
+ };
1901
+ for await (const rawEvent of response) {
1902
+ logger$2.debug("Copilot raw stream event:", JSON.stringify(rawEvent));
1903
+ if (rawEvent.data === "[DONE]") break;
1904
+ if (!rawEvent.data) continue;
1905
+ const chunk = JSON.parse(rawEvent.data);
1906
+ const events$1 = translateChunkToAnthropicEvents(chunk, streamState);
1907
+ for (const event of events$1) {
1908
+ logger$2.debug("Translated Anthropic event:", JSON.stringify(event));
1909
+ await stream.writeSSE({
1910
+ event: event.type,
1911
+ data: JSON.stringify(event)
1912
+ });
1913
+ }
1914
+ }
1915
+ });
1916
+ };
1917
+ const handleWithResponsesApi = async (c, anthropicPayload) => {
1918
+ const responsesPayload = translateAnthropicMessagesToResponsesPayload(anthropicPayload);
1919
+ logger$2.debug("Translated Responses payload:", JSON.stringify(responsesPayload));
1920
+ const { vision, initiator } = getResponsesRequestOptions(responsesPayload);
1921
+ const response = await createResponses(responsesPayload, {
1922
+ vision,
1923
+ initiator
1924
+ });
1925
+ if (responsesPayload.stream && isAsyncIterable$1(response)) {
1926
+ logger$2.debug("Streaming response from Copilot (Responses API)");
1927
+ return streamSSE(c, async (stream) => {
1928
+ const streamState = createResponsesStreamState();
1929
+ for await (const chunk of response) {
1930
+ if (chunk.event === "ping") {
1931
+ await stream.writeSSE({
1932
+ event: "ping",
1933
+ data: "{\"type\":\"ping\"}"
1934
+ });
1935
+ continue;
1936
+ }
1937
+ const data = chunk.data;
1938
+ if (!data) continue;
1939
+ logger$2.debug("Responses raw stream event:", data);
1940
+ const events$1 = translateResponsesStreamEvent(JSON.parse(data), streamState);
1941
+ for (const event of events$1) {
1942
+ const eventData = JSON.stringify(event);
1943
+ logger$2.debug("Translated Anthropic event:", eventData);
1944
+ await stream.writeSSE({
1945
+ event: event.type,
1946
+ data: eventData
1947
+ });
1948
+ }
1949
+ if (streamState.messageCompleted) {
1950
+ logger$2.debug("Message completed, ending stream");
1951
+ break;
1952
+ }
1953
+ }
1954
+ if (!streamState.messageCompleted) {
1955
+ logger$2.warn("Responses stream ended without completion; sending error event");
1956
+ const errorEvent = buildErrorEvent("Responses stream ended without completion");
1957
+ await stream.writeSSE({
1958
+ event: errorEvent.type,
1959
+ data: JSON.stringify(errorEvent)
1960
+ });
1961
+ }
1962
+ });
1963
+ }
1964
+ logger$2.debug("Non-streaming Responses result:", JSON.stringify(response).slice(-400));
1965
+ const anthropicResponse = translateResponsesResultToAnthropic(response);
1966
+ logger$2.debug("Translated Anthropic response:", JSON.stringify(anthropicResponse));
1967
+ return c.json(anthropicResponse);
1968
+ };
1969
+ const handleWithMessagesApi = async (c, anthropicPayload, anthropicBetaHeader) => {
1970
+ for (const message of anthropicPayload.messages) {
1971
+ if (message.role !== "assistant" || !Array.isArray(message.content)) continue;
1972
+ message.content = message.content.filter((block) => {
1973
+ if (block.type !== "thinking") return true;
1974
+ return block.thinking.length > 0 && block.signature.length > 0 && !block.signature.includes("@");
1975
+ });
1976
+ }
1977
+ logger$2.debug("Translated Messages payload:", JSON.stringify(anthropicPayload));
1978
+ const response = await createMessages(anthropicPayload, anthropicBetaHeader);
1979
+ if (isAsyncIterable$1(response)) {
1980
+ logger$2.debug("Streaming response from Copilot (Messages API)");
1981
+ return streamSSE(c, async (stream) => {
1982
+ for await (const event of response) {
1983
+ const eventId = event.id === void 0 ? void 0 : String(event.id);
1984
+ await stream.writeSSE({
1985
+ id: eventId,
1986
+ event: event.event,
1987
+ data: event.data ?? ""
1988
+ });
1989
+ }
1990
+ });
1991
+ }
1992
+ logger$2.debug("Non-streaming Messages result:", JSON.stringify(response).slice(-400));
1993
+ return c.json(response);
1994
+ };
1995
+ const shouldUseResponsesApi = (selectedModel) => {
1996
+ return selectedModel?.supported_endpoints?.includes(RESPONSES_ENDPOINT$1) ?? false;
1997
+ };
1998
+ const shouldUseMessagesApi = (selectedModel) => selectedModel?.supported_endpoints?.includes(MESSAGES_ENDPOINT) ?? false;
1999
+ const isNonStreaming = (response) => Object.hasOwn(response, "choices");
2000
+ const isAsyncIterable$1 = (value) => Boolean(value) && typeof value[Symbol.asyncIterator] === "function";
2001
+
2002
+ //#endregion
2003
+ //#region src/routes/messages/route.ts
2004
+ const messageRoutes = new Hono();
2005
+ messageRoutes.post("/", async (c) => {
2006
+ try {
2007
+ return await handleCompletion(c);
2008
+ } catch (error) {
2009
+ return await forwardError(c, error);
2010
+ }
2011
+ });
2012
+ messageRoutes.post("/count_tokens", async (c) => {
2013
+ try {
2014
+ return await handleCountTokens(c);
2015
+ } catch (error) {
2016
+ return await forwardError(c, error);
2017
+ }
2018
+ });
2019
+
2020
+ //#endregion
2021
+ //#region src/routes/models/route.ts
2022
+ const modelRoutes = new Hono();
2023
+ modelRoutes.get("/", async (c) => {
2024
+ try {
2025
+ if (!state.models) await cacheModels();
2026
+ const models = state.models?.data.map((model) => ({
2027
+ id: model.id,
2028
+ object: "model",
2029
+ type: "model",
2030
+ created: 0,
2031
+ created_at: (/* @__PURE__ */ new Date(0)).toISOString(),
2032
+ owned_by: model.vendor,
2033
+ display_name: model.name
2034
+ }));
2035
+ return c.json({
2036
+ object: "list",
2037
+ data: models,
2038
+ has_more: false
2039
+ });
2040
+ } catch (error) {
2041
+ return await forwardError(c, error);
2042
+ }
2043
+ });
2044
+
2045
+ //#endregion
2046
+ //#region src/routes/responses/stream-id-sync.ts
2047
+ const isRecord$1 = (value) => typeof value === "object" && value !== null;
2048
+ const getOutputIndex = (value) => typeof value.output_index === "number" ? value.output_index : void 0;
2049
+ const getEventType = (parsed, fallbackEvent) => {
2050
+ if (fallbackEvent) return fallbackEvent;
2051
+ return typeof parsed.type === "string" ? parsed.type : void 0;
2052
+ };
2053
+ const buildGeneratedItemId = (outputIndex) => {
2054
+ const suffix = randomUUID().replaceAll("-", "").slice(0, 16);
2055
+ return `oi_${outputIndex}_${suffix}`;
2056
+ };
2057
+ const createStreamIdTracker = () => ({ outputItems: /* @__PURE__ */ new Map() });
2058
+ const fixStreamIds = (data, event, tracker) => {
2059
+ if (!data) return data;
2060
+ let parsed;
2061
+ try {
2062
+ parsed = JSON.parse(data);
2063
+ } catch {
2064
+ return data;
2065
+ }
2066
+ if (!isRecord$1(parsed)) return data;
2067
+ switch (getEventType(parsed, event)) {
2068
+ case "response.output_item.added": return handleOutputItemAdded(parsed, tracker);
2069
+ case "response.output_item.done": return handleOutputItemDone(parsed, tracker);
2070
+ default: return syncItemIdByOutputIndex(parsed, tracker);
2071
+ }
2072
+ };
2073
+ const handleOutputItemAdded = (parsed, tracker) => {
2074
+ const outputIndex = getOutputIndex(parsed);
2075
+ if (outputIndex === void 0) return JSON.stringify(parsed);
2076
+ if (!isRecord$1(parsed.item)) return JSON.stringify(parsed);
2077
+ const currentId = parsed.item.id;
2078
+ const itemId = typeof currentId === "string" && currentId.length > 0 ? currentId : buildGeneratedItemId(outputIndex);
2079
+ parsed.item.id = itemId;
2080
+ tracker.outputItems.set(outputIndex, itemId);
2081
+ return JSON.stringify(parsed);
2082
+ };
2083
+ const handleOutputItemDone = (parsed, tracker) => {
2084
+ const outputIndex = getOutputIndex(parsed);
2085
+ if (outputIndex === void 0 || !isRecord$1(parsed.item)) return JSON.stringify(parsed);
2086
+ const trackedId = tracker.outputItems.get(outputIndex);
2087
+ if (trackedId) parsed.item.id = trackedId;
2088
+ return JSON.stringify(parsed);
2089
+ };
2090
+ const syncItemIdByOutputIndex = (parsed, tracker) => {
2091
+ const outputIndex = getOutputIndex(parsed);
2092
+ if (outputIndex === void 0) return JSON.stringify(parsed);
2093
+ const trackedId = tracker.outputItems.get(outputIndex);
2094
+ if (trackedId) parsed.item_id = trackedId;
2095
+ return JSON.stringify(parsed);
2096
+ };
2097
+
2098
+ //#endregion
2099
+ //#region src/routes/responses/tool-normalization.ts
2100
+ const APPLY_PATCH_DESCRIPTION = "Use the `apply_patch` tool to edit files";
2101
+ const isRecord = (value) => typeof value === "object" && value !== null;
2102
+ const getStringField = (value, key) => {
2103
+ const field = value[key];
2104
+ return typeof field === "string" ? field : void 0;
2105
+ };
2106
+ const isWebSearchTool = (tool) => {
2107
+ if (!isRecord(tool)) return false;
2108
+ return getStringField(tool, "type") === "web_search";
2109
+ };
2110
+ const isCustomApplyPatchTool = (tool) => {
2111
+ if (!isRecord(tool)) return false;
2112
+ const type = getStringField(tool, "type");
2113
+ const name = getStringField(tool, "name");
2114
+ return type === "custom" && name === "apply_patch";
2115
+ };
2116
+ const createFunctionApplyPatchTool = () => ({
2117
+ type: "function",
2118
+ name: "apply_patch",
2119
+ description: APPLY_PATCH_DESCRIPTION,
2120
+ parameters: {
2121
+ type: "object",
2122
+ properties: { input: {
2123
+ type: "string",
2124
+ description: "The entire contents of the apply_patch command"
2125
+ } },
2126
+ required: ["input"]
2127
+ },
2128
+ strict: false
2129
+ });
2130
+ const normalizeResponsesTools = (payload, options = {}) => {
2131
+ if (!Array.isArray(payload.tools) || payload.tools.length === 0) return;
2132
+ const { convertCustomApplyPatch = true } = options;
2133
+ const normalizedTools = [];
2134
+ for (const tool of payload.tools) {
2135
+ if (isWebSearchTool(tool)) continue;
2136
+ if (convertCustomApplyPatch && isCustomApplyPatchTool(tool)) {
2137
+ normalizedTools.push(createFunctionApplyPatchTool());
2138
+ continue;
2139
+ }
2140
+ normalizedTools.push(tool);
2141
+ }
2142
+ payload.tools = normalizedTools;
2143
+ };
2144
+
2145
+ //#endregion
2146
+ //#region src/routes/responses/handler.ts
2147
+ const logger$1 = createHandlerLogger("responses-handler");
2148
+ const RESPONSES_ENDPOINT = "/responses";
2149
+ const handleResponses = async (c) => {
2150
+ await checkRateLimit(state);
2151
+ const payload = await c.req.json();
2152
+ logger$1.debug("Responses request payload:", JSON.stringify(payload));
2153
+ normalizeResponsesTools(payload);
2154
+ if (!((state.models?.data.find((model) => model.id === payload.model))?.supported_endpoints?.includes(RESPONSES_ENDPOINT) ?? false)) return c.json({ error: {
2155
+ message: "This model does not support the responses endpoint. Please choose a different model.",
2156
+ type: "invalid_request_error"
2157
+ } }, 400);
2158
+ const { vision, initiator } = getResponsesRequestOptions(payload);
2159
+ if (state.manualApprove) await awaitApproval();
2160
+ const response = await createResponses(payload, {
2161
+ vision,
2162
+ initiator
2163
+ });
2164
+ if (isStreamingRequested(payload) && isAsyncIterable(response)) {
2165
+ logger$1.debug("Forwarding native Responses stream");
2166
+ return streamSSE(c, async (stream) => {
2167
+ const idTracker = createStreamIdTracker();
2168
+ for await (const chunk of response) {
2169
+ logger$1.debug("Responses stream chunk:", JSON.stringify(chunk));
2170
+ const normalizedData = fixStreamIds(chunk.data ?? "", chunk.event, idTracker);
2171
+ await stream.writeSSE({
2172
+ id: chunk.id,
2173
+ event: chunk.event,
2174
+ data: normalizedData
2175
+ });
2176
+ }
2177
+ });
2178
+ }
2179
+ logger$1.debug("Forwarding native Responses result:", JSON.stringify(response).slice(-400));
2180
+ return c.json(response);
2181
+ };
2182
+ const isAsyncIterable = (value) => Boolean(value) && typeof value[Symbol.asyncIterator] === "function";
2183
+ const isStreamingRequested = (payload) => Boolean(payload.stream);
2184
+
2185
+ //#endregion
2186
+ //#region src/routes/responses/route.ts
2187
+ const responsesRoutes = new Hono();
2188
+ responsesRoutes.post("/", async (c) => {
2189
+ try {
2190
+ return await handleResponses(c);
2191
+ } catch (error) {
2192
+ return await forwardError(c, error);
2193
+ }
2194
+ });
2195
+
2196
+ //#endregion
2197
+ //#region src/routes/token/route.ts
2198
+ const tokenRoute = new Hono();
2199
+ tokenRoute.get("/", (c) => {
2200
+ try {
2201
+ return c.json({ token: state.copilotToken });
2202
+ } catch (error) {
2203
+ console.error("Error fetching token:", error);
2204
+ return c.json({
2205
+ error: "Failed to fetch token",
2206
+ token: null
2207
+ }, 500);
2208
+ }
2209
+ });
2210
+
2211
+ //#endregion
2212
+ //#region src/routes/usage/route.ts
2213
+ const usageRoute = new Hono();
2214
+ usageRoute.get("/", async (c) => {
2215
+ try {
2216
+ const usage = await getCopilotUsage();
2217
+ return c.json(usage);
2218
+ } catch (error) {
2219
+ console.error("Error fetching Copilot usage:", error);
2220
+ return c.json({ error: "Failed to fetch Copilot usage" }, 500);
2221
+ }
2222
+ });
2223
+
2224
+ //#endregion
2225
+ //#region src/server.ts
2226
+ const server = new Hono();
2227
+ server.use(logger());
2228
+ server.use(cors());
2229
+ server.use("/chat/completions", apiKeyAuthMiddleware);
2230
+ server.use("/models", apiKeyAuthMiddleware);
2231
+ server.use("/embeddings", apiKeyAuthMiddleware);
2232
+ server.use("/responses", apiKeyAuthMiddleware);
2233
+ server.use("/usage", apiKeyAuthMiddleware);
2234
+ server.use("/token", apiKeyAuthMiddleware);
2235
+ server.use("/v1/chat/completions", apiKeyAuthMiddleware);
2236
+ server.use("/v1/models", apiKeyAuthMiddleware);
2237
+ server.use("/v1/embeddings", apiKeyAuthMiddleware);
2238
+ server.use("/v1/responses", apiKeyAuthMiddleware);
2239
+ server.use("/v1/messages", apiKeyAuthMiddleware);
2240
+ server.use("/api/event_logging", apiKeyAuthMiddleware);
2241
+ server.get("/", (c) => c.text("Server running"));
2242
+ server.route("/chat/completions", completionRoutes);
2243
+ server.route("/models", modelRoutes);
2244
+ server.route("/embeddings", embeddingRoutes);
2245
+ server.route("/usage", usageRoute);
2246
+ server.route("/token", tokenRoute);
2247
+ server.route("/responses", responsesRoutes);
2248
+ server.route("/v1/chat/completions", completionRoutes);
2249
+ server.route("/v1/models", modelRoutes);
2250
+ server.route("/v1/embeddings", embeddingRoutes);
2251
+ server.route("/v1/responses", responsesRoutes);
2252
+ server.route("/v1/messages", messageRoutes);
2253
+ server.route("/api/event_logging", eventLoggingRoutes);
2254
+
2255
+ //#endregion
2256
+ export { server };
2257
+ //# sourceMappingURL=server-CKh3Pzh6.js.map