@ashsec/copilot-api 0.7.13 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/main.js CHANGED
@@ -9,6 +9,7 @@ import { events } from "fetch-event-stream";
9
9
  import clipboard from "clipboardy";
10
10
  import { serve } from "srvx";
11
11
  import invariant from "tiny-invariant";
12
+ import fs$1 from "node:fs";
12
13
  import { getProxyForUrl } from "proxy-from-env";
13
14
  import { Agent, ProxyAgent, setGlobalDispatcher } from "undici";
14
15
  import { execSync } from "node:child_process";
@@ -16,84 +17,10 @@ import process$1 from "node:process";
16
17
  import { Hono } from "hono";
17
18
  import { cors } from "hono/cors";
18
19
  import { streamSSE } from "hono/streaming";
20
+ import util from "node:util";
19
21
 
20
22
  //#region package.json
21
- var name = "@ashsec/copilot-api";
22
- var version = "0.7.13";
23
- var description = "Turn GitHub Copilot into OpenAI/Anthropic API compatible server. Usable with Claude Code!";
24
- var keywords = [
25
- "proxy",
26
- "github-copilot",
27
- "openai-compatible"
28
- ];
29
- var homepage = "https://github.com/ericc-ch/copilot-api";
30
- var bugs = "https://github.com/ericc-ch/copilot-api/issues";
31
- var repository = {
32
- "type": "git",
33
- "url": "git+https://github.com/ericc-ch/copilot-api.git"
34
- };
35
- var author = "Erick Christian <erickchristian48@gmail.com>";
36
- var type = "module";
37
- var bin = { "copilot-api": "./dist/main.js" };
38
- var files = ["dist"];
39
- var scripts = {
40
- "build": "tsdown",
41
- "dev": "bun run --watch ./src/main.ts",
42
- "knip": "knip-bun",
43
- "lint": "eslint --cache",
44
- "lint:all": "eslint --cache .",
45
- "prepack": "bun run build",
46
- "prepare": "simple-git-hooks",
47
- "release": "bumpp && bun publish --access public",
48
- "start": "NODE_ENV=production bun run ./src/main.ts",
49
- "typecheck": "tsc"
50
- };
51
- var simple_git_hooks = { "pre-commit": "bunx lint-staged" };
52
- var lint_staged = { "*": "bun run lint --fix" };
53
- var dependencies = {
54
- "citty": "^0.1.6",
55
- "clipboardy": "^5.0.0",
56
- "consola": "^3.4.2",
57
- "fetch-event-stream": "^0.1.5",
58
- "gpt-tokenizer": "^3.0.1",
59
- "hono": "^4.9.9",
60
- "proxy-from-env": "^1.1.0",
61
- "srvx": "^0.8.9",
62
- "tiny-invariant": "^1.3.3",
63
- "undici": "^7.16.0",
64
- "zod": "^4.1.11"
65
- };
66
- var devDependencies = {
67
- "@echristian/eslint-config": "^0.0.54",
68
- "@types/bun": "^1.2.23",
69
- "@types/proxy-from-env": "^1.0.4",
70
- "bumpp": "^10.2.3",
71
- "eslint": "^9.37.0",
72
- "knip": "^5.64.1",
73
- "lint-staged": "^16.2.3",
74
- "prettier-plugin-packagejson": "^2.5.19",
75
- "simple-git-hooks": "^2.13.1",
76
- "tsdown": "^0.15.6",
77
- "typescript": "^5.9.3"
78
- };
79
- var package_default = {
80
- name,
81
- version,
82
- description,
83
- keywords,
84
- homepage,
85
- bugs,
86
- repository,
87
- author,
88
- type,
89
- bin,
90
- files,
91
- scripts,
92
- "simple-git-hooks": simple_git_hooks,
93
- "lint-staged": lint_staged,
94
- dependencies,
95
- devDependencies
96
- };
23
+ var version = "0.8.0";
97
24
 
98
25
  //#endregion
99
26
  //#region src/lib/paths.ts
@@ -103,6 +30,7 @@ const AZURE_OPENAI_CONFIG_PATH = path.join(APP_DIR, "azure_openai_config");
103
30
  const REPLACEMENTS_CONFIG_PATH = path.join(APP_DIR, "replacements.json");
104
31
  const PATHS = {
105
32
  APP_DIR,
33
+ CONFIG_PATH: path.join(APP_DIR, "config.json"),
106
34
  GITHUB_TOKEN_PATH,
107
35
  AZURE_OPENAI_CONFIG_PATH,
108
36
  REPLACEMENTS_CONFIG_PATH
@@ -128,7 +56,8 @@ const state = {
128
56
  manualApprove: false,
129
57
  rateLimitWait: false,
130
58
  showToken: false,
131
- debug: false
59
+ debug: false,
60
+ verbose: false
132
61
  };
133
62
 
134
63
  //#endregion
@@ -137,10 +66,10 @@ const standardHeaders = () => ({
137
66
  "content-type": "application/json",
138
67
  accept: "application/json"
139
68
  });
140
- const COPILOT_VERSION = "0.26.7";
69
+ const COPILOT_VERSION = "0.37.6";
141
70
  const EDITOR_PLUGIN_VERSION = `copilot-chat/${COPILOT_VERSION}`;
142
71
  const USER_AGENT = `GitHubCopilotChat/${COPILOT_VERSION}`;
143
- const API_VERSION = "2025-04-01";
72
+ const API_VERSION = "2025-10-01";
144
73
  const copilotBaseUrl = (state$1) => state$1.accountType === "individual" ? "https://api.githubcopilot.com" : `https://api.${state$1.accountType}.githubcopilot.com`;
145
74
  const copilotHeaders = (state$1, vision = false) => {
146
75
  const headers = {
@@ -150,7 +79,7 @@ const copilotHeaders = (state$1, vision = false) => {
150
79
  "editor-version": `vscode/${state$1.vsCodeVersion}`,
151
80
  "editor-plugin-version": EDITOR_PLUGIN_VERSION,
152
81
  "user-agent": USER_AGENT,
153
- "openai-intent": "conversation-panel",
82
+ "openai-intent": "conversation-agent",
154
83
  "x-github-api-version": API_VERSION,
155
84
  "x-request-id": randomUUID(),
156
85
  "x-vscode-user-agent-library-version": "electron-fetch"
@@ -654,13 +583,13 @@ const checkUsage = defineCommand({
654
583
  const premiumUsed = premiumTotal - premium.remaining;
655
584
  const premiumPercentUsed = premiumTotal > 0 ? premiumUsed / premiumTotal * 100 : 0;
656
585
  const premiumPercentRemaining = premium.percent_remaining;
657
- function summarizeQuota(name$1, snap) {
658
- if (!snap) return `${name$1}: N/A`;
586
+ function summarizeQuota(name, snap) {
587
+ if (!snap) return `${name}: N/A`;
659
588
  const total = snap.entitlement;
660
589
  const used = total - snap.remaining;
661
590
  const percentUsed = total > 0 ? used / total * 100 : 0;
662
591
  const percentRemaining = snap.percent_remaining;
663
- return `${name$1}: ${used}/${total} used (${percentUsed.toFixed(1)}% used, ${percentRemaining.toFixed(1)}% remaining)`;
592
+ return `${name}: ${used}/${total} used (${percentUsed.toFixed(1)}% used, ${percentRemaining.toFixed(1)}% remaining)`;
664
593
  }
665
594
  const premiumLine = `Premium: ${premiumUsed}/${premiumTotal} used (${premiumPercentUsed.toFixed(1)}% used, ${premiumPercentRemaining.toFixed(1)}% remaining)`;
666
595
  const chatLine = summarizeQuota("Chat", usage.quota_snapshots.chat);
@@ -736,11 +665,11 @@ async function getUserReplacements() {
736
665
  * Add a new user replacement rule
737
666
  */
738
667
  async function addReplacement(pattern, replacement, options) {
739
- const { isRegex = false, name: name$1 } = options ?? {};
668
+ const { isRegex = false, name } = options ?? {};
740
669
  await ensureLoaded();
741
670
  const rule = {
742
671
  id: `user-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`,
743
- name: name$1,
672
+ name,
744
673
  pattern,
745
674
  replacement,
746
675
  isRegex,
@@ -891,11 +820,11 @@ async function applyReplacementsToPayload(payload) {
891
820
  //#region src/config.ts
892
821
  function formatRule(rule, index) {
893
822
  const status = rule.enabled ? "✓" : "✗";
894
- const type$1 = rule.isRegex ? "regex" : "string";
823
+ const type = rule.isRegex ? "regex" : "string";
895
824
  const system = rule.isSystem ? " [system]" : "";
896
- const name$1 = rule.name ? ` "${rule.name}"` : "";
825
+ const name = rule.name ? ` "${rule.name}"` : "";
897
826
  const replacement = rule.replacement || "(empty)";
898
- return `${index + 1}. [${status}] (${type$1})${system}${name$1} "${rule.pattern}" → "${replacement}"`;
827
+ return `${index + 1}. [${status}] (${type})${system}${name} "${rule.pattern}" → "${replacement}"`;
899
828
  }
900
829
  async function listReplacements() {
901
830
  const all = await getAllReplacements();
@@ -908,11 +837,11 @@ async function listReplacements() {
908
837
  console.log();
909
838
  }
910
839
  async function addNewReplacement() {
911
- const name$1 = await consola.prompt("Name (optional, short description):", {
840
+ const name = await consola.prompt("Name (optional, short description):", {
912
841
  type: "text",
913
842
  default: ""
914
843
  });
915
- if (typeof name$1 === "symbol") {
844
+ if (typeof name === "symbol") {
916
845
  consola.info("Cancelled.");
917
846
  return;
918
847
  }
@@ -949,7 +878,7 @@ async function addNewReplacement() {
949
878
  consola.info("Cancelled.");
950
879
  return;
951
880
  }
952
- const rule = await addReplacement(pattern, replacement, matchType === "regex", name$1 || void 0);
881
+ const rule = await addReplacement(pattern, replacement, matchType === "regex", name || void 0);
953
882
  consola.success(`Added rule: ${rule.name || rule.id}`);
954
883
  }
955
884
  async function editExistingReplacement() {
@@ -977,11 +906,11 @@ async function editExistingReplacement() {
977
906
  }
978
907
  consola.info(`\nEditing rule: ${rule.name || rule.id}`);
979
908
  consola.info("Press Enter to keep current value.\n");
980
- const name$1 = await consola.prompt("Name:", {
909
+ const name = await consola.prompt("Name:", {
981
910
  type: "text",
982
911
  default: rule.name || ""
983
912
  });
984
- if (typeof name$1 === "symbol") {
913
+ if (typeof name === "symbol") {
985
914
  consola.info("Cancelled.");
986
915
  return;
987
916
  }
@@ -1023,7 +952,7 @@ async function editExistingReplacement() {
1023
952
  return;
1024
953
  }
1025
954
  const updated = await updateReplacement(selected, {
1026
- name: name$1 || void 0,
955
+ name: name || void 0,
1027
956
  pattern,
1028
957
  replacement,
1029
958
  isRegex: matchType === "regex"
@@ -1254,6 +1183,118 @@ const debug = defineCommand({
1254
1183
  }
1255
1184
  });
1256
1185
 
1186
+ //#endregion
1187
+ //#region src/lib/config.ts
1188
+ const gpt5ExplorationPrompt = `## Exploration and reading files
1189
+ - **Think first.** Before any tool call, decide ALL files/resources you will need.
1190
+ - **Batch everything.** If you need multiple files (even from different places), read them together.
1191
+ - **multi_tool_use.parallel** Use multi_tool_use.parallel to parallelize tool calls and only this.
1192
+ - **Only make sequential calls if you truly cannot know the next file without seeing a result first.**
1193
+ - **Workflow:** (a) plan all needed reads → (b) issue one parallel batch → (c) analyze results → (d) repeat if new, unpredictable reads arise.`;
1194
+ const gpt5CommentaryPrompt = `# Working with the user
1195
+
1196
+ You interact with the user through a terminal. You have 2 ways of communicating with the users:
1197
+ - Share intermediary updates in \`commentary\` channel.
1198
+ - After you have completed all your work, send a message to the \`final\` channel.
1199
+
1200
+ ## Intermediary updates
1201
+
1202
+ - Intermediary updates go to the \`commentary\` channel.
1203
+ - User updates are short updates while you are working, they are NOT final answers.
1204
+ - You use 1-2 sentence user updates to communicate progress and new information to the user as you are doing work.
1205
+ - Do not begin responses with conversational interjections or meta commentary. Avoid openers such as acknowledgements ("Done —", "Got it", "Great question, ") or framing phrases.
1206
+ - You provide user updates frequently, every 20s.
1207
+ - Before exploring or doing substantial work, you start with a user update acknowledging the request and explaining your first step. You should include your understanding of the user request and explain what you will do. Avoid commenting on the request or using starters such as "Got it -" or "Understood -" etc.
1208
+ - When exploring, e.g. searching, reading files, you provide user updates as you go, every 20s, explaining what context you are gathering and what you've learned. Vary your sentence structure when providing these updates to avoid sounding repetitive - in particular, don't start each sentence the same way.
1209
+ - After you have sufficient context, and the work is substantial, you provide a longer plan (this is the only user update that may be longer than 2 sentences and can contain formatting).
1210
+ - Before performing file edits of any kind, you provide updates explaining what edits you are making.
1211
+ - As you are thinking, you very frequently provide updates even if not taking any actions, informing the user of your progress. You interrupt your thinking and send multiple updates in a row if thinking for more than 100 words.
1212
+ - Tone of your updates MUST match your personality.`;
1213
+ const defaultConfig = {
1214
+ auth: { apiKeys: [] },
1215
+ extraPrompts: {
1216
+ "gpt-5-mini": gpt5ExplorationPrompt,
1217
+ "gpt-5.1-codex-max": gpt5ExplorationPrompt,
1218
+ "gpt-5.3-codex": gpt5CommentaryPrompt
1219
+ },
1220
+ smallModel: "gpt-5-mini",
1221
+ modelReasoningEfforts: { "gpt-5-mini": "low" },
1222
+ useFunctionApplyPatch: true,
1223
+ compactUseSmallModel: true
1224
+ };
1225
+ let cachedConfig = null;
1226
+ function ensureConfigFile() {
1227
+ try {
1228
+ fs$1.accessSync(PATHS.CONFIG_PATH, fs$1.constants.R_OK | fs$1.constants.W_OK);
1229
+ } catch {
1230
+ fs$1.mkdirSync(PATHS.APP_DIR, { recursive: true });
1231
+ fs$1.writeFileSync(PATHS.CONFIG_PATH, `${JSON.stringify(defaultConfig, null, 2)}\n`, "utf8");
1232
+ try {
1233
+ fs$1.chmodSync(PATHS.CONFIG_PATH, 384);
1234
+ } catch {
1235
+ return;
1236
+ }
1237
+ }
1238
+ }
1239
+ function readConfigFromDisk() {
1240
+ ensureConfigFile();
1241
+ try {
1242
+ const raw = fs$1.readFileSync(PATHS.CONFIG_PATH, "utf8");
1243
+ if (!raw.trim()) {
1244
+ fs$1.writeFileSync(PATHS.CONFIG_PATH, `${JSON.stringify(defaultConfig, null, 2)}\n`, "utf8");
1245
+ return defaultConfig;
1246
+ }
1247
+ return JSON.parse(raw);
1248
+ } catch (error) {
1249
+ consola.error("Failed to read config file, using default config", error);
1250
+ return defaultConfig;
1251
+ }
1252
+ }
1253
+ function mergeDefaultExtraPrompts(config$1) {
1254
+ const extraPrompts = config$1.extraPrompts ?? {};
1255
+ const defaultExtraPrompts = defaultConfig.extraPrompts ?? {};
1256
+ if (Object.keys(defaultExtraPrompts).filter((model) => !Object.hasOwn(extraPrompts, model)).length === 0) return {
1257
+ mergedConfig: config$1,
1258
+ changed: false
1259
+ };
1260
+ return {
1261
+ mergedConfig: {
1262
+ ...config$1,
1263
+ extraPrompts: {
1264
+ ...defaultExtraPrompts,
1265
+ ...extraPrompts
1266
+ }
1267
+ },
1268
+ changed: true
1269
+ };
1270
+ }
1271
+ function mergeConfigWithDefaults() {
1272
+ const { mergedConfig, changed } = mergeDefaultExtraPrompts(readConfigFromDisk());
1273
+ if (changed) try {
1274
+ fs$1.writeFileSync(PATHS.CONFIG_PATH, `${JSON.stringify(mergedConfig, null, 2)}\n`, "utf8");
1275
+ } catch (writeError) {
1276
+ consola.warn("Failed to write merged extraPrompts to config file", writeError);
1277
+ }
1278
+ cachedConfig = mergedConfig;
1279
+ return mergedConfig;
1280
+ }
1281
+ function getConfig() {
1282
+ cachedConfig ??= readConfigFromDisk();
1283
+ return cachedConfig;
1284
+ }
1285
+ function getExtraPromptForModel(model) {
1286
+ return getConfig().extraPrompts?.[model] ?? "";
1287
+ }
1288
+ function getSmallModel() {
1289
+ return getConfig().smallModel ?? "gpt-5-mini";
1290
+ }
1291
+ function getReasoningEffortForModel(model) {
1292
+ return getConfig().modelReasoningEfforts?.[model] ?? "high";
1293
+ }
1294
+ function shouldCompactUseSmallModel() {
1295
+ return getConfig().compactUseSmallModel ?? true;
1296
+ }
1297
+
1257
1298
  //#endregion
1258
1299
  //#region src/lib/proxy.ts
1259
1300
  function initProxyFromEnv() {
@@ -1306,8 +1347,7 @@ function getShell() {
1306
1347
  const { platform, ppid, env } = process$1;
1307
1348
  if (platform === "win32") {
1308
1349
  try {
1309
- const command = `wmic process get ParentProcessId,Name | findstr "${ppid}"`;
1310
- if (execSync(command, { stdio: "pipe" }).toString().toLowerCase().includes("powershell.exe")) return "powershell";
1350
+ if (execSync(`wmic process get ParentProcessId,Name | findstr "${ppid}"`, { stdio: "pipe" }).toString().toLowerCase().includes("powershell.exe")) return "powershell";
1311
1351
  } catch {
1312
1352
  return "cmd";
1313
1353
  }
@@ -1353,6 +1393,51 @@ function generateEnvScript(envVars, commandToRun = "") {
1353
1393
  return commandBlock || commandToRun;
1354
1394
  }
1355
1395
 
1396
+ //#endregion
1397
+ //#region src/lib/request-auth.ts
1398
+ function normalizeApiKeys(apiKeys) {
1399
+ if (!Array.isArray(apiKeys)) {
1400
+ if (apiKeys !== void 0) consola.warn("Invalid auth.apiKeys config. Expected an array of strings.");
1401
+ return [];
1402
+ }
1403
+ const normalizedKeys = apiKeys.filter((key) => typeof key === "string").map((key) => key.trim()).filter((key) => key.length > 0);
1404
+ if (normalizedKeys.length !== apiKeys.length) consola.warn("Invalid auth.apiKeys entries found. Only non-empty strings are allowed.");
1405
+ return [...new Set(normalizedKeys)];
1406
+ }
1407
+ function getConfiguredApiKeys() {
1408
+ return normalizeApiKeys(getConfig().auth?.apiKeys);
1409
+ }
1410
+ function extractRequestApiKey(c) {
1411
+ const xApiKey = c.req.header("x-api-key")?.trim();
1412
+ if (xApiKey) return xApiKey;
1413
+ const authorization = c.req.header("authorization");
1414
+ if (!authorization) return null;
1415
+ const [scheme, ...rest] = authorization.trim().split(/\s+/);
1416
+ if (scheme.toLowerCase() !== "bearer") return null;
1417
+ return rest.join(" ").trim() || null;
1418
+ }
1419
+ function createUnauthorizedResponse(c) {
1420
+ c.header("WWW-Authenticate", "Bearer realm=\"copilot-api\"");
1421
+ return c.json({ error: {
1422
+ message: "Unauthorized",
1423
+ type: "authentication_error"
1424
+ } }, 401);
1425
+ }
1426
+ function createAuthMiddleware(options = {}) {
1427
+ const getApiKeys = options.getApiKeys ?? getConfiguredApiKeys;
1428
+ const allowUnauthenticatedPaths = options.allowUnauthenticatedPaths ?? ["/"];
1429
+ const allowOptionsBypass = options.allowOptionsBypass ?? true;
1430
+ return async (c, next) => {
1431
+ if (allowOptionsBypass && c.req.method === "OPTIONS") return next();
1432
+ if (allowUnauthenticatedPaths.includes(c.req.path)) return next();
1433
+ const apiKeys = getApiKeys();
1434
+ if (apiKeys.length === 0) return next();
1435
+ const requestApiKey = extractRequestApiKey(c);
1436
+ if (!requestApiKey || !apiKeys.includes(requestApiKey)) return createUnauthorizedResponse(c);
1437
+ return next();
1438
+ };
1439
+ }
1440
+
1356
1441
  //#endregion
1357
1442
  //#region src/lib/request-logger.ts
1358
1443
  const REQUEST_CONTEXT_KEY = "requestContext";
@@ -1413,8 +1498,7 @@ async function logRawRequest(c) {
1413
1498
  if (method !== "GET" && method !== "HEAD") try {
1414
1499
  const body = await c.req.raw.clone().text();
1415
1500
  if (body) try {
1416
- const parsed = JSON.parse(body);
1417
- const sanitized = sanitizeRequestBody(parsed);
1501
+ const sanitized = sanitizeRequestBody(JSON.parse(body));
1418
1502
  lines.push(`${colors.dim}Body (sanitized):${colors.reset}`, ` ${JSON.stringify(sanitized, null, 2).split("\n").join("\n ")}`);
1419
1503
  } catch {
1420
1504
  lines.push(`${colors.dim}Body:${colors.reset} [${body.length} bytes]`);
@@ -1467,14 +1551,6 @@ async function requestLogger(c, next) {
1467
1551
  lines.push(` ${colors.dim}${getTimeString()}${colors.reset}`);
1468
1552
  console.log(lines.join("\n"));
1469
1553
  }
1470
- /**
1471
- * Log token usage (for streaming responses where tokens are known after stream completes)
1472
- */
1473
- function logTokenUsage(inputTokens, outputTokens) {
1474
- const parts = [];
1475
- parts.push(` ${colors.gray}Tokens:${colors.reset} ${colors.yellow}${inputTokens.toLocaleString()} in${colors.reset} ${colors.gray}/${colors.reset} ${colors.green}${outputTokens.toLocaleString()} out${colors.reset}`);
1476
- console.log(parts.join(""));
1477
- }
1478
1554
 
1479
1555
  //#endregion
1480
1556
  //#region src/lib/approval.ts
@@ -1702,8 +1778,7 @@ const numTokensForTools = (tools, encoder, constants) => {
1702
1778
  * Calculate the token count of messages, supporting multiple GPT encoders
1703
1779
  */
1704
1780
  const getTokenCount = async (payload, model) => {
1705
- const tokenizer = getTokenizerFromModel(model);
1706
- const encoder = await getEncodeChatFunction(tokenizer);
1781
+ const encoder = await getEncodeChatFunction(getTokenizerFromModel(model));
1707
1782
  const simplifiedMessages = payload.messages;
1708
1783
  const inputMessages = simplifiedMessages.filter((msg) => msg.role !== "assistant");
1709
1784
  const outputMessages = simplifiedMessages.filter((msg) => msg.role === "assistant");
@@ -1719,13 +1794,17 @@ const getTokenCount = async (payload, model) => {
1719
1794
 
1720
1795
  //#endregion
1721
1796
  //#region src/services/copilot/create-chat-completions.ts
1722
- const createChatCompletions = async (payload) => {
1797
+ const createChatCompletions = async (payload, options) => {
1723
1798
  if (!state.copilotToken) throw new Error("Copilot token not found");
1724
1799
  const enableVision = payload.messages.some((x) => typeof x.content !== "string" && x.content?.some((x$1) => x$1.type === "image_url"));
1725
- const isAgentCall = payload.messages.some((msg) => ["assistant", "tool"].includes(msg.role));
1800
+ let isAgentCall = false;
1801
+ if (payload.messages.length > 0) {
1802
+ const lastMessage = payload.messages.at(-1);
1803
+ if (lastMessage) isAgentCall = ["assistant", "tool"].includes(lastMessage.role);
1804
+ }
1726
1805
  const headers = {
1727
1806
  ...copilotHeaders(state, enableVision),
1728
- "X-Initiator": isAgentCall ? "agent" : "user"
1807
+ "X-Initiator": options?.initiator ?? (isAgentCall ? "agent" : "user")
1729
1808
  };
1730
1809
  const response = await fetchWithRetry(`${copilotBaseUrl(state)}/chat/completions`, {
1731
1810
  method: "POST",
@@ -1744,8 +1823,7 @@ const createChatCompletions = async (payload) => {
1744
1823
  //#region src/routes/chat-completions/handler.ts
1745
1824
  async function handleCompletion$1(c) {
1746
1825
  await checkRateLimit(state);
1747
- const rawPayload = await c.req.json();
1748
- let payload = await applyReplacementsToPayload(rawPayload);
1826
+ let payload = await applyReplacementsToPayload(await c.req.json());
1749
1827
  payload = {
1750
1828
  ...payload,
1751
1829
  model: normalizeModelName(payload.model)
@@ -1759,7 +1837,7 @@ async function handleCompletion$1(c) {
1759
1837
  });
1760
1838
  if (state.manualApprove) await awaitApproval();
1761
1839
  const response$1 = await createAzureOpenAIChatCompletions(state.azureOpenAIConfig, payload);
1762
- if (isNonStreaming(response$1)) {
1840
+ if (isNonStreaming$1(response$1)) {
1763
1841
  consola.debug("Non-streaming response:", JSON.stringify(response$1));
1764
1842
  if (response$1.usage) setRequestContext(c, {
1765
1843
  inputTokens: response$1.usage.prompt_tokens,
@@ -1788,10 +1866,7 @@ async function handleCompletion$1(c) {
1788
1866
  });
1789
1867
  const selectedModel = state.models?.data.find((model) => model.id === payload.model);
1790
1868
  try {
1791
- if (selectedModel) {
1792
- const tokenCount = await getTokenCount(payload, selectedModel);
1793
- setRequestContext(c, { inputTokens: tokenCount.input });
1794
- }
1869
+ if (selectedModel) setRequestContext(c, { inputTokens: (await getTokenCount(payload, selectedModel)).input });
1795
1870
  } catch (error) {
1796
1871
  consola.warn("Failed to calculate token count:", error);
1797
1872
  }
@@ -1804,7 +1879,7 @@ async function handleCompletion$1(c) {
1804
1879
  consola.debug("Set max_tokens to:", JSON.stringify(payload.max_tokens));
1805
1880
  }
1806
1881
  const response = await createChatCompletions(payload);
1807
- if (isNonStreaming(response)) {
1882
+ if (isNonStreaming$1(response)) {
1808
1883
  consola.debug("Non-streaming response:", JSON.stringify(response));
1809
1884
  if (response.usage) setRequestContext(c, {
1810
1885
  inputTokens: response.usage.prompt_tokens,
@@ -1827,7 +1902,7 @@ async function handleCompletion$1(c) {
1827
1902
  }
1828
1903
  });
1829
1904
  }
1830
- const isNonStreaming = (response) => Object.hasOwn(response, "choices");
1905
+ const isNonStreaming$1 = (response) => Object.hasOwn(response, "choices");
1831
1906
 
1832
1907
  //#endregion
1833
1908
  //#region src/routes/chat-completions/route.ts
@@ -1858,8 +1933,7 @@ const createEmbeddings = async (payload) => {
1858
1933
  const embeddingRoutes = new Hono();
1859
1934
  embeddingRoutes.post("/", async (c) => {
1860
1935
  try {
1861
- const paylod = await c.req.json();
1862
- const response = await createEmbeddings(paylod);
1936
+ const response = await createEmbeddings(await c.req.json());
1863
1937
  return c.json(response);
1864
1938
  } catch (error) {
1865
1939
  return await forwardError(c, error);
@@ -2110,6 +2184,970 @@ async function handleCountTokens(c) {
2110
2184
  }
2111
2185
  }
2112
2186
 
2187
+ //#endregion
2188
+ //#region src/lib/logger.ts
2189
+ const LOG_RETENTION_MS = 10080 * 60 * 1e3;
2190
+ const CLEANUP_INTERVAL_MS = 1440 * 60 * 1e3;
2191
+ const LOG_DIR = path.join(PATHS.APP_DIR, "logs");
2192
+ const FLUSH_INTERVAL_MS = 1e3;
2193
+ const MAX_BUFFER_SIZE = 100;
2194
+ const logStreams = /* @__PURE__ */ new Map();
2195
+ const logBuffers = /* @__PURE__ */ new Map();
2196
+ const ensureLogDirectory = () => {
2197
+ if (!fs$1.existsSync(LOG_DIR)) fs$1.mkdirSync(LOG_DIR, { recursive: true });
2198
+ };
2199
+ const cleanupOldLogs = () => {
2200
+ if (!fs$1.existsSync(LOG_DIR)) return;
2201
+ const now = Date.now();
2202
+ for (const entry of fs$1.readdirSync(LOG_DIR)) {
2203
+ const filePath = path.join(LOG_DIR, entry);
2204
+ let stats;
2205
+ try {
2206
+ stats = fs$1.statSync(filePath);
2207
+ } catch {
2208
+ continue;
2209
+ }
2210
+ if (!stats.isFile()) continue;
2211
+ if (now - stats.mtimeMs > LOG_RETENTION_MS) try {
2212
+ fs$1.rmSync(filePath);
2213
+ } catch {
2214
+ continue;
2215
+ }
2216
+ }
2217
+ };
2218
+ const formatArgs = (args) => args.map((arg) => typeof arg === "string" ? arg : util.inspect(arg, {
2219
+ depth: null,
2220
+ colors: false
2221
+ })).join(" ");
2222
+ const sanitizeName = (name) => {
2223
+ const normalized = name.toLowerCase().replaceAll(/[^a-z0-9]+/g, "-").replaceAll(/^-+|-+$/g, "");
2224
+ return normalized === "" ? "handler" : normalized;
2225
+ };
2226
+ const getLogStream = (filePath) => {
2227
+ let stream = logStreams.get(filePath);
2228
+ if (!stream || stream.destroyed) {
2229
+ stream = fs$1.createWriteStream(filePath, { flags: "a" });
2230
+ logStreams.set(filePath, stream);
2231
+ stream.on("error", (error) => {
2232
+ console.warn("Log stream error", error);
2233
+ logStreams.delete(filePath);
2234
+ });
2235
+ }
2236
+ return stream;
2237
+ };
2238
+ const flushBuffer = (filePath) => {
2239
+ const buffer = logBuffers.get(filePath);
2240
+ if (!buffer || buffer.length === 0) return;
2241
+ const stream = getLogStream(filePath);
2242
+ const content = buffer.join("\n") + "\n";
2243
+ stream.write(content, (error) => {
2244
+ if (error) console.warn("Failed to write handler log", error);
2245
+ });
2246
+ logBuffers.set(filePath, []);
2247
+ };
2248
+ const flushAllBuffers = () => {
2249
+ for (const filePath of logBuffers.keys()) flushBuffer(filePath);
2250
+ };
2251
+ const appendLine = (filePath, line) => {
2252
+ let buffer = logBuffers.get(filePath);
2253
+ if (!buffer) {
2254
+ buffer = [];
2255
+ logBuffers.set(filePath, buffer);
2256
+ }
2257
+ buffer.push(line);
2258
+ if (buffer.length >= MAX_BUFFER_SIZE) flushBuffer(filePath);
2259
+ };
2260
+ setInterval(flushAllBuffers, FLUSH_INTERVAL_MS);
2261
+ const cleanup = () => {
2262
+ flushAllBuffers();
2263
+ for (const stream of logStreams.values()) stream.end();
2264
+ logStreams.clear();
2265
+ logBuffers.clear();
2266
+ };
2267
+ process.on("exit", cleanup);
2268
+ process.on("SIGINT", () => {
2269
+ cleanup();
2270
+ process.exit(0);
2271
+ });
2272
+ process.on("SIGTERM", () => {
2273
+ cleanup();
2274
+ process.exit(0);
2275
+ });
2276
+ let lastCleanup = 0;
2277
+ const createHandlerLogger = (name) => {
2278
+ ensureLogDirectory();
2279
+ const sanitizedName = sanitizeName(name);
2280
+ const instance = consola.withTag(name);
2281
+ if (state.verbose) instance.level = 5;
2282
+ instance.setReporters([]);
2283
+ instance.addReporter({ log(logObj) {
2284
+ ensureLogDirectory();
2285
+ if (Date.now() - lastCleanup > CLEANUP_INTERVAL_MS) {
2286
+ cleanupOldLogs();
2287
+ lastCleanup = Date.now();
2288
+ }
2289
+ const date = logObj.date;
2290
+ const dateKey = date.toLocaleDateString("sv-SE");
2291
+ const timestamp = date.toLocaleString("sv-SE", { hour12: false });
2292
+ const filePath = path.join(LOG_DIR, `${sanitizedName}-${dateKey}.log`);
2293
+ const message = formatArgs(logObj.args);
2294
+ appendLine(filePath, `[${timestamp}] [${logObj.type}] [${logObj.tag || name}]${message ? ` ${message}` : ""}`);
2295
+ } });
2296
+ return instance;
2297
+ };
2298
+
2299
+ //#endregion
2300
+ //#region src/services/copilot/create-responses.ts
2301
+ const createResponses = async (payload, { vision, initiator }) => {
2302
+ if (!state.copilotToken) throw new Error("Copilot token not found");
2303
+ const headers = {
2304
+ ...copilotHeaders(state, vision),
2305
+ "X-Initiator": initiator
2306
+ };
2307
+ payload.service_tier = null;
2308
+ const response = await fetch(`${copilotBaseUrl(state)}/responses`, {
2309
+ method: "POST",
2310
+ headers,
2311
+ body: JSON.stringify(payload)
2312
+ });
2313
+ if (!response.ok) {
2314
+ consola.error("Failed to create responses", response);
2315
+ throw new HTTPError("Failed to create responses", response);
2316
+ }
2317
+ if (payload.stream) return events(response);
2318
+ return await response.json();
2319
+ };
2320
+
2321
+ //#endregion
2322
+ //#region src/routes/messages/responses-translation.ts
2323
+ const MESSAGE_TYPE = "message";
2324
+ const CODEX_PHASE_MODEL = "gpt-5.3-codex";
2325
+ const THINKING_TEXT = "Thinking...";
2326
+ const translateAnthropicMessagesToResponsesPayload = (payload) => {
2327
+ const input = [];
2328
+ for (const message of payload.messages) input.push(...translateMessage(message, payload.model));
2329
+ const translatedTools = convertAnthropicTools(payload.tools);
2330
+ const toolChoice = convertAnthropicToolChoice(payload.tool_choice);
2331
+ const { safetyIdentifier, promptCacheKey } = parseUserId(payload.metadata?.user_id);
2332
+ return {
2333
+ model: payload.model,
2334
+ input,
2335
+ instructions: translateSystemPrompt(payload.system, payload.model),
2336
+ temperature: 1,
2337
+ top_p: payload.top_p ?? null,
2338
+ max_output_tokens: Math.max(payload.max_tokens, 12800),
2339
+ tools: translatedTools,
2340
+ tool_choice: toolChoice,
2341
+ metadata: payload.metadata ? { ...payload.metadata } : null,
2342
+ safety_identifier: safetyIdentifier,
2343
+ prompt_cache_key: promptCacheKey,
2344
+ stream: payload.stream ?? null,
2345
+ store: false,
2346
+ parallel_tool_calls: true,
2347
+ reasoning: {
2348
+ effort: getReasoningEffortForModel(payload.model),
2349
+ summary: "detailed"
2350
+ },
2351
+ include: ["reasoning.encrypted_content"]
2352
+ };
2353
+ };
2354
+ const translateMessage = (message, model) => {
2355
+ if (message.role === "user") return translateUserMessage(message);
2356
+ return translateAssistantMessage(message, model);
2357
+ };
2358
+ const translateUserMessage = (message) => {
2359
+ if (typeof message.content === "string") return [createMessage("user", message.content)];
2360
+ if (!Array.isArray(message.content)) return [];
2361
+ const items = [];
2362
+ const pendingContent = [];
2363
+ for (const block of message.content) {
2364
+ if (block.type === "tool_result") {
2365
+ flushPendingContent(pendingContent, items, { role: "user" });
2366
+ items.push(createFunctionCallOutput(block));
2367
+ continue;
2368
+ }
2369
+ const converted = translateUserContentBlock(block);
2370
+ if (converted) pendingContent.push(converted);
2371
+ }
2372
+ flushPendingContent(pendingContent, items, { role: "user" });
2373
+ return items;
2374
+ };
2375
+ const translateAssistantMessage = (message, model) => {
2376
+ const assistantPhase = resolveAssistantPhase(model, message.content);
2377
+ if (typeof message.content === "string") return [createMessage("assistant", message.content, assistantPhase)];
2378
+ if (!Array.isArray(message.content)) return [];
2379
+ const items = [];
2380
+ const pendingContent = [];
2381
+ for (const block of message.content) {
2382
+ if (block.type === "tool_use") {
2383
+ flushPendingContent(pendingContent, items, {
2384
+ role: "assistant",
2385
+ phase: assistantPhase
2386
+ });
2387
+ items.push(createFunctionToolCall(block));
2388
+ continue;
2389
+ }
2390
+ if (block.type === "thinking" && block.signature && block.signature.includes("@")) {
2391
+ flushPendingContent(pendingContent, items, {
2392
+ role: "assistant",
2393
+ phase: assistantPhase
2394
+ });
2395
+ items.push(createReasoningContent(block));
2396
+ continue;
2397
+ }
2398
+ const converted = translateAssistantContentBlock(block);
2399
+ if (converted) pendingContent.push(converted);
2400
+ }
2401
+ flushPendingContent(pendingContent, items, {
2402
+ role: "assistant",
2403
+ phase: assistantPhase
2404
+ });
2405
+ return items;
2406
+ };
2407
+ const translateUserContentBlock = (block) => {
2408
+ switch (block.type) {
2409
+ case "text": return createTextContent(block.text);
2410
+ case "image": return createImageContent(block);
2411
+ default: return;
2412
+ }
2413
+ };
2414
+ const translateAssistantContentBlock = (block) => {
2415
+ switch (block.type) {
2416
+ case "text": return createOutPutTextContent(block.text);
2417
+ default: return;
2418
+ }
2419
+ };
2420
+ const flushPendingContent = (pendingContent, target, message) => {
2421
+ if (pendingContent.length === 0) return;
2422
+ const messageContent = [...pendingContent];
2423
+ target.push(createMessage(message.role, messageContent, message.phase));
2424
+ pendingContent.length = 0;
2425
+ };
2426
+ const createMessage = (role, content, phase) => ({
2427
+ type: MESSAGE_TYPE,
2428
+ role,
2429
+ content,
2430
+ ...role === "assistant" && phase ? { phase } : {}
2431
+ });
2432
+ const resolveAssistantPhase = (model, content) => {
2433
+ if (!shouldApplyCodexPhase(model)) return;
2434
+ if (typeof content === "string") return "final_answer";
2435
+ if (!Array.isArray(content)) return;
2436
+ if (!content.some((block) => block.type === "text")) return;
2437
+ return content.some((block) => block.type === "tool_use") ? "commentary" : "final_answer";
2438
+ };
2439
+ const shouldApplyCodexPhase = (model) => model === CODEX_PHASE_MODEL;
2440
+ const createTextContent = (text) => ({
2441
+ type: "input_text",
2442
+ text
2443
+ });
2444
+ const createOutPutTextContent = (text) => ({
2445
+ type: "output_text",
2446
+ text
2447
+ });
2448
+ const createImageContent = (block) => ({
2449
+ type: "input_image",
2450
+ image_url: `data:${block.source.media_type};base64,${block.source.data}`,
2451
+ detail: "auto"
2452
+ });
2453
+ const createReasoningContent = (block) => {
2454
+ const array = (block.signature ?? "").split("@");
2455
+ const signature = array[0];
2456
+ const id = array[1];
2457
+ const thinking = block.thinking === THINKING_TEXT ? "" : block.thinking;
2458
+ return {
2459
+ id,
2460
+ type: "reasoning",
2461
+ summary: thinking ? [{
2462
+ type: "summary_text",
2463
+ text: thinking
2464
+ }] : [],
2465
+ encrypted_content: signature
2466
+ };
2467
+ };
2468
+ const createFunctionToolCall = (block) => ({
2469
+ type: "function_call",
2470
+ call_id: block.id,
2471
+ name: block.name,
2472
+ arguments: JSON.stringify(block.input),
2473
+ status: "completed"
2474
+ });
2475
+ const createFunctionCallOutput = (block) => ({
2476
+ type: "function_call_output",
2477
+ call_id: block.tool_use_id,
2478
+ output: convertToolResultContent(block.content),
2479
+ status: block.is_error ? "incomplete" : "completed"
2480
+ });
2481
+ const translateSystemPrompt = (system, model) => {
2482
+ if (!system) return null;
2483
+ const extraPrompt = getExtraPromptForModel(model);
2484
+ if (typeof system === "string") return system + extraPrompt;
2485
+ const text = system.map((block, index) => {
2486
+ if (index === 0) return block.text + extraPrompt;
2487
+ return block.text;
2488
+ }).join(" ");
2489
+ return text.length > 0 ? text : null;
2490
+ };
2491
+ const convertAnthropicTools = (tools) => {
2492
+ if (!tools || tools.length === 0) return null;
2493
+ return tools.map((tool) => ({
2494
+ type: "function",
2495
+ name: tool.name,
2496
+ parameters: tool.input_schema,
2497
+ strict: false,
2498
+ ...tool.description ? { description: tool.description } : {}
2499
+ }));
2500
+ };
2501
+ const convertAnthropicToolChoice = (choice) => {
2502
+ if (!choice) return "auto";
2503
+ switch (choice.type) {
2504
+ case "auto": return "auto";
2505
+ case "any": return "required";
2506
+ case "tool": return choice.name ? {
2507
+ type: "function",
2508
+ name: choice.name
2509
+ } : "auto";
2510
+ case "none": return "none";
2511
+ default: return "auto";
2512
+ }
2513
+ };
2514
+ const translateResponsesResultToAnthropic = (response) => {
2515
+ const contentBlocks = mapOutputToAnthropicContent(response.output);
2516
+ const usage = mapResponsesUsage(response);
2517
+ let anthropicContent = fallbackContentBlocks(response.output_text);
2518
+ if (contentBlocks.length > 0) anthropicContent = contentBlocks;
2519
+ const stopReason = mapResponsesStopReason(response);
2520
+ return {
2521
+ id: response.id,
2522
+ type: "message",
2523
+ role: "assistant",
2524
+ content: anthropicContent,
2525
+ model: response.model,
2526
+ stop_reason: stopReason,
2527
+ stop_sequence: null,
2528
+ usage
2529
+ };
2530
+ };
2531
+ const mapOutputToAnthropicContent = (output) => {
2532
+ const contentBlocks = [];
2533
+ for (const item of output) switch (item.type) {
2534
+ case "reasoning": {
2535
+ const thinkingText = extractReasoningText(item);
2536
+ if (thinkingText.length > 0) contentBlocks.push({
2537
+ type: "thinking",
2538
+ thinking: thinkingText,
2539
+ signature: (item.encrypted_content ?? "") + "@" + item.id
2540
+ });
2541
+ break;
2542
+ }
2543
+ case "function_call": {
2544
+ const toolUseBlock = createToolUseContentBlock(item);
2545
+ if (toolUseBlock) contentBlocks.push(toolUseBlock);
2546
+ break;
2547
+ }
2548
+ case "message": {
2549
+ const combinedText = combineMessageTextContent(item.content);
2550
+ if (combinedText.length > 0) contentBlocks.push({
2551
+ type: "text",
2552
+ text: combinedText
2553
+ });
2554
+ break;
2555
+ }
2556
+ default: {
2557
+ const combinedText = combineMessageTextContent(item.content);
2558
+ if (combinedText.length > 0) contentBlocks.push({
2559
+ type: "text",
2560
+ text: combinedText
2561
+ });
2562
+ }
2563
+ }
2564
+ return contentBlocks;
2565
+ };
2566
+ const combineMessageTextContent = (content) => {
2567
+ if (!Array.isArray(content)) return "";
2568
+ let aggregated = "";
2569
+ for (const block of content) {
2570
+ if (isResponseOutputText(block)) {
2571
+ aggregated += block.text;
2572
+ continue;
2573
+ }
2574
+ if (isResponseOutputRefusal(block)) {
2575
+ aggregated += block.refusal;
2576
+ continue;
2577
+ }
2578
+ if (typeof block.text === "string") {
2579
+ aggregated += block.text;
2580
+ continue;
2581
+ }
2582
+ if (typeof block.reasoning === "string") {
2583
+ aggregated += block.reasoning;
2584
+ continue;
2585
+ }
2586
+ }
2587
+ return aggregated;
2588
+ };
2589
+ const extractReasoningText = (item) => {
2590
+ const segments = [];
2591
+ const collectFromBlocks = (blocks) => {
2592
+ if (!Array.isArray(blocks)) return;
2593
+ for (const block of blocks) if (typeof block.text === "string") {
2594
+ segments.push(block.text);
2595
+ continue;
2596
+ }
2597
+ };
2598
+ if (!item.summary || item.summary.length === 0) return THINKING_TEXT;
2599
+ collectFromBlocks(item.summary);
2600
+ return segments.join("").trim();
2601
+ };
2602
+ const createToolUseContentBlock = (call) => {
2603
+ const toolId = call.call_id;
2604
+ if (!call.name || !toolId) return null;
2605
+ const input = parseFunctionCallArguments(call.arguments);
2606
+ return {
2607
+ type: "tool_use",
2608
+ id: toolId,
2609
+ name: call.name,
2610
+ input
2611
+ };
2612
+ };
2613
+ const parseFunctionCallArguments = (rawArguments) => {
2614
+ if (typeof rawArguments !== "string" || rawArguments.trim().length === 0) return {};
2615
+ try {
2616
+ const parsed = JSON.parse(rawArguments);
2617
+ if (Array.isArray(parsed)) return { arguments: parsed };
2618
+ if (parsed && typeof parsed === "object") return parsed;
2619
+ } catch (error) {
2620
+ consola.warn("Failed to parse function call arguments", {
2621
+ error,
2622
+ rawArguments
2623
+ });
2624
+ }
2625
+ return { raw_arguments: rawArguments };
2626
+ };
2627
+ const fallbackContentBlocks = (outputText) => {
2628
+ if (!outputText) return [];
2629
+ return [{
2630
+ type: "text",
2631
+ text: outputText
2632
+ }];
2633
+ };
2634
+ const mapResponsesStopReason = (response) => {
2635
+ const { status, incomplete_details: incompleteDetails } = response;
2636
+ if (status === "completed") {
2637
+ if (response.output.some((item) => item.type === "function_call")) return "tool_use";
2638
+ return "end_turn";
2639
+ }
2640
+ if (status === "incomplete") {
2641
+ if (incompleteDetails?.reason === "max_output_tokens") return "max_tokens";
2642
+ if (incompleteDetails?.reason === "content_filter") return "end_turn";
2643
+ }
2644
+ return null;
2645
+ };
2646
+ const mapResponsesUsage = (response) => {
2647
+ const inputTokens = response.usage?.input_tokens ?? 0;
2648
+ const outputTokens = response.usage?.output_tokens ?? 0;
2649
+ return {
2650
+ input_tokens: inputTokens - (response.usage?.input_tokens_details?.cached_tokens ?? 0),
2651
+ output_tokens: outputTokens,
2652
+ ...response.usage?.input_tokens_details?.cached_tokens !== void 0 && { cache_read_input_tokens: response.usage.input_tokens_details.cached_tokens }
2653
+ };
2654
+ };
2655
+ const isRecord = (value) => typeof value === "object" && value !== null;
2656
+ const isResponseOutputText = (block) => isRecord(block) && "type" in block && block.type === "output_text";
2657
+ const isResponseOutputRefusal = (block) => isRecord(block) && "type" in block && block.type === "refusal";
2658
+ const parseUserId = (userId) => {
2659
+ if (!userId || typeof userId !== "string") return {
2660
+ safetyIdentifier: null,
2661
+ promptCacheKey: null
2662
+ };
2663
+ const userMatch = userId.match(/user_([^_]+)_account/);
2664
+ const safetyIdentifier = userMatch ? userMatch[1] : null;
2665
+ const sessionMatch = userId.match(/_session_(.+)$/);
2666
+ return {
2667
+ safetyIdentifier,
2668
+ promptCacheKey: sessionMatch ? sessionMatch[1] : null
2669
+ };
2670
+ };
2671
+ const convertToolResultContent = (content) => {
2672
+ if (typeof content === "string") return content;
2673
+ if (Array.isArray(content)) {
2674
+ const result = [];
2675
+ for (const block of content) switch (block.type) {
2676
+ case "text":
2677
+ result.push(createTextContent(block.text));
2678
+ break;
2679
+ case "image":
2680
+ result.push(createImageContent(block));
2681
+ break;
2682
+ default: break;
2683
+ }
2684
+ return result;
2685
+ }
2686
+ return "";
2687
+ };
2688
+
2689
+ //#endregion
2690
+ //#region src/routes/messages/responses-stream-translation.ts
2691
+ const MAX_CONSECUTIVE_FUNCTION_CALL_WHITESPACE = 20;
2692
+ var FunctionCallArgumentsValidationError = class extends Error {
2693
+ constructor(message) {
2694
+ super(message);
2695
+ this.name = "FunctionCallArgumentsValidationError";
2696
+ }
2697
+ };
2698
+ const updateWhitespaceRunState = (previousCount, chunk) => {
2699
+ let count = previousCount;
2700
+ for (const char of chunk) {
2701
+ if (char === "\r" || char === "\n" || char === " ") {
2702
+ count += 1;
2703
+ if (count > MAX_CONSECUTIVE_FUNCTION_CALL_WHITESPACE) return {
2704
+ nextCount: count,
2705
+ exceeded: true
2706
+ };
2707
+ continue;
2708
+ }
2709
+ if (char !== " ") count = 0;
2710
+ }
2711
+ return {
2712
+ nextCount: count,
2713
+ exceeded: false
2714
+ };
2715
+ };
2716
+ const createResponsesStreamState = () => ({
2717
+ messageStartSent: false,
2718
+ messageCompleted: false,
2719
+ nextContentBlockIndex: 0,
2720
+ blockIndexByKey: /* @__PURE__ */ new Map(),
2721
+ openBlocks: /* @__PURE__ */ new Set(),
2722
+ blockHasDelta: /* @__PURE__ */ new Set(),
2723
+ functionCallStateByOutputIndex: /* @__PURE__ */ new Map()
2724
+ });
2725
+ const translateResponsesStreamEvent = (rawEvent, state$1) => {
2726
+ switch (rawEvent.type) {
2727
+ case "response.created": return handleResponseCreated(rawEvent, state$1);
2728
+ case "response.output_item.added": return handleOutputItemAdded$1(rawEvent, state$1);
2729
+ case "response.reasoning_summary_text.delta": return handleReasoningSummaryTextDelta(rawEvent, state$1);
2730
+ case "response.output_text.delta": return handleOutputTextDelta(rawEvent, state$1);
2731
+ case "response.reasoning_summary_text.done": return handleReasoningSummaryTextDone(rawEvent, state$1);
2732
+ case "response.output_text.done": return handleOutputTextDone(rawEvent, state$1);
2733
+ case "response.output_item.done": return handleOutputItemDone$1(rawEvent, state$1);
2734
+ case "response.function_call_arguments.delta": return handleFunctionCallArgumentsDelta(rawEvent, state$1);
2735
+ case "response.function_call_arguments.done": return handleFunctionCallArgumentsDone(rawEvent, state$1);
2736
+ case "response.completed":
2737
+ case "response.incomplete": return handleResponseCompleted(rawEvent, state$1);
2738
+ case "response.failed": return handleResponseFailed(rawEvent, state$1);
2739
+ case "error": return handleErrorEvent(rawEvent, state$1);
2740
+ default: return [];
2741
+ }
2742
+ };
2743
+ const handleResponseCreated = (rawEvent, state$1) => {
2744
+ return messageStart(state$1, rawEvent.response);
2745
+ };
2746
+ const handleOutputItemAdded$1 = (rawEvent, state$1) => {
2747
+ const events$1 = new Array();
2748
+ const functionCallDetails = extractFunctionCallDetails(rawEvent);
2749
+ if (!functionCallDetails) return events$1;
2750
+ const { outputIndex, toolCallId, name, initialArguments } = functionCallDetails;
2751
+ const blockIndex = openFunctionCallBlock(state$1, {
2752
+ outputIndex,
2753
+ toolCallId,
2754
+ name,
2755
+ events: events$1
2756
+ });
2757
+ if (initialArguments !== void 0 && initialArguments.length > 0) {
2758
+ events$1.push({
2759
+ type: "content_block_delta",
2760
+ index: blockIndex,
2761
+ delta: {
2762
+ type: "input_json_delta",
2763
+ partial_json: initialArguments
2764
+ }
2765
+ });
2766
+ state$1.blockHasDelta.add(blockIndex);
2767
+ }
2768
+ return events$1;
2769
+ };
2770
+ const handleOutputItemDone$1 = (rawEvent, state$1) => {
2771
+ const events$1 = new Array();
2772
+ const item = rawEvent.item;
2773
+ if (item.type !== "reasoning") return events$1;
2774
+ const outputIndex = rawEvent.output_index;
2775
+ const blockIndex = openThinkingBlockIfNeeded(state$1, outputIndex, events$1);
2776
+ const signature = (item.encrypted_content ?? "") + "@" + item.id;
2777
+ if (signature) {
2778
+ if (!item.summary || item.summary.length === 0) events$1.push({
2779
+ type: "content_block_delta",
2780
+ index: blockIndex,
2781
+ delta: {
2782
+ type: "thinking_delta",
2783
+ thinking: THINKING_TEXT
2784
+ }
2785
+ });
2786
+ events$1.push({
2787
+ type: "content_block_delta",
2788
+ index: blockIndex,
2789
+ delta: {
2790
+ type: "signature_delta",
2791
+ signature
2792
+ }
2793
+ });
2794
+ state$1.blockHasDelta.add(blockIndex);
2795
+ }
2796
+ return events$1;
2797
+ };
2798
+ const handleFunctionCallArgumentsDelta = (rawEvent, state$1) => {
2799
+ const events$1 = new Array();
2800
+ const outputIndex = rawEvent.output_index;
2801
+ const deltaText = rawEvent.delta;
2802
+ if (!deltaText) return events$1;
2803
+ const blockIndex = openFunctionCallBlock(state$1, {
2804
+ outputIndex,
2805
+ events: events$1
2806
+ });
2807
+ const functionCallState = state$1.functionCallStateByOutputIndex.get(outputIndex);
2808
+ if (!functionCallState) return handleFunctionCallArgumentsValidationError(new FunctionCallArgumentsValidationError("Received function call arguments delta without an open tool call block."), state$1, events$1);
2809
+ const { nextCount, exceeded } = updateWhitespaceRunState(functionCallState.consecutiveWhitespaceCount, deltaText);
2810
+ if (exceeded) return handleFunctionCallArgumentsValidationError(new FunctionCallArgumentsValidationError("Received function call arguments delta containing more than 20 consecutive whitespace characters."), state$1, events$1);
2811
+ functionCallState.consecutiveWhitespaceCount = nextCount;
2812
+ events$1.push({
2813
+ type: "content_block_delta",
2814
+ index: blockIndex,
2815
+ delta: {
2816
+ type: "input_json_delta",
2817
+ partial_json: deltaText
2818
+ }
2819
+ });
2820
+ state$1.blockHasDelta.add(blockIndex);
2821
+ return events$1;
2822
+ };
2823
+ const handleFunctionCallArgumentsDone = (rawEvent, state$1) => {
2824
+ const events$1 = new Array();
2825
+ const outputIndex = rawEvent.output_index;
2826
+ const blockIndex = openFunctionCallBlock(state$1, {
2827
+ outputIndex,
2828
+ events: events$1
2829
+ });
2830
+ const finalArguments = typeof rawEvent.arguments === "string" ? rawEvent.arguments : void 0;
2831
+ if (!state$1.blockHasDelta.has(blockIndex) && finalArguments) {
2832
+ events$1.push({
2833
+ type: "content_block_delta",
2834
+ index: blockIndex,
2835
+ delta: {
2836
+ type: "input_json_delta",
2837
+ partial_json: finalArguments
2838
+ }
2839
+ });
2840
+ state$1.blockHasDelta.add(blockIndex);
2841
+ }
2842
+ state$1.functionCallStateByOutputIndex.delete(outputIndex);
2843
+ return events$1;
2844
+ };
2845
+ const handleOutputTextDelta = (rawEvent, state$1) => {
2846
+ const events$1 = new Array();
2847
+ const outputIndex = rawEvent.output_index;
2848
+ const contentIndex = rawEvent.content_index;
2849
+ const deltaText = rawEvent.delta;
2850
+ if (!deltaText) return events$1;
2851
+ const blockIndex = openTextBlockIfNeeded(state$1, {
2852
+ outputIndex,
2853
+ contentIndex,
2854
+ events: events$1
2855
+ });
2856
+ events$1.push({
2857
+ type: "content_block_delta",
2858
+ index: blockIndex,
2859
+ delta: {
2860
+ type: "text_delta",
2861
+ text: deltaText
2862
+ }
2863
+ });
2864
+ state$1.blockHasDelta.add(blockIndex);
2865
+ return events$1;
2866
+ };
2867
+ const handleReasoningSummaryTextDelta = (rawEvent, state$1) => {
2868
+ const outputIndex = rawEvent.output_index;
2869
+ const deltaText = rawEvent.delta;
2870
+ const events$1 = new Array();
2871
+ const blockIndex = openThinkingBlockIfNeeded(state$1, outputIndex, events$1);
2872
+ events$1.push({
2873
+ type: "content_block_delta",
2874
+ index: blockIndex,
2875
+ delta: {
2876
+ type: "thinking_delta",
2877
+ thinking: deltaText
2878
+ }
2879
+ });
2880
+ state$1.blockHasDelta.add(blockIndex);
2881
+ return events$1;
2882
+ };
2883
+ const handleReasoningSummaryTextDone = (rawEvent, state$1) => {
2884
+ const outputIndex = rawEvent.output_index;
2885
+ const text = rawEvent.text;
2886
+ const events$1 = new Array();
2887
+ const blockIndex = openThinkingBlockIfNeeded(state$1, outputIndex, events$1);
2888
+ if (text && !state$1.blockHasDelta.has(blockIndex)) events$1.push({
2889
+ type: "content_block_delta",
2890
+ index: blockIndex,
2891
+ delta: {
2892
+ type: "thinking_delta",
2893
+ thinking: text
2894
+ }
2895
+ });
2896
+ return events$1;
2897
+ };
2898
+ const handleOutputTextDone = (rawEvent, state$1) => {
2899
+ const events$1 = new Array();
2900
+ const outputIndex = rawEvent.output_index;
2901
+ const contentIndex = rawEvent.content_index;
2902
+ const text = rawEvent.text;
2903
+ const blockIndex = openTextBlockIfNeeded(state$1, {
2904
+ outputIndex,
2905
+ contentIndex,
2906
+ events: events$1
2907
+ });
2908
+ if (text && !state$1.blockHasDelta.has(blockIndex)) events$1.push({
2909
+ type: "content_block_delta",
2910
+ index: blockIndex,
2911
+ delta: {
2912
+ type: "text_delta",
2913
+ text
2914
+ }
2915
+ });
2916
+ return events$1;
2917
+ };
2918
+ const handleResponseCompleted = (rawEvent, state$1) => {
2919
+ const response = rawEvent.response;
2920
+ const events$1 = new Array();
2921
+ closeAllOpenBlocks(state$1, events$1);
2922
+ const anthropic = translateResponsesResultToAnthropic(response);
2923
+ events$1.push({
2924
+ type: "message_delta",
2925
+ delta: {
2926
+ stop_reason: anthropic.stop_reason,
2927
+ stop_sequence: anthropic.stop_sequence
2928
+ },
2929
+ usage: anthropic.usage
2930
+ }, { type: "message_stop" });
2931
+ state$1.messageCompleted = true;
2932
+ return events$1;
2933
+ };
2934
+ const handleResponseFailed = (rawEvent, state$1) => {
2935
+ const response = rawEvent.response;
2936
+ const events$1 = new Array();
2937
+ closeAllOpenBlocks(state$1, events$1);
2938
+ const message = response.error?.message ?? "The response failed due to an unknown error.";
2939
+ events$1.push(buildErrorEvent(message));
2940
+ state$1.messageCompleted = true;
2941
+ return events$1;
2942
+ };
2943
+ const handleErrorEvent = (rawEvent, state$1) => {
2944
+ const message = typeof rawEvent.message === "string" ? rawEvent.message : "An unexpected error occurred during streaming.";
2945
+ state$1.messageCompleted = true;
2946
+ return [buildErrorEvent(message)];
2947
+ };
2948
+ const handleFunctionCallArgumentsValidationError = (error, state$1, events$1 = []) => {
2949
+ const reason = error.message;
2950
+ closeAllOpenBlocks(state$1, events$1);
2951
+ state$1.messageCompleted = true;
2952
+ events$1.push(buildErrorEvent(reason));
2953
+ return events$1;
2954
+ };
2955
+ const messageStart = (state$1, response) => {
2956
+ state$1.messageStartSent = true;
2957
+ const inputCachedTokens = response.usage?.input_tokens_details?.cached_tokens;
2958
+ const inputTokens = (response.usage?.input_tokens ?? 0) - (inputCachedTokens ?? 0);
2959
+ return [{
2960
+ type: "message_start",
2961
+ message: {
2962
+ id: response.id,
2963
+ type: "message",
2964
+ role: "assistant",
2965
+ content: [],
2966
+ model: response.model,
2967
+ stop_reason: null,
2968
+ stop_sequence: null,
2969
+ usage: {
2970
+ input_tokens: inputTokens,
2971
+ output_tokens: 0,
2972
+ cache_read_input_tokens: inputCachedTokens ?? 0
2973
+ }
2974
+ }
2975
+ }];
2976
+ };
2977
+ const openTextBlockIfNeeded = (state$1, params) => {
2978
+ const { outputIndex, contentIndex, events: events$1 } = params;
2979
+ const key = getBlockKey(outputIndex, contentIndex);
2980
+ let blockIndex = state$1.blockIndexByKey.get(key);
2981
+ if (blockIndex === void 0) {
2982
+ blockIndex = state$1.nextContentBlockIndex;
2983
+ state$1.nextContentBlockIndex += 1;
2984
+ state$1.blockIndexByKey.set(key, blockIndex);
2985
+ }
2986
+ if (!state$1.openBlocks.has(blockIndex)) {
2987
+ closeOpenBlocks(state$1, events$1);
2988
+ events$1.push({
2989
+ type: "content_block_start",
2990
+ index: blockIndex,
2991
+ content_block: {
2992
+ type: "text",
2993
+ text: ""
2994
+ }
2995
+ });
2996
+ state$1.openBlocks.add(blockIndex);
2997
+ }
2998
+ return blockIndex;
2999
+ };
3000
+ const openThinkingBlockIfNeeded = (state$1, outputIndex, events$1) => {
3001
+ const key = getBlockKey(outputIndex, 0);
3002
+ let blockIndex = state$1.blockIndexByKey.get(key);
3003
+ if (blockIndex === void 0) {
3004
+ blockIndex = state$1.nextContentBlockIndex;
3005
+ state$1.nextContentBlockIndex += 1;
3006
+ state$1.blockIndexByKey.set(key, blockIndex);
3007
+ }
3008
+ if (!state$1.openBlocks.has(blockIndex)) {
3009
+ closeOpenBlocks(state$1, events$1);
3010
+ events$1.push({
3011
+ type: "content_block_start",
3012
+ index: blockIndex,
3013
+ content_block: {
3014
+ type: "thinking",
3015
+ thinking: ""
3016
+ }
3017
+ });
3018
+ state$1.openBlocks.add(blockIndex);
3019
+ }
3020
+ return blockIndex;
3021
+ };
3022
+ const closeBlockIfOpen = (state$1, blockIndex, events$1) => {
3023
+ if (!state$1.openBlocks.has(blockIndex)) return;
3024
+ events$1.push({
3025
+ type: "content_block_stop",
3026
+ index: blockIndex
3027
+ });
3028
+ state$1.openBlocks.delete(blockIndex);
3029
+ state$1.blockHasDelta.delete(blockIndex);
3030
+ };
3031
+ const closeOpenBlocks = (state$1, events$1) => {
3032
+ for (const blockIndex of state$1.openBlocks) closeBlockIfOpen(state$1, blockIndex, events$1);
3033
+ };
3034
+ const closeAllOpenBlocks = (state$1, events$1) => {
3035
+ closeOpenBlocks(state$1, events$1);
3036
+ state$1.functionCallStateByOutputIndex.clear();
3037
+ };
3038
+ const buildErrorEvent = (message) => ({
3039
+ type: "error",
3040
+ error: {
3041
+ type: "api_error",
3042
+ message
3043
+ }
3044
+ });
3045
+ const getBlockKey = (outputIndex, contentIndex) => `${outputIndex}:${contentIndex}`;
3046
+ const openFunctionCallBlock = (state$1, params) => {
3047
+ const { outputIndex, toolCallId, name, events: events$1 } = params;
3048
+ let functionCallState = state$1.functionCallStateByOutputIndex.get(outputIndex);
3049
+ if (!functionCallState) {
3050
+ const blockIndex$1 = state$1.nextContentBlockIndex;
3051
+ state$1.nextContentBlockIndex += 1;
3052
+ functionCallState = {
3053
+ blockIndex: blockIndex$1,
3054
+ toolCallId: toolCallId ?? `tool_call_${blockIndex$1}`,
3055
+ name: name ?? "function",
3056
+ consecutiveWhitespaceCount: 0
3057
+ };
3058
+ state$1.functionCallStateByOutputIndex.set(outputIndex, functionCallState);
3059
+ }
3060
+ const { blockIndex } = functionCallState;
3061
+ if (!state$1.openBlocks.has(blockIndex)) {
3062
+ closeOpenBlocks(state$1, events$1);
3063
+ events$1.push({
3064
+ type: "content_block_start",
3065
+ index: blockIndex,
3066
+ content_block: {
3067
+ type: "tool_use",
3068
+ id: functionCallState.toolCallId,
3069
+ name: functionCallState.name,
3070
+ input: {}
3071
+ }
3072
+ });
3073
+ state$1.openBlocks.add(blockIndex);
3074
+ }
3075
+ return blockIndex;
3076
+ };
3077
+ const extractFunctionCallDetails = (rawEvent) => {
3078
+ const item = rawEvent.item;
3079
+ if (item.type !== "function_call") return;
3080
+ return {
3081
+ outputIndex: rawEvent.output_index,
3082
+ toolCallId: item.call_id,
3083
+ name: item.name,
3084
+ initialArguments: item.arguments
3085
+ };
3086
+ };
3087
+
3088
+ //#endregion
3089
+ //#region src/routes/responses/utils.ts
3090
+ const getResponsesRequestOptions = (payload) => {
3091
+ return {
3092
+ vision: hasVisionInput(payload),
3093
+ initiator: hasAgentInitiator(payload) ? "agent" : "user"
3094
+ };
3095
+ };
3096
+ const hasAgentInitiator = (payload) => {
3097
+ const lastItem = getPayloadItems(payload).at(-1);
3098
+ if (!lastItem) return false;
3099
+ if (!("role" in lastItem) || !lastItem.role) return true;
3100
+ return (typeof lastItem.role === "string" ? lastItem.role.toLowerCase() : "") === "assistant";
3101
+ };
3102
+ const hasVisionInput = (payload) => {
3103
+ return getPayloadItems(payload).some((item) => containsVisionContent(item));
3104
+ };
3105
+ const getPayloadItems = (payload) => {
3106
+ const result = [];
3107
+ const { input } = payload;
3108
+ if (Array.isArray(input)) result.push(...input);
3109
+ return result;
3110
+ };
3111
+ const containsVisionContent = (value) => {
3112
+ if (!value) return false;
3113
+ if (Array.isArray(value)) return value.some((entry) => containsVisionContent(entry));
3114
+ if (typeof value !== "object") return false;
3115
+ const record = value;
3116
+ if ((typeof record.type === "string" ? record.type.toLowerCase() : void 0) === "input_image") return true;
3117
+ if (Array.isArray(record.content)) return record.content.some((entry) => containsVisionContent(entry));
3118
+ return false;
3119
+ };
3120
+
3121
+ //#endregion
3122
+ //#region src/services/copilot/create-messages.ts
3123
+ const createMessages = async (payload, anthropicBetaHeader, options) => {
3124
+ if (!state.copilotToken) throw new Error("Copilot token not found");
3125
+ const enableVision = payload.messages.some((message) => Array.isArray(message.content) && message.content.some((block) => block.type === "image"));
3126
+ let isInitiateRequest = false;
3127
+ const lastMessage = payload.messages.at(-1);
3128
+ if (lastMessage?.role === "user") isInitiateRequest = Array.isArray(lastMessage.content) ? lastMessage.content.some((block) => block.type !== "tool_result") : true;
3129
+ const initiator = options?.initiator ?? (isInitiateRequest ? "user" : "agent");
3130
+ const headers = {
3131
+ ...copilotHeaders(state, enableVision),
3132
+ "X-Initiator": initiator
3133
+ };
3134
+ if (anthropicBetaHeader) {
3135
+ const filteredBeta = anthropicBetaHeader.split(",").map((item) => item.trim()).filter((item) => item !== "claude-code-20250219").join(",");
3136
+ if (filteredBeta) headers["anthropic-beta"] = filteredBeta;
3137
+ } else if (payload.thinking?.budget_tokens) headers["anthropic-beta"] = "interleaved-thinking-2025-05-14";
3138
+ const response = await fetch(`${copilotBaseUrl(state)}/v1/messages`, {
3139
+ method: "POST",
3140
+ headers,
3141
+ body: JSON.stringify(payload)
3142
+ });
3143
+ if (!response.ok) {
3144
+ consola.error("Failed to create messages", response);
3145
+ throw new HTTPError("Failed to create messages", response);
3146
+ }
3147
+ if (payload.stream) return events(response);
3148
+ return await response.json();
3149
+ };
3150
+
2113
3151
  //#endregion
2114
3152
  //#region src/routes/messages/stream-translation.ts
2115
3153
  function isToolBlockOpen(state$1) {
@@ -2133,18 +3171,6 @@ function createMessageDeltaEvents(finishReason, usage) {
2133
3171
  }
2134
3172
  }, { type: "message_stop" }];
2135
3173
  }
2136
- function createFallbackMessageDeltaEvents(state$1) {
2137
- if (state$1.messageDeltaSent) return [];
2138
- if (state$1.pendingFinishReason) {
2139
- const usage = state$1.pendingUsage ?? {
2140
- prompt_tokens: 0,
2141
- completion_tokens: 0,
2142
- cached_tokens: 0
2143
- };
2144
- return createMessageDeltaEvents(state$1.pendingFinishReason, usage);
2145
- }
2146
- return [];
2147
- }
2148
3174
  function translateChunkToAnthropicEvents(chunk, state$1, originalModel) {
2149
3175
  const events$1 = [];
2150
3176
  if (chunk.usage) {
@@ -2279,111 +3305,264 @@ function translateChunkToAnthropicEvents(chunk, state$1, originalModel) {
2279
3305
  }
2280
3306
 
2281
3307
  //#endregion
2282
- //#region src/routes/messages/handler.ts
2283
- /** Collect all chunks and extract usage data */
2284
- async function collectChunksWithUsage(eventStream) {
2285
- const chunks = [];
2286
- let usage = null;
2287
- for await (const event of eventStream) {
2288
- if (!event.data || event.data === "[DONE]") continue;
3308
+ //#region src/routes/messages/subagent-marker.ts
3309
+ const subagentMarkerPrefix = "__SUBAGENT_MARKER__";
3310
+ const parseSubagentMarkerFromFirstUser = (payload) => {
3311
+ const firstUserMessage = payload.messages.find((msg) => msg.role === "user");
3312
+ if (!firstUserMessage || !Array.isArray(firstUserMessage.content)) return null;
3313
+ for (const block of firstUserMessage.content) {
3314
+ if (block.type !== "text") continue;
3315
+ const marker = parseSubagentMarkerFromSystemReminder(block.text);
3316
+ if (marker) return marker;
3317
+ }
3318
+ return null;
3319
+ };
3320
+ const parseSubagentMarkerFromSystemReminder = (text) => {
3321
+ const startTag = "<system-reminder>";
3322
+ const endTag = "</system-reminder>";
3323
+ let searchFrom = 0;
3324
+ while (true) {
3325
+ const reminderStart = text.indexOf(startTag, searchFrom);
3326
+ if (reminderStart === -1) break;
3327
+ const contentStart = reminderStart + 17;
3328
+ const reminderEnd = text.indexOf(endTag, contentStart);
3329
+ if (reminderEnd === -1) break;
3330
+ const reminderContent = text.slice(contentStart, reminderEnd);
3331
+ const markerIndex = reminderContent.indexOf(subagentMarkerPrefix);
3332
+ if (markerIndex === -1) {
3333
+ searchFrom = reminderEnd + 18;
3334
+ continue;
3335
+ }
3336
+ const markerJson = reminderContent.slice(markerIndex + 19).trim();
2289
3337
  try {
2290
- const chunk = JSON.parse(event.data);
2291
- chunks.push(chunk);
2292
- if (chunk.usage) usage = {
2293
- prompt_tokens: chunk.usage.prompt_tokens,
2294
- completion_tokens: chunk.usage.completion_tokens,
2295
- cached_tokens: chunk.usage.prompt_tokens_details?.cached_tokens ?? 0
2296
- };
2297
- } catch (error) {
2298
- consola.error("Failed to parse chunk:", error, event.data);
3338
+ const parsed = JSON.parse(markerJson);
3339
+ if (!parsed.session_id || !parsed.agent_id || !parsed.agent_type) {
3340
+ searchFrom = reminderEnd + 18;
3341
+ continue;
3342
+ }
3343
+ return parsed;
3344
+ } catch {
3345
+ searchFrom = reminderEnd + 18;
3346
+ continue;
2299
3347
  }
2300
3348
  }
2301
- return {
2302
- chunks,
2303
- usage
2304
- };
2305
- }
3349
+ return null;
3350
+ };
3351
+
3352
+ //#endregion
3353
+ //#region src/routes/messages/handler.ts
3354
+ const logger$1 = createHandlerLogger("messages-handler");
3355
+ const compactSystemPromptStart = "You are a helpful AI assistant tasked with summarizing conversations";
2306
3356
  async function handleCompletion(c) {
2307
3357
  await checkRateLimit(state);
2308
3358
  const anthropicPayload = await c.req.json();
2309
- consola.debug("Anthropic request payload:", JSON.stringify(anthropicPayload));
2310
- const translatedPayload = translateToOpenAI(anthropicPayload);
2311
- let openAIPayload = await applyReplacementsToPayload(translatedPayload);
2312
- openAIPayload = {
2313
- ...openAIPayload,
2314
- model: normalizeModelName(openAIPayload.model)
2315
- };
3359
+ logger$1.debug("Anthropic request payload:", JSON.stringify(anthropicPayload));
3360
+ const subagentMarker = parseSubagentMarkerFromFirstUser(anthropicPayload);
3361
+ const initiatorOverride = subagentMarker ? "agent" : void 0;
3362
+ if (subagentMarker) logger$1.debug("Detected Subagent marker:", JSON.stringify(subagentMarker));
3363
+ const isCompact = isCompactRequest(anthropicPayload);
3364
+ const anthropicBeta = c.req.header("anthropic-beta");
3365
+ logger$1.debug("Anthropic Beta header:", anthropicBeta);
3366
+ const noTools = !anthropicPayload.tools || anthropicPayload.tools.length === 0;
3367
+ if (anthropicBeta && noTools && !isCompact) anthropicPayload.model = getSmallModel();
3368
+ if (isCompact) {
3369
+ logger$1.debug("Is compact request:", isCompact);
3370
+ if (shouldCompactUseSmallModel()) anthropicPayload.model = getSmallModel();
3371
+ } else mergeToolResultForClaude(anthropicPayload);
2316
3372
  if (state.manualApprove) await awaitApproval();
2317
- const isAzureModel = isAzureOpenAIModel(openAIPayload.model);
2318
- if (isAzureModel) {
2319
- if (!state.azureOpenAIConfig) return c.json({ error: "Azure OpenAI not configured" }, 500);
2320
- setRequestContext(c, {
2321
- provider: "Azure OpenAI",
2322
- model: openAIPayload.model
2323
- });
2324
- } else setRequestContext(c, {
2325
- provider: "Copilot",
2326
- model: openAIPayload.model
3373
+ const selectedModel = state.models?.data.find((m) => m.id === anthropicPayload.model);
3374
+ if (shouldUseMessagesApi(selectedModel)) return await handleWithMessagesApi(c, anthropicPayload, {
3375
+ anthropicBetaHeader: anthropicBeta,
3376
+ initiatorOverride,
3377
+ selectedModel
2327
3378
  });
2328
- if (anthropicPayload.stream) {
2329
- const streamPayload = {
2330
- ...openAIPayload,
2331
- stream: true,
2332
- stream_options: { include_usage: true }
3379
+ if (shouldUseResponsesApi(selectedModel)) return await handleWithResponsesApi(c, anthropicPayload, initiatorOverride);
3380
+ return await handleWithChatCompletions(c, anthropicPayload, initiatorOverride);
3381
+ }
3382
+ const RESPONSES_ENDPOINT$1 = "/responses";
3383
+ const MESSAGES_ENDPOINT = "/v1/messages";
3384
+ const handleWithChatCompletions = async (c, anthropicPayload, initiatorOverride) => {
3385
+ let finalPayload = await applyReplacementsToPayload(translateToOpenAI(anthropicPayload));
3386
+ finalPayload = {
3387
+ ...finalPayload,
3388
+ model: normalizeModelName(finalPayload.model)
3389
+ };
3390
+ logger$1.debug("Translated OpenAI request payload:", JSON.stringify(finalPayload));
3391
+ const response = isAzureOpenAIModel(finalPayload.model) && state.azureOpenAIConfig ? await createAzureOpenAIChatCompletions(state.azureOpenAIConfig, finalPayload) : await createChatCompletions(finalPayload, { initiator: initiatorOverride });
3392
+ if (isNonStreaming(response)) {
3393
+ logger$1.debug("Non-streaming response from Copilot:", JSON.stringify(response).slice(-400));
3394
+ const anthropicResponse = translateToAnthropic(response);
3395
+ logger$1.debug("Translated Anthropic response:", JSON.stringify(anthropicResponse));
3396
+ return c.json(anthropicResponse);
3397
+ }
3398
+ logger$1.debug("Streaming response from Copilot");
3399
+ return streamSSE(c, async (stream) => {
3400
+ const streamState = {
3401
+ messageStartSent: false,
3402
+ contentBlockIndex: 0,
3403
+ contentBlockOpen: false,
3404
+ toolCalls: {}
2333
3405
  };
2334
- const azureConfig = state.azureOpenAIConfig;
2335
- const eventStream = isAzureModel && azureConfig ? await createAzureOpenAIChatCompletions(azureConfig, streamPayload) : await createChatCompletions(streamPayload);
2336
- return streamSSE(c, async (stream) => {
2337
- const { chunks, usage } = await collectChunksWithUsage(eventStream);
2338
- consola.debug(`[stream] Collected ${chunks.length} chunks, usage:`, usage);
2339
- if (usage) {
2340
- setRequestContext(c, {
2341
- inputTokens: usage.prompt_tokens,
2342
- outputTokens: usage.completion_tokens
3406
+ for await (const rawEvent of response) {
3407
+ logger$1.debug("Copilot raw stream event:", JSON.stringify(rawEvent));
3408
+ if (rawEvent.data === "[DONE]") break;
3409
+ if (!rawEvent.data) continue;
3410
+ const events$1 = translateChunkToAnthropicEvents(JSON.parse(rawEvent.data), streamState);
3411
+ for (const event of events$1) {
3412
+ logger$1.debug("Translated Anthropic event:", JSON.stringify(event));
3413
+ await stream.writeSSE({
3414
+ event: event.type,
3415
+ data: JSON.stringify(event)
2343
3416
  });
2344
- logTokenUsage(usage.prompt_tokens, usage.completion_tokens);
2345
3417
  }
2346
- const streamState = {
2347
- messageStartSent: false,
2348
- contentBlockOpen: false,
2349
- contentBlockIndex: 0,
2350
- toolCalls: {},
2351
- pendingUsage: usage ?? void 0
2352
- };
2353
- for (const chunk of chunks) {
2354
- const events$1 = translateChunkToAnthropicEvents(chunk, streamState, anthropicPayload.model);
2355
- for (const evt of events$1) {
2356
- consola.debug(`[stream] Emitting event: ${evt.type}`);
3418
+ }
3419
+ });
3420
+ };
3421
+ const handleWithResponsesApi = async (c, anthropicPayload, initiatorOverride) => {
3422
+ const responsesPayload = translateAnthropicMessagesToResponsesPayload(anthropicPayload);
3423
+ logger$1.debug("Translated Responses payload:", JSON.stringify(responsesPayload));
3424
+ const { vision, initiator } = getResponsesRequestOptions(responsesPayload);
3425
+ const response = await createResponses(responsesPayload, {
3426
+ vision,
3427
+ initiator: initiatorOverride ?? initiator
3428
+ });
3429
+ if (responsesPayload.stream && isAsyncIterable$1(response)) {
3430
+ logger$1.debug("Streaming response from Copilot (Responses API)");
3431
+ return streamSSE(c, async (stream) => {
3432
+ const streamState = createResponsesStreamState();
3433
+ for await (const chunk of response) {
3434
+ if (chunk.event === "ping") {
2357
3435
  await stream.writeSSE({
2358
- event: evt.type,
2359
- data: JSON.stringify(evt)
3436
+ event: "ping",
3437
+ data: "{\"type\":\"ping\"}"
2360
3438
  });
3439
+ continue;
3440
+ }
3441
+ const data = chunk.data;
3442
+ if (!data) continue;
3443
+ logger$1.debug("Responses raw stream event:", data);
3444
+ const events$1 = translateResponsesStreamEvent(JSON.parse(data), streamState);
3445
+ for (const event of events$1) {
3446
+ const eventData = JSON.stringify(event);
3447
+ logger$1.debug("Translated Anthropic event:", eventData);
3448
+ await stream.writeSSE({
3449
+ event: event.type,
3450
+ data: eventData
3451
+ });
3452
+ }
3453
+ if (streamState.messageCompleted) {
3454
+ logger$1.debug("Message completed, ending stream");
3455
+ break;
2361
3456
  }
2362
3457
  }
2363
- const fallbackEvents = createFallbackMessageDeltaEvents(streamState);
2364
- consola.debug(`[stream] Fallback events: ${fallbackEvents.length}, messageDeltaSent: ${streamState.messageDeltaSent}`);
2365
- for (const evt of fallbackEvents) {
2366
- consola.debug(`[stream] Emitting fallback event: ${evt.type}`);
3458
+ if (!streamState.messageCompleted) {
3459
+ logger$1.warn("Responses stream ended without completion; sending error event");
3460
+ const errorEvent = buildErrorEvent("Responses stream ended without completion");
2367
3461
  await stream.writeSSE({
2368
- event: evt.type,
2369
- data: JSON.stringify(evt)
3462
+ event: errorEvent.type,
3463
+ data: JSON.stringify(errorEvent)
2370
3464
  });
2371
3465
  }
2372
3466
  });
2373
3467
  }
2374
- const nonStreamPayload = {
2375
- ...openAIPayload,
2376
- stream: false
2377
- };
2378
- const azureConfigNonStream = state.azureOpenAIConfig;
2379
- const response = isAzureModel && azureConfigNonStream ? await createAzureOpenAIChatCompletions(azureConfigNonStream, nonStreamPayload) : await createChatCompletions(nonStreamPayload);
2380
- if (response.usage) setRequestContext(c, {
2381
- inputTokens: response.usage.prompt_tokens,
2382
- outputTokens: response.usage.completion_tokens
2383
- });
2384
- const anthropicResponse = translateToAnthropic(response, anthropicPayload.model);
3468
+ logger$1.debug("Non-streaming Responses result:", JSON.stringify(response).slice(-400));
3469
+ const anthropicResponse = translateResponsesResultToAnthropic(response);
3470
+ logger$1.debug("Translated Anthropic response:", JSON.stringify(anthropicResponse));
2385
3471
  return c.json(anthropicResponse);
2386
- }
3472
+ };
3473
+ const handleWithMessagesApi = async (c, anthropicPayload, options) => {
3474
+ const { anthropicBetaHeader, initiatorOverride, selectedModel } = options ?? {};
3475
+ for (const msg of anthropicPayload.messages) if (msg.role === "assistant" && Array.isArray(msg.content)) msg.content = msg.content.filter((block) => {
3476
+ if (block.type !== "thinking") return true;
3477
+ return block.thinking && block.thinking !== "Thinking..." && block.signature && !block.signature.includes("@");
3478
+ });
3479
+ if (selectedModel?.capabilities.supports.adaptive_thinking) {
3480
+ anthropicPayload.thinking = { type: "adaptive" };
3481
+ anthropicPayload.output_config = { effort: getAnthropicEffortForModel(anthropicPayload.model) };
3482
+ }
3483
+ logger$1.debug("Translated Messages payload:", JSON.stringify(anthropicPayload));
3484
+ const response = await createMessages(anthropicPayload, anthropicBetaHeader, { initiator: initiatorOverride });
3485
+ if (isAsyncIterable$1(response)) {
3486
+ logger$1.debug("Streaming response from Copilot (Messages API)");
3487
+ return streamSSE(c, async (stream) => {
3488
+ for await (const event of response) {
3489
+ const eventName = event.event;
3490
+ const data = event.data ?? "";
3491
+ logger$1.debug("Messages raw stream event:", data);
3492
+ await stream.writeSSE({
3493
+ event: eventName,
3494
+ data
3495
+ });
3496
+ }
3497
+ });
3498
+ }
3499
+ logger$1.debug("Non-streaming Messages result:", JSON.stringify(response).slice(-400));
3500
+ return c.json(response);
3501
+ };
3502
+ const shouldUseResponsesApi = (selectedModel) => {
3503
+ return selectedModel?.supported_endpoints?.includes(RESPONSES_ENDPOINT$1) ?? false;
3504
+ };
3505
+ const shouldUseMessagesApi = (selectedModel) => {
3506
+ return selectedModel?.supported_endpoints?.includes(MESSAGES_ENDPOINT) ?? false;
3507
+ };
3508
+ const isNonStreaming = (response) => Object.hasOwn(response, "choices");
3509
+ const isAsyncIterable$1 = (value) => Boolean(value) && typeof value[Symbol.asyncIterator] === "function";
3510
+ const getAnthropicEffortForModel = (model) => {
3511
+ const reasoningEffort = getReasoningEffortForModel(model);
3512
+ if (reasoningEffort === "xhigh") return "max";
3513
+ if (reasoningEffort === "none" || reasoningEffort === "minimal") return "low";
3514
+ return reasoningEffort;
3515
+ };
3516
+ const isCompactRequest = (anthropicPayload) => {
3517
+ const system = anthropicPayload.system;
3518
+ if (typeof system === "string") return system.startsWith(compactSystemPromptStart);
3519
+ if (!Array.isArray(system)) return false;
3520
+ return system.some((msg) => typeof msg.text === "string" && msg.text.startsWith(compactSystemPromptStart));
3521
+ };
3522
+ const mergeContentWithText = (tr, textBlock) => {
3523
+ if (typeof tr.content === "string") return {
3524
+ ...tr,
3525
+ content: `${tr.content}\n\n${textBlock.text}`
3526
+ };
3527
+ return {
3528
+ ...tr,
3529
+ content: [...tr.content, textBlock]
3530
+ };
3531
+ };
3532
+ const mergeContentWithTexts = (tr, textBlocks) => {
3533
+ if (typeof tr.content === "string") {
3534
+ const appendedTexts = textBlocks.map((tb) => tb.text).join("\n\n");
3535
+ return {
3536
+ ...tr,
3537
+ content: `${tr.content}\n\n${appendedTexts}`
3538
+ };
3539
+ }
3540
+ return {
3541
+ ...tr,
3542
+ content: [...tr.content, ...textBlocks]
3543
+ };
3544
+ };
3545
+ const mergeToolResultForClaude = (anthropicPayload) => {
3546
+ for (const msg of anthropicPayload.messages) {
3547
+ if (msg.role !== "user" || !Array.isArray(msg.content)) continue;
3548
+ const toolResults = [];
3549
+ const textBlocks = [];
3550
+ let valid = true;
3551
+ for (const block of msg.content) if (block.type === "tool_result") toolResults.push(block);
3552
+ else if (block.type === "text") textBlocks.push(block);
3553
+ else {
3554
+ valid = false;
3555
+ break;
3556
+ }
3557
+ if (!valid || toolResults.length === 0 || textBlocks.length === 0) continue;
3558
+ msg.content = mergeToolResult(toolResults, textBlocks);
3559
+ }
3560
+ };
3561
+ const mergeToolResult = (toolResults, textBlocks) => {
3562
+ if (toolResults.length === textBlocks.length) return toolResults.map((tr, i) => mergeContentWithText(tr, textBlocks[i]));
3563
+ const lastIndex = toolResults.length - 1;
3564
+ return toolResults.map((tr, i) => i === lastIndex ? mergeContentWithTexts(tr, textBlocks) : tr);
3565
+ };
2387
3566
 
2388
3567
  //#endregion
2389
3568
  //#region src/routes/messages/route.ts
@@ -2454,20 +3633,16 @@ replacementsRoute.post("/", async (c) => {
2454
3633
  return c.json(rule, 201);
2455
3634
  });
2456
3635
  replacementsRoute.delete("/:id", async (c) => {
2457
- const id = c.req.param("id");
2458
- if (!await removeReplacement(id)) return c.json({ error: "Replacement not found or is a system rule" }, 404);
3636
+ if (!await removeReplacement(c.req.param("id"))) return c.json({ error: "Replacement not found or is a system rule" }, 404);
2459
3637
  return c.json({ success: true });
2460
3638
  });
2461
3639
  replacementsRoute.patch("/:id", async (c) => {
2462
- const id = c.req.param("id");
2463
- const body = await c.req.json();
2464
- const rule = await updateReplacement(id, body);
3640
+ const rule = await updateReplacement(c.req.param("id"), await c.req.json());
2465
3641
  if (!rule) return c.json({ error: "Replacement not found or is a system rule" }, 404);
2466
3642
  return c.json(rule);
2467
3643
  });
2468
3644
  replacementsRoute.patch("/:id/toggle", async (c) => {
2469
- const id = c.req.param("id");
2470
- const rule = await toggleReplacement(id);
3645
+ const rule = await toggleReplacement(c.req.param("id"));
2471
3646
  if (!rule) return c.json({ error: "Replacement not found or is a system rule" }, 404);
2472
3647
  return c.json(rule);
2473
3648
  });
@@ -2476,6 +3651,130 @@ replacementsRoute.delete("/", async (c) => {
2476
3651
  return c.json({ success: true });
2477
3652
  });
2478
3653
 
3654
+ //#endregion
3655
+ //#region src/routes/responses/stream-id-sync.ts
3656
+ const createStreamIdTracker = () => ({ outputItems: /* @__PURE__ */ new Map() });
3657
+ const fixStreamIds = (data, event, tracker) => {
3658
+ if (!data) return data;
3659
+ const parsed = JSON.parse(data);
3660
+ switch (event) {
3661
+ case "response.output_item.added": return handleOutputItemAdded(parsed, tracker);
3662
+ case "response.output_item.done": return handleOutputItemDone(parsed, tracker);
3663
+ default: return handleItemId(parsed, tracker);
3664
+ }
3665
+ };
3666
+ const handleOutputItemAdded = (parsed, tracker) => {
3667
+ if (!parsed.item.id) {
3668
+ let randomSuffix = "";
3669
+ while (randomSuffix.length < 16) randomSuffix += Math.random().toString(36).slice(2);
3670
+ parsed.item.id = `oi_${parsed.output_index}_${randomSuffix.slice(0, 16)}`;
3671
+ }
3672
+ const outputIndex = parsed.output_index;
3673
+ tracker.outputItems.set(outputIndex, parsed.item.id);
3674
+ return JSON.stringify(parsed);
3675
+ };
3676
+ const handleOutputItemDone = (parsed, tracker) => {
3677
+ const outputIndex = parsed.output_index;
3678
+ const originalId = tracker.outputItems.get(outputIndex);
3679
+ if (originalId) parsed.item.id = originalId;
3680
+ return JSON.stringify(parsed);
3681
+ };
3682
+ const handleItemId = (parsed, tracker) => {
3683
+ const outputIndex = parsed.output_index;
3684
+ if (outputIndex !== void 0) {
3685
+ const itemId = tracker.outputItems.get(outputIndex);
3686
+ if (itemId) parsed.item_id = itemId;
3687
+ }
3688
+ return JSON.stringify(parsed);
3689
+ };
3690
+
3691
+ //#endregion
3692
+ //#region src/routes/responses/handler.ts
3693
+ const logger = createHandlerLogger("responses-handler");
3694
+ const RESPONSES_ENDPOINT = "/responses";
3695
+ const handleResponses = async (c) => {
3696
+ await checkRateLimit(state);
3697
+ const payload = await c.req.json();
3698
+ setRequestContext(c, {
3699
+ provider: "Copilot (Responses)",
3700
+ model: payload.model
3701
+ });
3702
+ logger.debug("Responses request payload:", JSON.stringify(payload));
3703
+ useFunctionApplyPatch(payload);
3704
+ removeWebSearchTool(payload);
3705
+ if (!((state.models?.data.find((model) => model.id === payload.model))?.supported_endpoints?.includes(RESPONSES_ENDPOINT) ?? false)) return c.json({ error: {
3706
+ message: "This model does not support the responses endpoint. Please choose a different model.",
3707
+ type: "invalid_request_error"
3708
+ } }, 400);
3709
+ const { vision, initiator } = getResponsesRequestOptions(payload);
3710
+ if (state.manualApprove) await awaitApproval();
3711
+ const response = await createResponses(payload, {
3712
+ vision,
3713
+ initiator
3714
+ });
3715
+ if (isStreamingRequested(payload) && isAsyncIterable(response)) {
3716
+ logger.debug("Forwarding native Responses stream");
3717
+ return streamSSE(c, async (stream) => {
3718
+ const idTracker = createStreamIdTracker();
3719
+ for await (const chunk of response) {
3720
+ logger.debug("Responses stream chunk:", JSON.stringify(chunk));
3721
+ const processedData = fixStreamIds(chunk.data ?? "", chunk.event, idTracker);
3722
+ await stream.writeSSE({
3723
+ id: chunk.id,
3724
+ event: chunk.event,
3725
+ data: processedData
3726
+ });
3727
+ }
3728
+ });
3729
+ }
3730
+ logger.debug("Forwarding native Responses result:", JSON.stringify(response).slice(-400));
3731
+ return c.json(response);
3732
+ };
3733
+ const isAsyncIterable = (value) => Boolean(value) && typeof value[Symbol.asyncIterator] === "function";
3734
+ const isStreamingRequested = (payload) => Boolean(payload.stream);
3735
+ const useFunctionApplyPatch = (payload) => {
3736
+ if (getConfig().useFunctionApplyPatch ?? true) {
3737
+ logger.debug("Using function tool apply_patch for responses");
3738
+ if (Array.isArray(payload.tools)) {
3739
+ const toolsArr = payload.tools;
3740
+ for (let i = 0; i < toolsArr.length; i++) {
3741
+ const t = toolsArr[i];
3742
+ if (t.type === "custom" && t.name === "apply_patch") toolsArr[i] = {
3743
+ type: "function",
3744
+ name: t.name,
3745
+ description: "Use the `apply_patch` tool to edit files",
3746
+ parameters: {
3747
+ type: "object",
3748
+ properties: { input: {
3749
+ type: "string",
3750
+ description: "The entire contents of the apply_patch command"
3751
+ } },
3752
+ required: ["input"]
3753
+ },
3754
+ strict: false
3755
+ };
3756
+ }
3757
+ }
3758
+ }
3759
+ };
3760
+ const removeWebSearchTool = (payload) => {
3761
+ if (!Array.isArray(payload.tools) || payload.tools.length === 0) return;
3762
+ payload.tools = payload.tools.filter((t) => {
3763
+ return t.type !== "web_search";
3764
+ });
3765
+ };
3766
+
3767
+ //#endregion
3768
+ //#region src/routes/responses/route.ts
3769
+ const responsesRoutes = new Hono();
3770
+ responsesRoutes.post("/", async (c) => {
3771
+ try {
3772
+ return await handleResponses(c);
3773
+ } catch (error) {
3774
+ return await forwardError(c, error);
3775
+ }
3776
+ });
3777
+
2479
3778
  //#endregion
2480
3779
  //#region src/routes/token/route.ts
2481
3780
  const tokenRoute = new Hono();
@@ -2509,6 +3808,7 @@ usageRoute.get("/", async (c) => {
2509
3808
  const server = new Hono();
2510
3809
  server.use(requestLogger);
2511
3810
  server.use(cors());
3811
+ server.use("*", createAuthMiddleware());
2512
3812
  server.get("/", (c) => c.text("Server running"));
2513
3813
  server.route("/chat/completions", completionRoutes);
2514
3814
  server.route("/models", modelRoutes);
@@ -2516,15 +3816,17 @@ server.route("/embeddings", embeddingRoutes);
2516
3816
  server.route("/usage", usageRoute);
2517
3817
  server.route("/token", tokenRoute);
2518
3818
  server.route("/replacements", replacementsRoute);
3819
+ server.route("/responses", responsesRoutes);
2519
3820
  server.route("/v1/chat/completions", completionRoutes);
2520
3821
  server.route("/v1/models", modelRoutes);
2521
3822
  server.route("/v1/embeddings", embeddingRoutes);
3823
+ server.route("/v1/responses", responsesRoutes);
2522
3824
  server.route("/v1/messages", messageRoutes);
2523
3825
 
2524
3826
  //#endregion
2525
3827
  //#region src/start.ts
2526
3828
  async function runServer(options) {
2527
- consola.info(`copilot-api v${package_default.version}`);
3829
+ consola.info(`copilot-api v${version}`);
2528
3830
  if (options.insecure) {
2529
3831
  process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
2530
3832
  consola.warn("SSL certificate verification disabled (insecure mode)");
@@ -2541,8 +3843,10 @@ async function runServer(options) {
2541
3843
  state.rateLimitWait = options.rateLimitWait;
2542
3844
  state.showToken = options.showToken;
2543
3845
  state.debug = options.debug;
3846
+ state.verbose = options.verbose;
2544
3847
  if (options.debug) consola.info("Debug mode enabled - raw HTTP requests will be logged");
2545
3848
  await ensurePaths();
3849
+ mergeConfigWithDefaults();
2546
3850
  await cacheVSCodeVersion();
2547
3851
  if (options.githubToken) {
2548
3852
  state.githubToken = options.githubToken;
@@ -2686,10 +3990,10 @@ const start = defineCommand({
2686
3990
 
2687
3991
  //#endregion
2688
3992
  //#region src/main.ts
2689
- const main = defineCommand({
3993
+ await runMain(defineCommand({
2690
3994
  meta: {
2691
3995
  name: "copilot-api",
2692
- version: package_default.version,
3996
+ version,
2693
3997
  description: "A wrapper around GitHub Copilot API to make it OpenAI compatible, making it usable for other tools."
2694
3998
  },
2695
3999
  subCommands: {
@@ -2699,8 +4003,7 @@ const main = defineCommand({
2699
4003
  debug,
2700
4004
  config
2701
4005
  }
2702
- });
2703
- await runMain(main);
4006
+ }));
2704
4007
 
2705
4008
  //#endregion
2706
4009
  export { };