@jeffreycao/copilot-api 1.5.3-beta.0 → 1.5.3-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/main.js +1 -1
- package/dist/{server-DAyVU7jA.js → server-DP3qGqqy.js} +308 -252
- package/dist/server-DP3qGqqy.js.map +1 -0
- package/dist/{start-B49um7QG.js → start-BBCMQ5y3.js} +2 -2
- package/dist/{start-B49um7QG.js.map → start-BBCMQ5y3.js.map} +1 -1
- package/package.json +2 -1
- package/dist/server-DAyVU7jA.js.map +0 -1
|
@@ -86,6 +86,9 @@ const FLUSH_INTERVAL_MS = 1e3;
|
|
|
86
86
|
const MAX_BUFFER_SIZE = 100;
|
|
87
87
|
const logStreams = /* @__PURE__ */ new Map();
|
|
88
88
|
const logBuffers = /* @__PURE__ */ new Map();
|
|
89
|
+
let runtimeInitialized = false;
|
|
90
|
+
let flushInterval;
|
|
91
|
+
let cleanupInterval;
|
|
89
92
|
const ensureLogDirectory = () => {
|
|
90
93
|
if (!fs.existsSync(LOG_DIR)) fs.mkdirSync(LOG_DIR, { recursive: true });
|
|
91
94
|
};
|
|
@@ -116,17 +119,8 @@ const sanitizeName = (name) => {
|
|
|
116
119
|
const normalized = name.toLowerCase().replaceAll(/[^a-z0-9]+/g, "-").replaceAll(/^-+|-+$/g, "");
|
|
117
120
|
return normalized === "" ? "handler" : normalized;
|
|
118
121
|
};
|
|
119
|
-
const
|
|
120
|
-
|
|
121
|
-
if (!stream || stream.destroyed) {
|
|
122
|
-
stream = fs.createWriteStream(filePath, { flags: "a" });
|
|
123
|
-
logStreams.set(filePath, stream);
|
|
124
|
-
stream.on("error", (error) => {
|
|
125
|
-
console.warn("Log stream error", error);
|
|
126
|
-
logStreams.delete(filePath);
|
|
127
|
-
});
|
|
128
|
-
}
|
|
129
|
-
return stream;
|
|
122
|
+
const maybeUnref = (timer) => {
|
|
123
|
+
timer.unref();
|
|
130
124
|
};
|
|
131
125
|
const flushBuffer = (filePath) => {
|
|
132
126
|
const buffer = logBuffers.get(filePath);
|
|
@@ -141,6 +135,52 @@ const flushBuffer = (filePath) => {
|
|
|
141
135
|
const flushAllBuffers = () => {
|
|
142
136
|
for (const filePath of logBuffers.keys()) flushBuffer(filePath);
|
|
143
137
|
};
|
|
138
|
+
const cleanup = () => {
|
|
139
|
+
if (flushInterval) {
|
|
140
|
+
clearInterval(flushInterval);
|
|
141
|
+
flushInterval = void 0;
|
|
142
|
+
}
|
|
143
|
+
if (cleanupInterval) {
|
|
144
|
+
clearInterval(cleanupInterval);
|
|
145
|
+
cleanupInterval = void 0;
|
|
146
|
+
}
|
|
147
|
+
flushAllBuffers();
|
|
148
|
+
for (const stream of logStreams.values()) stream.end();
|
|
149
|
+
logStreams.clear();
|
|
150
|
+
logBuffers.clear();
|
|
151
|
+
};
|
|
152
|
+
const initializeLoggerRuntime = () => {
|
|
153
|
+
if (runtimeInitialized) return;
|
|
154
|
+
runtimeInitialized = true;
|
|
155
|
+
ensureLogDirectory();
|
|
156
|
+
cleanupOldLogs();
|
|
157
|
+
flushInterval = setInterval(flushAllBuffers, FLUSH_INTERVAL_MS);
|
|
158
|
+
maybeUnref(flushInterval);
|
|
159
|
+
cleanupInterval = setInterval(cleanupOldLogs, CLEANUP_INTERVAL_MS);
|
|
160
|
+
maybeUnref(cleanupInterval);
|
|
161
|
+
process.once("exit", cleanup);
|
|
162
|
+
process.once("SIGINT", () => {
|
|
163
|
+
cleanup();
|
|
164
|
+
process.exit(0);
|
|
165
|
+
});
|
|
166
|
+
process.once("SIGTERM", () => {
|
|
167
|
+
cleanup();
|
|
168
|
+
process.exit(0);
|
|
169
|
+
});
|
|
170
|
+
};
|
|
171
|
+
const getLogStream = (filePath) => {
|
|
172
|
+
initializeLoggerRuntime();
|
|
173
|
+
let stream = logStreams.get(filePath);
|
|
174
|
+
if (!stream || stream.destroyed) {
|
|
175
|
+
stream = fs.createWriteStream(filePath, { flags: "a" });
|
|
176
|
+
logStreams.set(filePath, stream);
|
|
177
|
+
stream.on("error", (error) => {
|
|
178
|
+
console.warn("Log stream error", error);
|
|
179
|
+
logStreams.delete(filePath);
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
return stream;
|
|
183
|
+
};
|
|
144
184
|
const appendLine = (filePath, line) => {
|
|
145
185
|
let buffer = logBuffers.get(filePath);
|
|
146
186
|
if (!buffer) {
|
|
@@ -150,35 +190,23 @@ const appendLine = (filePath, line) => {
|
|
|
150
190
|
buffer.push(line);
|
|
151
191
|
if (buffer.length >= MAX_BUFFER_SIZE) flushBuffer(filePath);
|
|
152
192
|
};
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
193
|
+
const debugLazy = (logger$7, factory) => {
|
|
194
|
+
if (!state.verbose) return;
|
|
195
|
+
logger$7.debug(...factory());
|
|
196
|
+
};
|
|
197
|
+
const debugJson = (logger$7, label, value) => {
|
|
198
|
+
debugLazy(logger$7, () => [label, JSON.stringify(value)]);
|
|
199
|
+
};
|
|
200
|
+
const debugJsonTail = (logger$7, label, { value, tailLength = 400 }) => {
|
|
201
|
+
debugLazy(logger$7, () => [label, JSON.stringify(value).slice(-tailLength)]);
|
|
159
202
|
};
|
|
160
|
-
process.on("exit", cleanup);
|
|
161
|
-
process.on("SIGINT", () => {
|
|
162
|
-
cleanup();
|
|
163
|
-
process.exit(0);
|
|
164
|
-
});
|
|
165
|
-
process.on("SIGTERM", () => {
|
|
166
|
-
cleanup();
|
|
167
|
-
process.exit(0);
|
|
168
|
-
});
|
|
169
|
-
let lastCleanup = 0;
|
|
170
203
|
const createHandlerLogger = (name) => {
|
|
171
|
-
ensureLogDirectory();
|
|
172
204
|
const sanitizedName = sanitizeName(name);
|
|
173
205
|
const instance = consola.withTag(name);
|
|
174
206
|
if (state.verbose) instance.level = 5;
|
|
175
207
|
instance.setReporters([]);
|
|
176
208
|
instance.addReporter({ log(logObj) {
|
|
177
|
-
|
|
178
|
-
if (Date.now() - lastCleanup > CLEANUP_INTERVAL_MS) {
|
|
179
|
-
cleanupOldLogs();
|
|
180
|
-
lastCleanup = Date.now();
|
|
181
|
-
}
|
|
209
|
+
initializeLoggerRuntime();
|
|
182
210
|
const traceId = requestContext.getStore()?.traceId;
|
|
183
211
|
const date = logObj.date;
|
|
184
212
|
const dateKey = date.toLocaleDateString("sv-SE");
|
|
@@ -469,7 +497,10 @@ const logger$6 = createHandlerLogger("chat-completions-handler");
|
|
|
469
497
|
async function handleCompletion$1(c) {
|
|
470
498
|
await checkRateLimit(state);
|
|
471
499
|
let payload = await c.req.json();
|
|
472
|
-
logger$6
|
|
500
|
+
debugJsonTail(logger$6, "Request payload:", {
|
|
501
|
+
value: payload,
|
|
502
|
+
tailLength: 400
|
|
503
|
+
});
|
|
473
504
|
const selectedModel = state.models?.data.find((model) => model.id === payload.model);
|
|
474
505
|
try {
|
|
475
506
|
if (selectedModel) {
|
|
@@ -485,7 +516,7 @@ async function handleCompletion$1(c) {
|
|
|
485
516
|
...payload,
|
|
486
517
|
max_tokens: selectedModel?.capabilities.limits.max_output_tokens
|
|
487
518
|
};
|
|
488
|
-
logger$6
|
|
519
|
+
debugJson(logger$6, "Set max_tokens to:", payload.max_tokens);
|
|
489
520
|
}
|
|
490
521
|
const requestId = generateRequestIdFromPayload(payload);
|
|
491
522
|
logger$6.debug("Generated request ID:", requestId);
|
|
@@ -496,13 +527,13 @@ async function handleCompletion$1(c) {
|
|
|
496
527
|
sessionId
|
|
497
528
|
});
|
|
498
529
|
if (isNonStreaming$1(response)) {
|
|
499
|
-
logger$6
|
|
530
|
+
debugJson(logger$6, "Non-streaming response:", response);
|
|
500
531
|
return c.json(response);
|
|
501
532
|
}
|
|
502
533
|
logger$6.debug("Streaming response");
|
|
503
534
|
return streamSSE(c, async (stream) => {
|
|
504
535
|
for await (const chunk of response) {
|
|
505
|
-
logger$6
|
|
536
|
+
debugJson(logger$6, "Streaming chunk:", chunk);
|
|
506
537
|
await stream.writeSSE(chunk);
|
|
507
538
|
}
|
|
508
539
|
});
|
|
@@ -1884,6 +1915,107 @@ const createMessages = async (payload, anthropicBetaHeader, options) => {
|
|
|
1884
1915
|
return await response.json();
|
|
1885
1916
|
};
|
|
1886
1917
|
|
|
1918
|
+
//#endregion
|
|
1919
|
+
//#region src/routes/messages/preprocess.ts
|
|
1920
|
+
const compactSystemPromptStart = "You are a helpful AI assistant tasked with summarizing conversations";
|
|
1921
|
+
const compactTextOnlyGuard = "CRITICAL: Respond with TEXT ONLY. Do NOT call any tools.";
|
|
1922
|
+
const compactSummaryPromptStart = "Your task is to create a detailed summary of the conversation so far";
|
|
1923
|
+
const compactMessageSections = ["Pending Tasks:", "Current Work:"];
|
|
1924
|
+
const getAnthropicEffortForModel = (model) => {
|
|
1925
|
+
const reasoningEffort = getReasoningEffortForModel(model);
|
|
1926
|
+
if (reasoningEffort === "xhigh") return "max";
|
|
1927
|
+
if (reasoningEffort === "none" || reasoningEffort === "minimal") return "low";
|
|
1928
|
+
return reasoningEffort;
|
|
1929
|
+
};
|
|
1930
|
+
const getCompactCandidateText = (message) => {
|
|
1931
|
+
if (message.role !== "user") return "";
|
|
1932
|
+
if (typeof message.content === "string") return message.content;
|
|
1933
|
+
return message.content.filter((block) => block.type === "text").map((block) => block.text.startsWith("<system-reminder>") ? "" : block.text).filter((text) => text.length > 0).join("\n\n");
|
|
1934
|
+
};
|
|
1935
|
+
const isCompactMessage = (lastMessage) => {
|
|
1936
|
+
const text = getCompactCandidateText(lastMessage);
|
|
1937
|
+
if (!text) return false;
|
|
1938
|
+
return text.includes(compactTextOnlyGuard) && text.includes(compactSummaryPromptStart) && compactMessageSections.some((section) => text.includes(section));
|
|
1939
|
+
};
|
|
1940
|
+
const isCompactRequest = (anthropicPayload) => {
|
|
1941
|
+
const lastMessage = anthropicPayload.messages.at(-1);
|
|
1942
|
+
if (lastMessage && isCompactMessage(lastMessage)) return true;
|
|
1943
|
+
const system = anthropicPayload.system;
|
|
1944
|
+
if (typeof system === "string") return system.startsWith(compactSystemPromptStart);
|
|
1945
|
+
if (!Array.isArray(system)) return false;
|
|
1946
|
+
return system.some((msg) => typeof msg.text === "string" && msg.text.startsWith(compactSystemPromptStart));
|
|
1947
|
+
};
|
|
1948
|
+
const mergeContentWithText = (tr, textBlock) => {
|
|
1949
|
+
if (typeof tr.content === "string") return {
|
|
1950
|
+
...tr,
|
|
1951
|
+
content: `${tr.content}\n\n${textBlock.text}`
|
|
1952
|
+
};
|
|
1953
|
+
return {
|
|
1954
|
+
...tr,
|
|
1955
|
+
content: [...tr.content, textBlock]
|
|
1956
|
+
};
|
|
1957
|
+
};
|
|
1958
|
+
const mergeContentWithTexts = (tr, textBlocks) => {
|
|
1959
|
+
if (typeof tr.content === "string") {
|
|
1960
|
+
const appendedTexts = textBlocks.map((tb) => tb.text).join("\n\n");
|
|
1961
|
+
return {
|
|
1962
|
+
...tr,
|
|
1963
|
+
content: `${tr.content}\n\n${appendedTexts}`
|
|
1964
|
+
};
|
|
1965
|
+
}
|
|
1966
|
+
return {
|
|
1967
|
+
...tr,
|
|
1968
|
+
content: [...tr.content, ...textBlocks]
|
|
1969
|
+
};
|
|
1970
|
+
};
|
|
1971
|
+
const mergeToolResult = (toolResults, textBlocks) => {
|
|
1972
|
+
if (toolResults.length === textBlocks.length) return toolResults.map((tr, i) => mergeContentWithText(tr, textBlocks[i]));
|
|
1973
|
+
const lastIndex = toolResults.length - 1;
|
|
1974
|
+
return toolResults.map((tr, i) => i === lastIndex ? mergeContentWithTexts(tr, textBlocks) : tr);
|
|
1975
|
+
};
|
|
1976
|
+
const mergeToolResultForClaude = (anthropicPayload) => {
|
|
1977
|
+
for (const msg of anthropicPayload.messages) {
|
|
1978
|
+
if (msg.role !== "user" || !Array.isArray(msg.content)) continue;
|
|
1979
|
+
const toolResults = [];
|
|
1980
|
+
const textBlocks = [];
|
|
1981
|
+
let valid = true;
|
|
1982
|
+
for (const block of msg.content) if (block.type === "tool_result") toolResults.push(block);
|
|
1983
|
+
else if (block.type === "text") textBlocks.push(block);
|
|
1984
|
+
else {
|
|
1985
|
+
valid = false;
|
|
1986
|
+
break;
|
|
1987
|
+
}
|
|
1988
|
+
if (!valid || toolResults.length === 0 || textBlocks.length === 0) continue;
|
|
1989
|
+
msg.content = mergeToolResult(toolResults, textBlocks);
|
|
1990
|
+
}
|
|
1991
|
+
};
|
|
1992
|
+
const stripCacheControl = (payload) => {
|
|
1993
|
+
if (Array.isArray(payload.system)) for (const block of payload.system) {
|
|
1994
|
+
const systemBlock = block;
|
|
1995
|
+
const cacheControl = systemBlock.cache_control;
|
|
1996
|
+
if (cacheControl && typeof cacheControl === "object") {
|
|
1997
|
+
const { scope,...rest } = cacheControl;
|
|
1998
|
+
systemBlock.cache_control = rest;
|
|
1999
|
+
}
|
|
2000
|
+
}
|
|
2001
|
+
};
|
|
2002
|
+
const filterAssistantThinkingBlocks = (payload) => {
|
|
2003
|
+
for (const msg of payload.messages) if (msg.role === "assistant" && Array.isArray(msg.content)) msg.content = msg.content.filter((block) => {
|
|
2004
|
+
if (block.type !== "thinking") return true;
|
|
2005
|
+
return block.thinking && block.thinking !== "Thinking..." && block.signature && !block.signature.includes("@");
|
|
2006
|
+
});
|
|
2007
|
+
};
|
|
2008
|
+
const prepareMessagesApiPayload = (payload, selectedModel) => {
|
|
2009
|
+
stripCacheControl(payload);
|
|
2010
|
+
filterAssistantThinkingBlocks(payload);
|
|
2011
|
+
const toolChoice = payload.tool_choice;
|
|
2012
|
+
const disableThink = toolChoice?.type === "any" || toolChoice?.type === "tool";
|
|
2013
|
+
if (selectedModel?.capabilities.supports.adaptive_thinking && !disableThink) {
|
|
2014
|
+
payload.thinking = { type: "adaptive" };
|
|
2015
|
+
payload.output_config = { effort: getAnthropicEffortForModel(payload.model) };
|
|
2016
|
+
}
|
|
2017
|
+
};
|
|
2018
|
+
|
|
1887
2019
|
//#endregion
|
|
1888
2020
|
//#region src/routes/messages/stream-translation.ts
|
|
1889
2021
|
function isToolBlockOpen(state$1) {
|
|
@@ -2135,105 +2267,11 @@ function closeThinkingBlockIfOpen(state$1, events$1) {
|
|
|
2135
2267
|
}
|
|
2136
2268
|
|
|
2137
2269
|
//#endregion
|
|
2138
|
-
//#region src/routes/messages/
|
|
2139
|
-
const subagentMarkerPrefix = "__SUBAGENT_MARKER__";
|
|
2140
|
-
const parseSubagentMarkerFromFirstUser = (payload) => {
|
|
2141
|
-
const firstUserMessage = payload.messages.find((msg) => msg.role === "user");
|
|
2142
|
-
if (!firstUserMessage || !Array.isArray(firstUserMessage.content)) return null;
|
|
2143
|
-
for (const block of firstUserMessage.content) {
|
|
2144
|
-
if (block.type !== "text") continue;
|
|
2145
|
-
const marker = parseSubagentMarkerFromSystemReminder(block.text);
|
|
2146
|
-
if (marker) return marker;
|
|
2147
|
-
}
|
|
2148
|
-
return null;
|
|
2149
|
-
};
|
|
2150
|
-
const parseSubagentMarkerFromSystemReminder = (text) => {
|
|
2151
|
-
const startTag = "<system-reminder>";
|
|
2152
|
-
const endTag = "</system-reminder>";
|
|
2153
|
-
let searchFrom = 0;
|
|
2154
|
-
while (true) {
|
|
2155
|
-
const reminderStart = text.indexOf(startTag, searchFrom);
|
|
2156
|
-
if (reminderStart === -1) break;
|
|
2157
|
-
const contentStart = reminderStart + 17;
|
|
2158
|
-
const reminderEnd = text.indexOf(endTag, contentStart);
|
|
2159
|
-
if (reminderEnd === -1) break;
|
|
2160
|
-
const reminderContent = text.slice(contentStart, reminderEnd);
|
|
2161
|
-
const markerIndex = reminderContent.indexOf(subagentMarkerPrefix);
|
|
2162
|
-
if (markerIndex === -1) {
|
|
2163
|
-
searchFrom = reminderEnd + 18;
|
|
2164
|
-
continue;
|
|
2165
|
-
}
|
|
2166
|
-
const markerJson = reminderContent.slice(markerIndex + 19).trim();
|
|
2167
|
-
try {
|
|
2168
|
-
const parsed = JSON.parse(markerJson);
|
|
2169
|
-
if (!parsed.session_id || !parsed.agent_id || !parsed.agent_type) {
|
|
2170
|
-
searchFrom = reminderEnd + 18;
|
|
2171
|
-
continue;
|
|
2172
|
-
}
|
|
2173
|
-
return parsed;
|
|
2174
|
-
} catch {
|
|
2175
|
-
searchFrom = reminderEnd + 18;
|
|
2176
|
-
continue;
|
|
2177
|
-
}
|
|
2178
|
-
}
|
|
2179
|
-
return null;
|
|
2180
|
-
};
|
|
2181
|
-
|
|
2182
|
-
//#endregion
|
|
2183
|
-
//#region src/routes/messages/handler.ts
|
|
2184
|
-
const logger$5 = createHandlerLogger("messages-handler");
|
|
2185
|
-
const compactSystemPromptStart = "You are a helpful AI assistant tasked with summarizing conversations";
|
|
2186
|
-
const compactTextOnlyGuard = "CRITICAL: Respond with TEXT ONLY. Do NOT call any tools.";
|
|
2187
|
-
const compactSummaryPromptStart = "Your task is to create a detailed summary of the conversation so far";
|
|
2188
|
-
const compactMessageSections = ["Pending Tasks:", "Current Work:"];
|
|
2189
|
-
async function handleCompletion(c) {
|
|
2190
|
-
await checkRateLimit(state);
|
|
2191
|
-
const anthropicPayload = await c.req.json();
|
|
2192
|
-
logger$5.debug("Anthropic request payload:", JSON.stringify(anthropicPayload));
|
|
2193
|
-
const subagentMarker = parseSubagentMarkerFromFirstUser(anthropicPayload);
|
|
2194
|
-
if (subagentMarker) logger$5.debug("Detected Subagent marker:", JSON.stringify(subagentMarker));
|
|
2195
|
-
const sessionId = getRootSessionId(anthropicPayload, c);
|
|
2196
|
-
logger$5.debug("Extracted session ID:", sessionId);
|
|
2197
|
-
const isCompact = isCompactRequest(anthropicPayload);
|
|
2198
|
-
const anthropicBeta = c.req.header("anthropic-beta");
|
|
2199
|
-
logger$5.debug("Anthropic Beta header:", anthropicBeta);
|
|
2200
|
-
const noTools = !anthropicPayload.tools || anthropicPayload.tools.length === 0;
|
|
2201
|
-
if (anthropicBeta && noTools && !isCompact) anthropicPayload.model = getSmallModel();
|
|
2202
|
-
if (isCompact) logger$5.debug("Is compact request:", isCompact);
|
|
2203
|
-
else mergeToolResultForClaude(anthropicPayload);
|
|
2204
|
-
const requestId = generateRequestIdFromPayload(anthropicPayload, sessionId);
|
|
2205
|
-
logger$5.debug("Generated request ID:", requestId);
|
|
2206
|
-
if (state.manualApprove) await awaitApproval();
|
|
2207
|
-
const selectedModel = findEndpointModel(anthropicPayload.model);
|
|
2208
|
-
anthropicPayload.model = selectedModel?.id ?? anthropicPayload.model;
|
|
2209
|
-
if (shouldUseMessagesApi(selectedModel)) return await handleWithMessagesApi(c, anthropicPayload, {
|
|
2210
|
-
anthropicBetaHeader: anthropicBeta,
|
|
2211
|
-
subagentMarker,
|
|
2212
|
-
selectedModel,
|
|
2213
|
-
requestId,
|
|
2214
|
-
sessionId,
|
|
2215
|
-
isCompact
|
|
2216
|
-
});
|
|
2217
|
-
if (shouldUseResponsesApi(selectedModel)) return await handleWithResponsesApi(c, anthropicPayload, {
|
|
2218
|
-
subagentMarker,
|
|
2219
|
-
selectedModel,
|
|
2220
|
-
requestId,
|
|
2221
|
-
sessionId,
|
|
2222
|
-
isCompact
|
|
2223
|
-
});
|
|
2224
|
-
return await handleWithChatCompletions(c, anthropicPayload, {
|
|
2225
|
-
subagentMarker,
|
|
2226
|
-
requestId,
|
|
2227
|
-
sessionId,
|
|
2228
|
-
isCompact
|
|
2229
|
-
});
|
|
2230
|
-
}
|
|
2231
|
-
const RESPONSES_ENDPOINT$1 = "/responses";
|
|
2232
|
-
const MESSAGES_ENDPOINT = "/v1/messages";
|
|
2270
|
+
//#region src/routes/messages/api-flows.ts
|
|
2233
2271
|
const handleWithChatCompletions = async (c, anthropicPayload, options) => {
|
|
2234
|
-
const { subagentMarker, requestId, sessionId, isCompact } = options;
|
|
2272
|
+
const { logger: logger$7, subagentMarker, requestId, sessionId, isCompact } = options;
|
|
2235
2273
|
const openAIPayload = translateToOpenAI(anthropicPayload);
|
|
2236
|
-
logger$
|
|
2274
|
+
debugJson(logger$7, "Translated OpenAI request payload:", openAIPayload);
|
|
2237
2275
|
const response = await createChatCompletions(openAIPayload, {
|
|
2238
2276
|
subagentMarker,
|
|
2239
2277
|
requestId,
|
|
@@ -2241,12 +2279,12 @@ const handleWithChatCompletions = async (c, anthropicPayload, options) => {
|
|
|
2241
2279
|
isCompact
|
|
2242
2280
|
});
|
|
2243
2281
|
if (isNonStreaming(response)) {
|
|
2244
|
-
logger$
|
|
2282
|
+
debugJson(logger$7, "Non-streaming response from Copilot:", response);
|
|
2245
2283
|
const anthropicResponse = translateToAnthropic(response);
|
|
2246
|
-
logger$
|
|
2284
|
+
debugJson(logger$7, "Translated Anthropic response:", anthropicResponse);
|
|
2247
2285
|
return c.json(anthropicResponse);
|
|
2248
2286
|
}
|
|
2249
|
-
logger$
|
|
2287
|
+
logger$7.debug("Streaming response from Copilot");
|
|
2250
2288
|
return streamSSE(c, async (stream) => {
|
|
2251
2289
|
const streamState = {
|
|
2252
2290
|
messageStartSent: false,
|
|
@@ -2256,27 +2294,28 @@ const handleWithChatCompletions = async (c, anthropicPayload, options) => {
|
|
|
2256
2294
|
thinkingBlockOpen: false
|
|
2257
2295
|
};
|
|
2258
2296
|
for await (const rawEvent of response) {
|
|
2259
|
-
logger$
|
|
2297
|
+
debugJson(logger$7, "Copilot raw stream event:", rawEvent);
|
|
2260
2298
|
if (rawEvent.data === "[DONE]") break;
|
|
2261
2299
|
if (!rawEvent.data) continue;
|
|
2262
2300
|
const chunk = JSON.parse(rawEvent.data);
|
|
2263
2301
|
const events$1 = translateChunkToAnthropicEvents(chunk, streamState);
|
|
2264
2302
|
for (const event of events$1) {
|
|
2265
|
-
|
|
2303
|
+
const eventData = JSON.stringify(event);
|
|
2304
|
+
debugLazy(logger$7, () => ["Translated Anthropic event:", eventData]);
|
|
2266
2305
|
await stream.writeSSE({
|
|
2267
2306
|
event: event.type,
|
|
2268
|
-
data:
|
|
2307
|
+
data: eventData
|
|
2269
2308
|
});
|
|
2270
2309
|
}
|
|
2271
2310
|
}
|
|
2272
2311
|
});
|
|
2273
2312
|
};
|
|
2274
2313
|
const handleWithResponsesApi = async (c, anthropicPayload, options) => {
|
|
2275
|
-
const { subagentMarker, selectedModel, requestId, sessionId, isCompact } = options;
|
|
2314
|
+
const { logger: logger$7, subagentMarker, selectedModel, requestId, sessionId, isCompact } = options;
|
|
2276
2315
|
const responsesPayload = translateAnthropicMessagesToResponsesPayload(anthropicPayload);
|
|
2277
2316
|
applyResponsesApiContextManagement(responsesPayload, selectedModel?.capabilities.limits.max_prompt_tokens);
|
|
2278
2317
|
compactInputByLatestCompaction(responsesPayload);
|
|
2279
|
-
logger$
|
|
2318
|
+
debugJson(logger$7, "Translated Responses payload:", responsesPayload);
|
|
2280
2319
|
const { vision, initiator } = getResponsesRequestOptions(responsesPayload);
|
|
2281
2320
|
const response = await createResponses(responsesPayload, {
|
|
2282
2321
|
vision,
|
|
@@ -2287,7 +2326,7 @@ const handleWithResponsesApi = async (c, anthropicPayload, options) => {
|
|
|
2287
2326
|
isCompact
|
|
2288
2327
|
});
|
|
2289
2328
|
if (responsesPayload.stream && isAsyncIterable$1(response)) {
|
|
2290
|
-
logger$
|
|
2329
|
+
logger$7.debug("Streaming response from Copilot (Responses API)");
|
|
2291
2330
|
return streamSSE(c, async (stream) => {
|
|
2292
2331
|
const streamState = createResponsesStreamState();
|
|
2293
2332
|
for await (const chunk of response) {
|
|
@@ -2300,23 +2339,23 @@ const handleWithResponsesApi = async (c, anthropicPayload, options) => {
|
|
|
2300
2339
|
}
|
|
2301
2340
|
const data = chunk.data;
|
|
2302
2341
|
if (!data) continue;
|
|
2303
|
-
logger$
|
|
2342
|
+
debugLazy(logger$7, () => ["Responses raw stream event:", data]);
|
|
2304
2343
|
const events$1 = translateResponsesStreamEvent(JSON.parse(data), streamState);
|
|
2305
2344
|
for (const event of events$1) {
|
|
2306
2345
|
const eventData = JSON.stringify(event);
|
|
2307
|
-
logger$
|
|
2346
|
+
debugLazy(logger$7, () => ["Translated Anthropic event:", eventData]);
|
|
2308
2347
|
await stream.writeSSE({
|
|
2309
2348
|
event: event.type,
|
|
2310
2349
|
data: eventData
|
|
2311
2350
|
});
|
|
2312
2351
|
}
|
|
2313
2352
|
if (streamState.messageCompleted) {
|
|
2314
|
-
logger$
|
|
2353
|
+
logger$7.debug("Message completed, ending stream");
|
|
2315
2354
|
break;
|
|
2316
2355
|
}
|
|
2317
2356
|
}
|
|
2318
2357
|
if (!streamState.messageCompleted) {
|
|
2319
|
-
logger$
|
|
2358
|
+
logger$7.warn("Responses stream ended without completion; sending error event");
|
|
2320
2359
|
const errorEvent = buildErrorEvent("Responses stream ended without completion");
|
|
2321
2360
|
await stream.writeSSE({
|
|
2322
2361
|
event: errorEvent.type,
|
|
@@ -2325,25 +2364,18 @@ const handleWithResponsesApi = async (c, anthropicPayload, options) => {
|
|
|
2325
2364
|
}
|
|
2326
2365
|
});
|
|
2327
2366
|
}
|
|
2328
|
-
logger$
|
|
2367
|
+
debugJsonTail(logger$7, "Non-streaming Responses result:", {
|
|
2368
|
+
value: response,
|
|
2369
|
+
tailLength: 400
|
|
2370
|
+
});
|
|
2329
2371
|
const anthropicResponse = translateResponsesResultToAnthropic(response);
|
|
2330
|
-
logger$
|
|
2372
|
+
debugJson(logger$7, "Translated Anthropic response:", anthropicResponse);
|
|
2331
2373
|
return c.json(anthropicResponse);
|
|
2332
2374
|
};
|
|
2333
2375
|
const handleWithMessagesApi = async (c, anthropicPayload, options) => {
|
|
2334
|
-
const { anthropicBetaHeader, subagentMarker, selectedModel, requestId, sessionId, isCompact } = options;
|
|
2335
|
-
|
|
2336
|
-
|
|
2337
|
-
if (block.type !== "thinking") return true;
|
|
2338
|
-
return block.thinking && block.thinking !== "Thinking..." && block.signature && !block.signature.includes("@");
|
|
2339
|
-
});
|
|
2340
|
-
const toolChoice = anthropicPayload.tool_choice;
|
|
2341
|
-
const disableThink = toolChoice?.type === "any" || toolChoice?.type === "tool";
|
|
2342
|
-
if (selectedModel?.capabilities.supports.adaptive_thinking && !disableThink) {
|
|
2343
|
-
anthropicPayload.thinking = { type: "adaptive" };
|
|
2344
|
-
anthropicPayload.output_config = { effort: getAnthropicEffortForModel(anthropicPayload.model) };
|
|
2345
|
-
}
|
|
2346
|
-
logger$5.debug("Translated Messages payload:", JSON.stringify(anthropicPayload));
|
|
2376
|
+
const { logger: logger$7, anthropicBetaHeader, subagentMarker, selectedModel, requestId, sessionId, isCompact } = options;
|
|
2377
|
+
prepareMessagesApiPayload(anthropicPayload, selectedModel);
|
|
2378
|
+
debugJson(logger$7, "Translated Messages payload:", anthropicPayload);
|
|
2347
2379
|
const response = await createMessages(anthropicPayload, anthropicBetaHeader, {
|
|
2348
2380
|
subagentMarker,
|
|
2349
2381
|
requestId,
|
|
@@ -2351,12 +2383,12 @@ const handleWithMessagesApi = async (c, anthropicPayload, options) => {
|
|
|
2351
2383
|
isCompact
|
|
2352
2384
|
});
|
|
2353
2385
|
if (isAsyncIterable$1(response)) {
|
|
2354
|
-
logger$
|
|
2386
|
+
logger$7.debug("Streaming response from Copilot (Messages API)");
|
|
2355
2387
|
return streamSSE(c, async (stream) => {
|
|
2356
2388
|
for await (const event of response) {
|
|
2357
2389
|
const eventName = event.event;
|
|
2358
2390
|
const data = event.data ?? "";
|
|
2359
|
-
logger$
|
|
2391
|
+
debugLazy(logger$7, () => ["Messages raw stream event:", data]);
|
|
2360
2392
|
await stream.writeSSE({
|
|
2361
2393
|
event: eventName,
|
|
2362
2394
|
data
|
|
@@ -2364,95 +2396,116 @@ const handleWithMessagesApi = async (c, anthropicPayload, options) => {
|
|
|
2364
2396
|
}
|
|
2365
2397
|
});
|
|
2366
2398
|
}
|
|
2367
|
-
logger$
|
|
2399
|
+
debugJsonTail(logger$7, "Non-streaming Messages result:", {
|
|
2400
|
+
value: response,
|
|
2401
|
+
tailLength: 400
|
|
2402
|
+
});
|
|
2368
2403
|
return c.json(response);
|
|
2369
2404
|
};
|
|
2370
|
-
const shouldUseResponsesApi = (selectedModel) => {
|
|
2371
|
-
return selectedModel?.supported_endpoints?.includes(RESPONSES_ENDPOINT$1) ?? false;
|
|
2372
|
-
};
|
|
2373
|
-
const shouldUseMessagesApi = (selectedModel) => {
|
|
2374
|
-
if (!isMessagesApiEnabled()) return false;
|
|
2375
|
-
return selectedModel?.supported_endpoints?.includes(MESSAGES_ENDPOINT) ?? false;
|
|
2376
|
-
};
|
|
2377
2405
|
const isNonStreaming = (response) => Object.hasOwn(response, "choices");
|
|
2378
2406
|
const isAsyncIterable$1 = (value) => Boolean(value) && typeof value[Symbol.asyncIterator] === "function";
|
|
2379
|
-
|
|
2380
|
-
|
|
2381
|
-
|
|
2382
|
-
|
|
2383
|
-
|
|
2384
|
-
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2388
|
-
|
|
2389
|
-
|
|
2390
|
-
const isCompactMessage = (lastMessage) => {
|
|
2391
|
-
const text = getCompactCandidateText(lastMessage);
|
|
2392
|
-
if (!text) return false;
|
|
2393
|
-
return text.includes(compactTextOnlyGuard) && text.includes(compactSummaryPromptStart) && compactMessageSections.some((section) => text.includes(section));
|
|
2394
|
-
};
|
|
2395
|
-
const isCompactRequest = (anthropicPayload) => {
|
|
2396
|
-
const lastMessage = anthropicPayload.messages.at(-1);
|
|
2397
|
-
if (lastMessage && isCompactMessage(lastMessage)) return true;
|
|
2398
|
-
const system = anthropicPayload.system;
|
|
2399
|
-
if (typeof system === "string") return system.startsWith(compactSystemPromptStart);
|
|
2400
|
-
if (!Array.isArray(system)) return false;
|
|
2401
|
-
return system.some((msg) => typeof msg.text === "string" && msg.text.startsWith(compactSystemPromptStart));
|
|
2402
|
-
};
|
|
2403
|
-
const mergeContentWithText = (tr, textBlock) => {
|
|
2404
|
-
if (typeof tr.content === "string") return {
|
|
2405
|
-
...tr,
|
|
2406
|
-
content: `${tr.content}\n\n${textBlock.text}`
|
|
2407
|
-
};
|
|
2408
|
-
return {
|
|
2409
|
-
...tr,
|
|
2410
|
-
content: [...tr.content, textBlock]
|
|
2411
|
-
};
|
|
2412
|
-
};
|
|
2413
|
-
const mergeContentWithTexts = (tr, textBlocks) => {
|
|
2414
|
-
if (typeof tr.content === "string") {
|
|
2415
|
-
const appendedTexts = textBlocks.map((tb) => tb.text).join("\n\n");
|
|
2416
|
-
return {
|
|
2417
|
-
...tr,
|
|
2418
|
-
content: `${tr.content}\n\n${appendedTexts}`
|
|
2419
|
-
};
|
|
2407
|
+
|
|
2408
|
+
//#endregion
|
|
2409
|
+
//#region src/routes/messages/subagent-marker.ts
|
|
2410
|
+
const subagentMarkerPrefix = "__SUBAGENT_MARKER__";
|
|
2411
|
+
const parseSubagentMarkerFromFirstUser = (payload) => {
|
|
2412
|
+
const firstUserMessage = payload.messages.find((msg) => msg.role === "user");
|
|
2413
|
+
if (!firstUserMessage || !Array.isArray(firstUserMessage.content)) return null;
|
|
2414
|
+
for (const block of firstUserMessage.content) {
|
|
2415
|
+
if (block.type !== "text") continue;
|
|
2416
|
+
const marker = parseSubagentMarkerFromSystemReminder(block.text);
|
|
2417
|
+
if (marker) return marker;
|
|
2420
2418
|
}
|
|
2421
|
-
return
|
|
2422
|
-
...tr,
|
|
2423
|
-
content: [...tr.content, ...textBlocks]
|
|
2424
|
-
};
|
|
2419
|
+
return null;
|
|
2425
2420
|
};
|
|
2426
|
-
const
|
|
2427
|
-
|
|
2428
|
-
|
|
2429
|
-
|
|
2430
|
-
|
|
2431
|
-
|
|
2432
|
-
|
|
2433
|
-
|
|
2434
|
-
|
|
2435
|
-
|
|
2436
|
-
|
|
2421
|
+
const parseSubagentMarkerFromSystemReminder = (text) => {
|
|
2422
|
+
const startTag = "<system-reminder>";
|
|
2423
|
+
const endTag = "</system-reminder>";
|
|
2424
|
+
let searchFrom = 0;
|
|
2425
|
+
while (true) {
|
|
2426
|
+
const reminderStart = text.indexOf(startTag, searchFrom);
|
|
2427
|
+
if (reminderStart === -1) break;
|
|
2428
|
+
const contentStart = reminderStart + 17;
|
|
2429
|
+
const reminderEnd = text.indexOf(endTag, contentStart);
|
|
2430
|
+
if (reminderEnd === -1) break;
|
|
2431
|
+
const reminderContent = text.slice(contentStart, reminderEnd);
|
|
2432
|
+
const markerIndex = reminderContent.indexOf(subagentMarkerPrefix);
|
|
2433
|
+
if (markerIndex === -1) {
|
|
2434
|
+
searchFrom = reminderEnd + 18;
|
|
2435
|
+
continue;
|
|
2436
|
+
}
|
|
2437
|
+
const markerJson = reminderContent.slice(markerIndex + 19).trim();
|
|
2438
|
+
try {
|
|
2439
|
+
const parsed = JSON.parse(markerJson);
|
|
2440
|
+
if (!parsed.session_id || !parsed.agent_id || !parsed.agent_type) {
|
|
2441
|
+
searchFrom = reminderEnd + 18;
|
|
2442
|
+
continue;
|
|
2443
|
+
}
|
|
2444
|
+
return parsed;
|
|
2445
|
+
} catch {
|
|
2446
|
+
searchFrom = reminderEnd + 18;
|
|
2447
|
+
continue;
|
|
2437
2448
|
}
|
|
2438
|
-
if (!valid || toolResults.length === 0 || textBlocks.length === 0) continue;
|
|
2439
|
-
msg.content = mergeToolResult(toolResults, textBlocks);
|
|
2440
2449
|
}
|
|
2450
|
+
return null;
|
|
2441
2451
|
};
|
|
2442
|
-
|
|
2443
|
-
|
|
2444
|
-
|
|
2445
|
-
|
|
2452
|
+
|
|
2453
|
+
//#endregion
|
|
2454
|
+
//#region src/routes/messages/handler.ts
|
|
2455
|
+
const logger$5 = createHandlerLogger("messages-handler");
|
|
2456
|
+
async function handleCompletion(c) {
|
|
2457
|
+
await checkRateLimit(state);
|
|
2458
|
+
const anthropicPayload = await c.req.json();
|
|
2459
|
+
debugJson(logger$5, "Anthropic request payload:", anthropicPayload);
|
|
2460
|
+
const subagentMarker = parseSubagentMarkerFromFirstUser(anthropicPayload);
|
|
2461
|
+
if (subagentMarker) debugJson(logger$5, "Detected Subagent marker:", subagentMarker);
|
|
2462
|
+
const sessionId = getRootSessionId(anthropicPayload, c);
|
|
2463
|
+
logger$5.debug("Extracted session ID:", sessionId);
|
|
2464
|
+
const isCompact = isCompactRequest(anthropicPayload);
|
|
2465
|
+
const anthropicBeta = c.req.header("anthropic-beta");
|
|
2466
|
+
logger$5.debug("Anthropic Beta header:", anthropicBeta);
|
|
2467
|
+
const noTools = !anthropicPayload.tools || anthropicPayload.tools.length === 0;
|
|
2468
|
+
if (anthropicBeta && noTools && !isCompact) anthropicPayload.model = getSmallModel();
|
|
2469
|
+
if (isCompact) logger$5.debug("Is compact request:", isCompact);
|
|
2470
|
+
else mergeToolResultForClaude(anthropicPayload);
|
|
2471
|
+
const requestId = generateRequestIdFromPayload(anthropicPayload, sessionId);
|
|
2472
|
+
logger$5.debug("Generated request ID:", requestId);
|
|
2473
|
+
if (state.manualApprove) await awaitApproval();
|
|
2474
|
+
const selectedModel = findEndpointModel(anthropicPayload.model);
|
|
2475
|
+
anthropicPayload.model = selectedModel?.id ?? anthropicPayload.model;
|
|
2476
|
+
if (shouldUseMessagesApi(selectedModel)) return await handleWithMessagesApi(c, anthropicPayload, {
|
|
2477
|
+
anthropicBetaHeader: anthropicBeta,
|
|
2478
|
+
subagentMarker,
|
|
2479
|
+
selectedModel,
|
|
2480
|
+
requestId,
|
|
2481
|
+
sessionId,
|
|
2482
|
+
isCompact,
|
|
2483
|
+
logger: logger$5
|
|
2484
|
+
});
|
|
2485
|
+
if (shouldUseResponsesApi(selectedModel)) return await handleWithResponsesApi(c, anthropicPayload, {
|
|
2486
|
+
subagentMarker,
|
|
2487
|
+
selectedModel,
|
|
2488
|
+
requestId,
|
|
2489
|
+
sessionId,
|
|
2490
|
+
isCompact,
|
|
2491
|
+
logger: logger$5
|
|
2492
|
+
});
|
|
2493
|
+
return await handleWithChatCompletions(c, anthropicPayload, {
|
|
2494
|
+
subagentMarker,
|
|
2495
|
+
requestId,
|
|
2496
|
+
sessionId,
|
|
2497
|
+
isCompact,
|
|
2498
|
+
logger: logger$5
|
|
2499
|
+
});
|
|
2500
|
+
}
|
|
2501
|
+
const RESPONSES_ENDPOINT$1 = "/responses";
|
|
2502
|
+
const MESSAGES_ENDPOINT = "/v1/messages";
|
|
2503
|
+
const shouldUseResponsesApi = (selectedModel) => {
|
|
2504
|
+
return selectedModel?.supported_endpoints?.includes(RESPONSES_ENDPOINT$1) ?? false;
|
|
2446
2505
|
};
|
|
2447
|
-
const
|
|
2448
|
-
if (
|
|
2449
|
-
|
|
2450
|
-
const cc = b.cache_control;
|
|
2451
|
-
if (cc && typeof cc === "object") {
|
|
2452
|
-
const { scope,...rest } = cc;
|
|
2453
|
-
b.cache_control = rest;
|
|
2454
|
-
}
|
|
2455
|
-
}
|
|
2506
|
+
const shouldUseMessagesApi = (selectedModel) => {
|
|
2507
|
+
if (!isMessagesApiEnabled()) return false;
|
|
2508
|
+
return selectedModel?.supported_endpoints?.includes(MESSAGES_ENDPOINT) ?? false;
|
|
2456
2509
|
};
|
|
2457
2510
|
|
|
2458
2511
|
//#endregion
|
|
@@ -2622,10 +2675,10 @@ async function handleProviderMessages(c) {
|
|
|
2622
2675
|
payload.temperature ??= modelConfig?.temperature;
|
|
2623
2676
|
payload.top_p ??= modelConfig?.topP;
|
|
2624
2677
|
payload.top_k ??= modelConfig?.topK;
|
|
2625
|
-
logger$3
|
|
2678
|
+
debugJson(logger$3, "provider.messages.request", {
|
|
2626
2679
|
payload,
|
|
2627
2680
|
provider
|
|
2628
|
-
})
|
|
2681
|
+
});
|
|
2629
2682
|
const upstreamResponse = await forwardProviderMessages(providerConfig, payload, c.req.raw.headers);
|
|
2630
2683
|
if (!upstreamResponse.ok) {
|
|
2631
2684
|
logger$3.error("Failed to create responses", upstreamResponse);
|
|
@@ -2668,7 +2721,7 @@ async function handleProviderMessages(c) {
|
|
|
2668
2721
|
}
|
|
2669
2722
|
const jsonBody = await upstreamResponse.json();
|
|
2670
2723
|
adjustInputTokens(providerConfig, jsonBody.usage);
|
|
2671
|
-
logger$3
|
|
2724
|
+
debugJson(logger$3, "provider.messages.no_stream result:", jsonBody);
|
|
2672
2725
|
return c.json(jsonBody);
|
|
2673
2726
|
} catch (error) {
|
|
2674
2727
|
logger$3.error("provider.messages.error", {
|
|
@@ -2681,7 +2734,7 @@ async function handleProviderMessages(c) {
|
|
|
2681
2734
|
const adjustInputTokens = (providerConfig, usage) => {
|
|
2682
2735
|
if (!providerConfig.adjustInputTokens || !usage) return;
|
|
2683
2736
|
usage.input_tokens = Math.max(0, (usage.input_tokens ?? 0) - (usage.cache_read_input_tokens ?? 0) - (usage.cache_creation_input_tokens ?? 0));
|
|
2684
|
-
logger$3
|
|
2737
|
+
debugJson(logger$3, "provider.messages.adjusted_usage:", usage);
|
|
2685
2738
|
};
|
|
2686
2739
|
|
|
2687
2740
|
//#endregion
|
|
@@ -2773,7 +2826,7 @@ const RESPONSES_ENDPOINT = "/responses";
|
|
|
2773
2826
|
const handleResponses = async (c) => {
|
|
2774
2827
|
await checkRateLimit(state);
|
|
2775
2828
|
const payload = await c.req.json();
|
|
2776
|
-
logger$1
|
|
2829
|
+
debugJson(logger$1, "Responses request payload:", payload);
|
|
2777
2830
|
const requestId = generateRequestIdFromPayload({ messages: payload.input });
|
|
2778
2831
|
logger$1.debug("Generated request ID:", requestId);
|
|
2779
2832
|
const sessionId = getUUID(requestId);
|
|
@@ -2787,7 +2840,7 @@ const handleResponses = async (c) => {
|
|
|
2787
2840
|
type: "invalid_request_error"
|
|
2788
2841
|
} }, 400);
|
|
2789
2842
|
applyResponsesApiContextManagement(payload, selectedModel?.capabilities.limits.max_prompt_tokens);
|
|
2790
|
-
logger$1
|
|
2843
|
+
debugJson(logger$1, "Translated Responses payload:", payload);
|
|
2791
2844
|
const { vision, initiator } = getResponsesRequestOptions(payload);
|
|
2792
2845
|
if (state.manualApprove) await awaitApproval();
|
|
2793
2846
|
const response = await createResponses(payload, {
|
|
@@ -2801,7 +2854,7 @@ const handleResponses = async (c) => {
|
|
|
2801
2854
|
return streamSSE(c, async (stream) => {
|
|
2802
2855
|
const idTracker = createStreamIdTracker();
|
|
2803
2856
|
for await (const chunk of response) {
|
|
2804
|
-
logger$1
|
|
2857
|
+
debugJson(logger$1, "Responses stream chunk:", chunk);
|
|
2805
2858
|
const processedData = fixStreamIds(chunk.data ?? "", chunk.event, idTracker);
|
|
2806
2859
|
await stream.writeSSE({
|
|
2807
2860
|
id: chunk.id,
|
|
@@ -2811,7 +2864,10 @@ const handleResponses = async (c) => {
|
|
|
2811
2864
|
}
|
|
2812
2865
|
});
|
|
2813
2866
|
}
|
|
2814
|
-
logger$1
|
|
2867
|
+
debugJsonTail(logger$1, "Forwarding native Responses result:", {
|
|
2868
|
+
value: response,
|
|
2869
|
+
tailLength: 400
|
|
2870
|
+
});
|
|
2815
2871
|
return c.json(response);
|
|
2816
2872
|
};
|
|
2817
2873
|
const isAsyncIterable = (value) => Boolean(value) && typeof value[Symbol.asyncIterator] === "function";
|
|
@@ -2920,4 +2976,4 @@ server.route("/:provider/v1/models", providerModelRoutes);
|
|
|
2920
2976
|
|
|
2921
2977
|
//#endregion
|
|
2922
2978
|
export { server };
|
|
2923
|
-
//# sourceMappingURL=server-
|
|
2979
|
+
//# sourceMappingURL=server-DP3qGqqy.js.map
|