@hsupu/copilot-api 0.7.22 → 0.7.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/config.example.yaml +1 -1
- package/dist/main.mjs +12 -11
- package/dist/main.mjs.map +1 -1
- package/package.json +1 -1
package/config.example.yaml
CHANGED
|
@@ -127,7 +127,7 @@ anthropic:
|
|
|
127
127
|
|
|
128
128
|
openai-responses:
|
|
129
129
|
# Convert `call_xxx` IDs to `fc_xxx` in input. Required when clients send conversation history with
|
|
130
|
-
# Chat Completions-format tool call IDs to the Responses API (which requires `fc_` prefix). Default:
|
|
130
|
+
# Chat Completions-format tool call IDs to the Responses API (which requires `fc_` prefix). Default: true.
|
|
131
131
|
normalize_call_ids: true
|
|
132
132
|
|
|
133
133
|
# ============================================================================
|
package/dist/main.mjs
CHANGED
|
@@ -57,7 +57,7 @@ const state = {
|
|
|
57
57
|
streamIdleTimeout: 300,
|
|
58
58
|
systemPromptOverrides: [],
|
|
59
59
|
stripReadToolResultTags: false,
|
|
60
|
-
normalizeResponsesCallIds:
|
|
60
|
+
normalizeResponsesCallIds: true,
|
|
61
61
|
verbose: false
|
|
62
62
|
};
|
|
63
63
|
|
|
@@ -4782,7 +4782,7 @@ const setupClaudeCode = defineCommand({
|
|
|
4782
4782
|
|
|
4783
4783
|
//#endregion
|
|
4784
4784
|
//#region package.json
|
|
4785
|
-
var version = "0.7.
|
|
4785
|
+
var version = "0.7.23";
|
|
4786
4786
|
|
|
4787
4787
|
//#endregion
|
|
4788
4788
|
//#region src/lib/context/error-persistence.ts
|
|
@@ -6013,13 +6013,11 @@ function createTokenRefreshStrategy() {
|
|
|
6013
6013
|
function createResponsesAdapter(selectedModel, headersCapture) {
|
|
6014
6014
|
return {
|
|
6015
6015
|
format: "openai-responses",
|
|
6016
|
-
sanitize: (p) => {
|
|
6017
|
-
|
|
6018
|
-
|
|
6019
|
-
|
|
6020
|
-
|
|
6021
|
-
};
|
|
6022
|
-
},
|
|
6016
|
+
sanitize: (p) => ({
|
|
6017
|
+
payload: p,
|
|
6018
|
+
blocksRemoved: 0,
|
|
6019
|
+
systemReminderRemovals: 0
|
|
6020
|
+
}),
|
|
6023
6021
|
execute: (p) => executeWithAdaptiveRateLimit(() => createResponses(p, {
|
|
6024
6022
|
resolvedModel: selectedModel,
|
|
6025
6023
|
headersCapture
|
|
@@ -6114,7 +6112,8 @@ function sendErrorAndClose(ws, message, code) {
|
|
|
6114
6112
|
} catch {}
|
|
6115
6113
|
}
|
|
6116
6114
|
/** Handle a response.create message over WebSocket */
|
|
6117
|
-
async function handleResponseCreate(ws,
|
|
6115
|
+
async function handleResponseCreate(ws, rawPayload) {
|
|
6116
|
+
let payload = rawPayload;
|
|
6118
6117
|
const requestedModel = payload.model;
|
|
6119
6118
|
const resolvedModel = resolveModelName(requestedModel);
|
|
6120
6119
|
payload.model = resolvedModel;
|
|
@@ -6124,6 +6123,7 @@ async function handleResponseCreate(ws, payload) {
|
|
|
6124
6123
|
return;
|
|
6125
6124
|
}
|
|
6126
6125
|
payload.instructions = await processResponsesInstructions(payload.instructions, payload.model);
|
|
6126
|
+
if (state.normalizeResponsesCallIds) payload = normalizeCallIds(payload);
|
|
6127
6127
|
const tuiLogId = tuiLogger.startRequest({
|
|
6128
6128
|
method: "WS",
|
|
6129
6129
|
path: "/v1/responses",
|
|
@@ -10192,7 +10192,7 @@ modelsRoutes.get("/:model", async (c) => {
|
|
|
10192
10192
|
//#region src/routes/responses/handler.ts
|
|
10193
10193
|
/** Handle an inbound Responses API request */
|
|
10194
10194
|
async function handleResponses(c) {
|
|
10195
|
-
|
|
10195
|
+
let payload = await c.req.json();
|
|
10196
10196
|
const clientModel = payload.model;
|
|
10197
10197
|
const resolvedModel = resolveModelName(clientModel);
|
|
10198
10198
|
if (resolvedModel !== clientModel) {
|
|
@@ -10204,6 +10204,7 @@ async function handleResponses(c) {
|
|
|
10204
10204
|
throw new HTTPError(msg, 400, msg);
|
|
10205
10205
|
}
|
|
10206
10206
|
payload.instructions = await processResponsesInstructions(payload.instructions, payload.model);
|
|
10207
|
+
if (state.normalizeResponsesCallIds) payload = normalizeCallIds(payload);
|
|
10207
10208
|
const tuiLogId = c.get("tuiLogId");
|
|
10208
10209
|
const reqCtx = getRequestContextManager().create({
|
|
10209
10210
|
endpoint: "openai-responses",
|