@jsonstudio/llms 0.6.230 → 0.6.467
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/dist/conversion/codecs/gemini-openai-codec.js +24 -2
- package/dist/conversion/compat/actions/gemini-web-search.d.ts +17 -0
- package/dist/conversion/compat/actions/gemini-web-search.js +68 -0
- package/dist/conversion/compat/actions/glm-image-content.d.ts +2 -0
- package/dist/conversion/compat/actions/glm-image-content.js +83 -0
- package/dist/conversion/compat/actions/glm-vision-prompt.d.ts +11 -0
- package/dist/conversion/compat/actions/glm-vision-prompt.js +177 -0
- package/dist/conversion/compat/actions/glm-web-search.js +25 -28
- package/dist/conversion/compat/actions/iflow-web-search.d.ts +18 -0
- package/dist/conversion/compat/actions/iflow-web-search.js +87 -0
- package/dist/conversion/compat/actions/universal-shape-filter.js +11 -0
- package/dist/conversion/compat/profiles/chat-gemini.json +17 -0
- package/dist/conversion/compat/profiles/chat-glm.json +194 -184
- package/dist/conversion/compat/profiles/chat-iflow.json +199 -195
- package/dist/conversion/compat/profiles/chat-lmstudio.json +43 -43
- package/dist/conversion/compat/profiles/chat-qwen.json +20 -20
- package/dist/conversion/compat/profiles/responses-c4m.json +42 -42
- package/dist/conversion/config/sample-config.json +1 -1
- package/dist/conversion/hub/pipeline/compat/compat-pipeline-executor.js +24 -0
- package/dist/conversion/hub/pipeline/compat/compat-types.d.ts +8 -0
- package/dist/conversion/hub/pipeline/hub-pipeline.js +32 -1
- package/dist/conversion/hub/pipeline/session-identifiers.d.ts +9 -0
- package/dist/conversion/hub/pipeline/session-identifiers.js +76 -0
- package/dist/conversion/hub/pipeline/stages/resp_inbound/resp_inbound_stage1_sse_decode/index.js +31 -2
- package/dist/conversion/hub/pipeline/target-utils.js +6 -0
- package/dist/conversion/hub/process/chat-process.js +186 -40
- package/dist/conversion/hub/response/provider-response.d.ts +13 -1
- package/dist/conversion/hub/response/provider-response.js +84 -35
- package/dist/conversion/hub/response/server-side-tools.js +61 -4
- package/dist/conversion/hub/semantic-mappers/gemini-mapper.js +123 -3
- package/dist/conversion/hub/semantic-mappers/responses-mapper.js +17 -1
- package/dist/conversion/hub/standardized-bridge.js +14 -0
- package/dist/conversion/responses/responses-openai-bridge.js +110 -6
- package/dist/conversion/shared/anthropic-message-utils.js +133 -9
- package/dist/conversion/shared/bridge-message-utils.js +137 -10
- package/dist/conversion/shared/errors.d.ts +20 -0
- package/dist/conversion/shared/errors.js +28 -0
- package/dist/conversion/shared/responses-conversation-store.js +30 -3
- package/dist/conversion/shared/responses-output-builder.js +111 -8
- package/dist/conversion/shared/tool-filter-pipeline.js +1 -0
- package/dist/filters/special/request-toolcalls-stringify.d.ts +13 -0
- package/dist/filters/special/request-toolcalls-stringify.js +103 -3
- package/dist/filters/special/response-tool-text-canonicalize.d.ts +16 -0
- package/dist/filters/special/response-tool-text-canonicalize.js +27 -3
- package/dist/router/virtual-router/bootstrap.js +44 -12
- package/dist/router/virtual-router/classifier.js +13 -17
- package/dist/router/virtual-router/engine.d.ts +39 -0
- package/dist/router/virtual-router/engine.js +755 -55
- package/dist/router/virtual-router/features.js +1 -1
- package/dist/router/virtual-router/message-utils.js +36 -24
- package/dist/router/virtual-router/provider-registry.d.ts +15 -0
- package/dist/router/virtual-router/provider-registry.js +42 -1
- package/dist/router/virtual-router/routing-instructions.d.ts +34 -0
- package/dist/router/virtual-router/routing-instructions.js +383 -0
- package/dist/router/virtual-router/sticky-session-store.d.ts +3 -0
- package/dist/router/virtual-router/sticky-session-store.js +110 -0
- package/dist/router/virtual-router/token-counter.js +14 -3
- package/dist/router/virtual-router/tool-signals.js +0 -22
- package/dist/router/virtual-router/types.d.ts +80 -0
- package/dist/router/virtual-router/types.js +2 -1
- package/dist/servertool/engine.d.ts +27 -0
- package/dist/servertool/engine.js +101 -0
- package/dist/servertool/flow-types.d.ts +40 -0
- package/dist/servertool/flow-types.js +1 -0
- package/dist/servertool/handlers/vision.d.ts +1 -0
- package/dist/servertool/handlers/vision.js +194 -0
- package/dist/servertool/handlers/web-search.d.ts +1 -0
- package/dist/servertool/handlers/web-search.js +791 -0
- package/dist/servertool/orchestration-types.d.ts +33 -0
- package/dist/servertool/orchestration-types.js +1 -0
- package/dist/servertool/registry.d.ts +18 -0
- package/dist/servertool/registry.js +27 -0
- package/dist/servertool/server-side-tools.d.ts +8 -0
- package/dist/servertool/server-side-tools.js +208 -0
- package/dist/servertool/types.d.ts +94 -0
- package/dist/servertool/types.js +1 -0
- package/dist/servertool/vision-tool.d.ts +2 -0
- package/dist/servertool/vision-tool.js +185 -0
- package/dist/sse/sse-to-json/builders/response-builder.js +6 -3
- package/package.json +1 -1
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { recordStage } from '../pipeline/stages/utils.js';
|
|
1
2
|
import { ChatFormatAdapter } from '../format-adapters/chat-format-adapter.js';
|
|
2
3
|
import { ResponsesFormatAdapter } from '../format-adapters/responses-format-adapter.js';
|
|
3
4
|
import { AnthropicFormatAdapter } from '../format-adapters/anthropic-format-adapter.js';
|
|
@@ -12,45 +13,36 @@ import { runRespProcessStage2Finalize } from '../pipeline/stages/resp_process/re
|
|
|
12
13
|
import { runRespOutboundStage1ClientRemap } from '../pipeline/stages/resp_outbound/resp_outbound_stage1_client_remap/index.js';
|
|
13
14
|
import { runRespOutboundStage2SseStream } from '../pipeline/stages/resp_outbound/resp_outbound_stage2_sse_stream/index.js';
|
|
14
15
|
import { recordResponsesResponse } from '../../shared/responses-conversation-store.js';
|
|
15
|
-
import {
|
|
16
|
-
function resolveChatReasoningMode(entryEndpoint) {
|
|
17
|
-
const envRaw = (process.env.ROUTECODEX_CHAT_REASONING_MODE || process.env.RCC_CHAT_REASONING_MODE || '').trim().toLowerCase();
|
|
18
|
-
const map = {
|
|
19
|
-
keep: 'keep',
|
|
20
|
-
drop: 'drop',
|
|
21
|
-
discard: 'drop',
|
|
22
|
-
text: 'append_to_content',
|
|
23
|
-
append: 'append_to_content',
|
|
24
|
-
append_text: 'append_to_content',
|
|
25
|
-
append_to_content: 'append_to_content'
|
|
26
|
-
};
|
|
27
|
-
if (envRaw && map[envRaw]) {
|
|
28
|
-
return map[envRaw];
|
|
29
|
-
}
|
|
30
|
-
return 'keep';
|
|
31
|
-
}
|
|
16
|
+
import { runServerToolOrchestration } from '../../../servertool/engine.js';
|
|
32
17
|
const PROVIDER_RESPONSE_REGISTRY = {
|
|
33
18
|
'openai-chat': {
|
|
34
|
-
protocol: 'openai-chat',
|
|
35
19
|
createFormatAdapter: () => new ChatFormatAdapter(),
|
|
36
20
|
createMapper: () => new OpenAIChatResponseMapper()
|
|
37
21
|
},
|
|
38
22
|
'openai-responses': {
|
|
39
|
-
protocol: 'openai-responses',
|
|
40
23
|
createFormatAdapter: () => new ResponsesFormatAdapter(),
|
|
41
24
|
createMapper: () => new ResponsesResponseMapper()
|
|
42
25
|
},
|
|
43
26
|
'anthropic-messages': {
|
|
44
|
-
protocol: 'anthropic-messages',
|
|
45
27
|
createFormatAdapter: () => new AnthropicFormatAdapter(),
|
|
46
28
|
createMapper: () => new AnthropicResponseMapper()
|
|
47
29
|
},
|
|
48
30
|
'gemini-chat': {
|
|
49
|
-
protocol: 'gemini-chat',
|
|
50
31
|
createFormatAdapter: () => new GeminiFormatAdapter(),
|
|
51
32
|
createMapper: () => new GeminiResponseMapper()
|
|
52
33
|
}
|
|
53
34
|
};
|
|
35
|
+
function isServerToolFollowup(context) {
|
|
36
|
+
const raw = context.serverToolFollowup;
|
|
37
|
+
if (raw === true) {
|
|
38
|
+
return true;
|
|
39
|
+
}
|
|
40
|
+
if (typeof raw === 'string') {
|
|
41
|
+
const v = raw.trim().toLowerCase();
|
|
42
|
+
return v === '1' || v === 'true';
|
|
43
|
+
}
|
|
44
|
+
return false;
|
|
45
|
+
}
|
|
54
46
|
function resolveClientProtocol(entryEndpoint) {
|
|
55
47
|
const lowered = (entryEndpoint || '').toLowerCase();
|
|
56
48
|
if (lowered.includes('/v1/responses'))
|
|
@@ -86,8 +78,28 @@ function applyModelOverride(payload, model) {
|
|
|
86
78
|
/* ignore */
|
|
87
79
|
}
|
|
88
80
|
}
|
|
81
|
+
function resolveChatReasoningMode(_entryEndpoint) {
|
|
82
|
+
// 当前保持默认策略:保留 reasoning_content 字段,不做额外拼接或删除。
|
|
83
|
+
return 'keep';
|
|
84
|
+
}
|
|
89
85
|
export async function convertProviderResponse(options) {
|
|
90
86
|
const clientProtocol = resolveClientProtocol(options.entryEndpoint);
|
|
87
|
+
const hasServerToolSupport = Boolean(options.providerInvoker) || Boolean(options.reenterPipeline);
|
|
88
|
+
const skipServerTools = isServerToolFollowup(options.context) || !hasServerToolSupport;
|
|
89
|
+
// 对于由 server-side 工具触发的内部跳转(二跳/三跳),统一禁用 SSE 聚合输出,
|
|
90
|
+
// 始终返回完整的 ChatCompletion JSON,便于在 llms 内部直接解析,而不是拿到
|
|
91
|
+
// __sse_responses 可读流。
|
|
92
|
+
const wantsStream = isServerToolFollowup(options.context) ? false : options.wantsStream;
|
|
93
|
+
try {
|
|
94
|
+
// eslint-disable-next-line no-console
|
|
95
|
+
console.log(`\x1b[38;5;33m[servertool][orchestrator][debug] requestId=${options.context.requestId} ` +
|
|
96
|
+
`protocol=${options.providerProtocol} endpoint=${options.entryEndpoint} ` +
|
|
97
|
+
`skipServerTools=${skipServerTools} hasInvoker=${Boolean(options.providerInvoker)} ` +
|
|
98
|
+
`hasReenter=${Boolean(options.reenterPipeline)}\x1b[0m`);
|
|
99
|
+
}
|
|
100
|
+
catch {
|
|
101
|
+
/* logging best-effort */
|
|
102
|
+
}
|
|
91
103
|
const displayModel = extractDisplayModel(options.context);
|
|
92
104
|
const plan = PROVIDER_RESPONSE_REGISTRY[options.providerProtocol];
|
|
93
105
|
if (!plan) {
|
|
@@ -97,7 +109,7 @@ export async function convertProviderResponse(options) {
|
|
|
97
109
|
providerProtocol: options.providerProtocol,
|
|
98
110
|
payload: options.providerResponse,
|
|
99
111
|
adapterContext: options.context,
|
|
100
|
-
wantsStream
|
|
112
|
+
wantsStream,
|
|
101
113
|
stageRecorder: options.stageRecorder
|
|
102
114
|
});
|
|
103
115
|
const formatAdapter = plan.createFormatAdapter();
|
|
@@ -138,18 +150,55 @@ export async function convertProviderResponse(options) {
|
|
|
138
150
|
mapper,
|
|
139
151
|
stageRecorder: options.stageRecorder
|
|
140
152
|
});
|
|
141
|
-
//
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
153
|
+
// 记录语义映射后的 ChatCompletion,便于回放 server-side 工具流程。
|
|
154
|
+
recordStage(options.stageRecorder, 'resp_inbound_stage3_semantic_map.chat', chatResponse);
|
|
155
|
+
// 检查是否需要进行 ServerTool 编排
|
|
156
|
+
// 使用新的 ChatEnvelope 级别的 servertool 实现
|
|
157
|
+
let effectiveChatResponse = chatResponse;
|
|
158
|
+
if (!skipServerTools && options.reenterPipeline) {
|
|
159
|
+
try {
|
|
160
|
+
// eslint-disable-next-line no-console
|
|
161
|
+
console.log(`\x1b[38;5;33m[servertool][orchestrator] start requestId=${options.context.requestId} ` +
|
|
162
|
+
`protocol=${options.providerProtocol} endpoint=${options.entryEndpoint}\x1b[0m`);
|
|
163
|
+
}
|
|
164
|
+
catch {
|
|
165
|
+
/* logging best-effort */
|
|
166
|
+
}
|
|
167
|
+
const orchestration = await runServerToolOrchestration({
|
|
168
|
+
chat: chatResponse,
|
|
169
|
+
adapterContext: options.context,
|
|
170
|
+
requestId: options.context.requestId,
|
|
171
|
+
entryEndpoint: options.entryEndpoint,
|
|
172
|
+
providerProtocol: options.providerProtocol,
|
|
173
|
+
providerInvoker: options.providerInvoker,
|
|
174
|
+
reenterPipeline: options.reenterPipeline
|
|
175
|
+
});
|
|
176
|
+
if (orchestration.executed) {
|
|
177
|
+
const flowLabel = orchestration.flowId ?? 'servertool_flow';
|
|
178
|
+
try {
|
|
179
|
+
// eslint-disable-next-line no-console
|
|
180
|
+
console.log(`\x1b[38;5;33m[servertool][orchestrator] completed requestId=${options.context.requestId} ` +
|
|
181
|
+
`mode=${flowLabel}\x1b[0m`);
|
|
182
|
+
}
|
|
183
|
+
catch {
|
|
184
|
+
/* logging best-effort */
|
|
185
|
+
}
|
|
186
|
+
effectiveChatResponse = orchestration.chat;
|
|
187
|
+
}
|
|
188
|
+
else {
|
|
189
|
+
try {
|
|
190
|
+
// eslint-disable-next-line no-console
|
|
191
|
+
console.log(`\x1b[38;5;33m[servertool][orchestrator] skipped requestId=${options.context.requestId} ` +
|
|
192
|
+
'reason=no_servertool_match\x1b[0m');
|
|
193
|
+
}
|
|
194
|
+
catch {
|
|
195
|
+
/* logging best-effort */
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
// 如果没有执行 servertool,继续原来的处理流程
|
|
151
200
|
const governanceResult = await runRespProcessStage1ToolGovernance({
|
|
152
|
-
payload:
|
|
201
|
+
payload: effectiveChatResponse,
|
|
153
202
|
entryEndpoint: options.entryEndpoint,
|
|
154
203
|
requestId: options.context.requestId,
|
|
155
204
|
clientProtocol,
|
|
@@ -159,7 +208,7 @@ export async function convertProviderResponse(options) {
|
|
|
159
208
|
payload: governanceResult.governedPayload,
|
|
160
209
|
entryEndpoint: options.entryEndpoint,
|
|
161
210
|
requestId: options.context.requestId,
|
|
162
|
-
wantsStream
|
|
211
|
+
wantsStream,
|
|
163
212
|
reasoningMode: resolveChatReasoningMode(options.entryEndpoint),
|
|
164
213
|
stageRecorder: options.stageRecorder
|
|
165
214
|
});
|
|
@@ -176,7 +225,7 @@ export async function convertProviderResponse(options) {
|
|
|
176
225
|
clientPayload,
|
|
177
226
|
clientProtocol,
|
|
178
227
|
requestId: options.context.requestId,
|
|
179
|
-
wantsStream
|
|
228
|
+
wantsStream,
|
|
180
229
|
stageRecorder: options.stageRecorder
|
|
181
230
|
});
|
|
182
231
|
if (outbound.stream) {
|
|
@@ -32,7 +32,28 @@ function extractToolCalls(chatResponse) {
|
|
|
32
32
|
return calls;
|
|
33
33
|
}
|
|
34
34
|
function extractTextFromChatLike(payload) {
|
|
35
|
-
|
|
35
|
+
// 1) 解包常见包装层:data / response 节点
|
|
36
|
+
let current = payload;
|
|
37
|
+
const visited = new Set();
|
|
38
|
+
while (current && typeof current === 'object' && !Array.isArray(current) && !visited.has(current)) {
|
|
39
|
+
visited.add(current);
|
|
40
|
+
if (Array.isArray(current.choices) || Array.isArray(current.output)) {
|
|
41
|
+
break;
|
|
42
|
+
}
|
|
43
|
+
const data = current.data;
|
|
44
|
+
if (data && typeof data === 'object' && !Array.isArray(data)) {
|
|
45
|
+
current = data;
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
const response = current.response;
|
|
49
|
+
if (response && typeof response === 'object' && !Array.isArray(response)) {
|
|
50
|
+
current = response;
|
|
51
|
+
continue;
|
|
52
|
+
}
|
|
53
|
+
break;
|
|
54
|
+
}
|
|
55
|
+
// 2) 优先从 choices[].message.content 提取(OpenAI/GLM 兼容)
|
|
56
|
+
const choices = getArray(current.choices);
|
|
36
57
|
if (!choices.length)
|
|
37
58
|
return '';
|
|
38
59
|
const first = asObject(choices[0]);
|
|
@@ -43,7 +64,7 @@ function extractTextFromChatLike(payload) {
|
|
|
43
64
|
return '';
|
|
44
65
|
const content = message.content;
|
|
45
66
|
if (typeof content === 'string')
|
|
46
|
-
return content;
|
|
67
|
+
return content.trim();
|
|
47
68
|
const parts = getArray(content);
|
|
48
69
|
const texts = [];
|
|
49
70
|
for (const part of parts) {
|
|
@@ -55,9 +76,45 @@ function extractTextFromChatLike(payload) {
|
|
|
55
76
|
if (typeof record.text === 'string') {
|
|
56
77
|
texts.push(record.text);
|
|
57
78
|
}
|
|
79
|
+
else if (typeof record.content === 'string') {
|
|
80
|
+
texts.push(record.content);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
const joinedFromChoices = texts.join('\n').trim();
|
|
85
|
+
if (joinedFromChoices) {
|
|
86
|
+
return joinedFromChoices;
|
|
87
|
+
}
|
|
88
|
+
// 3) 回退:从 output[].content[] 中提取(部分 Responses/自定义后端)
|
|
89
|
+
const output = current.output;
|
|
90
|
+
if (Array.isArray(output)) {
|
|
91
|
+
const altTexts = [];
|
|
92
|
+
for (const entry of output) {
|
|
93
|
+
if (!entry || typeof entry !== 'object')
|
|
94
|
+
continue;
|
|
95
|
+
const blocks = entry.content;
|
|
96
|
+
const blockArray = Array.isArray(blocks) ? blocks : [];
|
|
97
|
+
for (const block of blockArray) {
|
|
98
|
+
if (!block || typeof block !== 'object')
|
|
99
|
+
continue;
|
|
100
|
+
const record = block;
|
|
101
|
+
if (typeof record.text === 'string') {
|
|
102
|
+
altTexts.push(record.text);
|
|
103
|
+
}
|
|
104
|
+
else if (typeof record.output_text === 'string') {
|
|
105
|
+
altTexts.push(record.output_text);
|
|
106
|
+
}
|
|
107
|
+
else if (typeof record.content === 'string') {
|
|
108
|
+
altTexts.push(record.content);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
const joined = altTexts.join('\n').trim();
|
|
113
|
+
if (joined) {
|
|
114
|
+
return joined;
|
|
58
115
|
}
|
|
59
116
|
}
|
|
60
|
-
return
|
|
117
|
+
return '';
|
|
61
118
|
}
|
|
62
119
|
function getWebSearchConfig(ctx) {
|
|
63
120
|
const raw = ctx.webSearch;
|
|
@@ -131,7 +188,7 @@ function resolveEnvServerSideToolsEnabled() {
|
|
|
131
188
|
return false;
|
|
132
189
|
if (raw === '1' || raw === 'true' || raw === 'yes')
|
|
133
190
|
return true;
|
|
134
|
-
if (raw === 'web_search'
|
|
191
|
+
if (raw === 'web_search')
|
|
135
192
|
return true;
|
|
136
193
|
return false;
|
|
137
194
|
}
|
|
@@ -171,6 +171,117 @@ function collectParameters(payload) {
|
|
|
171
171
|
}
|
|
172
172
|
return Object.keys(params).length ? params : undefined;
|
|
173
173
|
}
|
|
174
|
+
function appendChatContentToGeminiParts(message, targetParts) {
|
|
175
|
+
const content = message.content;
|
|
176
|
+
if (typeof content === 'string') {
|
|
177
|
+
const text = content.trim();
|
|
178
|
+
if (text.length) {
|
|
179
|
+
targetParts.push({ text });
|
|
180
|
+
}
|
|
181
|
+
return;
|
|
182
|
+
}
|
|
183
|
+
if (!Array.isArray(content)) {
|
|
184
|
+
return;
|
|
185
|
+
}
|
|
186
|
+
const items = content;
|
|
187
|
+
for (const block of items) {
|
|
188
|
+
if (block == null)
|
|
189
|
+
continue;
|
|
190
|
+
if (typeof block === 'string') {
|
|
191
|
+
const text = block.trim();
|
|
192
|
+
if (text.length) {
|
|
193
|
+
targetParts.push({ text });
|
|
194
|
+
}
|
|
195
|
+
continue;
|
|
196
|
+
}
|
|
197
|
+
if (typeof block !== 'object') {
|
|
198
|
+
const text = String(block);
|
|
199
|
+
if (text.trim().length) {
|
|
200
|
+
targetParts.push({ text: text.trim() });
|
|
201
|
+
}
|
|
202
|
+
continue;
|
|
203
|
+
}
|
|
204
|
+
const record = block;
|
|
205
|
+
const rawType = record.type;
|
|
206
|
+
const type = typeof rawType === 'string' ? rawType.toLowerCase() : '';
|
|
207
|
+
// Text-style blocks
|
|
208
|
+
if (!type || type === 'text') {
|
|
209
|
+
const textValue = typeof record.text === 'string'
|
|
210
|
+
? record.text
|
|
211
|
+
: typeof record.content === 'string'
|
|
212
|
+
? record.content
|
|
213
|
+
: '';
|
|
214
|
+
const text = textValue.trim();
|
|
215
|
+
if (text.length) {
|
|
216
|
+
targetParts.push({ text });
|
|
217
|
+
}
|
|
218
|
+
continue;
|
|
219
|
+
}
|
|
220
|
+
// Image-style blocks -> Gemini inlineData
|
|
221
|
+
if (type === 'image' || type === 'image_url') {
|
|
222
|
+
// Prefer OpenAI-style image_url.url, but also accept uri/url/data.
|
|
223
|
+
let url;
|
|
224
|
+
const imageUrlRaw = record.image_url;
|
|
225
|
+
if (typeof imageUrlRaw === 'string') {
|
|
226
|
+
url = imageUrlRaw;
|
|
227
|
+
}
|
|
228
|
+
else if (imageUrlRaw && typeof imageUrlRaw === 'object' && typeof imageUrlRaw.url === 'string') {
|
|
229
|
+
url = imageUrlRaw.url;
|
|
230
|
+
}
|
|
231
|
+
else if (typeof record.uri === 'string') {
|
|
232
|
+
url = record.uri;
|
|
233
|
+
}
|
|
234
|
+
else if (typeof record.url === 'string') {
|
|
235
|
+
url = record.url;
|
|
236
|
+
}
|
|
237
|
+
else if (typeof record.data === 'string') {
|
|
238
|
+
url = record.data;
|
|
239
|
+
}
|
|
240
|
+
const trimmed = (url ?? '').trim();
|
|
241
|
+
if (!trimmed.length) {
|
|
242
|
+
// Fallback: at least emit a textual marker so内容不会完全丢失
|
|
243
|
+
targetParts.push({ text: '[image]' });
|
|
244
|
+
continue;
|
|
245
|
+
}
|
|
246
|
+
let mimeType;
|
|
247
|
+
let data;
|
|
248
|
+
// data:URL → inlineData { mimeType, data }
|
|
249
|
+
if (trimmed.startsWith('data:')) {
|
|
250
|
+
const match = /^data:([^;,]+)?(?:;base64)?,(.*)$/s.exec(trimmed);
|
|
251
|
+
if (match) {
|
|
252
|
+
mimeType = (match[1] || '').trim() || undefined;
|
|
253
|
+
data = match[2] || '';
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
if (data && data.trim().length) {
|
|
257
|
+
const inline = {
|
|
258
|
+
inlineData: {
|
|
259
|
+
data: data.trim()
|
|
260
|
+
}
|
|
261
|
+
};
|
|
262
|
+
if (mimeType && mimeType.length) {
|
|
263
|
+
inline.inlineData.mimeType = mimeType;
|
|
264
|
+
}
|
|
265
|
+
targetParts.push(inline);
|
|
266
|
+
}
|
|
267
|
+
else {
|
|
268
|
+
// 非 data: URL 暂时作为文本 URL 传递,保持语义可见
|
|
269
|
+
targetParts.push({ text: trimmed });
|
|
270
|
+
}
|
|
271
|
+
continue;
|
|
272
|
+
}
|
|
273
|
+
// 默认:回退为文本 JSON 表示,避免静默丢失内容
|
|
274
|
+
try {
|
|
275
|
+
const jsonText = JSON.stringify(record);
|
|
276
|
+
if (jsonText.trim().length) {
|
|
277
|
+
targetParts.push({ text: jsonText });
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
catch {
|
|
281
|
+
// ignore malformed block
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
}
|
|
174
285
|
function buildGeminiRequestFromChat(chat, metadata) {
|
|
175
286
|
const contents = [];
|
|
176
287
|
const emittedToolOutputs = new Set();
|
|
@@ -191,9 +302,7 @@ function buildGeminiRequestFromChat(chat, metadata) {
|
|
|
191
302
|
role: mapChatRoleToGemini(message.role),
|
|
192
303
|
parts: []
|
|
193
304
|
};
|
|
194
|
-
|
|
195
|
-
entry.parts.push({ text: message.content });
|
|
196
|
-
}
|
|
305
|
+
appendChatContentToGeminiParts(message, entry.parts);
|
|
197
306
|
const toolCalls = Array.isArray(message.tool_calls) ? message.tool_calls : [];
|
|
198
307
|
for (const tc of toolCalls) {
|
|
199
308
|
if (!tc || typeof tc !== 'object')
|
|
@@ -335,7 +444,18 @@ function safeParseJson(value) {
|
|
|
335
444
|
}
|
|
336
445
|
}
|
|
337
446
|
function ensureFunctionResponsePayload(value) {
|
|
447
|
+
// Gemini function_response.response 字段在 CloudCode/Gemini CLI 协议里对应的是
|
|
448
|
+
// protobuf Struct(JSON object),而不是顶层数组。
|
|
449
|
+
// 这里做一层规范化:
|
|
450
|
+
// - 对象:直接透传;
|
|
451
|
+
// - 数组:包一层 { result: [...] } 避免把数组作为 Struct 根节点;
|
|
452
|
+
// - 原始值:包一层 { result: value },并把 undefined 映射为 null。
|
|
338
453
|
if (value && typeof value === 'object') {
|
|
454
|
+
if (Array.isArray(value)) {
|
|
455
|
+
return {
|
|
456
|
+
result: value
|
|
457
|
+
};
|
|
458
|
+
}
|
|
339
459
|
return value;
|
|
340
460
|
}
|
|
341
461
|
return {
|
|
@@ -148,6 +148,20 @@ function serializeSystemContent(message) {
|
|
|
148
148
|
}
|
|
149
149
|
return undefined;
|
|
150
150
|
}
|
|
151
|
+
function mergeMetadata(a, b) {
|
|
152
|
+
if (!a && !b) {
|
|
153
|
+
return undefined;
|
|
154
|
+
}
|
|
155
|
+
if (!a && b) {
|
|
156
|
+
return jsonClone(b);
|
|
157
|
+
}
|
|
158
|
+
if (a && !b) {
|
|
159
|
+
return jsonClone(a);
|
|
160
|
+
}
|
|
161
|
+
const left = jsonClone(a);
|
|
162
|
+
const right = jsonClone(b);
|
|
163
|
+
return { ...left, ...right };
|
|
164
|
+
}
|
|
151
165
|
export class ResponsesSemanticMapper {
|
|
152
166
|
async toChat(format, ctx) {
|
|
153
167
|
const payload = format.payload || {};
|
|
@@ -209,13 +223,15 @@ export class ResponsesSemanticMapper {
|
|
|
209
223
|
.map(message => serializeSystemContent(message))
|
|
210
224
|
.filter((content) => typeof content === 'string' && content.length > 0);
|
|
211
225
|
const capturedContext = chat.metadata?.responsesContext;
|
|
226
|
+
const envelopeMetadata = chat.metadata && isJsonObject(chat.metadata) ? chat.metadata : undefined;
|
|
212
227
|
const responsesContext = isJsonObject(capturedContext)
|
|
213
228
|
? {
|
|
214
229
|
...capturedContext,
|
|
230
|
+
metadata: mergeMetadata(capturedContext.metadata, envelopeMetadata),
|
|
215
231
|
originalSystemMessages
|
|
216
232
|
}
|
|
217
233
|
: {
|
|
218
|
-
metadata:
|
|
234
|
+
metadata: envelopeMetadata,
|
|
219
235
|
originalSystemMessages
|
|
220
236
|
};
|
|
221
237
|
const responsesResult = buildResponsesRequestFromChat(requestShape, responsesContext);
|
|
@@ -60,6 +60,20 @@ export function standardizedToChatEnvelope(request, options) {
|
|
|
60
60
|
const metadata = {
|
|
61
61
|
context: adapterContext
|
|
62
62
|
};
|
|
63
|
+
const sourceMeta = (request.metadata && typeof request.metadata === 'object'
|
|
64
|
+
? request.metadata
|
|
65
|
+
: undefined);
|
|
66
|
+
if (sourceMeta) {
|
|
67
|
+
if (sourceMeta.webSearch && typeof sourceMeta.webSearch === 'object') {
|
|
68
|
+
metadata.webSearch = sourceMeta.webSearch;
|
|
69
|
+
}
|
|
70
|
+
if (sourceMeta.forceWebSearch === true) {
|
|
71
|
+
metadata.forceWebSearch = true;
|
|
72
|
+
}
|
|
73
|
+
if (sourceMeta.forceVision === true) {
|
|
74
|
+
metadata.forceVision = true;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
63
77
|
if (typeof adapterContext.toolCallIdStyle === 'string' && adapterContext.toolCallIdStyle.length) {
|
|
64
78
|
metadata.toolCallIdStyle = adapterContext.toolCallIdStyle;
|
|
65
79
|
}
|
|
@@ -3,6 +3,7 @@ import { evaluateResponsesHostPolicy } from './responses-host-policy.js';
|
|
|
3
3
|
import { convertMessagesToBridgeInput, convertBridgeInputToChatMessages } from '../shared/bridge-message-utils.js';
|
|
4
4
|
import { createToolCallIdTransformer, enforceToolCallIdStyle, resolveToolCallIdStyle, stripInternalToolingMetadata, sanitizeResponsesFunctionName } from '../shared/responses-tool-utils.js';
|
|
5
5
|
import { mapBridgeToolsToChat, mapChatToolsToBridge } from '../shared/tool-mapping.js';
|
|
6
|
+
import { ProviderProtocolError } from '../shared/errors.js';
|
|
6
7
|
// --- Utilities (ported strictly) ---
|
|
7
8
|
import { canonicalizeChatResponseTools } from '../shared/tool-canonicalizer.js';
|
|
8
9
|
import { normalizeMessageReasoningTools } from '../shared/reasoning-tool-normalizer.js';
|
|
@@ -95,7 +96,16 @@ export function buildChatRequestFromResponses(payload, context) {
|
|
|
95
96
|
// 不在 Responses 路径做工具治理;统一在 Chat 后半段处理
|
|
96
97
|
// No system tips for MCP on OpenAI Responses path (avoid leaking tool names)
|
|
97
98
|
if (!messages.length) {
|
|
98
|
-
throw new
|
|
99
|
+
throw new ProviderProtocolError('Responses payload produced no chat messages', {
|
|
100
|
+
code: 'MALFORMED_REQUEST',
|
|
101
|
+
protocol: 'openai-responses',
|
|
102
|
+
providerType: 'responses',
|
|
103
|
+
details: {
|
|
104
|
+
context: 'buildChatRequestFromResponses',
|
|
105
|
+
inputLength: Array.isArray(context.input) ? context.input.length : undefined,
|
|
106
|
+
requestId: context.requestId
|
|
107
|
+
}
|
|
108
|
+
});
|
|
99
109
|
}
|
|
100
110
|
// 如果只有 system 消息且无 user/assistant/tool,后续桥接 action 会从 instructions 注入兜底 user 消息
|
|
101
111
|
const result = { model: payload.model, messages };
|
|
@@ -190,17 +200,50 @@ function mergeResponsesTools(originalTools, fromChat) {
|
|
|
190
200
|
export function buildResponsesRequestFromChat(payload, ctx, extras) {
|
|
191
201
|
const chat = unwrapData(payload);
|
|
192
202
|
const out = {};
|
|
203
|
+
const forceWebSearch = !!ctx &&
|
|
204
|
+
isObject(ctx.metadata) &&
|
|
205
|
+
isObject(ctx.metadata.webSearch) &&
|
|
206
|
+
ctx.metadata.webSearch.force === true;
|
|
193
207
|
// 基本字段
|
|
194
208
|
out.model = chat.model;
|
|
195
209
|
// tools: 反向映射为 ResponsesToolDefinition 形状
|
|
196
|
-
const
|
|
210
|
+
const chatTools = Array.isArray(chat.tools) ? chat.tools : [];
|
|
211
|
+
// 对于 openai-responses upstream,内建 web_search 由官方服务器处理。
|
|
212
|
+
// Chat 侧注入的 server-side web_search 函数(带 engine/query/recency/count)
|
|
213
|
+
// 仅用于非 Responses provider 的 server-tool 回环;在这里构造真正的
|
|
214
|
+
// `/v1/responses` 请求时,需要:
|
|
215
|
+
// 1) 不再把函数版 web_search 透传上游;
|
|
216
|
+
// 2) 若检测到 Chat 侧启用了 web_search 且原始请求中没有 builtin web_search,
|
|
217
|
+
// 则补一个 `{ type: "web_search" }` 内建工具给 OpenAI Responses。
|
|
218
|
+
const hasServerSideWebSearch = !forceWebSearch && chatTools.some((tool) => {
|
|
219
|
+
const fn = tool && typeof tool === 'object' ? tool.function : undefined;
|
|
220
|
+
const name = typeof fn?.name === 'string' ? fn.name.trim().toLowerCase() : '';
|
|
221
|
+
return name === 'web_search';
|
|
222
|
+
});
|
|
223
|
+
const toolsForBridge = hasServerSideWebSearch
|
|
224
|
+
? chatTools.filter((tool) => {
|
|
225
|
+
const fn = tool && typeof tool === 'object' ? tool.function : undefined;
|
|
226
|
+
const name = typeof fn?.name === 'string' ? fn.name.trim().toLowerCase() : '';
|
|
227
|
+
return name !== 'web_search';
|
|
228
|
+
})
|
|
229
|
+
: chatTools;
|
|
230
|
+
const responsesToolsFromChat = mapChatToolsToBridge(toolsForBridge, {
|
|
197
231
|
sanitizeName: sanitizeResponsesFunctionName
|
|
198
232
|
});
|
|
199
233
|
// Prefer Chat‑normalized tools, but if the original Responses payload carried
|
|
200
234
|
// non‑function tools (such as builtin `web_search`), merge them back so that
|
|
201
235
|
// upstream `/v1/responses` providers see their original tool definitions.
|
|
202
236
|
const originalTools = Array.isArray(ctx?.toolsRaw) ? ctx.toolsRaw : undefined;
|
|
203
|
-
|
|
237
|
+
let mergedTools = mergeResponsesTools(originalTools, responsesToolsFromChat);
|
|
238
|
+
if (hasServerSideWebSearch) {
|
|
239
|
+
const normalizeType = (value) => typeof value === 'string' ? value.trim().toLowerCase() : '';
|
|
240
|
+
const hasBuiltinWebSearch = (mergedTools && mergedTools.some((tool) => normalizeType(tool.type) === 'web_search')) ||
|
|
241
|
+
(originalTools && originalTools.some((tool) => normalizeType(tool.type) === 'web_search'));
|
|
242
|
+
if (!hasBuiltinWebSearch) {
|
|
243
|
+
const injected = { type: 'web_search' };
|
|
244
|
+
mergedTools = mergedTools ? [...mergedTools, injected] : [injected];
|
|
245
|
+
}
|
|
246
|
+
}
|
|
204
247
|
if (mergedTools?.length) {
|
|
205
248
|
out.tools = mergedTools;
|
|
206
249
|
}
|
|
@@ -503,15 +546,76 @@ export function buildResponsesPayloadFromChat(payload, context) {
|
|
|
503
546
|
if (response.object === 'response' && Array.isArray(response.output)) {
|
|
504
547
|
return response;
|
|
505
548
|
}
|
|
506
|
-
|
|
507
|
-
|
|
549
|
+
const hasChoicesArray = Array.isArray(response.choices);
|
|
550
|
+
const choicesLength = hasChoicesArray ? response.choices.length : 0;
|
|
551
|
+
// Graceful fallback for provider payloads that do not contain a valid
|
|
552
|
+
// ChatCompletion-style choices array (e.g. certain compat error envelopes).
|
|
553
|
+
if (!hasChoicesArray || choicesLength === 0) {
|
|
554
|
+
const rawStatus = response.status;
|
|
555
|
+
const statusCode = typeof rawStatus === 'string' && rawStatus.trim().length
|
|
556
|
+
? rawStatus.trim()
|
|
557
|
+
: typeof rawStatus === 'number'
|
|
558
|
+
? String(rawStatus)
|
|
559
|
+
: undefined;
|
|
560
|
+
const message = typeof response.msg === 'string' && response.msg.trim().length
|
|
561
|
+
? response.msg.trim()
|
|
562
|
+
: typeof response.message === 'string' && response.message.trim().length
|
|
563
|
+
? response.message.trim()
|
|
564
|
+
: 'Upstream returned non-standard Chat completion payload (missing choices).';
|
|
565
|
+
const out = {
|
|
566
|
+
id: response.id || `resp-${Date.now()}`,
|
|
567
|
+
object: 'response',
|
|
568
|
+
created_at: response.created_at || response.created || Math.floor(Date.now() / 1000),
|
|
569
|
+
model: response.model,
|
|
570
|
+
status: 'failed',
|
|
571
|
+
output: []
|
|
572
|
+
};
|
|
573
|
+
if (message) {
|
|
574
|
+
out.output_text = message;
|
|
575
|
+
out.error = {
|
|
576
|
+
type: 'provider_error',
|
|
577
|
+
code: statusCode,
|
|
578
|
+
message
|
|
579
|
+
};
|
|
580
|
+
}
|
|
581
|
+
if (context) {
|
|
582
|
+
for (const k of ['metadata', 'parallel_tool_calls', 'tool_choice', 'include']) {
|
|
583
|
+
if (context[k] !== undefined)
|
|
584
|
+
out[k] = context[k];
|
|
585
|
+
}
|
|
586
|
+
if (!shouldStripHostManagedFields(context) && context.store !== undefined) {
|
|
587
|
+
out.store = context.store;
|
|
588
|
+
}
|
|
589
|
+
}
|
|
590
|
+
if (typeof response.request_id === 'string') {
|
|
591
|
+
out.request_id = response.request_id;
|
|
592
|
+
}
|
|
593
|
+
else if (typeof response.id === 'string') {
|
|
594
|
+
out.request_id = response.id;
|
|
595
|
+
}
|
|
596
|
+
else if (typeof context?.requestId === 'string') {
|
|
597
|
+
out.request_id = context.requestId;
|
|
598
|
+
}
|
|
599
|
+
if (out.metadata) {
|
|
600
|
+
stripInternalToolingMetadata(out.metadata);
|
|
601
|
+
}
|
|
602
|
+
return out;
|
|
508
603
|
}
|
|
509
604
|
const canonical = canonicalizeChatResponseTools(response);
|
|
510
605
|
const choices = Array.isArray(canonical?.choices) ? canonical.choices : [];
|
|
511
606
|
const primaryChoice = choices[0] && typeof choices[0] === 'object' ? choices[0] : undefined;
|
|
512
607
|
const message = primaryChoice && typeof primaryChoice.message === 'object' ? primaryChoice.message : undefined;
|
|
513
608
|
if (!message) {
|
|
514
|
-
throw new
|
|
609
|
+
throw new ProviderProtocolError('Responses bridge could not locate assistant message in Chat completion', {
|
|
610
|
+
code: 'MALFORMED_RESPONSE',
|
|
611
|
+
protocol: 'openai-chat',
|
|
612
|
+
providerType: 'openai',
|
|
613
|
+
details: {
|
|
614
|
+
context: 'buildResponsesPayloadFromChat',
|
|
615
|
+
choicesLength: choices.length,
|
|
616
|
+
requestId: context?.requestId
|
|
617
|
+
}
|
|
618
|
+
});
|
|
515
619
|
}
|
|
516
620
|
if (message) {
|
|
517
621
|
try {
|