extrait 0.2.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -1
- package/dist/index.cjs +733 -39
- package/dist/index.js +733 -39
- package/dist/mcp.d.ts +2 -0
- package/dist/types.d.ts +1 -0
- package/package.json +3 -2
package/dist/index.cjs
CHANGED
|
@@ -1294,10 +1294,36 @@ async function executeMCPToolCalls(calls, toolset, context) {
|
|
|
1294
1294
|
throw new Error("Received a function tool call without id or name.");
|
|
1295
1295
|
}
|
|
1296
1296
|
const tool = toolset.byName.get(toolName);
|
|
1297
|
+
const parsedArguments = parseToolArguments(call.arguments);
|
|
1297
1298
|
if (!tool) {
|
|
1298
|
-
|
|
1299
|
+
const errorMessage = context.request.unknownToolError ? context.request.unknownToolError(toolName) : `Tool "${toolName}" is not registered in the current toolset.`;
|
|
1300
|
+
const metadata2 = {
|
|
1301
|
+
id: callId,
|
|
1302
|
+
type: call.type ?? "function",
|
|
1303
|
+
name: toolName,
|
|
1304
|
+
arguments: parsedArguments,
|
|
1305
|
+
error: errorMessage
|
|
1306
|
+
};
|
|
1307
|
+
const startedAt2 = new Date().toISOString();
|
|
1308
|
+
const execution = {
|
|
1309
|
+
callId,
|
|
1310
|
+
type: metadata2.type,
|
|
1311
|
+
name: toolName,
|
|
1312
|
+
clientId: "__unregistered__",
|
|
1313
|
+
remoteName: toolName,
|
|
1314
|
+
arguments: parsedArguments,
|
|
1315
|
+
error: errorMessage,
|
|
1316
|
+
round: context.round,
|
|
1317
|
+
provider: context.provider,
|
|
1318
|
+
model: context.model,
|
|
1319
|
+
handledLocally: true,
|
|
1320
|
+
startedAt: startedAt2,
|
|
1321
|
+
durationMs: 0
|
|
1322
|
+
};
|
|
1323
|
+
emitToolExecution(context.request, execution);
|
|
1324
|
+
out.push({ call: metadata2, execution });
|
|
1325
|
+
continue;
|
|
1299
1326
|
}
|
|
1300
|
-
const parsedArguments = parseToolArguments(call.arguments);
|
|
1301
1327
|
const args = isRecord(parsedArguments) ? parsedArguments : {};
|
|
1302
1328
|
const metadata = {
|
|
1303
1329
|
id: callId,
|
|
@@ -1597,8 +1623,14 @@ function createOpenAICompatibleAdapter(options) {
|
|
|
1597
1623
|
async stream(request, callbacks = {}) {
|
|
1598
1624
|
const usesResponses = shouldUseResponsesAPI(options, request);
|
|
1599
1625
|
const usesMCP = hasMCPClients(request.mcpClients);
|
|
1600
|
-
if (usesResponses
|
|
1601
|
-
|
|
1626
|
+
if (usesResponses) {
|
|
1627
|
+
if (usesMCP) {
|
|
1628
|
+
return streamWithResponsesAPIWithMCP(options, fetcher, responsesPath, request, callbacks);
|
|
1629
|
+
}
|
|
1630
|
+
return streamWithResponsesAPIPassThrough(options, fetcher, responsesPath, request, callbacks);
|
|
1631
|
+
}
|
|
1632
|
+
if (usesMCP) {
|
|
1633
|
+
return streamWithChatCompletionsWithMCP(options, fetcher, path, request, callbacks);
|
|
1602
1634
|
}
|
|
1603
1635
|
const response = await fetcher(buildURL(options.baseURL, path), {
|
|
1604
1636
|
method: "POST",
|
|
@@ -1883,6 +1915,316 @@ async function completeWithResponsesAPIWithMCP(options, fetcher, path, request)
|
|
|
1883
1915
|
toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
|
|
1884
1916
|
};
|
|
1885
1917
|
}
|
|
1918
|
+
async function streamWithChatCompletionsWithMCP(options, fetcher, path, request, callbacks) {
|
|
1919
|
+
const maxToolRounds = normalizeMaxToolRounds(request.maxToolRounds ?? options.defaultMaxToolRounds);
|
|
1920
|
+
let messages = buildMessages(request);
|
|
1921
|
+
let aggregatedUsage;
|
|
1922
|
+
let finishReason;
|
|
1923
|
+
let lastPayload;
|
|
1924
|
+
const executedToolCalls = [];
|
|
1925
|
+
const toolExecutions = [];
|
|
1926
|
+
callbacks.onStart?.();
|
|
1927
|
+
for (let round = 1;round <= maxToolRounds + 1; round += 1) {
|
|
1928
|
+
const mcpToolset = await resolveMCPToolset(request.mcpClients);
|
|
1929
|
+
const transportTools = toProviderFunctionTools(mcpToolset);
|
|
1930
|
+
const response = await fetcher(buildURL(options.baseURL, path), {
|
|
1931
|
+
method: "POST",
|
|
1932
|
+
headers: buildHeaders(options),
|
|
1933
|
+
body: JSON.stringify(cleanUndefined({
|
|
1934
|
+
...options.defaultBody,
|
|
1935
|
+
...request.body,
|
|
1936
|
+
model: options.model,
|
|
1937
|
+
messages,
|
|
1938
|
+
temperature: request.temperature,
|
|
1939
|
+
max_tokens: request.maxTokens,
|
|
1940
|
+
tools: transportTools,
|
|
1941
|
+
tool_choice: request.toolChoice,
|
|
1942
|
+
parallel_tool_calls: request.parallelToolCalls,
|
|
1943
|
+
stream: true
|
|
1944
|
+
}))
|
|
1945
|
+
});
|
|
1946
|
+
if (!response.ok) {
|
|
1947
|
+
const message = await response.text();
|
|
1948
|
+
throw new Error(`HTTP ${response.status}: ${message}`);
|
|
1949
|
+
}
|
|
1950
|
+
let roundText = "";
|
|
1951
|
+
let roundUsage;
|
|
1952
|
+
let roundFinishReason;
|
|
1953
|
+
const streamedToolCalls = new Map;
|
|
1954
|
+
await consumeSSE(response, (data) => {
|
|
1955
|
+
if (data === "[DONE]") {
|
|
1956
|
+
return;
|
|
1957
|
+
}
|
|
1958
|
+
const json = safeJSONParse(data);
|
|
1959
|
+
if (!isRecord2(json)) {
|
|
1960
|
+
return;
|
|
1961
|
+
}
|
|
1962
|
+
lastPayload = json;
|
|
1963
|
+
const delta = pickAssistantDelta(json);
|
|
1964
|
+
const chunkUsage = pickUsage(json);
|
|
1965
|
+
const chunkFinishReason = pickFinishReason(json);
|
|
1966
|
+
collectOpenAIStreamToolCalls(json, streamedToolCalls);
|
|
1967
|
+
roundUsage = mergeUsage(roundUsage, chunkUsage);
|
|
1968
|
+
if (chunkFinishReason) {
|
|
1969
|
+
roundFinishReason = chunkFinishReason;
|
|
1970
|
+
}
|
|
1971
|
+
if (delta) {
|
|
1972
|
+
roundText += delta;
|
|
1973
|
+
callbacks.onToken?.(delta);
|
|
1974
|
+
}
|
|
1975
|
+
if (delta || chunkUsage || chunkFinishReason) {
|
|
1976
|
+
const chunk = {
|
|
1977
|
+
textDelta: delta,
|
|
1978
|
+
raw: json,
|
|
1979
|
+
usage: chunkUsage,
|
|
1980
|
+
finishReason: chunkFinishReason
|
|
1981
|
+
};
|
|
1982
|
+
callbacks.onChunk?.(chunk);
|
|
1983
|
+
}
|
|
1984
|
+
});
|
|
1985
|
+
aggregatedUsage = mergeUsage(aggregatedUsage, roundUsage);
|
|
1986
|
+
if (roundFinishReason) {
|
|
1987
|
+
finishReason = roundFinishReason;
|
|
1988
|
+
}
|
|
1989
|
+
const calledTools = buildOpenAIStreamToolCalls(streamedToolCalls);
|
|
1990
|
+
if (calledTools.length === 0) {
|
|
1991
|
+
const out2 = {
|
|
1992
|
+
text: roundText,
|
|
1993
|
+
raw: lastPayload,
|
|
1994
|
+
usage: aggregatedUsage,
|
|
1995
|
+
finishReason,
|
|
1996
|
+
toolCalls: executedToolCalls.length > 0 ? executedToolCalls : undefined,
|
|
1997
|
+
toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
|
|
1998
|
+
};
|
|
1999
|
+
callbacks.onComplete?.(out2);
|
|
2000
|
+
return out2;
|
|
2001
|
+
}
|
|
2002
|
+
if (round > maxToolRounds) {
|
|
2003
|
+
throw new Error(`Tool call loop exceeded maxToolRounds (${maxToolRounds}).`);
|
|
2004
|
+
}
|
|
2005
|
+
const outputs = await executeMCPToolCalls(calledTools, mcpToolset, {
|
|
2006
|
+
round,
|
|
2007
|
+
request,
|
|
2008
|
+
provider: "openai-compatible",
|
|
2009
|
+
model: options.model
|
|
2010
|
+
});
|
|
2011
|
+
executedToolCalls.push(...outputs.map((entry) => entry.call));
|
|
2012
|
+
toolExecutions.push(...outputs.map((entry) => entry.execution));
|
|
2013
|
+
const assistantMessage = buildOpenAIAssistantToolMessage(roundText, calledTools);
|
|
2014
|
+
const toolMessages = outputs.map((entry) => ({
|
|
2015
|
+
role: "tool",
|
|
2016
|
+
tool_call_id: entry.call.id,
|
|
2017
|
+
content: stringifyToolOutput(entry.call.error ? { error: entry.call.error } : entry.call.output)
|
|
2018
|
+
}));
|
|
2019
|
+
messages = [...messages, assistantMessage, ...toolMessages];
|
|
2020
|
+
}
|
|
2021
|
+
const out = {
|
|
2022
|
+
text: "",
|
|
2023
|
+
raw: lastPayload,
|
|
2024
|
+
usage: aggregatedUsage,
|
|
2025
|
+
finishReason,
|
|
2026
|
+
toolCalls: executedToolCalls.length > 0 ? executedToolCalls : undefined,
|
|
2027
|
+
toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
|
|
2028
|
+
};
|
|
2029
|
+
callbacks.onComplete?.(out);
|
|
2030
|
+
return out;
|
|
2031
|
+
}
|
|
2032
|
+
async function streamWithResponsesAPIPassThrough(options, fetcher, path, request, callbacks) {
|
|
2033
|
+
const body = isRecord2(request.body) ? request.body : undefined;
|
|
2034
|
+
const response = await fetcher(buildURL(options.baseURL, path), {
|
|
2035
|
+
method: "POST",
|
|
2036
|
+
headers: buildHeaders(options),
|
|
2037
|
+
body: JSON.stringify(cleanUndefined({
|
|
2038
|
+
...options.defaultBody,
|
|
2039
|
+
...request.body,
|
|
2040
|
+
model: options.model,
|
|
2041
|
+
input: buildResponsesInput(request),
|
|
2042
|
+
previous_response_id: pickString(body?.previous_response_id),
|
|
2043
|
+
temperature: request.temperature,
|
|
2044
|
+
max_output_tokens: request.maxTokens,
|
|
2045
|
+
stream: true
|
|
2046
|
+
}))
|
|
2047
|
+
});
|
|
2048
|
+
if (!response.ok) {
|
|
2049
|
+
const message = await response.text();
|
|
2050
|
+
throw new Error(`HTTP ${response.status}: ${message}`);
|
|
2051
|
+
}
|
|
2052
|
+
callbacks.onStart?.();
|
|
2053
|
+
let text = "";
|
|
2054
|
+
let usage;
|
|
2055
|
+
let finishReason;
|
|
2056
|
+
let lastPayload;
|
|
2057
|
+
await consumeSSE(response, (data) => {
|
|
2058
|
+
if (data === "[DONE]") {
|
|
2059
|
+
return;
|
|
2060
|
+
}
|
|
2061
|
+
const json = safeJSONParse(data);
|
|
2062
|
+
if (!isRecord2(json)) {
|
|
2063
|
+
return;
|
|
2064
|
+
}
|
|
2065
|
+
const roundPayload = pickResponsesStreamPayload(json);
|
|
2066
|
+
if (roundPayload) {
|
|
2067
|
+
lastPayload = roundPayload;
|
|
2068
|
+
}
|
|
2069
|
+
const delta = pickResponsesStreamTextDelta(json);
|
|
2070
|
+
const chunkUsage = pickResponsesStreamUsage(json);
|
|
2071
|
+
const chunkFinishReason = pickResponsesStreamFinishReason(json);
|
|
2072
|
+
usage = mergeUsage(usage, chunkUsage);
|
|
2073
|
+
if (chunkFinishReason) {
|
|
2074
|
+
finishReason = chunkFinishReason;
|
|
2075
|
+
}
|
|
2076
|
+
if (delta) {
|
|
2077
|
+
text += delta;
|
|
2078
|
+
callbacks.onToken?.(delta);
|
|
2079
|
+
}
|
|
2080
|
+
if (delta || chunkUsage || chunkFinishReason) {
|
|
2081
|
+
const chunk = {
|
|
2082
|
+
textDelta: delta,
|
|
2083
|
+
raw: json,
|
|
2084
|
+
usage: chunkUsage,
|
|
2085
|
+
finishReason: chunkFinishReason
|
|
2086
|
+
};
|
|
2087
|
+
callbacks.onChunk?.(chunk);
|
|
2088
|
+
}
|
|
2089
|
+
});
|
|
2090
|
+
const finalPayload = lastPayload ?? {};
|
|
2091
|
+
const out = {
|
|
2092
|
+
text: text.length > 0 ? text : pickResponsesText(finalPayload) || pickAssistantText(finalPayload),
|
|
2093
|
+
raw: finalPayload,
|
|
2094
|
+
usage: mergeUsage(usage, pickUsage(finalPayload)),
|
|
2095
|
+
finishReason: finishReason ?? pickResponsesFinishReason(finalPayload) ?? pickFinishReason(finalPayload)
|
|
2096
|
+
};
|
|
2097
|
+
callbacks.onComplete?.(out);
|
|
2098
|
+
return out;
|
|
2099
|
+
}
|
|
2100
|
+
async function streamWithResponsesAPIWithMCP(options, fetcher, path, request, callbacks) {
|
|
2101
|
+
const maxToolRounds = normalizeMaxToolRounds(request.maxToolRounds ?? options.defaultMaxToolRounds);
|
|
2102
|
+
let input = buildResponsesInput(request);
|
|
2103
|
+
let previousResponseId = pickString(isRecord2(request.body) ? request.body.previous_response_id : undefined);
|
|
2104
|
+
let aggregatedUsage;
|
|
2105
|
+
let finishReason;
|
|
2106
|
+
let lastPayload;
|
|
2107
|
+
const executedToolCalls = [];
|
|
2108
|
+
const toolExecutions = [];
|
|
2109
|
+
callbacks.onStart?.();
|
|
2110
|
+
for (let round = 1;round <= maxToolRounds + 1; round += 1) {
|
|
2111
|
+
const mcpToolset = await resolveMCPToolset(request.mcpClients);
|
|
2112
|
+
const transportTools = toResponsesTools(toProviderFunctionTools(mcpToolset));
|
|
2113
|
+
const response = await fetcher(buildURL(options.baseURL, path), {
|
|
2114
|
+
method: "POST",
|
|
2115
|
+
headers: buildHeaders(options),
|
|
2116
|
+
body: JSON.stringify(cleanUndefined({
|
|
2117
|
+
...options.defaultBody,
|
|
2118
|
+
...request.body,
|
|
2119
|
+
model: options.model,
|
|
2120
|
+
input,
|
|
2121
|
+
previous_response_id: previousResponseId,
|
|
2122
|
+
temperature: request.temperature,
|
|
2123
|
+
max_output_tokens: request.maxTokens,
|
|
2124
|
+
tools: transportTools,
|
|
2125
|
+
tool_choice: request.toolChoice,
|
|
2126
|
+
parallel_tool_calls: request.parallelToolCalls,
|
|
2127
|
+
stream: true
|
|
2128
|
+
}))
|
|
2129
|
+
});
|
|
2130
|
+
if (!response.ok) {
|
|
2131
|
+
const message = await response.text();
|
|
2132
|
+
throw new Error(`HTTP ${response.status}: ${message}`);
|
|
2133
|
+
}
|
|
2134
|
+
let roundText = "";
|
|
2135
|
+
let roundUsage;
|
|
2136
|
+
let roundFinishReason;
|
|
2137
|
+
let roundPayload;
|
|
2138
|
+
const streamedToolCalls = new Map;
|
|
2139
|
+
await consumeSSE(response, (data) => {
|
|
2140
|
+
if (data === "[DONE]") {
|
|
2141
|
+
return;
|
|
2142
|
+
}
|
|
2143
|
+
const json = safeJSONParse(data);
|
|
2144
|
+
if (!isRecord2(json)) {
|
|
2145
|
+
return;
|
|
2146
|
+
}
|
|
2147
|
+
const payload = pickResponsesStreamPayload(json);
|
|
2148
|
+
if (payload) {
|
|
2149
|
+
roundPayload = payload;
|
|
2150
|
+
lastPayload = payload;
|
|
2151
|
+
}
|
|
2152
|
+
const delta = pickResponsesStreamTextDelta(json);
|
|
2153
|
+
const chunkUsage = pickResponsesStreamUsage(json);
|
|
2154
|
+
const chunkFinishReason = pickResponsesStreamFinishReason(json);
|
|
2155
|
+
collectResponsesStreamToolCalls(json, streamedToolCalls);
|
|
2156
|
+
roundUsage = mergeUsage(roundUsage, chunkUsage);
|
|
2157
|
+
if (chunkFinishReason) {
|
|
2158
|
+
roundFinishReason = chunkFinishReason;
|
|
2159
|
+
}
|
|
2160
|
+
if (delta) {
|
|
2161
|
+
roundText += delta;
|
|
2162
|
+
callbacks.onToken?.(delta);
|
|
2163
|
+
}
|
|
2164
|
+
if (delta || chunkUsage || chunkFinishReason) {
|
|
2165
|
+
const chunk = {
|
|
2166
|
+
textDelta: delta,
|
|
2167
|
+
raw: json,
|
|
2168
|
+
usage: chunkUsage,
|
|
2169
|
+
finishReason: chunkFinishReason
|
|
2170
|
+
};
|
|
2171
|
+
callbacks.onChunk?.(chunk);
|
|
2172
|
+
}
|
|
2173
|
+
});
|
|
2174
|
+
aggregatedUsage = mergeUsage(aggregatedUsage, roundUsage);
|
|
2175
|
+
const payloadUsage = roundPayload ? pickUsage(roundPayload) : undefined;
|
|
2176
|
+
aggregatedUsage = mergeUsage(aggregatedUsage, payloadUsage);
|
|
2177
|
+
if (roundFinishReason) {
|
|
2178
|
+
finishReason = roundFinishReason;
|
|
2179
|
+
} else if (roundPayload) {
|
|
2180
|
+
finishReason = pickResponsesFinishReason(roundPayload) ?? finishReason;
|
|
2181
|
+
}
|
|
2182
|
+
const payloadToolCalls = roundPayload ? pickResponsesToolCalls(roundPayload) : [];
|
|
2183
|
+
const streamedCalls = buildResponsesStreamToolCalls(streamedToolCalls);
|
|
2184
|
+
const providerToolCalls = payloadToolCalls.length > 0 ? payloadToolCalls : streamedCalls;
|
|
2185
|
+
const functionCalls = providerToolCalls.filter((toolCall) => toolCall.type === "function" && typeof toolCall.id === "string" && typeof toolCall.name === "string");
|
|
2186
|
+
if (functionCalls.length === 0) {
|
|
2187
|
+
const finalText = roundText.length > 0 ? roundText : roundPayload ? pickResponsesText(roundPayload) || pickAssistantText(roundPayload) : "";
|
|
2188
|
+
const out2 = {
|
|
2189
|
+
text: finalText,
|
|
2190
|
+
raw: roundPayload ?? lastPayload,
|
|
2191
|
+
usage: aggregatedUsage,
|
|
2192
|
+
finishReason,
|
|
2193
|
+
toolCalls: executedToolCalls.length > 0 ? executedToolCalls : undefined,
|
|
2194
|
+
toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
|
|
2195
|
+
};
|
|
2196
|
+
callbacks.onComplete?.(out2);
|
|
2197
|
+
return out2;
|
|
2198
|
+
}
|
|
2199
|
+
if (round > maxToolRounds) {
|
|
2200
|
+
throw new Error(`Tool call loop exceeded maxToolRounds (${maxToolRounds}).`);
|
|
2201
|
+
}
|
|
2202
|
+
const outputs = await executeMCPToolCalls(functionCalls, mcpToolset, {
|
|
2203
|
+
round,
|
|
2204
|
+
request,
|
|
2205
|
+
provider: "openai-compatible",
|
|
2206
|
+
model: options.model
|
|
2207
|
+
});
|
|
2208
|
+
executedToolCalls.push(...outputs.map((entry) => entry.call));
|
|
2209
|
+
toolExecutions.push(...outputs.map((entry) => entry.execution));
|
|
2210
|
+
input = outputs.map((entry) => ({
|
|
2211
|
+
type: "function_call_output",
|
|
2212
|
+
call_id: entry.call.id,
|
|
2213
|
+
output: stringifyToolOutput(entry.call.error ? { error: entry.call.error } : entry.call.output)
|
|
2214
|
+
}));
|
|
2215
|
+
previousResponseId = pickString(roundPayload?.id);
|
|
2216
|
+
}
|
|
2217
|
+
const out = {
|
|
2218
|
+
text: pickResponsesText(lastPayload ?? {}) || pickAssistantText(lastPayload ?? {}),
|
|
2219
|
+
raw: lastPayload,
|
|
2220
|
+
usage: aggregatedUsage,
|
|
2221
|
+
finishReason,
|
|
2222
|
+
toolCalls: executedToolCalls.length > 0 ? executedToolCalls : undefined,
|
|
2223
|
+
toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
|
|
2224
|
+
};
|
|
2225
|
+
callbacks.onComplete?.(out);
|
|
2226
|
+
return out;
|
|
2227
|
+
}
|
|
1886
2228
|
function shouldUseResponsesAPI(options, request) {
|
|
1887
2229
|
if (options.path?.includes("/responses")) {
|
|
1888
2230
|
return true;
|
|
@@ -2039,6 +2381,190 @@ function pickAssistantDelta(payload) {
|
|
|
2039
2381
|
}
|
|
2040
2382
|
return "";
|
|
2041
2383
|
}
|
|
2384
|
+
function collectOpenAIStreamToolCalls(payload, state) {
|
|
2385
|
+
const choices = payload.choices;
|
|
2386
|
+
if (!Array.isArray(choices) || choices.length === 0 || !isRecord2(choices[0])) {
|
|
2387
|
+
return;
|
|
2388
|
+
}
|
|
2389
|
+
const delta = choices[0].delta;
|
|
2390
|
+
if (!isRecord2(delta) || !Array.isArray(delta.tool_calls)) {
|
|
2391
|
+
return;
|
|
2392
|
+
}
|
|
2393
|
+
for (const rawToolCall of delta.tool_calls) {
|
|
2394
|
+
if (!isRecord2(rawToolCall)) {
|
|
2395
|
+
continue;
|
|
2396
|
+
}
|
|
2397
|
+
const index = toFiniteNumber(rawToolCall.index);
|
|
2398
|
+
const toolIndex = index !== undefined ? Math.floor(index) : 0;
|
|
2399
|
+
const existing = state.get(toolIndex) ?? {
|
|
2400
|
+
index: toolIndex,
|
|
2401
|
+
argumentsText: ""
|
|
2402
|
+
};
|
|
2403
|
+
const id = pickString(rawToolCall.id);
|
|
2404
|
+
if (id) {
|
|
2405
|
+
existing.id = id;
|
|
2406
|
+
}
|
|
2407
|
+
const type = pickString(rawToolCall.type);
|
|
2408
|
+
if (type) {
|
|
2409
|
+
existing.type = type;
|
|
2410
|
+
}
|
|
2411
|
+
const functionCall = isRecord2(rawToolCall.function) ? rawToolCall.function : undefined;
|
|
2412
|
+
const name = pickString(functionCall?.name);
|
|
2413
|
+
if (name) {
|
|
2414
|
+
existing.name = `${existing.name ?? ""}${name}`;
|
|
2415
|
+
}
|
|
2416
|
+
const argumentsDelta = pickString(functionCall?.arguments);
|
|
2417
|
+
if (argumentsDelta) {
|
|
2418
|
+
existing.argumentsText += argumentsDelta;
|
|
2419
|
+
}
|
|
2420
|
+
state.set(toolIndex, existing);
|
|
2421
|
+
}
|
|
2422
|
+
}
|
|
2423
|
+
function buildOpenAIStreamToolCalls(state) {
|
|
2424
|
+
return [...state.values()].sort((a, b) => a.index - b.index).map((entry) => ({
|
|
2425
|
+
id: entry.id ?? "",
|
|
2426
|
+
type: entry.type ?? "function",
|
|
2427
|
+
name: entry.name,
|
|
2428
|
+
arguments: entry.argumentsText.length > 0 ? entry.argumentsText : {}
|
|
2429
|
+
}));
|
|
2430
|
+
}
|
|
2431
|
+
function buildOpenAIAssistantToolMessage(text, toolCalls) {
|
|
2432
|
+
return {
|
|
2433
|
+
role: "assistant",
|
|
2434
|
+
content: text,
|
|
2435
|
+
tool_calls: toolCalls.map((call) => ({
|
|
2436
|
+
id: call.id,
|
|
2437
|
+
type: "function",
|
|
2438
|
+
function: {
|
|
2439
|
+
name: call.name,
|
|
2440
|
+
arguments: typeof call.arguments === "string" ? call.arguments : JSON.stringify(call.arguments ?? {})
|
|
2441
|
+
}
|
|
2442
|
+
}))
|
|
2443
|
+
};
|
|
2444
|
+
}
|
|
2445
|
+
function pickResponsesStreamPayload(payload) {
|
|
2446
|
+
if (isRecord2(payload.response)) {
|
|
2447
|
+
return payload.response;
|
|
2448
|
+
}
|
|
2449
|
+
if ("output" in payload || "output_text" in payload || "status" in payload || "id" in payload) {
|
|
2450
|
+
return payload;
|
|
2451
|
+
}
|
|
2452
|
+
return;
|
|
2453
|
+
}
|
|
2454
|
+
function pickResponsesStreamTextDelta(payload) {
|
|
2455
|
+
const eventType = pickString(payload.type) ?? "";
|
|
2456
|
+
if (!eventType.includes("output_text.delta")) {
|
|
2457
|
+
return "";
|
|
2458
|
+
}
|
|
2459
|
+
const direct = pickString(payload.delta);
|
|
2460
|
+
if (direct) {
|
|
2461
|
+
return direct;
|
|
2462
|
+
}
|
|
2463
|
+
if (isRecord2(payload.delta)) {
|
|
2464
|
+
return pickString(payload.delta.text) ?? pickString(payload.delta.output_text) ?? "";
|
|
2465
|
+
}
|
|
2466
|
+
return "";
|
|
2467
|
+
}
|
|
2468
|
+
function pickResponsesStreamUsage(payload) {
|
|
2469
|
+
const direct = pickUsage(payload);
|
|
2470
|
+
if (direct) {
|
|
2471
|
+
return direct;
|
|
2472
|
+
}
|
|
2473
|
+
if (isRecord2(payload.response)) {
|
|
2474
|
+
return pickUsage(payload.response);
|
|
2475
|
+
}
|
|
2476
|
+
return;
|
|
2477
|
+
}
|
|
2478
|
+
function pickResponsesStreamFinishReason(payload) {
|
|
2479
|
+
const eventType = pickString(payload.type);
|
|
2480
|
+
if (eventType === "response.completed") {
|
|
2481
|
+
return "completed";
|
|
2482
|
+
}
|
|
2483
|
+
if (eventType === "response.failed") {
|
|
2484
|
+
return "failed";
|
|
2485
|
+
}
|
|
2486
|
+
const directStatus = pickString(payload.status);
|
|
2487
|
+
if (directStatus) {
|
|
2488
|
+
return directStatus;
|
|
2489
|
+
}
|
|
2490
|
+
if (isRecord2(payload.response)) {
|
|
2491
|
+
return pickString(payload.response.status);
|
|
2492
|
+
}
|
|
2493
|
+
return;
|
|
2494
|
+
}
|
|
2495
|
+
function collectResponsesStreamToolCalls(payload, state) {
|
|
2496
|
+
if (isRecord2(payload.response)) {
|
|
2497
|
+
collectResponsesStreamToolCallsFromOutput(payload.response.output, state);
|
|
2498
|
+
}
|
|
2499
|
+
collectResponsesStreamToolCallsFromOutput(payload.output, state);
|
|
2500
|
+
if (isRecord2(payload.item)) {
|
|
2501
|
+
const itemKey = pickString(payload.item_id) ?? pickString(payload.call_id);
|
|
2502
|
+
collectResponsesStreamToolCallsFromItem(payload.item, state, itemKey);
|
|
2503
|
+
}
|
|
2504
|
+
if (isRecord2(payload.output_item)) {
|
|
2505
|
+
const itemKey = pickString(payload.item_id) ?? pickString(payload.call_id);
|
|
2506
|
+
collectResponsesStreamToolCallsFromItem(payload.output_item, state, itemKey);
|
|
2507
|
+
}
|
|
2508
|
+
const eventType = pickString(payload.type) ?? "";
|
|
2509
|
+
if (eventType.includes("function_call_arguments.delta")) {
|
|
2510
|
+
const key = pickString(payload.item_id) ?? pickString(payload.call_id) ?? "function_call";
|
|
2511
|
+
const existing = state.get(key) ?? {
|
|
2512
|
+
key,
|
|
2513
|
+
argumentsText: ""
|
|
2514
|
+
};
|
|
2515
|
+
const delta = pickString(payload.delta) ?? (isRecord2(payload.delta) ? pickString(payload.delta.text) ?? pickString(payload.delta.arguments) : undefined) ?? pickString(payload.arguments_delta);
|
|
2516
|
+
if (delta) {
|
|
2517
|
+
existing.argumentsText += delta;
|
|
2518
|
+
}
|
|
2519
|
+
state.set(key, existing);
|
|
2520
|
+
}
|
|
2521
|
+
}
|
|
2522
|
+
function collectResponsesStreamToolCallsFromOutput(output, state) {
|
|
2523
|
+
if (!Array.isArray(output)) {
|
|
2524
|
+
return;
|
|
2525
|
+
}
|
|
2526
|
+
for (const item of output) {
|
|
2527
|
+
if (!isRecord2(item)) {
|
|
2528
|
+
continue;
|
|
2529
|
+
}
|
|
2530
|
+
collectResponsesStreamToolCallsFromItem(item, state);
|
|
2531
|
+
}
|
|
2532
|
+
}
|
|
2533
|
+
function collectResponsesStreamToolCallsFromItem(item, state, forcedKey) {
|
|
2534
|
+
const type = pickString(item.type);
|
|
2535
|
+
if (type !== "function_call" && !type?.includes("tool") && !type?.includes("mcp")) {
|
|
2536
|
+
return;
|
|
2537
|
+
}
|
|
2538
|
+
const key = forcedKey ?? pickString(item.call_id) ?? pickString(item.id) ?? `call_${state.size}`;
|
|
2539
|
+
const existing = state.get(key) ?? {
|
|
2540
|
+
key,
|
|
2541
|
+
argumentsText: ""
|
|
2542
|
+
};
|
|
2543
|
+
const callId = pickString(item.call_id) ?? pickString(item.id);
|
|
2544
|
+
if (callId) {
|
|
2545
|
+
existing.id = callId;
|
|
2546
|
+
}
|
|
2547
|
+
if (type) {
|
|
2548
|
+
existing.type = type;
|
|
2549
|
+
}
|
|
2550
|
+
const name = pickString(item.name);
|
|
2551
|
+
if (name) {
|
|
2552
|
+
existing.name = name;
|
|
2553
|
+
}
|
|
2554
|
+
const argumentsText = pickString(item.arguments);
|
|
2555
|
+
if (argumentsText && argumentsText.length >= existing.argumentsText.length) {
|
|
2556
|
+
existing.argumentsText = argumentsText;
|
|
2557
|
+
}
|
|
2558
|
+
state.set(key, existing);
|
|
2559
|
+
}
|
|
2560
|
+
function buildResponsesStreamToolCalls(state) {
|
|
2561
|
+
return [...state.values()].map((entry) => ({
|
|
2562
|
+
id: entry.id ?? entry.key,
|
|
2563
|
+
type: entry.type === "function_call" ? "function" : entry.type ?? "function",
|
|
2564
|
+
name: entry.name,
|
|
2565
|
+
arguments: entry.argumentsText.length > 0 ? entry.argumentsText : {}
|
|
2566
|
+
}));
|
|
2567
|
+
}
|
|
2042
2568
|
function pickResponsesText(payload) {
|
|
2043
2569
|
const outputText = payload.output_text;
|
|
2044
2570
|
if (typeof outputText === "string") {
|
|
@@ -2131,22 +2657,6 @@ function pickResponsesFinishReason(payload) {
|
|
|
2131
2657
|
}
|
|
2132
2658
|
return;
|
|
2133
2659
|
}
|
|
2134
|
-
async function streamViaComplete(callbacks, complete) {
|
|
2135
|
-
callbacks.onStart?.();
|
|
2136
|
-
const response = await complete();
|
|
2137
|
-
if (response.text.length > 0) {
|
|
2138
|
-
callbacks.onToken?.(response.text);
|
|
2139
|
-
}
|
|
2140
|
-
callbacks.onChunk?.({
|
|
2141
|
-
textDelta: response.text,
|
|
2142
|
-
raw: response.raw,
|
|
2143
|
-
done: true,
|
|
2144
|
-
usage: response.usage,
|
|
2145
|
-
finishReason: response.finishReason
|
|
2146
|
-
});
|
|
2147
|
-
callbacks.onComplete?.(response);
|
|
2148
|
-
return response;
|
|
2149
|
-
}
|
|
2150
2660
|
|
|
2151
2661
|
// src/providers/anthropic-compatible.ts
|
|
2152
2662
|
var DEFAULT_ANTHROPIC_MAX_TOKENS = 1024;
|
|
@@ -2165,7 +2675,7 @@ function createAnthropicCompatibleAdapter(options) {
|
|
|
2165
2675
|
},
|
|
2166
2676
|
async stream(request, callbacks = {}) {
|
|
2167
2677
|
if (hasMCPClients(request.mcpClients)) {
|
|
2168
|
-
return
|
|
2678
|
+
return streamWithMCPToolLoop(options, fetcher, path, request, callbacks);
|
|
2169
2679
|
}
|
|
2170
2680
|
const response = await fetcher(buildURL(options.baseURL, path), {
|
|
2171
2681
|
method: "POST",
|
|
@@ -2339,6 +2849,127 @@ async function completeWithMCPToolLoop(options, fetcher, path, request) {
|
|
|
2339
2849
|
toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
|
|
2340
2850
|
};
|
|
2341
2851
|
}
|
|
2852
|
+
async function streamWithMCPToolLoop(options, fetcher, path, request, callbacks) {
|
|
2853
|
+
const maxToolRounds = normalizeMaxToolRounds(request.maxToolRounds ?? options.defaultMaxToolRounds);
|
|
2854
|
+
let messages = [{ role: "user", content: request.prompt }];
|
|
2855
|
+
let aggregatedUsage;
|
|
2856
|
+
let finishReason;
|
|
2857
|
+
let lastPayload;
|
|
2858
|
+
const toolCalls = [];
|
|
2859
|
+
const toolExecutions = [];
|
|
2860
|
+
callbacks.onStart?.();
|
|
2861
|
+
for (let round = 1;round <= maxToolRounds + 1; round += 1) {
|
|
2862
|
+
const mcpToolset = await resolveMCPToolset(request.mcpClients);
|
|
2863
|
+
const tools = toAnthropicTools(toProviderFunctionTools(mcpToolset));
|
|
2864
|
+
const response = await fetcher(buildURL(options.baseURL, path), {
|
|
2865
|
+
method: "POST",
|
|
2866
|
+
headers: buildHeaders2(options),
|
|
2867
|
+
body: JSON.stringify(cleanUndefined({
|
|
2868
|
+
...options.defaultBody,
|
|
2869
|
+
...request.body,
|
|
2870
|
+
model: options.model,
|
|
2871
|
+
system: request.systemPrompt,
|
|
2872
|
+
messages,
|
|
2873
|
+
temperature: request.temperature,
|
|
2874
|
+
max_tokens: resolveMaxTokens(request.maxTokens, options.defaultMaxTokens),
|
|
2875
|
+
tools,
|
|
2876
|
+
tool_choice: toAnthropicToolChoice(request.toolChoice),
|
|
2877
|
+
stream: true
|
|
2878
|
+
}))
|
|
2879
|
+
});
|
|
2880
|
+
if (!response.ok) {
|
|
2881
|
+
const message = await response.text();
|
|
2882
|
+
throw new Error(`HTTP ${response.status}: ${message}`);
|
|
2883
|
+
}
|
|
2884
|
+
let roundText = "";
|
|
2885
|
+
let roundUsage;
|
|
2886
|
+
let roundFinishReason;
|
|
2887
|
+
const streamedToolCalls = new Map;
|
|
2888
|
+
await consumeSSE(response, (data) => {
|
|
2889
|
+
if (data === "[DONE]") {
|
|
2890
|
+
return;
|
|
2891
|
+
}
|
|
2892
|
+
const json = safeJSONParse(data);
|
|
2893
|
+
if (!isRecord2(json)) {
|
|
2894
|
+
return;
|
|
2895
|
+
}
|
|
2896
|
+
lastPayload = json;
|
|
2897
|
+
const delta = pickAnthropicDelta(json);
|
|
2898
|
+
const chunkUsage = pickUsage2(json);
|
|
2899
|
+
const chunkFinishReason = pickFinishReason2(json);
|
|
2900
|
+
collectAnthropicStreamToolCalls(json, streamedToolCalls);
|
|
2901
|
+
roundUsage = mergeUsage(roundUsage, chunkUsage);
|
|
2902
|
+
if (chunkFinishReason) {
|
|
2903
|
+
roundFinishReason = chunkFinishReason;
|
|
2904
|
+
}
|
|
2905
|
+
if (delta) {
|
|
2906
|
+
roundText += delta;
|
|
2907
|
+
callbacks.onToken?.(delta);
|
|
2908
|
+
}
|
|
2909
|
+
if (delta || chunkUsage || chunkFinishReason) {
|
|
2910
|
+
const chunk = {
|
|
2911
|
+
textDelta: delta,
|
|
2912
|
+
raw: json,
|
|
2913
|
+
usage: chunkUsage,
|
|
2914
|
+
finishReason: chunkFinishReason
|
|
2915
|
+
};
|
|
2916
|
+
callbacks.onChunk?.(chunk);
|
|
2917
|
+
}
|
|
2918
|
+
});
|
|
2919
|
+
aggregatedUsage = mergeUsage(aggregatedUsage, roundUsage);
|
|
2920
|
+
if (roundFinishReason) {
|
|
2921
|
+
finishReason = roundFinishReason;
|
|
2922
|
+
}
|
|
2923
|
+
const calledTools = buildAnthropicStreamToolCalls(streamedToolCalls);
|
|
2924
|
+
if (calledTools.length === 0) {
|
|
2925
|
+
const out2 = {
|
|
2926
|
+
text: roundText,
|
|
2927
|
+
raw: lastPayload,
|
|
2928
|
+
usage: aggregatedUsage,
|
|
2929
|
+
finishReason,
|
|
2930
|
+
toolCalls: toolCalls.length > 0 ? toolCalls : undefined,
|
|
2931
|
+
toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
|
|
2932
|
+
};
|
|
2933
|
+
callbacks.onComplete?.(out2);
|
|
2934
|
+
return out2;
|
|
2935
|
+
}
|
|
2936
|
+
if (round > maxToolRounds) {
|
|
2937
|
+
throw new Error(`Tool call loop exceeded maxToolRounds (${maxToolRounds}).`);
|
|
2938
|
+
}
|
|
2939
|
+
const toolResultContent = [];
|
|
2940
|
+
const outputs = await executeMCPToolCalls(calledTools, mcpToolset, {
|
|
2941
|
+
round,
|
|
2942
|
+
request,
|
|
2943
|
+
provider: "anthropic-compatible",
|
|
2944
|
+
model: options.model
|
|
2945
|
+
});
|
|
2946
|
+
toolCalls.push(...outputs.map((entry) => entry.call));
|
|
2947
|
+
toolExecutions.push(...outputs.map((entry) => entry.execution));
|
|
2948
|
+
for (const entry of outputs) {
|
|
2949
|
+
toolResultContent.push({
|
|
2950
|
+
type: "tool_result",
|
|
2951
|
+
tool_use_id: entry.call.id,
|
|
2952
|
+
...entry.call.error ? { is_error: true } : {},
|
|
2953
|
+
content: stringifyToolOutput(entry.call.error ? { error: entry.call.error } : entry.call.output)
|
|
2954
|
+
});
|
|
2955
|
+
}
|
|
2956
|
+
messages = [
|
|
2957
|
+
...messages,
|
|
2958
|
+
{ role: "assistant", content: buildAnthropicAssistantToolContent(roundText, calledTools) },
|
|
2959
|
+
{ role: "user", content: toolResultContent }
|
|
2960
|
+
];
|
|
2961
|
+
}
|
|
2962
|
+
const out = {
|
|
2963
|
+
text: "",
|
|
2964
|
+
raw: lastPayload,
|
|
2965
|
+
usage: aggregatedUsage,
|
|
2966
|
+
finishReason,
|
|
2967
|
+
toolCalls: toolCalls.length > 0 ? toolCalls : undefined,
|
|
2968
|
+
toolExecutions: toolExecutions.length > 0 ? toolExecutions : undefined
|
|
2969
|
+
};
|
|
2970
|
+
callbacks.onComplete?.(out);
|
|
2971
|
+
return out;
|
|
2972
|
+
}
|
|
2342
2973
|
function buildHeaders2(options) {
|
|
2343
2974
|
return {
|
|
2344
2975
|
"content-type": "application/json",
|
|
@@ -2401,6 +3032,83 @@ function pickAnthropicDelta(payload) {
|
|
|
2401
3032
|
}
|
|
2402
3033
|
return "";
|
|
2403
3034
|
}
|
|
3035
|
+
function collectAnthropicStreamToolCalls(payload, state) {
|
|
3036
|
+
const eventType = pickString(payload.type);
|
|
3037
|
+
if (!eventType) {
|
|
3038
|
+
return;
|
|
3039
|
+
}
|
|
3040
|
+
if (eventType === "content_block_start" && isRecord2(payload.content_block)) {
|
|
3041
|
+
const block = payload.content_block;
|
|
3042
|
+
if (pickString(block.type) !== "tool_use") {
|
|
3043
|
+
return;
|
|
3044
|
+
}
|
|
3045
|
+
const index = pickContentBlockIndex(payload.index);
|
|
3046
|
+
const existing = state.get(index) ?? {
|
|
3047
|
+
index,
|
|
3048
|
+
argumentsText: ""
|
|
3049
|
+
};
|
|
3050
|
+
const id = pickString(block.id);
|
|
3051
|
+
if (id) {
|
|
3052
|
+
existing.id = id;
|
|
3053
|
+
}
|
|
3054
|
+
const name = pickString(block.name);
|
|
3055
|
+
if (name) {
|
|
3056
|
+
existing.name = name;
|
|
3057
|
+
}
|
|
3058
|
+
if ("input" in block) {
|
|
3059
|
+
existing.input = block.input;
|
|
3060
|
+
}
|
|
3061
|
+
state.set(index, existing);
|
|
3062
|
+
return;
|
|
3063
|
+
}
|
|
3064
|
+
if (eventType === "content_block_delta" && isRecord2(payload.delta)) {
|
|
3065
|
+
const delta = payload.delta;
|
|
3066
|
+
if (pickString(delta.type) !== "input_json_delta") {
|
|
3067
|
+
return;
|
|
3068
|
+
}
|
|
3069
|
+
const index = pickContentBlockIndex(payload.index);
|
|
3070
|
+
const existing = state.get(index) ?? {
|
|
3071
|
+
index,
|
|
3072
|
+
argumentsText: ""
|
|
3073
|
+
};
|
|
3074
|
+
const partial = pickString(delta.partial_json);
|
|
3075
|
+
if (partial) {
|
|
3076
|
+
existing.argumentsText += partial;
|
|
3077
|
+
}
|
|
3078
|
+
state.set(index, existing);
|
|
3079
|
+
}
|
|
3080
|
+
}
|
|
3081
|
+
function pickContentBlockIndex(value) {
|
|
3082
|
+
const numeric = toFiniteNumber(value);
|
|
3083
|
+
if (numeric !== undefined) {
|
|
3084
|
+
return Math.floor(numeric);
|
|
3085
|
+
}
|
|
3086
|
+
return 0;
|
|
3087
|
+
}
|
|
3088
|
+
function buildAnthropicStreamToolCalls(state) {
|
|
3089
|
+
return [...state.values()].sort((a, b) => a.index - b.index).map((entry) => ({
|
|
3090
|
+
id: entry.id ?? "",
|
|
3091
|
+
type: "function",
|
|
3092
|
+
name: entry.name,
|
|
3093
|
+
arguments: entry.argumentsText.length > 0 ? entry.argumentsText : entry.input ?? {}
|
|
3094
|
+
}));
|
|
3095
|
+
}
|
|
3096
|
+
function buildAnthropicAssistantToolContent(text, toolCalls) {
|
|
3097
|
+
const content = [];
|
|
3098
|
+
if (text.length > 0) {
|
|
3099
|
+
content.push({ type: "text", text });
|
|
3100
|
+
}
|
|
3101
|
+
for (const call of toolCalls) {
|
|
3102
|
+
const parsedArguments = typeof call.arguments === "string" ? parseToolArguments(call.arguments) : call.arguments;
|
|
3103
|
+
content.push({
|
|
3104
|
+
type: "tool_use",
|
|
3105
|
+
id: call.id,
|
|
3106
|
+
name: call.name,
|
|
3107
|
+
input: isRecord2(parsedArguments) ? parsedArguments : {}
|
|
3108
|
+
});
|
|
3109
|
+
}
|
|
3110
|
+
return content;
|
|
3111
|
+
}
|
|
2404
3112
|
function pickUsage2(payload) {
|
|
2405
3113
|
const fromUsage = extractUsageObject(payload.usage);
|
|
2406
3114
|
if (fromUsage) {
|
|
@@ -2480,22 +3188,6 @@ function toAnthropicToolChoice(value) {
|
|
|
2480
3188
|
}
|
|
2481
3189
|
return value;
|
|
2482
3190
|
}
|
|
2483
|
-
async function streamViaComplete2(callbacks, complete) {
|
|
2484
|
-
callbacks.onStart?.();
|
|
2485
|
-
const response = await complete();
|
|
2486
|
-
if (response.text.length > 0) {
|
|
2487
|
-
callbacks.onToken?.(response.text);
|
|
2488
|
-
}
|
|
2489
|
-
callbacks.onChunk?.({
|
|
2490
|
-
textDelta: response.text,
|
|
2491
|
-
raw: response.raw,
|
|
2492
|
-
done: true,
|
|
2493
|
-
usage: response.usage,
|
|
2494
|
-
finishReason: response.finishReason
|
|
2495
|
-
});
|
|
2496
|
-
callbacks.onComplete?.(response);
|
|
2497
|
-
return response;
|
|
2498
|
-
}
|
|
2499
3191
|
|
|
2500
3192
|
// src/providers/registry.ts
|
|
2501
3193
|
class InMemoryProviderRegistry {
|
|
@@ -3974,7 +4666,8 @@ async function createMCPClient(options) {
|
|
|
3974
4666
|
return wrapMCPClient({
|
|
3975
4667
|
id: options.id,
|
|
3976
4668
|
client,
|
|
3977
|
-
transport
|
|
4669
|
+
transport,
|
|
4670
|
+
toolCallTimeoutMs: options.toolCallTimeoutMs
|
|
3978
4671
|
});
|
|
3979
4672
|
}
|
|
3980
4673
|
function wrapMCPClient(options) {
|
|
@@ -3994,7 +4687,8 @@ function wrapMCPClient(options) {
|
|
|
3994
4687
|
};
|
|
3995
4688
|
},
|
|
3996
4689
|
async callTool(params) {
|
|
3997
|
-
|
|
4690
|
+
const callOptions = options.toolCallTimeoutMs === undefined ? undefined : { timeout: options.toolCallTimeoutMs };
|
|
4691
|
+
return options.client.callTool(params, undefined, callOptions);
|
|
3998
4692
|
},
|
|
3999
4693
|
async close() {
|
|
4000
4694
|
await options.client.close();
|