graphlit-client 1.0.20250713002 → 1.0.20250714001
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client.js +31 -3
- package/dist/streaming/providers.js +40 -4
- package/package.json +1 -1
package/dist/client.js
CHANGED
@@ -2454,6 +2454,8 @@ class Graphlit {
|
|
2454
2454
|
// ALWAYS log when there's a tool-related issue for debugging
|
2455
2455
|
const hasToolCalls = mistralMessages.some((m) => m.tool_calls?.length > 0);
|
2456
2456
|
const hasToolResponses = mistralMessages.some((m) => m.role === "tool");
|
2457
|
+
// Count tool responses to determine if we should pass tools
|
2458
|
+
const toolResponseCount = mistralMessages.filter((m) => m.role === "tool").length;
|
2457
2459
|
if (hasToolCalls ||
|
2458
2460
|
hasToolResponses ||
|
2459
2461
|
process.env.DEBUG_GRAPHLIT_SDK_STREAMING_MESSAGES) {
|
@@ -2461,13 +2463,17 @@ class Graphlit {
|
|
2461
2463
|
console.log(JSON.stringify(mistralMessages, null, 2));
|
2462
2464
|
// Count tool calls and responses
|
2463
2465
|
const toolCallCount = mistralMessages.reduce((count, m) => count + (m.tool_calls?.length || 0), 0);
|
2464
|
-
const toolResponseCount = mistralMessages.filter((m) => m.role === "tool").length;
|
2465
2466
|
console.log(`🔍 [Mistral] Tool calls: ${toolCallCount}, Tool responses: ${toolResponseCount}`);
|
2466
2467
|
if (toolResponseCount > 0) {
|
2467
2468
|
console.log(`🔍 [Mistral] IMPORTANT: We have tool responses, should we still pass tools?`);
|
2468
2469
|
}
|
2469
2470
|
}
|
2470
|
-
|
2471
|
+
// Mistral API requires that we don't pass tools when sending tool results
|
2472
|
+
const shouldPassTools = toolResponseCount === 0 ? tools : undefined;
|
2473
|
+
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2474
|
+
console.log(`🔍 [Mistral] Passing tools: ${shouldPassTools ? 'YES' : 'NO'} (tool responses in messages: ${toolResponseCount})`);
|
2475
|
+
}
|
2476
|
+
await this.streamWithMistral(specification, mistralMessages, shouldPassTools, uiAdapter, (message, calls, usage) => {
|
2471
2477
|
roundMessage = message;
|
2472
2478
|
toolCalls = calls;
|
2473
2479
|
if (usage) {
|
@@ -2860,6 +2866,8 @@ class Graphlit {
|
|
2860
2866
|
(OpenAI
|
2861
2867
|
? new OpenAI({
|
2862
2868
|
apiKey: process.env.OPENAI_API_KEY || "",
|
2869
|
+
maxRetries: 3,
|
2870
|
+
timeout: 60000, // 60 seconds
|
2863
2871
|
})
|
2864
2872
|
: (() => {
|
2865
2873
|
throw new Error("OpenAI module not available");
|
@@ -2882,6 +2890,8 @@ class Graphlit {
|
|
2882
2890
|
(Anthropic
|
2883
2891
|
? new Anthropic({
|
2884
2892
|
apiKey: process.env.ANTHROPIC_API_KEY || "",
|
2893
|
+
maxRetries: 3,
|
2894
|
+
timeout: 60000, // 60 seconds
|
2885
2895
|
})
|
2886
2896
|
: (() => {
|
2887
2897
|
throw new Error("Anthropic module not available");
|
@@ -2976,6 +2986,8 @@ class Graphlit {
|
|
2976
2986
|
? new OpenAI({
|
2977
2987
|
apiKey: process.env.CEREBRAS_API_KEY || "",
|
2978
2988
|
baseURL: "https://api.cerebras.ai/v1",
|
2989
|
+
maxRetries: 3,
|
2990
|
+
timeout: 60000, // 60 seconds
|
2979
2991
|
})
|
2980
2992
|
: (() => {
|
2981
2993
|
throw new Error("OpenAI module not available for Cerebras");
|
@@ -3023,7 +3035,19 @@ class Graphlit {
|
|
3023
3035
|
if (!apiKey) {
|
3024
3036
|
throw new Error("MISTRAL_API_KEY environment variable is required for Mistral streaming");
|
3025
3037
|
}
|
3026
|
-
return new Mistral({
|
3038
|
+
return new Mistral({
|
3039
|
+
apiKey,
|
3040
|
+
retryConfig: {
|
3041
|
+
strategy: "backoff",
|
3042
|
+
backoff: {
|
3043
|
+
initialInterval: 1000,
|
3044
|
+
maxInterval: 60000,
|
3045
|
+
exponent: 2,
|
3046
|
+
maxElapsedTime: 300000, // 5 minutes
|
3047
|
+
},
|
3048
|
+
retryConnectionErrors: true,
|
3049
|
+
},
|
3050
|
+
});
|
3027
3051
|
})()
|
3028
3052
|
: (() => {
|
3029
3053
|
throw new Error("Mistral module not available");
|
@@ -3069,6 +3093,8 @@ class Graphlit {
|
|
3069
3093
|
? new OpenAI({
|
3070
3094
|
baseURL: "https://api.deepseek.com",
|
3071
3095
|
apiKey: process.env.DEEPSEEK_API_KEY || "",
|
3096
|
+
maxRetries: 3,
|
3097
|
+
timeout: 60000, // 60 seconds
|
3072
3098
|
})
|
3073
3099
|
: null);
|
3074
3100
|
if (!deepseekClient) {
|
@@ -3090,6 +3116,8 @@ class Graphlit {
|
|
3090
3116
|
? new OpenAI({
|
3091
3117
|
baseURL: "https://api.x.ai/v1",
|
3092
3118
|
apiKey: process.env.XAI_API_KEY || "",
|
3119
|
+
maxRetries: 3,
|
3120
|
+
timeout: 60000, // 60 seconds
|
3093
3121
|
})
|
3094
3122
|
: null);
|
3095
3123
|
if (!xaiClient) {
|
@@ -1094,8 +1094,15 @@ onEvent, onComplete, abortSignal) {
|
|
1094
1094
|
// Check for any final text we might have missed
|
1095
1095
|
if (part.text) {
|
1096
1096
|
const finalText = part.text;
|
1097
|
+
// Skip if this is just the complete message we already have
|
1098
|
+
if (finalText === fullMessage) {
|
1099
|
+
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
1100
|
+
console.log(`[Google] Skipping duplicate final text (matches fullMessage exactly)`);
|
1101
|
+
}
|
1102
|
+
continue;
|
1103
|
+
}
|
1097
1104
|
// Only add if it's not already included in fullMessage
|
1098
|
-
if (!fullMessage.endsWith(finalText)) {
|
1105
|
+
if (!fullMessage.includes(finalText) && !fullMessage.endsWith(finalText)) {
|
1099
1106
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
1100
1107
|
console.log(`[Google] Adding final text: ${finalText.length} chars`);
|
1101
1108
|
}
|
@@ -1105,6 +1112,9 @@ onEvent, onComplete, abortSignal) {
|
|
1105
1112
|
token: finalText,
|
1106
1113
|
});
|
1107
1114
|
}
|
1115
|
+
else if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
1116
|
+
console.log(`[Google] Skipping final text (already in message): ${finalText.length} chars`);
|
1117
|
+
}
|
1108
1118
|
}
|
1109
1119
|
// Check for function calls
|
1110
1120
|
if (part.functionCall &&
|
@@ -2008,6 +2018,7 @@ onEvent, onComplete, abortSignal) {
|
|
2008
2018
|
};
|
2009
2019
|
// Add tools if provided
|
2010
2020
|
if (tools && tools.length > 0) {
|
2021
|
+
console.log(`[Mistral] Adding ${tools.length} tools to stream config`);
|
2011
2022
|
streamConfig.tools = tools.map((tool) => ({
|
2012
2023
|
type: "function",
|
2013
2024
|
function: {
|
@@ -2017,6 +2028,9 @@ onEvent, onComplete, abortSignal) {
|
|
2017
2028
|
},
|
2018
2029
|
}));
|
2019
2030
|
}
|
2031
|
+
else {
|
2032
|
+
console.log(`[Mistral] No tools provided - tools parameter is ${tools === undefined ? 'undefined' : 'empty array'}`);
|
2033
|
+
}
|
2020
2034
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2021
2035
|
console.log(`[Mistral] Stream config:`, JSON.stringify({
|
2022
2036
|
...streamConfig,
|
@@ -2058,9 +2072,24 @@ onEvent, onComplete, abortSignal) {
|
|
2058
2072
|
});
|
2059
2073
|
}
|
2060
2074
|
}
|
2061
|
-
|
2062
|
-
|
2063
|
-
|
2075
|
+
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2076
|
+
console.log(`[Mistral] Attempting to create stream with retry configuration`);
|
2077
|
+
}
|
2078
|
+
// Log final config being sent
|
2079
|
+
console.log(`[Mistral] Sending request with tools: ${streamConfig.tools ? 'YES' : 'NO'}`);
|
2080
|
+
stream = await mistralClient.chat.stream(streamConfig, {
|
2081
|
+
retries: {
|
2082
|
+
strategy: "backoff",
|
2083
|
+
backoff: {
|
2084
|
+
initialInterval: 1000,
|
2085
|
+
maxInterval: 60000,
|
2086
|
+
exponent: 2,
|
2087
|
+
maxElapsedTime: 300000, // 5 minutes
|
2088
|
+
},
|
2089
|
+
retryConnectionErrors: true,
|
2090
|
+
},
|
2091
|
+
retryCodes: ["429", "500", "502", "503", "504"],
|
2092
|
+
...(abortSignal && { fetchOptions: { signal: abortSignal } }),
|
2064
2093
|
});
|
2065
2094
|
}
|
2066
2095
|
catch (error) {
|
@@ -2218,6 +2247,13 @@ onEvent, onComplete, abortSignal) {
|
|
2218
2247
|
rateLimitError.statusCode = 429;
|
2219
2248
|
throw rateLimitError;
|
2220
2249
|
}
|
2250
|
+
if (error.message?.includes("500") ||
|
2251
|
+
error.message?.includes("Service unavailable") ||
|
2252
|
+
error.message?.includes("INTERNAL_SERVER_ERROR")) {
|
2253
|
+
const serviceError = new Error("Mistral API service is temporarily unavailable (500). This is a temporary issue with Mistral's servers. Please try again later.");
|
2254
|
+
serviceError.statusCode = 500;
|
2255
|
+
throw serviceError;
|
2256
|
+
}
|
2221
2257
|
// Re-throw with more context
|
2222
2258
|
throw new Error(`Mistral streaming failed: ${error.message || "Unknown error"}`);
|
2223
2259
|
}
|