graphlit-client 1.0.20250625001 → 1.0.20250627002
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +235 -5
- package/dist/client.d.ts +24 -1
- package/dist/client.js +280 -55
- package/dist/generated/graphql-documents.d.ts +19 -0
- package/dist/generated/graphql-documents.js +1161 -0
- package/dist/generated/graphql-types.d.ts +1940 -85
- package/dist/generated/graphql-types.js +51 -0
- package/dist/streaming/llm-formatters.js +68 -5
- package/dist/streaming/providers.d.ts +18 -13
- package/dist/streaming/providers.js +690 -167
- package/dist/streaming/ui-event-adapter.d.ts +7 -0
- package/dist/streaming/ui-event-adapter.js +55 -0
- package/dist/types/internal.d.ts +11 -0
- package/dist/types/ui-events.d.ts +9 -0
- package/package.json +1 -1
package/dist/client.js
CHANGED
@@ -8,7 +8,7 @@ import * as Documents from "./generated/graphql-documents.js";
|
|
8
8
|
import * as dotenv from "dotenv";
|
9
9
|
import { getServiceType, getModelName } from "./model-mapping.js";
|
10
10
|
import { UIEventAdapter } from "./streaming/ui-event-adapter.js";
|
11
|
-
import { formatMessagesForOpenAI, formatMessagesForAnthropic, formatMessagesForGoogle,
|
11
|
+
import { formatMessagesForOpenAI, formatMessagesForAnthropic, formatMessagesForGoogle, formatMessagesForMistral, formatMessagesForBedrock, } from "./streaming/llm-formatters.js";
|
12
12
|
import { streamWithOpenAI, streamWithAnthropic, streamWithGoogle, streamWithGroq, streamWithCerebras, streamWithCohere, streamWithMistral, streamWithBedrock, streamWithDeepseek, } from "./streaming/providers.js";
|
13
13
|
// Optional imports for streaming LLM clients
|
14
14
|
// These are peer dependencies and may not be installed
|
@@ -20,6 +20,7 @@ let Anthropic;
|
|
20
20
|
let GoogleGenerativeAI;
|
21
21
|
let Groq;
|
22
22
|
let CohereClient;
|
23
|
+
let CohereClientV2;
|
23
24
|
let Mistral;
|
24
25
|
let BedrockRuntimeClient;
|
25
26
|
try {
|
@@ -74,6 +75,7 @@ catch (e) {
|
|
74
75
|
}
|
75
76
|
try {
|
76
77
|
CohereClient = optionalRequire("cohere-ai").CohereClient;
|
78
|
+
CohereClientV2 = optionalRequire("cohere-ai").CohereClientV2;
|
77
79
|
if (process.env.DEBUG_GRAPHLIT_SDK_INITIALIZATION) {
|
78
80
|
console.log("[SDK Loading] Cohere SDK loaded successfully");
|
79
81
|
}
|
@@ -792,6 +794,15 @@ class Graphlit {
|
|
792
794
|
correlationId: correlationId,
|
793
795
|
});
|
794
796
|
}
|
797
|
+
async retrieveView(prompt, id, retrievalStrategy, rerankingStrategy, correlationId) {
|
798
|
+
return this.mutateAndCheckError(Documents.RetrieveView, {
|
799
|
+
prompt: prompt,
|
800
|
+
id: id,
|
801
|
+
retrievalStrategy: retrievalStrategy,
|
802
|
+
rerankingStrategy: rerankingStrategy,
|
803
|
+
correlationId: correlationId,
|
804
|
+
});
|
805
|
+
}
|
795
806
|
async retrieveSources(prompt, filter, augmentedFilter, retrievalStrategy, rerankingStrategy, correlationId) {
|
796
807
|
return this.mutateAndCheckError(Documents.RetrieveSources, {
|
797
808
|
prompt: prompt,
|
@@ -928,6 +939,12 @@ class Graphlit {
|
|
928
939
|
teamId: teamId,
|
929
940
|
});
|
930
941
|
}
|
942
|
+
async queryDiscordGuilds(properties) {
|
943
|
+
return this.queryAndCheckError(Documents.QueryDiscordGuilds, { properties: properties });
|
944
|
+
}
|
945
|
+
async queryDiscordChannels(properties) {
|
946
|
+
return this.queryAndCheckError(Documents.QueryDiscordChannels, { properties: properties });
|
947
|
+
}
|
931
948
|
async querySlackChannels(properties) {
|
932
949
|
return this.queryAndCheckError(Documents.QuerySlackChannels, { properties: properties });
|
933
950
|
}
|
@@ -1026,6 +1043,108 @@ class Graphlit {
|
|
1026
1043
|
async queryModels(filter) {
|
1027
1044
|
return this.queryAndCheckError(Documents.QueryModels, { filter: filter });
|
1028
1045
|
}
|
1046
|
+
async createConnector(connector) {
|
1047
|
+
return this.mutateAndCheckError(Documents.CreateConnector, { connector: connector });
|
1048
|
+
}
|
1049
|
+
async updateConnector(connector) {
|
1050
|
+
return this.mutateAndCheckError(Documents.UpdateConnector, { connector: connector });
|
1051
|
+
}
|
1052
|
+
/*
|
1053
|
+
public async upsertConnector(
|
1054
|
+
connector: Types.ConnectorInput
|
1055
|
+
): Promise<Types.UpsertConnectorMutation> {
|
1056
|
+
return this.mutateAndCheckError<
|
1057
|
+
Types.UpsertConnectorMutation,
|
1058
|
+
{ connector: Types.ConnectorInput }
|
1059
|
+
>(Documents.UpsertConnector, { connector: connector });
|
1060
|
+
}
|
1061
|
+
*/
|
1062
|
+
async deleteConnector(id) {
|
1063
|
+
return this.mutateAndCheckError(Documents.DeleteConnector, { id: id });
|
1064
|
+
}
|
1065
|
+
/*
|
1066
|
+
public async deleteConnectors(
|
1067
|
+
ids: string[],
|
1068
|
+
isSynchronous?: boolean
|
1069
|
+
): Promise<Types.DeleteConnectorsMutation> {
|
1070
|
+
return this.mutateAndCheckError<
|
1071
|
+
Types.DeleteConnectorsMutation,
|
1072
|
+
{ ids: string[]; isSynchronous?: boolean }
|
1073
|
+
>(Documents.DeleteConnectors, { ids: ids, isSynchronous: isSynchronous });
|
1074
|
+
}
|
1075
|
+
|
1076
|
+
public async deleteAllConnectors(
|
1077
|
+
filter?: Types.ConnectorFilter,
|
1078
|
+
isSynchronous?: boolean,
|
1079
|
+
correlationId?: string
|
1080
|
+
): Promise<Types.DeleteAllConnectorsMutation> {
|
1081
|
+
return this.mutateAndCheckError<
|
1082
|
+
Types.DeleteAllConnectorsMutation,
|
1083
|
+
{
|
1084
|
+
filter?: Types.ConnectorFilter;
|
1085
|
+
isSynchronous?: boolean;
|
1086
|
+
correlationId?: string;
|
1087
|
+
}
|
1088
|
+
>(Documents.DeleteAllConnectors, {
|
1089
|
+
filter: filter,
|
1090
|
+
isSynchronous: isSynchronous,
|
1091
|
+
correlationId: correlationId,
|
1092
|
+
});
|
1093
|
+
}
|
1094
|
+
*/
|
1095
|
+
async getConnector(id) {
|
1096
|
+
return this.queryAndCheckError(Documents.GetConnector, { id: id });
|
1097
|
+
}
|
1098
|
+
async queryConnectors(filter) {
|
1099
|
+
return this.queryAndCheckError(Documents.QueryConnectors, { filter: filter });
|
1100
|
+
}
|
1101
|
+
async countConnectors(filter) {
|
1102
|
+
return this.queryAndCheckError(Documents.CountConnectors, { filter: filter });
|
1103
|
+
}
|
1104
|
+
/*
|
1105
|
+
public async connectorExists(
|
1106
|
+
filter?: Types.ConnectorFilter
|
1107
|
+
): Promise<Types.ConnectorExistsQuery> {
|
1108
|
+
return this.queryAndCheckError<
|
1109
|
+
Types.QueryConnectorsQuery,
|
1110
|
+
{ filter?: Types.ConnectorFilter }
|
1111
|
+
>(Documents.ConnectorExists, { filter: filter });
|
1112
|
+
}
|
1113
|
+
*/
|
1114
|
+
async createView(view) {
|
1115
|
+
return this.mutateAndCheckError(Documents.CreateView, { view: view });
|
1116
|
+
}
|
1117
|
+
async updateView(view) {
|
1118
|
+
return this.mutateAndCheckError(Documents.UpdateView, { view: view });
|
1119
|
+
}
|
1120
|
+
async upsertView(view) {
|
1121
|
+
return this.mutateAndCheckError(Documents.UpsertView, { view: view });
|
1122
|
+
}
|
1123
|
+
async deleteView(id) {
|
1124
|
+
return this.mutateAndCheckError(Documents.DeleteView, { id: id });
|
1125
|
+
}
|
1126
|
+
async deleteViews(ids, isSynchronous) {
|
1127
|
+
return this.mutateAndCheckError(Documents.DeleteViews, { ids: ids, isSynchronous: isSynchronous });
|
1128
|
+
}
|
1129
|
+
async deleteAllViews(filter, isSynchronous, correlationId) {
|
1130
|
+
return this.mutateAndCheckError(Documents.DeleteAllViews, {
|
1131
|
+
filter: filter,
|
1132
|
+
isSynchronous: isSynchronous,
|
1133
|
+
correlationId: correlationId,
|
1134
|
+
});
|
1135
|
+
}
|
1136
|
+
async getView(id) {
|
1137
|
+
return this.queryAndCheckError(Documents.GetView, { id: id });
|
1138
|
+
}
|
1139
|
+
async queryViews(filter) {
|
1140
|
+
return this.queryAndCheckError(Documents.QueryViews, { filter: filter });
|
1141
|
+
}
|
1142
|
+
async countViews(filter) {
|
1143
|
+
return this.queryAndCheckError(Documents.CountViews, { filter: filter });
|
1144
|
+
}
|
1145
|
+
async viewExists(filter) {
|
1146
|
+
return this.queryAndCheckError(Documents.ViewExists, { filter: filter });
|
1147
|
+
}
|
1029
1148
|
async createWorkflow(workflow) {
|
1030
1149
|
return this.mutateAndCheckError(Documents.CreateWorkflow, { workflow: workflow });
|
1031
1150
|
}
|
@@ -1649,7 +1768,7 @@ class Graphlit {
|
|
1649
1768
|
* @param specification - Optional specification to check compatibility
|
1650
1769
|
* @returns true if streaming is available, false otherwise
|
1651
1770
|
*/
|
1652
|
-
supportsStreaming(specification) {
|
1771
|
+
supportsStreaming(specification, tools) {
|
1653
1772
|
// If we have a full specification, check its service type
|
1654
1773
|
if (specification) {
|
1655
1774
|
const serviceType = specification.serviceType;
|
@@ -1677,12 +1796,26 @@ class Graphlit {
|
|
1677
1796
|
case Types.ModelServiceTypes.Cerebras:
|
1678
1797
|
return OpenAI !== undefined || this.cerebrasClient !== undefined;
|
1679
1798
|
case Types.ModelServiceTypes.Cohere:
|
1680
|
-
return CohereClient !== undefined ||
|
1799
|
+
return (CohereClient !== undefined ||
|
1800
|
+
CohereClientV2 !== undefined ||
|
1801
|
+
this.cohereClient !== undefined);
|
1681
1802
|
case Types.ModelServiceTypes.Mistral:
|
1682
1803
|
return Mistral !== undefined || this.mistralClient !== undefined;
|
1683
1804
|
case Types.ModelServiceTypes.Bedrock:
|
1684
|
-
|
1685
|
-
this.bedrockClient !== undefined
|
1805
|
+
const hasBedrockClient = BedrockRuntimeClient !== undefined ||
|
1806
|
+
this.bedrockClient !== undefined;
|
1807
|
+
// Bedrock Llama models don't support tools in streaming mode
|
1808
|
+
if (hasBedrockClient && tools && tools.length > 0) {
|
1809
|
+
const bedrockModel = specification.bedrock?.model;
|
1810
|
+
if (bedrockModel === Types.BedrockModels.Llama_4Maverick_17B ||
|
1811
|
+
bedrockModel === Types.BedrockModels.Llama_4Scout_17B) {
|
1812
|
+
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
1813
|
+
console.log(`⚠️ [supportsStreaming] Bedrock Llama model ${bedrockModel} does not support tools in streaming mode - will fallback to non-streaming`);
|
1814
|
+
}
|
1815
|
+
return false; // Force fallback to promptAgent for tool support
|
1816
|
+
}
|
1817
|
+
}
|
1818
|
+
return hasBedrockClient;
|
1686
1819
|
case Types.ModelServiceTypes.Deepseek:
|
1687
1820
|
return OpenAI !== undefined || this.deepseekClient !== undefined;
|
1688
1821
|
default:
|
@@ -1696,7 +1829,9 @@ class Graphlit {
|
|
1696
1829
|
const hasGoogle = GoogleGenerativeAI !== undefined || this.googleClient !== undefined;
|
1697
1830
|
const hasGroq = Groq !== undefined || this.groqClient !== undefined;
|
1698
1831
|
const hasCerebras = OpenAI !== undefined || this.cerebrasClient !== undefined;
|
1699
|
-
const hasCohere = CohereClient !== undefined ||
|
1832
|
+
const hasCohere = CohereClient !== undefined ||
|
1833
|
+
CohereClientV2 !== undefined ||
|
1834
|
+
this.cohereClient !== undefined;
|
1700
1835
|
const hasMistral = Mistral !== undefined || this.mistralClient !== undefined;
|
1701
1836
|
const hasBedrock = BedrockRuntimeClient !== undefined || this.bedrockClient !== undefined;
|
1702
1837
|
return (hasOpenAI ||
|
@@ -1914,7 +2049,7 @@ class Graphlit {
|
|
1914
2049
|
}
|
1915
2050
|
}
|
1916
2051
|
// Check streaming support - fallback to promptAgent if not supported
|
1917
|
-
if (fullSpec && !this.supportsStreaming(fullSpec)) {
|
2052
|
+
if (fullSpec && !this.supportsStreaming(fullSpec, tools)) {
|
1918
2053
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
1919
2054
|
console.log("\n⚠️ [streamAgent] Streaming not supported, falling back to promptAgent with same conversation");
|
1920
2055
|
}
|
@@ -1929,6 +2064,31 @@ class Graphlit {
|
|
1929
2064
|
conversationId: actualConversationId,
|
1930
2065
|
timestamp: new Date(),
|
1931
2066
|
});
|
2067
|
+
// Debug logging for fallback
|
2068
|
+
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2069
|
+
console.log(`📊 [streamAgent fallback] promptAgent result:`, {
|
2070
|
+
hasMessage: !!promptResult.message,
|
2071
|
+
messageLength: promptResult.message?.length,
|
2072
|
+
toolCallsCount: promptResult.toolCalls?.length || 0,
|
2073
|
+
toolResultsCount: promptResult.toolResults?.length || 0,
|
2074
|
+
toolCalls: promptResult.toolCalls,
|
2075
|
+
toolResults: promptResult.toolResults?.map((tr) => ({
|
2076
|
+
name: tr.name,
|
2077
|
+
hasResult: !!tr.result,
|
2078
|
+
hasError: !!tr.error,
|
2079
|
+
})),
|
2080
|
+
});
|
2081
|
+
}
|
2082
|
+
// Emit tool events if there were tool calls
|
2083
|
+
if (promptResult.toolCalls && promptResult.toolCalls.length > 0) {
|
2084
|
+
for (const toolCall of promptResult.toolCalls) {
|
2085
|
+
onEvent({
|
2086
|
+
type: "tool_update",
|
2087
|
+
toolCall: toolCall,
|
2088
|
+
status: "completed",
|
2089
|
+
});
|
2090
|
+
}
|
2091
|
+
}
|
1932
2092
|
// Emit the final message as a single update (simulating streaming)
|
1933
2093
|
onEvent({
|
1934
2094
|
type: "message_update",
|
@@ -1937,7 +2097,7 @@ class Graphlit {
|
|
1937
2097
|
message: promptResult.message,
|
1938
2098
|
role: Types.ConversationRoleTypes.Assistant,
|
1939
2099
|
timestamp: new Date().toISOString(),
|
1940
|
-
toolCalls: [],
|
2100
|
+
toolCalls: promptResult.toolCalls || [],
|
1941
2101
|
},
|
1942
2102
|
isStreaming: false,
|
1943
2103
|
});
|
@@ -1949,7 +2109,7 @@ class Graphlit {
|
|
1949
2109
|
message: promptResult.message,
|
1950
2110
|
role: Types.ConversationRoleTypes.Assistant,
|
1951
2111
|
timestamp: new Date().toISOString(),
|
1952
|
-
toolCalls: [],
|
2112
|
+
toolCalls: promptResult.toolCalls || [],
|
1953
2113
|
},
|
1954
2114
|
});
|
1955
2115
|
return; // Exit early after successful fallback
|
@@ -2132,7 +2292,7 @@ class Graphlit {
|
|
2132
2292
|
await this.streamWithOpenAI(specification, openaiMessages, tools, uiAdapter, (message, calls) => {
|
2133
2293
|
roundMessage = message;
|
2134
2294
|
toolCalls = calls;
|
2135
|
-
});
|
2295
|
+
}, abortSignal);
|
2136
2296
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2137
2297
|
console.log(`\n🏁 [Streaming] OpenAI native streaming completed (Round ${currentRound})`);
|
2138
2298
|
}
|
@@ -2149,7 +2309,7 @@ class Graphlit {
|
|
2149
2309
|
await this.streamWithAnthropic(specification, anthropicMessages, system, tools, uiAdapter, (message, calls) => {
|
2150
2310
|
roundMessage = message;
|
2151
2311
|
toolCalls = calls;
|
2152
|
-
});
|
2312
|
+
}, abortSignal);
|
2153
2313
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2154
2314
|
console.log(`\n🏁 [Streaming] Anthropic native streaming completed (Round ${currentRound})`);
|
2155
2315
|
}
|
@@ -2168,7 +2328,7 @@ class Graphlit {
|
|
2168
2328
|
tools, uiAdapter, (message, calls) => {
|
2169
2329
|
roundMessage = message;
|
2170
2330
|
toolCalls = calls;
|
2171
|
-
});
|
2331
|
+
}, abortSignal);
|
2172
2332
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2173
2333
|
console.log(`\n🏁 [Streaming] Google native streaming completed (Round ${currentRound})`);
|
2174
2334
|
}
|
@@ -2185,7 +2345,7 @@ class Graphlit {
|
|
2185
2345
|
await this.streamWithGroq(specification, groqMessages, tools, uiAdapter, (message, calls) => {
|
2186
2346
|
roundMessage = message;
|
2187
2347
|
toolCalls = calls;
|
2188
|
-
});
|
2348
|
+
}, abortSignal);
|
2189
2349
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2190
2350
|
console.log(`\n🏁 [Streaming] Groq native streaming completed (Round ${currentRound})`);
|
2191
2351
|
}
|
@@ -2202,24 +2362,24 @@ class Graphlit {
|
|
2202
2362
|
await this.streamWithCerebras(specification, cerebrasMessages, tools, uiAdapter, (message, calls) => {
|
2203
2363
|
roundMessage = message;
|
2204
2364
|
toolCalls = calls;
|
2205
|
-
});
|
2365
|
+
}, abortSignal);
|
2206
2366
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2207
2367
|
console.log(`\n🏁 [Streaming] Cerebras native streaming completed (Round ${currentRound})`);
|
2208
2368
|
}
|
2209
2369
|
}
|
2210
2370
|
else if (serviceType === Types.ModelServiceTypes.Cohere &&
|
2211
|
-
(CohereClient || this.cohereClient)) {
|
2371
|
+
(CohereClient || CohereClientV2 || this.cohereClient)) {
|
2212
2372
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2213
2373
|
console.log(`\n✅ [Streaming] Using Cohere native streaming (Round ${currentRound})`);
|
2214
2374
|
}
|
2215
|
-
|
2375
|
+
// V2 API uses raw messages, not formatted
|
2216
2376
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING_MESSAGES) {
|
2217
|
-
console.log(`🔍 [Cohere] Sending ${
|
2377
|
+
console.log(`🔍 [Cohere] Sending ${messages.length} messages to LLM`);
|
2218
2378
|
}
|
2219
|
-
await this.streamWithCohere(specification,
|
2379
|
+
await this.streamWithCohere(specification, messages, tools, uiAdapter, (message, calls) => {
|
2220
2380
|
roundMessage = message;
|
2221
2381
|
toolCalls = calls;
|
2222
|
-
});
|
2382
|
+
}, abortSignal);
|
2223
2383
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2224
2384
|
console.log(`\n🏁 [Streaming] Cohere native streaming completed (Round ${currentRound})`);
|
2225
2385
|
}
|
@@ -2230,13 +2390,26 @@ class Graphlit {
|
|
2230
2390
|
console.log(`\n✅ [Streaming] Using Mistral native streaming (Round ${currentRound})`);
|
2231
2391
|
}
|
2232
2392
|
const mistralMessages = formatMessagesForMistral(messages);
|
2233
|
-
|
2234
|
-
|
2393
|
+
// ALWAYS log when there's a tool-related issue for debugging
|
2394
|
+
const hasToolCalls = mistralMessages.some((m) => m.tool_calls?.length > 0);
|
2395
|
+
const hasToolResponses = mistralMessages.some((m) => m.role === "tool");
|
2396
|
+
if (hasToolCalls ||
|
2397
|
+
hasToolResponses ||
|
2398
|
+
process.env.DEBUG_GRAPHLIT_SDK_STREAMING_MESSAGES) {
|
2399
|
+
console.log(`🔍 [Mistral] Sending ${mistralMessages.length} messages to LLM:`);
|
2400
|
+
console.log(JSON.stringify(mistralMessages, null, 2));
|
2401
|
+
// Count tool calls and responses
|
2402
|
+
const toolCallCount = mistralMessages.reduce((count, m) => count + (m.tool_calls?.length || 0), 0);
|
2403
|
+
const toolResponseCount = mistralMessages.filter((m) => m.role === "tool").length;
|
2404
|
+
console.log(`🔍 [Mistral] Tool calls: ${toolCallCount}, Tool responses: ${toolResponseCount}`);
|
2405
|
+
if (toolResponseCount > 0) {
|
2406
|
+
console.log(`🔍 [Mistral] IMPORTANT: We have tool responses, should we still pass tools?`);
|
2407
|
+
}
|
2235
2408
|
}
|
2236
2409
|
await this.streamWithMistral(specification, mistralMessages, tools, uiAdapter, (message, calls) => {
|
2237
2410
|
roundMessage = message;
|
2238
2411
|
toolCalls = calls;
|
2239
|
-
});
|
2412
|
+
}, abortSignal);
|
2240
2413
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2241
2414
|
console.log(`\n🏁 [Streaming] Mistral native streaming completed (Round ${currentRound})`);
|
2242
2415
|
}
|
@@ -2253,7 +2426,7 @@ class Graphlit {
|
|
2253
2426
|
await this.streamWithBedrock(specification, bedrockMessages, system, tools, uiAdapter, (message, calls) => {
|
2254
2427
|
roundMessage = message;
|
2255
2428
|
toolCalls = calls;
|
2256
|
-
});
|
2429
|
+
}, abortSignal);
|
2257
2430
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2258
2431
|
console.log(`\n🏁 [Streaming] Bedrock native streaming completed (Round ${currentRound})`);
|
2259
2432
|
}
|
@@ -2270,7 +2443,7 @@ class Graphlit {
|
|
2270
2443
|
await this.streamWithDeepseek(specification, deepseekMessages, tools, uiAdapter, (message, calls) => {
|
2271
2444
|
roundMessage = message;
|
2272
2445
|
toolCalls = calls;
|
2273
|
-
});
|
2446
|
+
}, abortSignal);
|
2274
2447
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2275
2448
|
console.log(`\n🏁 [Streaming] Deepseek native streaming completed (Round ${currentRound})`);
|
2276
2449
|
}
|
@@ -2408,13 +2581,16 @@ class Graphlit {
|
|
2408
2581
|
result: result,
|
2409
2582
|
});
|
2410
2583
|
// Add tool response to messages
|
2411
|
-
|
2584
|
+
const toolMessage = {
|
2412
2585
|
__typename: "ConversationMessage",
|
2413
2586
|
role: Types.ConversationRoleTypes.Tool,
|
2414
2587
|
message: typeof result === "string" ? result : JSON.stringify(result),
|
2415
2588
|
toolCallId: toolCall.id,
|
2416
2589
|
timestamp: new Date().toISOString(),
|
2417
|
-
}
|
2590
|
+
};
|
2591
|
+
// Add tool name for Mistral compatibility
|
2592
|
+
toolMessage.toolName = toolCall.name;
|
2593
|
+
messages.push(toolMessage);
|
2418
2594
|
}
|
2419
2595
|
catch (error) {
|
2420
2596
|
const errorMessage = error instanceof Error ? error.message : "Unknown error";
|
@@ -2430,13 +2606,16 @@ class Graphlit {
|
|
2430
2606
|
error: errorMessage,
|
2431
2607
|
});
|
2432
2608
|
// Add error response
|
2433
|
-
|
2609
|
+
const errorToolMessage = {
|
2434
2610
|
__typename: "ConversationMessage",
|
2435
2611
|
role: Types.ConversationRoleTypes.Tool,
|
2436
2612
|
message: `Error: ${errorMessage}`,
|
2437
2613
|
toolCallId: toolCall.id,
|
2438
2614
|
timestamp: new Date().toISOString(),
|
2439
|
-
}
|
2615
|
+
};
|
2616
|
+
// Add tool name for Mistral compatibility
|
2617
|
+
errorToolMessage.toolName = toolCall.name;
|
2618
|
+
messages.push(errorToolMessage);
|
2440
2619
|
}
|
2441
2620
|
}
|
2442
2621
|
}
|
@@ -2450,7 +2629,14 @@ class Graphlit {
|
|
2450
2629
|
const completionTime = uiAdapter.getCompletionTime(); // Total time in milliseconds
|
2451
2630
|
const ttft = uiAdapter.getTTFT(); // Time to first token in milliseconds
|
2452
2631
|
const throughput = uiAdapter.getThroughput(); // Tokens per second
|
2453
|
-
|
2632
|
+
// Convert milliseconds to ISO 8601 duration format (e.g., "PT1.5S")
|
2633
|
+
const millisecondsToTimeSpan = (ms) => {
|
2634
|
+
if (ms === undefined)
|
2635
|
+
return undefined;
|
2636
|
+
const seconds = ms / 1000;
|
2637
|
+
return `PT${seconds}S`;
|
2638
|
+
};
|
2639
|
+
const completeResponse = await this.completeConversation(trimmedMessage, conversationId, millisecondsToTimeSpan(completionTime), millisecondsToTimeSpan(ttft), throughput, correlationId);
|
2454
2640
|
// Extract token count from the response
|
2455
2641
|
finalTokens =
|
2456
2642
|
completeResponse.completeConversation?.message?.tokens ?? undefined;
|
@@ -2604,7 +2790,7 @@ class Graphlit {
|
|
2604
2790
|
/**
|
2605
2791
|
* Stream with OpenAI client
|
2606
2792
|
*/
|
2607
|
-
async streamWithOpenAI(specification, messages, tools, uiAdapter, onComplete) {
|
2793
|
+
async streamWithOpenAI(specification, messages, tools, uiAdapter, onComplete, abortSignal) {
|
2608
2794
|
// Check if we have either the OpenAI module or a provided client
|
2609
2795
|
if (!OpenAI && !this.openaiClient) {
|
2610
2796
|
throw new Error("OpenAI client not available");
|
@@ -2621,12 +2807,12 @@ class Graphlit {
|
|
2621
2807
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2622
2808
|
console.log(`🚀 [Graphlit SDK] Routing to OpenAI streaming provider | Spec: ${specification.name} (${specification.id}) | Messages: ${messages.length} | Tools: ${tools?.length || 0}`);
|
2623
2809
|
}
|
2624
|
-
await streamWithOpenAI(specification, messages, tools, openaiClient, (event) => uiAdapter.handleEvent(event), onComplete);
|
2810
|
+
await streamWithOpenAI(specification, messages, tools, openaiClient, (event) => uiAdapter.handleEvent(event), onComplete, abortSignal);
|
2625
2811
|
}
|
2626
2812
|
/**
|
2627
2813
|
* Stream with Anthropic client
|
2628
2814
|
*/
|
2629
|
-
async streamWithAnthropic(specification, messages, systemPrompt, tools, uiAdapter, onComplete) {
|
2815
|
+
async streamWithAnthropic(specification, messages, systemPrompt, tools, uiAdapter, onComplete, abortSignal) {
|
2630
2816
|
// Check if we have either the Anthropic module or a provided client
|
2631
2817
|
if (!Anthropic && !this.anthropicClient) {
|
2632
2818
|
throw new Error("Anthropic client not available");
|
@@ -2643,12 +2829,43 @@ class Graphlit {
|
|
2643
2829
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2644
2830
|
console.log(`🚀 [Graphlit SDK] Routing to Anthropic streaming provider | Spec: ${specification.name} (${specification.id}) | Messages: ${messages.length} | Tools: ${tools?.length || 0} | SystemPrompt: ${systemPrompt ? "Yes" : "No"}`);
|
2645
2831
|
}
|
2646
|
-
|
2832
|
+
// Get thinking configuration from specification
|
2833
|
+
const thinkingConfig = this.getThinkingConfig(specification);
|
2834
|
+
if (thinkingConfig && process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2835
|
+
console.log(`🧠 [Graphlit SDK] Anthropic thinking enabled | Budget: ${thinkingConfig.budget_tokens} tokens`);
|
2836
|
+
}
|
2837
|
+
await streamWithAnthropic(specification, messages, systemPrompt, tools, anthropicClient, (event) => uiAdapter.handleEvent(event), onComplete, abortSignal, thinkingConfig);
|
2838
|
+
}
|
2839
|
+
/**
|
2840
|
+
* Extract thinking configuration from specification
|
2841
|
+
*/
|
2842
|
+
getThinkingConfig(specification) {
|
2843
|
+
// Check Anthropic specifications
|
2844
|
+
if (specification.serviceType === Types.ModelServiceTypes.Anthropic) {
|
2845
|
+
const anthropic = specification.anthropic;
|
2846
|
+
if (anthropic?.enableThinking) {
|
2847
|
+
return {
|
2848
|
+
type: "enabled",
|
2849
|
+
budget_tokens: anthropic.thinkingTokenLimit || 10000,
|
2850
|
+
};
|
2851
|
+
}
|
2852
|
+
}
|
2853
|
+
// Check Google specifications (also supports thinking)
|
2854
|
+
if (specification.serviceType === Types.ModelServiceTypes.Google) {
|
2855
|
+
const google = specification.google;
|
2856
|
+
if (google?.enableThinking) {
|
2857
|
+
return {
|
2858
|
+
type: "enabled",
|
2859
|
+
budget_tokens: google.thinkingTokenLimit || 10000,
|
2860
|
+
};
|
2861
|
+
}
|
2862
|
+
}
|
2863
|
+
return undefined;
|
2647
2864
|
}
|
2648
2865
|
/**
|
2649
2866
|
* Stream with Google client
|
2650
2867
|
*/
|
2651
|
-
async streamWithGoogle(specification, messages, systemPrompt, tools, uiAdapter, onComplete) {
|
2868
|
+
async streamWithGoogle(specification, messages, systemPrompt, tools, uiAdapter, onComplete, abortSignal) {
|
2652
2869
|
// Check if we have either the Google module or a provided client
|
2653
2870
|
if (!GoogleGenerativeAI && !this.googleClient) {
|
2654
2871
|
throw new Error("Google GenerativeAI client not available");
|
@@ -2663,12 +2880,12 @@ class Graphlit {
|
|
2663
2880
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2664
2881
|
console.log(`🚀 [Graphlit SDK] Routing to Google streaming provider | Spec: ${specification.name} (${specification.id}) | Messages: ${messages.length} | Tools: ${tools?.length || 0} | SystemPrompt: ${systemPrompt ? "Yes" : "No"}`);
|
2665
2882
|
}
|
2666
|
-
await streamWithGoogle(specification, messages, systemPrompt, tools, googleClient, (event) => uiAdapter.handleEvent(event), onComplete);
|
2883
|
+
await streamWithGoogle(specification, messages, systemPrompt, tools, googleClient, (event) => uiAdapter.handleEvent(event), onComplete, abortSignal);
|
2667
2884
|
}
|
2668
2885
|
/**
|
2669
2886
|
* Stream with Groq client (OpenAI-compatible)
|
2670
2887
|
*/
|
2671
|
-
async streamWithGroq(specification, messages, tools, uiAdapter, onComplete) {
|
2888
|
+
async streamWithGroq(specification, messages, tools, uiAdapter, onComplete, abortSignal) {
|
2672
2889
|
// Check if we have either the Groq module or a provided client
|
2673
2890
|
if (!Groq && !this.groqClient) {
|
2674
2891
|
throw new Error("Groq client not available");
|
@@ -2683,12 +2900,12 @@ class Graphlit {
|
|
2683
2900
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2684
2901
|
console.log(`🚀 [Graphlit SDK] Routing to Groq streaming provider | Spec: ${specification.name} (${specification.id}) | Messages: ${messages.length} | Tools: ${tools?.length || 0}`);
|
2685
2902
|
}
|
2686
|
-
await streamWithGroq(specification, messages, tools, groqClient, (event) => uiAdapter.handleEvent(event), onComplete);
|
2903
|
+
await streamWithGroq(specification, messages, tools, groqClient, (event) => uiAdapter.handleEvent(event), onComplete, abortSignal);
|
2687
2904
|
}
|
2688
2905
|
/**
|
2689
2906
|
* Stream with Cerebras client (OpenAI-compatible)
|
2690
2907
|
*/
|
2691
|
-
async streamWithCerebras(specification, messages, tools, uiAdapter, onComplete) {
|
2908
|
+
async streamWithCerebras(specification, messages, tools, uiAdapter, onComplete, abortSignal) {
|
2692
2909
|
// Check if we have either the OpenAI module or a provided client
|
2693
2910
|
if (!OpenAI && !this.cerebrasClient) {
|
2694
2911
|
throw new Error("Cerebras client not available");
|
@@ -2706,32 +2923,34 @@ class Graphlit {
|
|
2706
2923
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2707
2924
|
console.log(`🚀 [Graphlit SDK] Routing to Cerebras streaming provider | Spec: ${specification.name} (${specification.id}) | Messages: ${messages.length} | Tools: ${tools?.length || 0}`);
|
2708
2925
|
}
|
2709
|
-
await streamWithCerebras(specification, messages, tools, cerebrasClient, (event) => uiAdapter.handleEvent(event), onComplete);
|
2926
|
+
await streamWithCerebras(specification, messages, tools, cerebrasClient, (event) => uiAdapter.handleEvent(event), onComplete, abortSignal);
|
2710
2927
|
}
|
2711
2928
|
/**
|
2712
2929
|
* Stream with Cohere client
|
2713
2930
|
*/
|
2714
|
-
async streamWithCohere(specification, messages, tools, uiAdapter, onComplete) {
|
2931
|
+
async streamWithCohere(specification, messages, tools, uiAdapter, onComplete, abortSignal) {
|
2715
2932
|
// Check if we have either the Cohere module or a provided client
|
2716
|
-
if (!CohereClient && !this.cohereClient) {
|
2933
|
+
if (!CohereClient && !CohereClientV2 && !this.cohereClient) {
|
2717
2934
|
throw new Error("Cohere client not available");
|
2718
2935
|
}
|
2719
|
-
// Use provided client or create a new one
|
2936
|
+
// Use provided client or create a new one - prefer v2
|
2720
2937
|
const cohereClient = this.cohereClient ||
|
2721
|
-
(
|
2722
|
-
? new
|
2723
|
-
:
|
2724
|
-
|
2725
|
-
|
2938
|
+
(CohereClientV2
|
2939
|
+
? new CohereClientV2({ token: process.env.COHERE_API_KEY || "" })
|
2940
|
+
: CohereClient
|
2941
|
+
? new CohereClient({ token: process.env.COHERE_API_KEY || "" })
|
2942
|
+
: (() => {
|
2943
|
+
throw new Error("Cohere module not available");
|
2944
|
+
})());
|
2726
2945
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2727
2946
|
console.log(`🚀 [Graphlit SDK] Routing to Cohere streaming provider | Spec: ${specification.name} (${specification.id}) | Messages: ${messages.length} | Tools: ${tools?.length || 0}`);
|
2728
2947
|
}
|
2729
|
-
await streamWithCohere(specification, messages, tools, cohereClient, (event) => uiAdapter.handleEvent(event), onComplete);
|
2948
|
+
await streamWithCohere(specification, messages, tools, cohereClient, (event) => uiAdapter.handleEvent(event), onComplete, abortSignal);
|
2730
2949
|
}
|
2731
2950
|
/**
|
2732
2951
|
* Stream with Mistral client
|
2733
2952
|
*/
|
2734
|
-
async streamWithMistral(specification, messages, tools, uiAdapter, onComplete) {
|
2953
|
+
async streamWithMistral(specification, messages, tools, uiAdapter, onComplete, abortSignal) {
|
2735
2954
|
// Check if we have either the Mistral module or a provided client
|
2736
2955
|
if (!Mistral && !this.mistralClient) {
|
2737
2956
|
throw new Error("Mistral client not available");
|
@@ -2739,19 +2958,25 @@ class Graphlit {
|
|
2739
2958
|
// Use provided client or create a new one
|
2740
2959
|
const mistralClient = this.mistralClient ||
|
2741
2960
|
(Mistral
|
2742
|
-
?
|
2961
|
+
? (() => {
|
2962
|
+
const apiKey = process.env.MISTRAL_API_KEY;
|
2963
|
+
if (!apiKey) {
|
2964
|
+
throw new Error("MISTRAL_API_KEY environment variable is required for Mistral streaming");
|
2965
|
+
}
|
2966
|
+
return new Mistral({ apiKey });
|
2967
|
+
})()
|
2743
2968
|
: (() => {
|
2744
2969
|
throw new Error("Mistral module not available");
|
2745
2970
|
})());
|
2746
2971
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2747
2972
|
console.log(`🚀 [Graphlit SDK] Routing to Mistral streaming provider | Spec: ${specification.name} (${specification.id}) | Messages: ${messages.length} | Tools: ${tools?.length || 0}`);
|
2748
2973
|
}
|
2749
|
-
await streamWithMistral(specification, messages, tools, mistralClient, (event) => uiAdapter.handleEvent(event), onComplete);
|
2974
|
+
await streamWithMistral(specification, messages, tools, mistralClient, (event) => uiAdapter.handleEvent(event), onComplete, abortSignal);
|
2750
2975
|
}
|
2751
2976
|
/**
|
2752
2977
|
* Stream with Bedrock client
|
2753
2978
|
*/
|
2754
|
-
async streamWithBedrock(specification, messages, systemPrompt, tools, uiAdapter, onComplete) {
|
2979
|
+
async streamWithBedrock(specification, messages, systemPrompt, tools, uiAdapter, onComplete, abortSignal) {
|
2755
2980
|
// Check if we have either the Bedrock module or a provided client
|
2756
2981
|
if (!BedrockRuntimeClient && !this.bedrockClient) {
|
2757
2982
|
throw new Error("Bedrock client not available");
|
@@ -2768,12 +2993,12 @@ class Graphlit {
|
|
2768
2993
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2769
2994
|
console.log(`🚀 [Graphlit SDK] Routing to Bedrock streaming provider | Spec: ${specification.name} (${specification.id}) | Messages: ${messages.length} | Tools: ${tools?.length || 0} | SystemPrompt: ${systemPrompt ? "Yes" : "No"}`);
|
2770
2995
|
}
|
2771
|
-
await streamWithBedrock(specification, messages, systemPrompt, tools, bedrockClient, (event) => uiAdapter.handleEvent(event), onComplete);
|
2996
|
+
await streamWithBedrock(specification, messages, systemPrompt, tools, bedrockClient, (event) => uiAdapter.handleEvent(event), onComplete, abortSignal);
|
2772
2997
|
}
|
2773
2998
|
/**
|
2774
2999
|
* Stream with Deepseek client
|
2775
3000
|
*/
|
2776
|
-
async streamWithDeepseek(specification, messages, tools, uiAdapter, onComplete) {
|
3001
|
+
async streamWithDeepseek(specification, messages, tools, uiAdapter, onComplete, abortSignal) {
|
2777
3002
|
// Check if we have either the OpenAI module or a provided Deepseek client
|
2778
3003
|
if (!OpenAI && !this.deepseekClient) {
|
2779
3004
|
throw new Error("Deepseek client not available (requires OpenAI SDK)");
|
@@ -2792,7 +3017,7 @@ class Graphlit {
|
|
2792
3017
|
if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
|
2793
3018
|
console.log(`🚀 [Graphlit SDK] Routing to Deepseek streaming provider | Spec: ${specification.name} (${specification.id}) | Messages: ${messages.length} | Tools: ${tools?.length || 0}`);
|
2794
3019
|
}
|
2795
|
-
await streamWithDeepseek(specification, messages, tools, deepseekClient, (event) => uiAdapter.handleEvent(event), onComplete);
|
3020
|
+
await streamWithDeepseek(specification, messages, tools, deepseekClient, (event) => uiAdapter.handleEvent(event), onComplete, abortSignal);
|
2796
3021
|
}
|
2797
3022
|
// Helper method to execute tools for promptAgent
|
2798
3023
|
async executeToolsForPromptAgent(toolCalls, toolHandlers, allToolCalls, signal) {
|