graphlit-client 1.0.20250611020 ā 1.0.20250612002
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client.js +22 -10
- package/dist/streaming/llm-formatters.d.ts +17 -2
- package/dist/streaming/llm-formatters.js +96 -14
- package/dist/streaming/providers.js +3 -12
- package/package.json +1 -1
package/dist/client.js
CHANGED
@@ -1658,12 +1658,21 @@ class Graphlit {
|
|
1658
1658
|
// Use the formatted message from formatConversation which already includes
|
1659
1659
|
// all context, RAG results, and conversation history
|
1660
1660
|
if (formattedMessage) {
|
1661
|
-
|
1661
|
+
const messageToAdd = {
|
1662
1662
|
__typename: "ConversationMessage",
|
1663
1663
|
role: formattedMessage.role || Types.ConversationRoleTypes.User,
|
1664
1664
|
message: formattedMessage.message,
|
1665
1665
|
timestamp: formattedMessage.timestamp || new Date().toISOString(),
|
1666
|
-
}
|
1666
|
+
};
|
1667
|
+
// Add image data if provided
|
1668
|
+
if (mimeType && data) {
|
1669
|
+
messageToAdd.mimeType = mimeType;
|
1670
|
+
messageToAdd.data = data;
|
1671
|
+
if (process.env.DEBUG_GRAPHLIT_STREAMING) {
|
1672
|
+
console.log(`\nš¼ļø [Streaming] Adding image data to message: ${mimeType}, ${data.length} chars`);
|
1673
|
+
}
|
1674
|
+
}
|
1675
|
+
messages.push(messageToAdd);
|
1667
1676
|
}
|
1668
1677
|
else {
|
1669
1678
|
throw new Error("No formatted message returned from formatConversation");
|
@@ -2028,14 +2037,15 @@ class Graphlit {
|
|
2028
2037
|
* Stream with OpenAI client
|
2029
2038
|
*/
|
2030
2039
|
async streamWithOpenAI(specification, messages, tools, uiAdapter, onComplete) {
|
2031
|
-
if
|
2040
|
+
// Check if we have either the OpenAI module or a provided client
|
2041
|
+
if (!OpenAI && !this.openaiClient) {
|
2032
2042
|
throw new Error("OpenAI client not available");
|
2033
2043
|
}
|
2034
2044
|
// Use provided client or create a new one
|
2035
2045
|
const openaiClient = this.openaiClient ||
|
2036
|
-
new OpenAI({
|
2046
|
+
(OpenAI ? new OpenAI({
|
2037
2047
|
apiKey: process.env.OPENAI_API_KEY || "",
|
2038
|
-
});
|
2048
|
+
}) : (() => { throw new Error("OpenAI module not available"); })());
|
2039
2049
|
if (process.env.DEBUG_GRAPHLIT_STREAMING) {
|
2040
2050
|
console.log("\nš [Graphlit SDK] Routing to OpenAI streaming provider");
|
2041
2051
|
console.log(` š Specification: ${specification.name} (${specification.id})`);
|
@@ -2048,14 +2058,15 @@ class Graphlit {
|
|
2048
2058
|
* Stream with Anthropic client
|
2049
2059
|
*/
|
2050
2060
|
async streamWithAnthropic(specification, messages, systemPrompt, tools, uiAdapter, onComplete) {
|
2051
|
-
if
|
2061
|
+
// Check if we have either the Anthropic module or a provided client
|
2062
|
+
if (!Anthropic && !this.anthropicClient) {
|
2052
2063
|
throw new Error("Anthropic client not available");
|
2053
2064
|
}
|
2054
2065
|
// Use provided client or create a new one
|
2055
2066
|
const anthropicClient = this.anthropicClient ||
|
2056
|
-
new Anthropic({
|
2067
|
+
(Anthropic ? new Anthropic({
|
2057
2068
|
apiKey: process.env.ANTHROPIC_API_KEY || "",
|
2058
|
-
});
|
2069
|
+
}) : (() => { throw new Error("Anthropic module not available"); })());
|
2059
2070
|
if (process.env.DEBUG_GRAPHLIT_STREAMING) {
|
2060
2071
|
console.log("\nš [Graphlit SDK] Routing to Anthropic streaming provider");
|
2061
2072
|
console.log(` š Specification: ${specification.name} (${specification.id})`);
|
@@ -2069,12 +2080,13 @@ class Graphlit {
|
|
2069
2080
|
* Stream with Google client
|
2070
2081
|
*/
|
2071
2082
|
async streamWithGoogle(specification, messages, systemPrompt, tools, uiAdapter, onComplete) {
|
2072
|
-
if
|
2083
|
+
// Check if we have either the Google module or a provided client
|
2084
|
+
if (!GoogleGenerativeAI && !this.googleClient) {
|
2073
2085
|
throw new Error("Google GenerativeAI client not available");
|
2074
2086
|
}
|
2075
2087
|
// Use provided client or create a new one
|
2076
2088
|
const googleClient = this.googleClient ||
|
2077
|
-
new GoogleGenerativeAI(process.env.GOOGLE_API_KEY || "");
|
2089
|
+
(GoogleGenerativeAI ? new GoogleGenerativeAI(process.env.GOOGLE_API_KEY || "") : (() => { throw new Error("Google GenerativeAI module not available"); })());
|
2078
2090
|
if (process.env.DEBUG_GRAPHLIT_STREAMING) {
|
2079
2091
|
console.log("\nš [Graphlit SDK] Routing to Google streaming provider");
|
2080
2092
|
console.log(` š Specification: ${specification.name} (${specification.id})`);
|
@@ -4,7 +4,13 @@ import { ConversationMessage } from "../generated/graphql-types.js";
|
|
4
4
|
*/
|
5
5
|
export interface OpenAIMessage {
|
6
6
|
role: "system" | "user" | "assistant" | "tool";
|
7
|
-
content?: string
|
7
|
+
content?: string | Array<{
|
8
|
+
type: "text" | "image_url";
|
9
|
+
text?: string;
|
10
|
+
image_url?: {
|
11
|
+
url: string;
|
12
|
+
};
|
13
|
+
}>;
|
8
14
|
tool_calls?: Array<{
|
9
15
|
id: string;
|
10
16
|
type: "function";
|
@@ -21,8 +27,13 @@ export interface OpenAIMessage {
|
|
21
27
|
export interface AnthropicMessage {
|
22
28
|
role: "user" | "assistant";
|
23
29
|
content: string | Array<{
|
24
|
-
type: "text" | "tool_use" | "tool_result";
|
30
|
+
type: "text" | "image" | "tool_use" | "tool_result";
|
25
31
|
text?: string;
|
32
|
+
source?: {
|
33
|
+
type: "base64";
|
34
|
+
media_type: string;
|
35
|
+
data: string;
|
36
|
+
};
|
26
37
|
id?: string;
|
27
38
|
name?: string;
|
28
39
|
input?: unknown;
|
@@ -37,6 +48,10 @@ export interface GoogleMessage {
|
|
37
48
|
role: "user" | "model";
|
38
49
|
parts: Array<{
|
39
50
|
text?: string;
|
51
|
+
inlineData?: {
|
52
|
+
mimeType: string;
|
53
|
+
data: string;
|
54
|
+
};
|
40
55
|
functionCall?: {
|
41
56
|
name: string;
|
42
57
|
args: unknown;
|
@@ -53,10 +53,36 @@ export function formatMessagesForOpenAI(messages) {
|
|
53
53
|
});
|
54
54
|
break;
|
55
55
|
default: // User messages
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
56
|
+
// Check if this message has image data
|
57
|
+
if (message.mimeType && message.data) {
|
58
|
+
// Multi-modal message with image
|
59
|
+
const contentParts = [];
|
60
|
+
// Add text content if present
|
61
|
+
if (trimmedMessage) {
|
62
|
+
contentParts.push({
|
63
|
+
type: "text",
|
64
|
+
text: trimmedMessage,
|
65
|
+
});
|
66
|
+
}
|
67
|
+
// Add image content
|
68
|
+
contentParts.push({
|
69
|
+
type: "image_url",
|
70
|
+
image_url: {
|
71
|
+
url: `data:${message.mimeType};base64,${message.data}`,
|
72
|
+
},
|
73
|
+
});
|
74
|
+
formattedMessages.push({
|
75
|
+
role: "user",
|
76
|
+
content: contentParts,
|
77
|
+
});
|
78
|
+
}
|
79
|
+
else {
|
80
|
+
// Text-only message
|
81
|
+
formattedMessages.push({
|
82
|
+
role: "user",
|
83
|
+
content: trimmedMessage,
|
84
|
+
});
|
85
|
+
}
|
60
86
|
break;
|
61
87
|
}
|
62
88
|
}
|
@@ -122,10 +148,38 @@ export function formatMessagesForAnthropic(messages) {
|
|
122
148
|
});
|
123
149
|
break;
|
124
150
|
default: // User messages
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
151
|
+
// Check if this message has image data
|
152
|
+
if (message.mimeType && message.data) {
|
153
|
+
// Multi-modal message with image
|
154
|
+
const contentParts = [];
|
155
|
+
// Add text content if present
|
156
|
+
if (trimmedMessage) {
|
157
|
+
contentParts.push({
|
158
|
+
type: "text",
|
159
|
+
text: trimmedMessage,
|
160
|
+
});
|
161
|
+
}
|
162
|
+
// Add image content
|
163
|
+
contentParts.push({
|
164
|
+
type: "image",
|
165
|
+
source: {
|
166
|
+
type: "base64",
|
167
|
+
media_type: message.mimeType,
|
168
|
+
data: message.data,
|
169
|
+
},
|
170
|
+
});
|
171
|
+
formattedMessages.push({
|
172
|
+
role: "user",
|
173
|
+
content: contentParts,
|
174
|
+
});
|
175
|
+
}
|
176
|
+
else {
|
177
|
+
// Text-only message
|
178
|
+
formattedMessages.push({
|
179
|
+
role: "user",
|
180
|
+
content: trimmedMessage,
|
181
|
+
});
|
182
|
+
}
|
129
183
|
break;
|
130
184
|
}
|
131
185
|
}
|
@@ -138,9 +192,14 @@ export function formatMessagesForAnthropic(messages) {
|
|
138
192
|
export function formatMessagesForGoogle(messages) {
|
139
193
|
const formattedMessages = [];
|
140
194
|
for (const message of messages) {
|
141
|
-
if (!message.role
|
195
|
+
if (!message.role)
|
142
196
|
continue;
|
143
|
-
|
197
|
+
// Allow messages with image data even if they have no text content
|
198
|
+
const hasContent = message.message?.trim();
|
199
|
+
const hasImageData = message.mimeType && message.data;
|
200
|
+
if (!hasContent && !hasImageData)
|
201
|
+
continue;
|
202
|
+
const trimmedMessage = message.message?.trim() || "";
|
144
203
|
switch (message.role) {
|
145
204
|
case ConversationRoleTypes.System:
|
146
205
|
// Google handles system prompts differently, usually as part of the first user message
|
@@ -176,10 +235,33 @@ export function formatMessagesForGoogle(messages) {
|
|
176
235
|
});
|
177
236
|
break;
|
178
237
|
default: // User messages
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
238
|
+
// Check if this message has image data
|
239
|
+
if (message.mimeType && message.data) {
|
240
|
+
// Multi-modal message with image
|
241
|
+
const parts = [];
|
242
|
+
// Add text content if present
|
243
|
+
if (trimmedMessage) {
|
244
|
+
parts.push({ text: trimmedMessage });
|
245
|
+
}
|
246
|
+
// Add image content
|
247
|
+
parts.push({
|
248
|
+
inlineData: {
|
249
|
+
mimeType: message.mimeType,
|
250
|
+
data: message.data,
|
251
|
+
},
|
252
|
+
});
|
253
|
+
formattedMessages.push({
|
254
|
+
role: "user",
|
255
|
+
parts,
|
256
|
+
});
|
257
|
+
}
|
258
|
+
else {
|
259
|
+
// Text-only message
|
260
|
+
formattedMessages.push({
|
261
|
+
role: "user",
|
262
|
+
parts: [{ text: trimmedMessage }],
|
263
|
+
});
|
264
|
+
}
|
183
265
|
break;
|
184
266
|
}
|
185
267
|
}
|
@@ -201,10 +201,7 @@ onEvent, onComplete) {
|
|
201
201
|
onComplete(fullMessage, toolCalls);
|
202
202
|
}
|
203
203
|
catch (error) {
|
204
|
-
|
205
|
-
type: "error",
|
206
|
-
error: error instanceof Error ? error.message : "OpenAI streaming failed",
|
207
|
-
});
|
204
|
+
// Don't emit error event here - let the client handle it to avoid duplicates
|
208
205
|
throw error;
|
209
206
|
}
|
210
207
|
}
|
@@ -427,10 +424,7 @@ onEvent, onComplete) {
|
|
427
424
|
onComplete(fullMessage, validToolCalls);
|
428
425
|
}
|
429
426
|
catch (error) {
|
430
|
-
|
431
|
-
type: "error",
|
432
|
-
error: error instanceof Error ? error.message : "Anthropic streaming failed",
|
433
|
-
});
|
427
|
+
// Don't emit error event here - let the client handle it to avoid duplicates
|
434
428
|
throw error;
|
435
429
|
}
|
436
430
|
}
|
@@ -678,10 +672,7 @@ onEvent, onComplete) {
|
|
678
672
|
onComplete(fullMessage, toolCalls);
|
679
673
|
}
|
680
674
|
catch (error) {
|
681
|
-
|
682
|
-
type: "error",
|
683
|
-
error: error instanceof Error ? error.message : "Google streaming failed",
|
684
|
-
});
|
675
|
+
// Don't emit error event here - let the client handle it to avoid duplicates
|
685
676
|
throw error;
|
686
677
|
}
|
687
678
|
}
|