graphlit-client 1.0.20250612007 → 1.0.20250612009

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/client.js CHANGED
@@ -4,7 +4,7 @@ import { ApolloClient, InMemoryCache, createHttpLink, ApolloLink, ApolloError, }
4
4
  import * as Types from "./generated/graphql-types.js";
5
5
  import * as Documents from "./generated/graphql-documents.js";
6
6
  import * as dotenv from "dotenv";
7
- import { getServiceType } from "./model-mapping.js";
7
+ import { getServiceType, getModelName } from "./model-mapping.js";
8
8
  import { UIEventAdapter } from "./streaming/ui-event-adapter.js";
9
9
  import { formatMessagesForOpenAI, formatMessagesForAnthropic, formatMessagesForGoogle, } from "./streaming/llm-formatters.js";
10
10
  import { streamWithOpenAI, streamWithAnthropic, streamWithGoogle, } from "./streaming/providers.js";
@@ -1626,11 +1626,15 @@ class Graphlit {
1626
1626
  });
1627
1627
  return; // Exit early after successful fallback
1628
1628
  }
1629
- // Create UI event adapter
1629
+ // Create UI event adapter with model information
1630
+ const modelName = fullSpec ? getModelName(fullSpec) : undefined;
1631
+ const serviceType = fullSpec ? getServiceType(fullSpec) : undefined;
1630
1632
  uiAdapter = new UIEventAdapter(onEvent, actualConversationId, {
1631
1633
  smoothingEnabled: options?.smoothingEnabled ?? true,
1632
1634
  chunkingStrategy: options?.chunkingStrategy ?? "word",
1633
1635
  smoothingDelay: options?.smoothingDelay ?? 30,
1636
+ model: modelName,
1637
+ modelService: serviceType,
1634
1638
  });
1635
1639
  // Start the streaming conversation
1636
1640
  await this.executeStreamingAgent(prompt, actualConversationId, fullSpec, tools, toolHandlers, uiAdapter, maxRounds, abortSignal, mimeType, data, correlationId);
@@ -1678,11 +1682,20 @@ class Graphlit {
1678
1682
  // Format conversation once at the beginning
1679
1683
  const formatResponse = await this.formatConversation(prompt, conversationId, { id: specification.id }, tools, true, correlationId);
1680
1684
  const formattedMessage = formatResponse.formatConversation?.message;
1685
+ const conversationHistory = formatResponse.formatConversation?.details?.messages;
1681
1686
  if (!formattedMessage?.message) {
1682
1687
  throw new Error("Failed to format conversation");
1683
1688
  }
1684
1689
  if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
1685
- console.log("\nšŸ“‹ [formatConversation] Response", formattedMessage.message);
1690
+ console.log("\nšŸ“‹ [formatConversation] Full response:", JSON.stringify(formatResponse, null, 2));
1691
+ console.log("\nšŸ“‹ [formatConversation] Response - current message:", formattedMessage.message);
1692
+ console.log(`šŸ“‹ [formatConversation] Conversation history: ${conversationHistory?.length || 0} messages`);
1693
+ if (conversationHistory && conversationHistory.length > 0) {
1694
+ console.log("šŸ“‹ [formatConversation] History messages:");
1695
+ conversationHistory.forEach((msg, i) => {
1696
+ console.log(` ${i + 1}. [${msg?.role}] ${msg?.message?.substring(0, 100)}...`);
1697
+ });
1698
+ }
1686
1699
  }
1687
1700
  // Build message array with conversation history
1688
1701
  const messages = [];
@@ -1695,9 +1708,41 @@ class Graphlit {
1695
1708
  timestamp: new Date().toISOString(),
1696
1709
  });
1697
1710
  }
1698
- // Use the formatted message from formatConversation which already includes
1699
- // all context, RAG results, and conversation history
1700
- if (formattedMessage) {
1711
+ // Use the full conversation history from formatConversation if available
1712
+ if (conversationHistory && conversationHistory.length > 0) {
1713
+ if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
1714
+ console.log(`šŸ”„ [formatConversation] Using full conversation history with ${conversationHistory.length} messages`);
1715
+ }
1716
+ for (const historyMessage of conversationHistory) {
1717
+ if (historyMessage) {
1718
+ const messageToAdd = {
1719
+ __typename: "ConversationMessage",
1720
+ role: historyMessage.role || Types.ConversationRoleTypes.User,
1721
+ message: historyMessage.message || "",
1722
+ timestamp: historyMessage.timestamp || new Date().toISOString(),
1723
+ };
1724
+ // Add optional fields if present
1725
+ if (historyMessage.author)
1726
+ messageToAdd.author = historyMessage.author;
1727
+ if (historyMessage.data)
1728
+ messageToAdd.data = historyMessage.data;
1729
+ if (historyMessage.mimeType)
1730
+ messageToAdd.mimeType = historyMessage.mimeType;
1731
+ if (historyMessage.toolCalls)
1732
+ messageToAdd.toolCalls = historyMessage.toolCalls;
1733
+ if (historyMessage.toolCallId)
1734
+ messageToAdd.toolCallId = historyMessage.toolCallId;
1735
+ if (historyMessage.toolCallResponse)
1736
+ messageToAdd.toolCallResponse = historyMessage.toolCallResponse;
1737
+ messages.push(messageToAdd);
1738
+ }
1739
+ }
1740
+ }
1741
+ else {
1742
+ // Fallback to single formatted message (for backward compatibility)
1743
+ if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
1744
+ console.log("āš ļø [formatConversation] No conversation history available, using single formatted message");
1745
+ }
1701
1746
  const messageToAdd = {
1702
1747
  __typename: "ConversationMessage",
1703
1748
  role: formattedMessage.role || Types.ConversationRoleTypes.User,
@@ -1714,9 +1759,6 @@ class Graphlit {
1714
1759
  }
1715
1760
  messages.push(messageToAdd);
1716
1761
  }
1717
- else {
1718
- throw new Error("No formatted message returned from formatConversation");
1719
- }
1720
1762
  const serviceType = getServiceType(specification);
1721
1763
  // Handle tool calling loop locally
1722
1764
  while (currentRound < maxRounds) {
@@ -9,8 +9,13 @@ export declare class UIEventAdapter {
9
9
  private onEvent;
10
10
  private conversationId;
11
11
  private model?;
12
+ private modelService?;
13
+ private tokenCount;
12
14
  private currentMessage;
13
15
  private isStreaming;
16
+ private streamStartTime;
17
+ private firstTokenTime;
18
+ private lastTokenTime;
14
19
  private activeToolCalls;
15
20
  private lastUpdateTime;
16
21
  private updateTimer?;
@@ -21,6 +26,8 @@ export declare class UIEventAdapter {
21
26
  smoothingEnabled?: boolean;
22
27
  chunkingStrategy?: ChunkingStrategy;
23
28
  smoothingDelay?: number;
29
+ model?: string;
30
+ modelService?: string;
24
31
  });
25
32
  /**
26
33
  * Process a raw streaming event and emit appropriate UI events
@@ -8,8 +8,13 @@ export class UIEventAdapter {
8
8
  onEvent;
9
9
  conversationId;
10
10
  model;
11
+ modelService;
12
+ tokenCount = 0;
11
13
  currentMessage = "";
12
14
  isStreaming = false;
15
+ streamStartTime = 0;
16
+ firstTokenTime = 0;
17
+ lastTokenTime = 0;
13
18
  activeToolCalls = new Map();
14
19
  lastUpdateTime = 0;
15
20
  updateTimer;
@@ -20,6 +25,8 @@ export class UIEventAdapter {
20
25
  this.onEvent = onEvent;
21
26
  this.conversationId = conversationId;
22
27
  this.smoothingDelay = options.smoothingDelay ?? 30;
28
+ this.model = options.model;
29
+ this.modelService = options.modelService;
23
30
  if (options.smoothingEnabled) {
24
31
  this.chunkBuffer = new ChunkBuffer(options.chunkingStrategy || "word");
25
32
  }
@@ -84,6 +91,9 @@ export class UIEventAdapter {
84
91
  handleStart(conversationId) {
85
92
  this.conversationId = conversationId;
86
93
  this.isStreaming = true;
94
+ this.streamStartTime = Date.now();
95
+ this.firstTokenTime = 0;
96
+ this.lastTokenTime = 0;
87
97
  this.emitUIEvent({
88
98
  type: "conversation_started",
89
99
  conversationId,
@@ -92,6 +102,12 @@ export class UIEventAdapter {
92
102
  });
93
103
  }
94
104
  handleToken(token) {
105
+ // Track timing for first token
106
+ const now = Date.now();
107
+ if (this.firstTokenTime === 0) {
108
+ this.firstTokenTime = now;
109
+ }
110
+ this.lastTokenTime = now;
95
111
  if (this.chunkBuffer) {
96
112
  const chunks = this.chunkBuffer.addToken(token);
97
113
  // Add chunks to queue for all chunking modes (character, word, sentence)
@@ -155,14 +171,30 @@ export class UIEventAdapter {
155
171
  globalThis.clearTimeout(this.updateTimer);
156
172
  this.updateTimer = undefined;
157
173
  }
158
- // DO NOT re-process chunks here - they should already be in currentMessage
159
- // Just clear any remaining state
174
+ // Process any remaining chunks before completing
175
+ if (this.chunkQueue.length > 0) {
176
+ // Add all remaining chunks to current message
177
+ const remainingChunks = this.chunkQueue.join('');
178
+ const chunkCount = this.chunkQueue.length;
179
+ this.currentMessage += remainingChunks;
180
+ this.chunkQueue.length = 0; // Clear the queue after processing
181
+ if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
182
+ console.log(`šŸ”š [UIEventAdapter] Processed ${chunkCount} remaining chunks: "${remainingChunks}"`);
183
+ }
184
+ }
185
+ // Flush any remaining content from the buffer
160
186
  if (this.chunkBuffer) {
161
- this.chunkBuffer.flush(); // Clear the buffer but don't use the result
187
+ const finalChunks = this.chunkBuffer.flush();
188
+ if (finalChunks.length > 0) {
189
+ const finalContent = finalChunks.join('');
190
+ this.currentMessage += finalContent;
191
+ if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
192
+ console.log(`šŸ”š [UIEventAdapter] Flushed buffer with ${finalChunks.length} chunks: "${finalContent}"`);
193
+ }
194
+ }
162
195
  }
163
- this.chunkQueue.length = 0; // Clear any remaining queue
164
196
  this.isStreaming = false;
165
- // Create final message
197
+ // Create final message with metadata
166
198
  const finalMessage = {
167
199
  __typename: "ConversationMessage",
168
200
  role: ConversationRoleTypes.Assistant,
@@ -170,7 +202,26 @@ export class UIEventAdapter {
170
202
  timestamp: new Date().toISOString(),
171
203
  tokens: undefined, // Will be set by caller if available
172
204
  toolCalls: Array.from(this.activeToolCalls.values()).map((t) => t.toolCall),
205
+ model: this.model,
206
+ modelService: this.modelService,
173
207
  };
208
+ // Add final timing metadata
209
+ if (this.streamStartTime > 0) {
210
+ const totalTime = Date.now() - this.streamStartTime;
211
+ // Final throughput (chars/second)
212
+ finalMessage.throughput = totalTime > 0
213
+ ? Math.round((this.currentMessage.length / totalTime) * 1000)
214
+ : 0;
215
+ // Total completion time in seconds
216
+ finalMessage.completionTime = totalTime / 1000;
217
+ // Add time to first token if we have it (useful metric)
218
+ if (this.firstTokenTime > 0) {
219
+ const ttft = this.firstTokenTime - this.streamStartTime;
220
+ if (process.env.DEBUG_GRAPHLIT_SDK_STREAMING) {
221
+ console.log(`ā±ļø [UIEventAdapter] TTFT: ${ttft}ms | Total: ${totalTime}ms | Throughput: ${finalMessage.throughput} chars/s`);
222
+ }
223
+ }
224
+ }
174
225
  this.emitUIEvent({
175
226
  type: "conversation_completed",
176
227
  message: finalMessage,
@@ -258,6 +309,27 @@ export class UIEventAdapter {
258
309
  message: this.currentMessage,
259
310
  timestamp: new Date().toISOString(),
260
311
  };
312
+ // Add model metadata if available
313
+ if (this.model) {
314
+ message.model = this.model;
315
+ }
316
+ if (this.modelService) {
317
+ message.modelService = this.modelService;
318
+ }
319
+ // Add timing metadata if streaming has started
320
+ if (this.streamStartTime > 0) {
321
+ const now = Date.now();
322
+ const elapsedTime = now - this.streamStartTime;
323
+ // Calculate throughput (chars/second)
324
+ const throughput = elapsedTime > 0
325
+ ? Math.round((this.currentMessage.length / elapsedTime) * 1000)
326
+ : 0;
327
+ message.throughput = throughput;
328
+ // Add completion time if we have it (in seconds to match API)
329
+ if (elapsedTime > 0) {
330
+ message.completionTime = elapsedTime / 1000;
331
+ }
332
+ }
261
333
  this.emitUIEvent({
262
334
  type: "message_update",
263
335
  message,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "graphlit-client",
3
- "version": "1.0.20250612007",
3
+ "version": "1.0.20250612009",
4
4
  "description": "Graphlit API Client for TypeScript",
5
5
  "type": "module",
6
6
  "main": "./dist/client.js",