@copilotkit/runtime 1.3.16-mme-azure-openai.0 → 1.3.16-mme-sdk-js.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/CHANGELOG.md +11 -3
  2. package/__snapshots__/schema/schema.graphql +1 -0
  3. package/dist/{chunk-MKZBH3Y6.mjs → chunk-2LR3IZ2Q.mjs} +2 -2
  4. package/dist/{chunk-MYZHUCL6.mjs → chunk-B74M7FXG.mjs} +1 -1
  5. package/dist/chunk-B74M7FXG.mjs.map +1 -0
  6. package/dist/{chunk-MVG266E4.mjs → chunk-BTAHXIKM.mjs} +37 -10
  7. package/dist/chunk-BTAHXIKM.mjs.map +1 -0
  8. package/dist/{chunk-CRSCH25P.mjs → chunk-CVLU3KWZ.mjs} +2 -2
  9. package/dist/{chunk-S5SYJ6JC.mjs → chunk-L23AVL2R.mjs} +2 -2
  10. package/dist/{chunk-YVZORQSA.mjs → chunk-VQXA3N5D.mjs} +675 -151
  11. package/dist/chunk-VQXA3N5D.mjs.map +1 -0
  12. package/dist/{chunk-BHSRGDL6.mjs → chunk-XE3SYKK4.mjs} +1 -4
  13. package/dist/chunk-XE3SYKK4.mjs.map +1 -0
  14. package/dist/{copilot-runtime-df3527ad.d.ts → copilot-runtime-543a59ae.d.ts} +29 -7
  15. package/dist/graphql/types/converted/index.d.ts +1 -1
  16. package/dist/graphql/types/converted/index.js.map +1 -1
  17. package/dist/graphql/types/converted/index.mjs +1 -1
  18. package/dist/{groq-adapter-798aff23.d.ts → groq-adapter-7aa25931.d.ts} +1 -34
  19. package/dist/{index-cff31380.d.ts → index-83ee522f.d.ts} +3 -2
  20. package/dist/index.d.ts +4 -4
  21. package/dist/index.js +747 -199
  22. package/dist/index.js.map +1 -1
  23. package/dist/index.mjs +13 -9
  24. package/dist/index.mjs.map +1 -1
  25. package/dist/{langserve-a14a6849.d.ts → langserve-f00629d2.d.ts} +1 -1
  26. package/dist/lib/index.d.ts +4 -4
  27. package/dist/lib/index.js +747 -199
  28. package/dist/lib/index.js.map +1 -1
  29. package/dist/lib/index.mjs +13 -9
  30. package/dist/lib/integrations/index.d.ts +4 -4
  31. package/dist/lib/integrations/index.js +4 -1
  32. package/dist/lib/integrations/index.js.map +1 -1
  33. package/dist/lib/integrations/index.mjs +5 -5
  34. package/dist/lib/integrations/nest/index.d.ts +3 -3
  35. package/dist/lib/integrations/nest/index.js +4 -1
  36. package/dist/lib/integrations/nest/index.js.map +1 -1
  37. package/dist/lib/integrations/nest/index.mjs +3 -3
  38. package/dist/lib/integrations/node-express/index.d.ts +3 -3
  39. package/dist/lib/integrations/node-express/index.js +4 -1
  40. package/dist/lib/integrations/node-express/index.js.map +1 -1
  41. package/dist/lib/integrations/node-express/index.mjs +3 -3
  42. package/dist/lib/integrations/node-http/index.d.ts +3 -3
  43. package/dist/lib/integrations/node-http/index.js +4 -1
  44. package/dist/lib/integrations/node-http/index.js.map +1 -1
  45. package/dist/lib/integrations/node-http/index.mjs +2 -2
  46. package/dist/service-adapters/index.d.ts +4 -4
  47. package/dist/service-adapters/index.js +0 -3
  48. package/dist/service-adapters/index.js.map +1 -1
  49. package/dist/service-adapters/index.mjs +1 -1
  50. package/package.json +6 -4
  51. package/src/agents/langgraph/event-source.ts +62 -94
  52. package/src/agents/langgraph/events.ts +27 -17
  53. package/src/graphql/types/converted/index.ts +5 -1
  54. package/src/graphql/types/enums.ts +1 -0
  55. package/src/lib/runtime/copilot-runtime.ts +58 -11
  56. package/src/lib/runtime/remote-action-constructors.ts +284 -0
  57. package/src/lib/runtime/remote-actions.ts +65 -159
  58. package/src/lib/runtime/remote-lg-cloud-action.ts +614 -0
  59. package/src/service-adapters/openai/openai-adapter.ts +0 -37
  60. package/src/service-adapters/openai/utils.ts +13 -9
  61. package/dist/chunk-BHSRGDL6.mjs.map +0 -1
  62. package/dist/chunk-MVG266E4.mjs.map +0 -1
  63. package/dist/chunk-MYZHUCL6.mjs.map +0 -1
  64. package/dist/chunk-YVZORQSA.mjs.map +0 -1
  65. /package/dist/{chunk-MKZBH3Y6.mjs.map → chunk-2LR3IZ2Q.mjs.map} +0 -0
  66. /package/dist/{chunk-CRSCH25P.mjs.map → chunk-CVLU3KWZ.mjs.map} +0 -0
  67. /package/dist/{chunk-S5SYJ6JC.mjs.map → chunk-L23AVL2R.mjs.map} +0 -0
@@ -0,0 +1,614 @@
1
+ import { Client } from "@langchain/langgraph-sdk";
2
+ import { randomUUID } from "node:crypto";
3
+ import { parse as parsePartialJson } from "partial-json";
4
+ import { Logger } from "pino";
5
+ import { ActionInput } from "../../graphql/inputs/action.input";
6
+ import { LangGraphCloudAgent, LangGraphCloudEndpoint } from "./remote-actions";
7
+ import { CopilotRequestContextProperties } from "../integrations";
8
+ import { Message, MessageType } from "../../graphql/types/converted";
9
+ import { MessageRole } from "../../graphql/types/enums";
10
+ import { CustomEventNames, LangGraphEventTypes } from "../../agents/langgraph/events";
11
+
12
+ type State = Record<string, any>;
13
+
14
+ type ExecutionAction = Pick<ActionInput, "name" | "description"> & { parameters: string };
15
+
16
+ interface ExecutionArgs extends Omit<LangGraphCloudEndpoint, "agents"> {
17
+ agent: LangGraphCloudAgent;
18
+ threadId: string;
19
+ nodeName: string;
20
+ messages: Message[];
21
+ state: State;
22
+ properties: CopilotRequestContextProperties;
23
+ actions: ExecutionAction[];
24
+ logger: Logger;
25
+ }
26
+
27
+ // The following types are our own definition to the messages accepted by LangGraph cloud, enhanced with some of our extra data.
28
+ interface ToolCall {
29
+ id: string;
30
+ name: string;
31
+ args: Record<string, unknown>;
32
+ }
33
+
34
+ type BaseLangGraphCloudMessage = Omit<
35
+ Message,
36
+ | "isResultMessage"
37
+ | "isTextMessage"
38
+ | "isActionExecutionMessage"
39
+ | "isAgentStateMessage"
40
+ | "type"
41
+ | "createdAt"
42
+ > & {
43
+ content: string;
44
+ role: MessageRole;
45
+ additional_kwargs?: Record<string, unknown>;
46
+ type: MessageType;
47
+ };
48
+
49
+ interface LangGraphCloudResultMessage extends BaseLangGraphCloudMessage {
50
+ tool_call_id: string;
51
+ name: string;
52
+ }
53
+
54
+ interface LangGraphCloudActionExecutionMessage extends BaseLangGraphCloudMessage {
55
+ tool_calls: ToolCall[];
56
+ }
57
+
58
+ type LangGraphCloudMessage =
59
+ | LangGraphCloudActionExecutionMessage
60
+ | LangGraphCloudResultMessage
61
+ | BaseLangGraphCloudMessage;
62
+
63
+ export async function execute(args: ExecutionArgs): Promise<ReadableStream<Uint8Array>> {
64
+ return new ReadableStream({
65
+ async start(controller) {
66
+ try {
67
+ await streamEvents(controller, args);
68
+ controller.close();
69
+ } catch (err) {}
70
+ },
71
+ });
72
+ }
73
+
74
+ async function streamEvents(controller: ReadableStreamDefaultController, args: ExecutionArgs) {
75
+ const {
76
+ deploymentUrl,
77
+ langsmithApiKey,
78
+ threadId: agrsInitialThreadId,
79
+ agent,
80
+ nodeName: initialNodeName,
81
+ state: initialState,
82
+ messages,
83
+ actions,
84
+ logger,
85
+ } = args;
86
+
87
+ let nodeName = initialNodeName;
88
+ let state = initialState;
89
+ const { name, assistantId: initialAssistantId } = agent;
90
+
91
+ // TODO: deploymentUrl is not required in local development
92
+ const client = new Client({ apiUrl: deploymentUrl, apiKey: langsmithApiKey });
93
+ let initialThreadId = agrsInitialThreadId;
94
+ const wasInitiatedWithExistingThread = !!initialThreadId;
95
+ if (initialThreadId && initialThreadId.startsWith("ck-")) {
96
+ initialThreadId = initialThreadId.substring(3);
97
+ }
98
+
99
+ const assistants = await client.assistants.search();
100
+ const retrievedAssistant = assistants.find((a) => a.name === name);
101
+ const threadId = initialThreadId ?? randomUUID();
102
+ if (initialThreadId === threadId) {
103
+ await client.threads.get(threadId);
104
+ } else {
105
+ await client.threads.create({ threadId: threadId });
106
+ }
107
+
108
+ let agentState = { values: {} };
109
+ if (wasInitiatedWithExistingThread) {
110
+ agentState = await client.threads.getState(threadId);
111
+ }
112
+ const agentStateValues = agentState.values as State;
113
+ state.messages = agentStateValues.messages;
114
+ const mode = wasInitiatedWithExistingThread && nodeName != "__end__" ? "continue" : "start";
115
+ let formattedMessages = [];
116
+ try {
117
+ formattedMessages = formatMessages(messages);
118
+ } catch (e) {
119
+ logger.error(e, `Error event thrown: ${e.message}`);
120
+ }
121
+ state = langGraphDefaultMergeState(state, formattedMessages, actions, name);
122
+
123
+ if (mode === "continue") {
124
+ await client.threads.updateState(threadId, { values: state, asNode: nodeName });
125
+ }
126
+
127
+ const assistantId = initialAssistantId ?? retrievedAssistant?.assistant_id;
128
+ if (!assistantId) {
129
+ console.error(`
130
+ No agent found for the agent name specified in CopilotKit provider
131
+ Please check your available agents or provide an agent ID in the LangGraph Cloud endpoint definition.\n
132
+
133
+ These are the available agents: [${assistants.map((a) => `${a.name} (ID: ${a.assistant_id})`).join(", ")}]
134
+ `);
135
+ throw new Error("No agent id found");
136
+ }
137
+ const graphInfo = await client.assistants.getGraph(assistantId);
138
+ const streamInput = mode === "start" ? state : null;
139
+
140
+ let streamingStateExtractor = new StreamingStateExtractor([]);
141
+ let prevNodeName = null;
142
+ let emitIntermediateStateUntilEnd = null;
143
+ let shouldExit = null;
144
+ let externalRunId = null;
145
+
146
+ const streamResponse = client.runs.stream(threadId, assistantId, {
147
+ input: streamInput,
148
+ streamMode: ["events", "values"],
149
+ });
150
+
151
+ const emit = (message: string) => controller.enqueue(new TextEncoder().encode(message));
152
+
153
+ let latestStateValues = {};
154
+
155
+ try {
156
+ for await (const chunk of streamResponse) {
157
+ if (!["events", "values", "error"].includes(chunk.event)) continue;
158
+
159
+ if (chunk.event === "error") {
160
+ logger.error(chunk, `Error event thrown: ${chunk.data.message}`);
161
+ throw new Error(`Error event thrown: ${chunk.data.message}`);
162
+ }
163
+
164
+ if (chunk.event === "values") {
165
+ latestStateValues = chunk.data;
166
+ continue;
167
+ }
168
+
169
+ const event = chunk.data;
170
+ const currentNodeName = event.name;
171
+ const eventType = event.event;
172
+ const runId = event.metadata.run_id;
173
+ externalRunId = runId;
174
+ const metadata = event.metadata;
175
+
176
+ shouldExit =
177
+ shouldExit != null
178
+ ? shouldExit
179
+ : eventType === LangGraphEventTypes.OnCustomEvent &&
180
+ event.name === CustomEventNames.CopilotKitExit;
181
+
182
+ const emitIntermediateState = metadata["copilotkit:emit-intermediate-state"];
183
+ const manuallyEmitIntermediateState =
184
+ eventType === LangGraphEventTypes.OnCustomEvent &&
185
+ event.name === CustomEventNames.CopilotKitManuallyEmitIntermediateState;
186
+
187
+ // we only want to update the node name under certain conditions
188
+ // since we don't need any internal node names to be sent to the frontend
189
+ if (graphInfo["nodes"].some((node) => node.id === currentNodeName)) {
190
+ nodeName = currentNodeName;
191
+ }
192
+
193
+ if (!nodeName) {
194
+ continue;
195
+ }
196
+
197
+ if (manuallyEmitIntermediateState) {
198
+ if (eventType === LangGraphEventTypes.OnChainEnd) {
199
+ state = event.data.output;
200
+ emit(
201
+ getStateSyncEvent({
202
+ threadId,
203
+ runId,
204
+ agentName: agent.name,
205
+ nodeName,
206
+ state: event.data.output,
207
+ running: true,
208
+ active: true,
209
+ }),
210
+ );
211
+ }
212
+ continue;
213
+ }
214
+
215
+ if (emitIntermediateState && emitIntermediateStateUntilEnd == null) {
216
+ emitIntermediateStateUntilEnd = nodeName;
217
+ }
218
+
219
+ if (emitIntermediateState && eventType === LangGraphEventTypes.OnChatModelStart) {
220
+ // reset the streaming state extractor
221
+ streamingStateExtractor = new StreamingStateExtractor(emitIntermediateState);
222
+ }
223
+
224
+ let updatedState = latestStateValues;
225
+
226
+ if (emitIntermediateState && eventType === LangGraphEventTypes.OnChatModelStream) {
227
+ streamingStateExtractor.bufferToolCalls(event);
228
+ }
229
+
230
+ if (emitIntermediateStateUntilEnd !== null) {
231
+ updatedState = {
232
+ ...updatedState,
233
+ ...streamingStateExtractor.extractState(),
234
+ };
235
+ }
236
+
237
+ if (
238
+ !emitIntermediateState &&
239
+ currentNodeName === emitIntermediateStateUntilEnd &&
240
+ eventType === LangGraphEventTypes.OnChainEnd
241
+ ) {
242
+ // stop emitting function call state
243
+ emitIntermediateStateUntilEnd = null;
244
+ }
245
+
246
+ const exitingNode =
247
+ nodeName === currentNodeName && eventType === LangGraphEventTypes.OnChainEnd;
248
+
249
+ if (
250
+ JSON.stringify(updatedState) !== JSON.stringify(state) ||
251
+ prevNodeName != nodeName ||
252
+ exitingNode
253
+ ) {
254
+ state = updatedState;
255
+ prevNodeName = nodeName;
256
+ emit(
257
+ getStateSyncEvent({
258
+ threadId,
259
+ runId,
260
+ agentName: agent.name,
261
+ nodeName,
262
+ state,
263
+ running: true,
264
+ active: !exitingNode,
265
+ }),
266
+ );
267
+ }
268
+
269
+ emit(JSON.stringify(event) + "\n");
270
+ }
271
+
272
+ state = await client.threads.getState(threadId);
273
+ const isEndNode = state.next.length === 0;
274
+ nodeName = Object.keys(state.metadata.writes)[0];
275
+
276
+ emit(
277
+ getStateSyncEvent({
278
+ threadId,
279
+ runId: externalRunId,
280
+ agentName: agent.name,
281
+ nodeName: isEndNode ? "__end__" : nodeName,
282
+ state: state.values,
283
+ running: !shouldExit,
284
+ active: false,
285
+ }),
286
+ );
287
+
288
+ return Promise.resolve();
289
+ } catch (e) {
290
+ // TODO: handle error state here.
291
+ return Promise.resolve();
292
+ }
293
+ }
294
+
295
+ function getStateSyncEvent({
296
+ threadId,
297
+ runId,
298
+ agentName,
299
+ nodeName,
300
+ state,
301
+ running,
302
+ active,
303
+ }: {
304
+ threadId: string;
305
+ runId: string;
306
+ agentName: string;
307
+ nodeName: string;
308
+ state: State;
309
+ running: boolean;
310
+ active: boolean;
311
+ }): string {
312
+ const stateWithoutMessages = Object.keys(state).reduce((acc, key) => {
313
+ if (key !== "messages") {
314
+ acc[key] = state[key];
315
+ }
316
+ return acc;
317
+ }, {} as State);
318
+
319
+ return (
320
+ JSON.stringify({
321
+ event: LangGraphEventTypes.OnCopilotKitStateSync,
322
+ thread_id: threadId,
323
+ run_id: runId,
324
+ agent_name: agentName,
325
+ node_name: nodeName,
326
+ active: active,
327
+ state: stateWithoutMessages,
328
+ running: running,
329
+ role: "assistant",
330
+ }) + "\n"
331
+ );
332
+ }
333
+
334
+ class StreamingStateExtractor {
335
+ private emitIntermediateState: { [key: string]: any }[];
336
+ private toolCallBuffer: { [key: string]: string };
337
+ private currentToolCall: string | null;
338
+ private previouslyParsableState: { [key: string]: any };
339
+
340
+ constructor(emitIntermediateState: { [key: string]: any }[]) {
341
+ this.emitIntermediateState = emitIntermediateState;
342
+ this.toolCallBuffer = {};
343
+ this.currentToolCall = null;
344
+ this.previouslyParsableState = {};
345
+ }
346
+
347
+ bufferToolCalls(event: {
348
+ data: { chunk: { tool_call_chunks: { name: string | null; args: string }[] } };
349
+ }) {
350
+ if (event.data.chunk.tool_call_chunks.length > 0) {
351
+ const chunk = event.data.chunk.tool_call_chunks[0];
352
+
353
+ if (chunk.name !== null) {
354
+ this.currentToolCall = chunk.name;
355
+ this.toolCallBuffer[this.currentToolCall] = chunk.args;
356
+ } else if (this.currentToolCall !== null) {
357
+ this.toolCallBuffer[this.currentToolCall] += chunk.args;
358
+ }
359
+ }
360
+ }
361
+
362
+ getEmitStateConfig(currentToolName: string): [string | null, string | null] {
363
+ for (const config of this.emitIntermediateState) {
364
+ const stateKey = config["state_key"];
365
+ const tool = config["tool"];
366
+ const toolArgument = config["tool_argument"];
367
+
368
+ if (currentToolName === tool) {
369
+ return [toolArgument, stateKey];
370
+ }
371
+ }
372
+ return [null, null];
373
+ }
374
+
375
+ extractState(): State {
376
+ const state: State = {};
377
+
378
+ for (const [key, value] of Object.entries(this.toolCallBuffer)) {
379
+ const [argumentName, stateKey] = this.getEmitStateConfig(key);
380
+
381
+ if (stateKey === null) {
382
+ continue;
383
+ }
384
+
385
+ let parsedValue;
386
+ try {
387
+ parsedValue = parsePartialJson(value);
388
+ } catch (error) {
389
+ if (key in this.previouslyParsableState) {
390
+ parsedValue = this.previouslyParsableState[key];
391
+ } else {
392
+ continue;
393
+ }
394
+ }
395
+
396
+ this.previouslyParsableState[key] = parsedValue;
397
+
398
+ if (!argumentName) {
399
+ state[stateKey] = parsedValue;
400
+ } else {
401
+ state[stateKey] = parsedValue[argumentName];
402
+ }
403
+ }
404
+
405
+ return state;
406
+ }
407
+ }
408
+
409
+ // Start of Selection
410
+ function langGraphDefaultMergeState(
411
+ state: State,
412
+ messages: LangGraphCloudMessage[],
413
+ actions: ExecutionAction[],
414
+ agentName: string,
415
+ ): State {
416
+ if (messages.length > 0 && "role" in messages[0] && messages[0].role === "system") {
417
+ // remove system message
418
+ messages = messages.slice(1);
419
+ }
420
+
421
+ // merge with existing messages
422
+ const mergedMessages: LangGraphCloudMessage[] = state.messages || [];
423
+ const existingMessageIds = new Set(mergedMessages.map((message) => message.id));
424
+ const existingToolCallResults = new Set<string>();
425
+
426
+ for (const message of mergedMessages) {
427
+ if ("tool_call_id" in message) {
428
+ existingToolCallResults.add(message.tool_call_id);
429
+ }
430
+ }
431
+
432
+ for (const message of messages) {
433
+ // filter tool calls to activate the agent itself
434
+ if (
435
+ "tool_calls" in message &&
436
+ message.tool_calls.length > 0 &&
437
+ message.tool_calls[0].name === agentName
438
+ ) {
439
+ continue;
440
+ }
441
+
442
+ // filter results from activating the agent
443
+ if ("name" in message && message.name === agentName) {
444
+ continue;
445
+ }
446
+
447
+ if (!existingMessageIds.has(message.id)) {
448
+ // skip duplicate tool call results
449
+ if ("tool_call_id" in message && existingToolCallResults.has(message.tool_call_id)) {
450
+ console.warn("Warning: Duplicate tool call result, skipping:", message.tool_call_id);
451
+ continue;
452
+ }
453
+
454
+ mergedMessages.push(message);
455
+ // TODO: should the below "else" block stay?
456
+ } else {
457
+ // Replace the message with the existing one
458
+ for (let i = 0; i < mergedMessages.length; i++) {
459
+ if (mergedMessages[i].id === message.id) {
460
+ if ("tool_calls" in message) {
461
+ if (
462
+ ("tool_calls" in mergedMessages[i] || "additional_kwargs" in mergedMessages[i]) &&
463
+ mergedMessages[i].content
464
+ ) {
465
+ message.tool_calls = mergedMessages[i]["tool_calls"];
466
+ message.additional_kwargs = mergedMessages[i].additional_kwargs;
467
+ }
468
+ }
469
+ mergedMessages[i] = message;
470
+ }
471
+ }
472
+ }
473
+ }
474
+
475
+ // fix wrong tool call ids
476
+ for (let i = 0; i < mergedMessages.length - 1; i++) {
477
+ const currentMessage = mergedMessages[i];
478
+ const nextMessage = mergedMessages[i + 1];
479
+
480
+ if (
481
+ "tool_calls" in currentMessage &&
482
+ currentMessage.tool_calls.length > 0 &&
483
+ "tool_call_id" in nextMessage
484
+ ) {
485
+ nextMessage.tool_call_id = currentMessage.tool_calls[0].id;
486
+ }
487
+ }
488
+
489
+ // try to auto-correct and log alignment issues
490
+ const correctedMessages: LangGraphCloudMessage[] = [];
491
+
492
+ for (let i = 0; i < mergedMessages.length; i++) {
493
+ const currentMessage = mergedMessages[i];
494
+ const nextMessage = mergedMessages[i + 1] || null;
495
+ const prevMessage = mergedMessages[i - 1] || null;
496
+
497
+ if ("tool_calls" in currentMessage && currentMessage.tool_calls.length > 0) {
498
+ if (!nextMessage) {
499
+ console.warn(
500
+ "No next message to auto-correct tool call, skipping:",
501
+ currentMessage.tool_calls[0].id,
502
+ );
503
+ continue;
504
+ }
505
+
506
+ if (
507
+ !("tool_call_id" in nextMessage) ||
508
+ nextMessage.tool_call_id !== currentMessage.tool_calls[0].id
509
+ ) {
510
+ const toolMessage = mergedMessages.find(
511
+ (m) => "tool_call_id" in m && m.tool_call_id === currentMessage.tool_calls[0].id,
512
+ );
513
+
514
+ if (toolMessage) {
515
+ console.warn(
516
+ "Auto-corrected tool call alignment issue:",
517
+ currentMessage.tool_calls[0].id,
518
+ );
519
+ correctedMessages.push(currentMessage, toolMessage);
520
+ continue;
521
+ } else {
522
+ console.warn(
523
+ "No corresponding tool call result found for tool call, skipping:",
524
+ currentMessage.tool_calls[0].id,
525
+ );
526
+ continue;
527
+ }
528
+ }
529
+
530
+ correctedMessages.push(currentMessage);
531
+ continue;
532
+ }
533
+
534
+ if ("tool_call_id" in currentMessage) {
535
+ if (!prevMessage || !("tool_calls" in prevMessage)) {
536
+ console.warn("No previous tool call, skipping tool call result:", currentMessage.id);
537
+ continue;
538
+ }
539
+
540
+ if (prevMessage.tool_calls && prevMessage.tool_calls[0].id !== currentMessage.tool_call_id) {
541
+ console.warn("Tool call id is incorrect, skipping tool call result:", currentMessage.id);
542
+ continue;
543
+ }
544
+
545
+ correctedMessages.push(currentMessage);
546
+ continue;
547
+ }
548
+
549
+ correctedMessages.push(currentMessage);
550
+ }
551
+
552
+ return deepMerge(state, {
553
+ messages: correctedMessages,
554
+ copilotkit: {
555
+ actions,
556
+ },
557
+ });
558
+ }
559
+
560
+ function deepMerge(obj1: State, obj2: State) {
561
+ let result = { ...obj1 };
562
+ for (let key in obj2) {
563
+ if (typeof obj2[key] === "object" && !Array.isArray(obj2[key])) {
564
+ if (obj1[key]) {
565
+ result[key] = deepMerge(obj1[key], obj2[key]);
566
+ } else {
567
+ result[key] = { ...obj2[key] };
568
+ }
569
+ } else {
570
+ result[key] = obj2[key];
571
+ }
572
+ }
573
+ return result;
574
+ }
575
+
576
+ function formatMessages(messages: Message[]): LangGraphCloudMessage[] {
577
+ return messages.map((message) => {
578
+ if (message.isTextMessage() && message.role === "assistant") {
579
+ return message;
580
+ }
581
+ if (message.isTextMessage() && message.role === "system") {
582
+ return message;
583
+ }
584
+ if (message.isTextMessage() && message.role === "user") {
585
+ return message;
586
+ }
587
+ if (message.isActionExecutionMessage()) {
588
+ const toolCall: ToolCall = {
589
+ name: message.name,
590
+ args: message.arguments,
591
+ id: message.id,
592
+ };
593
+ return {
594
+ type: message.type,
595
+ content: "",
596
+ tool_calls: [toolCall],
597
+ role: MessageRole.assistant,
598
+ id: message.id,
599
+ } satisfies LangGraphCloudActionExecutionMessage;
600
+ }
601
+ if (message.isResultMessage()) {
602
+ return {
603
+ type: message.type,
604
+ content: message.result,
605
+ id: message.id,
606
+ tool_call_id: message.actionExecutionId,
607
+ name: message.actionName,
608
+ role: MessageRole.tool,
609
+ } satisfies LangGraphCloudResultMessage;
610
+ }
611
+
612
+ throw new Error(`Unknown message type ${message.type}`);
613
+ });
614
+ }
@@ -18,39 +18,6 @@
18
18
  *
19
19
  * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);
20
20
  * ```
21
- *
22
- * ## Example with Azure OpenAI
23
- *
24
- * ```ts
25
- * import { CopilotRuntime, OpenAIAdapter } from "@copilotkit/runtime";
26
- * import OpenAI from "openai";
27
- *
28
- * // The name of your Azure OpenAI Instance.
29
- * // https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal#create-a-resource
30
- * const instance = "<your instance name>";
31
- *
32
- * // Corresponds to your Model deployment within your OpenAI resource, e.g. my-gpt35-16k-deployment
33
- * // Navigate to the Azure OpenAI Studio to deploy a model.
34
- * const model = "<your model>";
35
- *
36
- * const apiKey = process.env["AZURE_OPENAI_API_KEY"];
37
- * if (!apiKey) {
38
- * throw new Error("The AZURE_OPENAI_API_KEY environment variable is missing or empty.");
39
- * }
40
- *
41
- * const copilotKit = new CopilotRuntime();
42
- *
43
- * const openai = new OpenAI({
44
- * apiKey,
45
- * baseURL: `https://${instance}.openai.azure.com/openai/deployments/${model}`,
46
- * defaultQuery: { "api-version": "2024-04-01-preview" },
47
- * defaultHeaders: { "api-key": apiKey },
48
- * });
49
- *
50
- * const serviceAdapter = new OpenAIAdapter({ openai });
51
- *
52
- * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);
53
- * ```
54
21
  */
55
22
  import OpenAI from "openai";
56
23
  import {
@@ -145,10 +112,6 @@ export class OpenAIAdapter implements CopilotServiceAdapter {
145
112
  eventSource.stream(async (eventStream$) => {
146
113
  let mode: "function" | "message" | null = null;
147
114
  for await (const chunk of stream) {
148
- if (chunk.choices.length === 0) {
149
- continue;
150
- }
151
-
152
115
  const toolCall = chunk.choices[0].delta.tool_calls?.[0];
153
116
  const content = chunk.choices[0].delta.content;
154
117
 
@@ -1,11 +1,12 @@
1
- import {
2
- ActionExecutionMessage,
3
- Message,
4
- ResultMessage,
5
- TextMessage,
6
- } from "../../graphql/types/converted";
1
+ import { Message } from "../../graphql/types/converted";
7
2
  import { ActionInput } from "../../graphql/inputs/action.input";
8
- import { ChatCompletionMessageParam, ChatCompletionTool } from "openai/resources";
3
+ import {
4
+ ChatCompletionMessageParam,
5
+ ChatCompletionTool,
6
+ ChatCompletionUserMessageParam,
7
+ ChatCompletionAssistantMessageParam,
8
+ ChatCompletionSystemMessageParam,
9
+ } from "openai/resources";
9
10
 
10
11
  export function limitMessagesToTokenCount(
11
12
  messages: any[],
@@ -120,9 +121,12 @@ export function convertActionInputToOpenAITool(action: ActionInput): ChatComplet
120
121
  export function convertMessageToOpenAIMessage(message: Message): ChatCompletionMessageParam {
121
122
  if (message.isTextMessage()) {
122
123
  return {
123
- role: message.role,
124
+ role: message.role as ChatCompletionUserMessageParam["role"],
124
125
  content: message.content,
125
- };
126
+ } satisfies
127
+ | ChatCompletionUserMessageParam
128
+ | ChatCompletionAssistantMessageParam
129
+ | ChatCompletionSystemMessageParam;
126
130
  } else if (message.isActionExecutionMessage()) {
127
131
  return {
128
132
  role: "assistant",