@copilotkit/runtime 1.4.0-pre-1-4-0.0 → 1.4.0-pre-1-4-0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/CHANGELOG.md +85 -3
  2. package/__snapshots__/schema/schema.graphql +1 -0
  3. package/dist/{chunk-3SXVSM2J.mjs → chunk-5DNXXJKY.mjs} +2 -2
  4. package/dist/{chunk-KDT43TMW.mjs → chunk-7ASURDKE.mjs} +3 -3
  5. package/dist/{chunk-KDT43TMW.mjs.map → chunk-7ASURDKE.mjs.map} +1 -1
  6. package/dist/{chunk-VGYN7TGZ.mjs → chunk-BNQDVBQH.mjs} +1 -1
  7. package/dist/chunk-BNQDVBQH.mjs.map +1 -0
  8. package/dist/{chunk-SBNW3ABC.mjs → chunk-ITRJ4KUY.mjs} +2 -2
  9. package/dist/{chunk-5LEQEQFA.mjs → chunk-KDMS6EKE.mjs} +108 -142
  10. package/dist/chunk-KDMS6EKE.mjs.map +1 -0
  11. package/dist/{chunk-5DFP5VJV.mjs → chunk-NR7QFSEY.mjs} +2 -2
  12. package/dist/{copilot-runtime-68acb673.d.ts → copilot-runtime-8d3f40c7.d.ts} +3 -3
  13. package/dist/graphql/types/converted/index.d.ts +1 -1
  14. package/dist/{groq-adapter-c30654bd.d.ts → groq-adapter-dbfba3eb.d.ts} +34 -1
  15. package/dist/{index-439dfa2a.d.ts → index-83ee522f.d.ts} +2 -1
  16. package/dist/index.d.ts +4 -4
  17. package/dist/index.js +107 -141
  18. package/dist/index.js.map +1 -1
  19. package/dist/index.mjs +6 -6
  20. package/dist/{langserve-0d844922.d.ts → langserve-f00629d2.d.ts} +1 -1
  21. package/dist/lib/index.d.ts +4 -4
  22. package/dist/lib/index.js +107 -141
  23. package/dist/lib/index.js.map +1 -1
  24. package/dist/lib/index.mjs +6 -6
  25. package/dist/lib/integrations/index.d.ts +4 -4
  26. package/dist/lib/integrations/index.js +2 -1
  27. package/dist/lib/integrations/index.js.map +1 -1
  28. package/dist/lib/integrations/index.mjs +4 -4
  29. package/dist/lib/integrations/nest/index.d.ts +3 -3
  30. package/dist/lib/integrations/nest/index.js +2 -1
  31. package/dist/lib/integrations/nest/index.js.map +1 -1
  32. package/dist/lib/integrations/nest/index.mjs +2 -2
  33. package/dist/lib/integrations/node-express/index.d.ts +3 -3
  34. package/dist/lib/integrations/node-express/index.js +2 -1
  35. package/dist/lib/integrations/node-express/index.js.map +1 -1
  36. package/dist/lib/integrations/node-express/index.mjs +2 -2
  37. package/dist/lib/integrations/node-http/index.d.ts +3 -3
  38. package/dist/lib/integrations/node-http/index.js +2 -1
  39. package/dist/lib/integrations/node-http/index.js.map +1 -1
  40. package/dist/lib/integrations/node-http/index.mjs +1 -1
  41. package/dist/service-adapters/index.d.ts +4 -4
  42. package/dist/service-adapters/index.js.map +1 -1
  43. package/dist/service-adapters/index.mjs +1 -1
  44. package/package.json +4 -4
  45. package/src/agents/langgraph/events.ts +1 -0
  46. package/src/graphql/types/enums.ts +1 -0
  47. package/src/lib/runtime/copilot-runtime.ts +2 -2
  48. package/src/lib/runtime/remote-lg-cloud-action.ts +137 -160
  49. package/src/service-adapters/openai/openai-adapter.ts +33 -0
  50. package/src/service-adapters/openai/utils.ts +13 -9
  51. package/dist/chunk-5LEQEQFA.mjs.map +0 -1
  52. package/dist/chunk-VGYN7TGZ.mjs.map +0 -1
  53. /package/dist/{chunk-3SXVSM2J.mjs.map → chunk-5DNXXJKY.mjs.map} +0 -0
  54. /package/dist/{chunk-SBNW3ABC.mjs.map → chunk-ITRJ4KUY.mjs.map} +0 -0
  55. /package/dist/{chunk-5DFP5VJV.mjs.map → chunk-NR7QFSEY.mjs.map} +0 -0
@@ -7,6 +7,7 @@ import { LangGraphCloudAgent, LangGraphCloudEndpoint } from "./remote-actions";
7
7
  import { CopilotRequestContextProperties } from "../integrations";
8
8
  import { Message, MessageType } from "../../graphql/types/converted";
9
9
  import { MessageRole } from "../../graphql/types/enums";
10
+ import { CustomEventNames, LangGraphEventTypes } from "../../agents/langgraph/events";
10
11
 
11
12
  type State = Record<string, any>;
12
13
 
@@ -111,7 +112,13 @@ async function streamEvents(controller: ReadableStreamDefaultController, args: E
111
112
  const agentStateValues = agentState.values as State;
112
113
  state.messages = agentStateValues.messages;
113
114
  const mode = wasInitiatedWithExistingThread && nodeName != "__end__" ? "continue" : "start";
114
- state = langGraphDefaultMergeState(state, formatMessages(messages), actions, name);
115
+ let formattedMessages = [];
116
+ try {
117
+ formattedMessages = formatMessages(messages);
118
+ } catch (e) {
119
+ logger.error(e, `Error event thrown: ${e.message}`);
120
+ }
121
+ state = langGraphDefaultMergeState(state, formattedMessages, actions, name);
115
122
 
116
123
  if (mode === "continue") {
117
124
  await client.threads.updateState(threadId, { values: state, asNode: nodeName });
@@ -133,7 +140,7 @@ async function streamEvents(controller: ReadableStreamDefaultController, args: E
133
140
  let streamingStateExtractor = new StreamingStateExtractor([]);
134
141
  let prevNodeName = null;
135
142
  let emitIntermediateStateUntilEnd = null;
136
- let shouldExit = null;
143
+ let shouldExit = false;
137
144
  let externalRunId = null;
138
145
 
139
146
  const streamResponse = client.runs.stream(threadId, assistantId, {
@@ -144,163 +151,149 @@ async function streamEvents(controller: ReadableStreamDefaultController, args: E
144
151
  const emit = (message: string) => controller.enqueue(new TextEncoder().encode(message));
145
152
 
146
153
  let latestStateValues = {};
154
+ let updatedState = state;
147
155
 
148
- for await (const chunk of streamResponse) {
149
- if (!["events", "values", "error"].includes(chunk.event)) continue;
156
+ try {
157
+ for await (const chunk of streamResponse) {
158
+ if (!["events", "values", "error"].includes(chunk.event)) continue;
150
159
 
151
- if (chunk.event === "error") {
152
- logger.error(chunk, `Error event thrown: ${chunk.data.message}`);
153
- throw new Error(`Error event thrown: ${chunk.data.message}`);
154
- }
160
+ if (chunk.event === "error") {
161
+ logger.error(chunk, `Error event thrown: ${chunk.data.message}`);
162
+ throw new Error(`Error event thrown: ${chunk.data.message}`);
163
+ }
155
164
 
156
- if (chunk.event === "values") {
157
- latestStateValues = chunk.data;
158
- continue;
159
- }
165
+ if (chunk.event === "values") {
166
+ latestStateValues = chunk.data;
167
+ continue;
168
+ }
160
169
 
161
- const event = chunk.data;
162
- const currentNodeName = event.name;
163
- const eventType = event.event;
164
- const runId = event.metadata.run_id;
165
- externalRunId = runId;
166
- const metadata = event.metadata;
167
-
168
- shouldExit = shouldExit != null ? shouldExit : metadata["copilotkit:exit"];
169
- const emitIntermediateState = metadata["copilotkit:emit-intermediate-state"];
170
- const forceEmitIntermediateState = metadata["copilotkit:force-emit-intermediate-state"];
171
- const manuallyEmitMessage = metadata["copilotkit:manually-emit-messages"];
172
- const manuallyEmitToolCall = metadata["copilotkit:manually-emit-tool-calls"];
173
- // we only want to update the node name under certain conditions
174
- // since we don't need any internal node names to be sent to the frontend
175
- if (graphInfo["nodes"].some((node) => node.id === currentNodeName)) {
176
- nodeName = currentNodeName;
177
- }
170
+ const event = chunk.data;
171
+ const currentNodeName = event.name;
172
+ const eventType = event.event;
173
+ const runId = event.metadata.run_id;
174
+ externalRunId = runId;
175
+ const metadata = event.metadata;
176
+
177
+ shouldExit =
178
+ shouldExit ||
179
+ (eventType === LangGraphEventTypes.OnCustomEvent &&
180
+ event.name === CustomEventNames.CopilotKitExit);
181
+
182
+ const emitIntermediateState = metadata["copilotkit:emit-intermediate-state"];
183
+ const manuallyEmitIntermediateState =
184
+ eventType === LangGraphEventTypes.OnCustomEvent &&
185
+ event.name === CustomEventNames.CopilotKitManuallyEmitIntermediateState;
186
+
187
+ // we only want to update the node name under certain conditions
188
+ // since we don't need any internal node names to be sent to the frontend
189
+ if (graphInfo["nodes"].some((node) => node.id === currentNodeName)) {
190
+ nodeName = currentNodeName;
191
+
192
+ // only update state from values when entering or exiting a known node
193
+ if (
194
+ eventType === LangGraphEventTypes.OnChainStart ||
195
+ eventType === LangGraphEventTypes.OnChainEnd
196
+ ) {
197
+ updatedState = latestStateValues;
198
+ }
199
+ }
178
200
 
179
- if (!nodeName) {
180
- continue;
181
- }
201
+ if (!nodeName) {
202
+ continue;
203
+ }
182
204
 
183
- if (forceEmitIntermediateState) {
184
- if (eventType === "on_chain_end") {
185
- state = event.data.output;
205
+ if (manuallyEmitIntermediateState) {
206
+ updatedState = event.data;
186
207
  emit(
187
208
  getStateSyncEvent({
188
209
  threadId,
189
210
  runId,
190
211
  agentName: agent.name,
191
212
  nodeName,
192
- state: event.data.output,
213
+ state: updatedState,
193
214
  running: true,
194
215
  active: true,
195
216
  }),
196
217
  );
218
+ continue;
197
219
  }
198
- continue;
199
- }
200
220
 
201
- if (manuallyEmitMessage) {
202
- if (eventType === "on_chain_end") {
203
- state = event.data.output;
204
- emit(
205
- JSON.stringify({
206
- event: "on_copilotkit_emit_message",
207
- message: event.data.output,
208
- messageId: randomUUID(),
209
- role: MessageRole.assistant,
210
- }) + "\n",
211
- );
221
+ if (emitIntermediateState && emitIntermediateStateUntilEnd == null) {
222
+ emitIntermediateStateUntilEnd = nodeName;
212
223
  }
213
- continue;
214
- }
215
224
 
216
- if (manuallyEmitToolCall) {
217
- if (eventType === "on_chain_end") {
218
- state = event.data.output;
219
- emit(
220
- JSON.stringify({
221
- event: "on_copilotkit_emit_tool_call",
222
- name: event.data.output.name,
223
- args: event.data.output.args,
224
- id: event.data.output.id,
225
- }) + "\n",
226
- );
225
+ if (emitIntermediateState && eventType === LangGraphEventTypes.OnChatModelStart) {
226
+ // reset the streaming state extractor
227
+ streamingStateExtractor = new StreamingStateExtractor(emitIntermediateState);
227
228
  }
228
- continue;
229
- }
230
229
 
231
- if (emitIntermediateState && emitIntermediateStateUntilEnd == null) {
232
- emitIntermediateStateUntilEnd = nodeName;
233
- }
234
-
235
- if (emitIntermediateState && eventType === "on_chat_model_start") {
236
- // reset the streaming state extractor
237
- streamingStateExtractor = new StreamingStateExtractor(emitIntermediateState);
238
- }
239
-
240
- let updatedState = latestStateValues;
230
+ if (emitIntermediateState && eventType === LangGraphEventTypes.OnChatModelStream) {
231
+ streamingStateExtractor.bufferToolCalls(event);
232
+ }
241
233
 
242
- if (emitIntermediateState && eventType === "on_chat_model_stream") {
243
- streamingStateExtractor.bufferToolCalls(event);
244
- }
234
+ if (emitIntermediateStateUntilEnd !== null) {
235
+ updatedState = {
236
+ ...updatedState,
237
+ ...streamingStateExtractor.extractState(),
238
+ };
239
+ }
245
240
 
246
- if (emitIntermediateStateUntilEnd !== null) {
247
- updatedState = {
248
- ...updatedState,
249
- ...streamingStateExtractor.extractState(),
250
- };
251
- }
241
+ if (
242
+ !emitIntermediateState &&
243
+ currentNodeName === emitIntermediateStateUntilEnd &&
244
+ eventType === LangGraphEventTypes.OnChainEnd
245
+ ) {
246
+ // stop emitting function call state
247
+ emitIntermediateStateUntilEnd = null;
248
+ }
252
249
 
253
- if (
254
- !emitIntermediateState &&
255
- currentNodeName === emitIntermediateStateUntilEnd &&
256
- eventType === "on_chain_end"
257
- ) {
258
- // stop emitting function call state
259
- emitIntermediateStateUntilEnd = null;
260
- }
250
+ const exitingNode =
251
+ nodeName === currentNodeName && eventType === LangGraphEventTypes.OnChainEnd;
261
252
 
262
- const exitingNode = nodeName === currentNodeName && eventType === "on_chain_end";
253
+ if (
254
+ JSON.stringify(updatedState) !== JSON.stringify(state) ||
255
+ prevNodeName != nodeName ||
256
+ exitingNode
257
+ ) {
258
+ state = updatedState;
259
+ prevNodeName = nodeName;
260
+ emit(
261
+ getStateSyncEvent({
262
+ threadId,
263
+ runId,
264
+ agentName: agent.name,
265
+ nodeName,
266
+ state,
267
+ running: true,
268
+ active: !exitingNode,
269
+ }),
270
+ );
271
+ }
263
272
 
264
- if (
265
- JSON.stringify(updatedState) !== JSON.stringify(state) ||
266
- prevNodeName != nodeName ||
267
- exitingNode
268
- ) {
269
- state = updatedState;
270
- prevNodeName = nodeName;
271
- emit(
272
- getStateSyncEvent({
273
- threadId,
274
- runId,
275
- agentName: agent.name,
276
- nodeName,
277
- state,
278
- running: true,
279
- active: !exitingNode,
280
- }),
281
- );
273
+ emit(JSON.stringify(event) + "\n");
282
274
  }
283
275
 
284
- emit(JSON.stringify(event) + "\n");
276
+ state = await client.threads.getState(threadId);
277
+ const isEndNode = state.next.length === 0;
278
+ nodeName = Object.keys(state.metadata.writes)[0];
279
+
280
+ emit(
281
+ getStateSyncEvent({
282
+ threadId,
283
+ runId: externalRunId,
284
+ agentName: agent.name,
285
+ nodeName: isEndNode ? "__end__" : nodeName,
286
+ state: state.values,
287
+ running: !shouldExit,
288
+ active: false,
289
+ }),
290
+ );
291
+
292
+ return Promise.resolve();
293
+ } catch (e) {
294
+ // TODO: handle error state here.
295
+ return Promise.resolve();
285
296
  }
286
-
287
- state = await client.threads.getState(threadId);
288
- const isEndNode = state.next.length === 0;
289
- nodeName = Object.keys(state.metadata.writes)[0];
290
-
291
- emit(
292
- getStateSyncEvent({
293
- threadId,
294
- runId: externalRunId,
295
- agentName: agent.name,
296
- nodeName: isEndNode ? "__end__" : nodeName,
297
- state: state.values,
298
- running: !shouldExit,
299
- active: false,
300
- }),
301
- );
302
-
303
- return Promise.resolve();
304
297
  }
305
298
 
306
299
  function getStateSyncEvent({
@@ -329,7 +322,7 @@ function getStateSyncEvent({
329
322
 
330
323
  return (
331
324
  JSON.stringify({
332
- event: "on_copilotkit_state_sync",
325
+ event: LangGraphEventTypes.OnCopilotKitStateSync,
333
326
  thread_id: threadId,
334
327
  run_id: runId,
335
328
  agent_name: agentName,
@@ -361,10 +354,10 @@ class StreamingStateExtractor {
361
354
  if (event.data.chunk.tool_call_chunks.length > 0) {
362
355
  const chunk = event.data.chunk.tool_call_chunks[0];
363
356
 
364
- if (chunk.name !== null) {
357
+ if (chunk.name !== null && chunk.name !== undefined) {
365
358
  this.currentToolCall = chunk.name;
366
359
  this.toolCallBuffer[this.currentToolCall] = chunk.args;
367
- } else if (this.currentToolCall !== null) {
360
+ } else if (this.currentToolCall !== null && this.currentToolCall !== undefined) {
368
361
  this.toolCallBuffer[this.currentToolCall] += chunk.args;
369
362
  }
370
363
  }
@@ -466,15 +459,14 @@ function langGraphDefaultMergeState(
466
459
  } else {
467
460
  // Replace the message with the existing one
468
461
  for (let i = 0; i < mergedMessages.length; i++) {
469
- if (mergedMessages[i].id === message.id) {
470
- if ("tool_calls" in message) {
471
- if (
472
- ("tool_calls" in mergedMessages[i] || "additional_kwargs" in mergedMessages[i]) &&
473
- mergedMessages[i].content
474
- ) {
475
- message.tool_calls = mergedMessages[i]["tool_calls"];
476
- message.additional_kwargs = mergedMessages[i].additional_kwargs;
477
- }
462
+ if (mergedMessages[i].id === message.id && message.role === "assistant") {
463
+ if (
464
+ ("tool_calls" in mergedMessages[i] || "additional_kwargs" in mergedMessages[i]) &&
465
+ mergedMessages[i].content
466
+ ) {
467
+ // @ts-expect-error -- message did not have a tool call, now it will
468
+ message.tool_calls = mergedMessages[i]["tool_calls"];
469
+ message.additional_kwargs = mergedMessages[i].additional_kwargs;
478
470
  }
479
471
  mergedMessages[i] = message;
480
472
  }
@@ -559,28 +551,13 @@ function langGraphDefaultMergeState(
559
551
  correctedMessages.push(currentMessage);
560
552
  }
561
553
 
562
- return deepMerge(state, {
554
+ return {
555
+ ...state,
563
556
  messages: correctedMessages,
564
557
  copilotkit: {
565
558
  actions,
566
559
  },
567
- });
568
- }
569
-
570
- function deepMerge(obj1: State, obj2: State) {
571
- let result = { ...obj1 };
572
- for (let key in obj2) {
573
- if (typeof obj2[key] === "object" && !Array.isArray(obj2[key])) {
574
- if (obj1[key]) {
575
- result[key] = deepMerge(obj1[key], obj2[key]);
576
- } else {
577
- result[key] = { ...obj2[key] };
578
- }
579
- } else {
580
- result[key] = obj2[key];
581
- }
582
- }
583
- return result;
560
+ };
584
561
  }
585
562
 
586
563
  function formatMessages(messages: Message[]): LangGraphCloudMessage[] {
@@ -615,7 +592,7 @@ function formatMessages(messages: Message[]): LangGraphCloudMessage[] {
615
592
  id: message.id,
616
593
  tool_call_id: message.actionExecutionId,
617
594
  name: message.actionName,
618
- role: MessageRole.assistant,
595
+ role: MessageRole.tool,
619
596
  } satisfies LangGraphCloudResultMessage;
620
597
  }
621
598
 
@@ -18,6 +18,39 @@
18
18
  *
19
19
  * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);
20
20
  * ```
21
+ *
22
+ * ## Example with Azure OpenAI
23
+ *
24
+ * ```ts
25
+ * import { CopilotRuntime, OpenAIAdapter } from "@copilotkit/runtime";
26
+ * import OpenAI from "openai";
27
+ *
28
+ * // The name of your Azure OpenAI Instance.
29
+ * // https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal#create-a-resource
30
+ * const instance = "<your instance name>";
31
+ *
32
+ * // Corresponds to your Model deployment within your OpenAI resource, e.g. my-gpt35-16k-deployment
33
+ * // Navigate to the Azure OpenAI Studio to deploy a model.
34
+ * const model = "<your model>";
35
+ *
36
+ * const apiKey = process.env["AZURE_OPENAI_API_KEY"];
37
+ * if (!apiKey) {
38
+ * throw new Error("The AZURE_OPENAI_API_KEY environment variable is missing or empty.");
39
+ * }
40
+ *
41
+ * const copilotKit = new CopilotRuntime();
42
+ *
43
+ * const openai = new OpenAI({
44
+ * apiKey,
45
+ * baseURL: `https://${instance}.openai.azure.com/openai/deployments/${model}`,
46
+ * defaultQuery: { "api-version": "2024-04-01-preview" },
47
+ * defaultHeaders: { "api-key": apiKey },
48
+ * });
49
+ *
50
+ * const serviceAdapter = new OpenAIAdapter({ openai });
51
+ *
52
+ * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);
53
+ * ```
21
54
  */
22
55
  import OpenAI from "openai";
23
56
  import {
@@ -1,11 +1,12 @@
1
- import {
2
- ActionExecutionMessage,
3
- Message,
4
- ResultMessage,
5
- TextMessage,
6
- } from "../../graphql/types/converted";
1
+ import { Message } from "../../graphql/types/converted";
7
2
  import { ActionInput } from "../../graphql/inputs/action.input";
8
- import { ChatCompletionMessageParam, ChatCompletionTool } from "openai/resources";
3
+ import {
4
+ ChatCompletionMessageParam,
5
+ ChatCompletionTool,
6
+ ChatCompletionUserMessageParam,
7
+ ChatCompletionAssistantMessageParam,
8
+ ChatCompletionSystemMessageParam,
9
+ } from "openai/resources";
9
10
 
10
11
  export function limitMessagesToTokenCount(
11
12
  messages: any[],
@@ -120,9 +121,12 @@ export function convertActionInputToOpenAITool(action: ActionInput): ChatComplet
120
121
  export function convertMessageToOpenAIMessage(message: Message): ChatCompletionMessageParam {
121
122
  if (message.isTextMessage()) {
122
123
  return {
123
- role: message.role,
124
+ role: message.role as ChatCompletionUserMessageParam["role"],
124
125
  content: message.content,
125
- };
126
+ } satisfies
127
+ | ChatCompletionUserMessageParam
128
+ | ChatCompletionAssistantMessageParam
129
+ | ChatCompletionSystemMessageParam;
126
130
  } else if (message.isActionExecutionMessage()) {
127
131
  return {
128
132
  role: "assistant",