@copilotkit/runtime 1.50.0-beta.2 → 1.50.0-beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/dist/index.d.ts +71 -281
  2. package/dist/index.js +291 -274
  3. package/dist/index.js.map +1 -1
  4. package/dist/index.mjs +284 -263
  5. package/dist/index.mjs.map +1 -1
  6. package/dist/langgraph.d.ts +284 -0
  7. package/dist/langgraph.js +211 -0
  8. package/dist/langgraph.js.map +1 -0
  9. package/dist/langgraph.mjs +206 -0
  10. package/dist/langgraph.mjs.map +1 -0
  11. package/dist/v2/index.d.ts +1 -0
  12. package/dist/v2/index.js +7 -0
  13. package/dist/v2/index.js.map +1 -1
  14. package/dist/v2/index.mjs +1 -0
  15. package/dist/v2/index.mjs.map +1 -1
  16. package/package.json +48 -15
  17. package/src/langgraph.ts +1 -0
  18. package/src/lib/index.ts +41 -1
  19. package/src/lib/integrations/node-http/index.ts +129 -10
  20. package/src/lib/runtime/agent-integrations/{langgraph.agent.ts → langgraph/agent.ts} +5 -30
  21. package/src/lib/runtime/agent-integrations/langgraph/consts.ts +34 -0
  22. package/src/lib/runtime/agent-integrations/langgraph/index.ts +2 -0
  23. package/src/lib/runtime/copilot-runtime.ts +17 -40
  24. package/src/service-adapters/anthropic/anthropic-adapter.ts +16 -3
  25. package/src/service-adapters/bedrock/bedrock-adapter.ts +4 -1
  26. package/src/service-adapters/experimental/ollama/ollama-adapter.ts +2 -1
  27. package/src/service-adapters/google/google-genai-adapter.ts +9 -4
  28. package/src/service-adapters/groq/groq-adapter.ts +16 -3
  29. package/src/service-adapters/langchain/langchain-adapter.ts +5 -3
  30. package/src/service-adapters/langchain/langserve.ts +2 -1
  31. package/src/service-adapters/openai/openai-adapter.ts +17 -3
  32. package/src/service-adapters/openai/openai-assistant-adapter.ts +26 -11
  33. package/src/service-adapters/unify/unify-adapter.ts +3 -1
  34. package/src/v2/index.ts +1 -0
  35. package/tsup.config.ts +5 -2
package/dist/index.mjs CHANGED
@@ -1,32 +1,21 @@
1
1
  import 'reflect-metadata';
2
- import OpenAI from 'openai';
3
2
  import { TelemetryClient, CopilotKitLowLevelError, CopilotKitErrorCode, randomUUID, randomId, CopilotKitMisuseError, getZodParameters, readBody, CopilotKitError, CopilotKitAgentDiscoveryError, parseJson, convertJsonSchemaToZodSchema } from '@copilotkit/shared';
4
- import { AIMessage, HumanMessage, SystemMessage, ToolMessage } from '@langchain/core/messages';
3
+ import { HumanMessage, AIMessage, SystemMessage, ToolMessage } from '@langchain/core/messages';
5
4
  import { DynamicStructuredTool } from '@langchain/core/tools';
6
- import { awaitAllCallbacks } from '@langchain/core/callbacks/promises';
7
- import { ChatGoogle } from '@langchain/google-gauth';
8
- import { Groq } from 'groq-sdk';
9
5
  import { registerEnumType, Field, InputType, ObjectType, createUnionType, InterfaceType, Query, Ctx, Mutation, Arg, Resolver, buildSchemaSync } from 'type-graphql';
10
- import { map, ReplaySubject, shareReplay, finalize, firstValueFrom, skipWhile, takeWhile, filter, Subject, take, tap } from 'rxjs';
6
+ import { ReplaySubject, shareReplay, finalize, firstValueFrom, skipWhile, takeWhile, filter, Subject, take, tap } from 'rxjs';
11
7
  import { GraphQLJSON, GraphQLJSONObject } from 'graphql-scalars';
12
8
  import { Repeater } from 'graphql-yoga';
13
9
  import { plainToInstance } from 'class-transformer';
14
10
  import { GraphQLError } from 'graphql';
15
11
  import { createHash } from 'crypto';
16
12
  import { InMemoryAgentRunner, CopilotRuntime as CopilotRuntime$1, createCopilotEndpointSingleRoute } from '@copilotkitnext/runtime';
17
- import { EventType } from '@ag-ui/client';
18
- import { LangGraphAgent as LangGraphAgent$1 } from '@ag-ui/langgraph';
19
- export { LangGraphHttpAgent } from '@ag-ui/langgraph';
20
13
  import { BasicAgent } from '@copilotkitnext/agent';
21
14
  import { useDeferStream } from '@graphql-yoga/plugin-defer-stream';
22
15
  import createPinoLogger from 'pino';
23
16
  import pretty from 'pino-pretty';
24
17
  import { handle } from 'hono/vercel';
25
18
  import { Readable } from 'stream';
26
- import { RemoteRunnable } from 'langchain/runnables/remote';
27
- import Anthropic from '@anthropic-ai/sdk';
28
- import { Ollama } from '@langchain/community/llms/ollama';
29
- import { ChatBedrockConverse } from '@langchain/aws';
30
19
 
31
20
  var __create = Object.create;
32
21
  var __defProp = Object.defineProperty;
@@ -35,7 +24,14 @@ var __getOwnPropNames = Object.getOwnPropertyNames;
35
24
  var __getProtoOf = Object.getPrototypeOf;
36
25
  var __hasOwnProp = Object.prototype.hasOwnProperty;
37
26
  var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
38
- var __commonJS = (cb, mod) => function __require() {
27
+ var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
28
+ get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
29
+ }) : x)(function(x) {
30
+ if (typeof require !== "undefined")
31
+ return require.apply(this, arguments);
32
+ throw new Error('Dynamic require of "' + x + '" is not supported');
33
+ });
34
+ var __commonJS = (cb, mod) => function __require2() {
39
35
  return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
40
36
  };
41
37
  var __copyProps = (to, from, except, desc) => {
@@ -83,6 +79,11 @@ var require_package = __commonJS({
83
79
  import: "./dist/v2/index.mjs",
84
80
  require: "./dist/v2/index.js",
85
81
  types: "./dist/v2/index.d.ts"
82
+ },
83
+ "./langgraph": {
84
+ import: "./dist/langgraph.mjs",
85
+ require: "./dist/langgraph.js",
86
+ types: "./dist/langgraph.d.ts"
86
87
  }
87
88
  },
88
89
  types: "./dist/index.d.ts",
@@ -112,29 +113,21 @@ var require_package = __commonJS({
112
113
  tsconfig: "workspace:*",
113
114
  tsup: "^6.7.0",
114
115
  typescript: "^5.2.3",
115
- vitest: "^3.2.4",
116
- "zod-to-json-schema": "^3.23.5"
116
+ vitest: "^3.2.4"
117
117
  },
118
118
  dependencies: {
119
- "@anthropic-ai/sdk": "^0.57.0",
120
119
  "@copilotkit/shared": "workspace:*",
121
120
  "@copilotkitnext/agent": "0.0.23",
122
121
  "@copilotkitnext/runtime": "0.0.23",
123
122
  "@graphql-yoga/plugin-defer-stream": "^3.3.1",
124
- "@langchain/aws": "^0.1.9",
125
- "@langchain/community": "^0.3.29",
123
+ "@hono/node-server": "^1.13.5",
126
124
  "@langchain/core": "^0.3.38",
127
- "@langchain/google-gauth": "^0.1.0",
128
- "@langchain/langgraph-sdk": "^0.0.70",
129
- "@langchain/openai": "^0.4.2",
130
125
  "@scarf/scarf": "^1.3.0",
131
126
  "class-transformer": "^0.5.1",
132
127
  "class-validator": "^0.14.1",
133
128
  graphql: "^16.8.1",
134
129
  "graphql-scalars": "^1.23.0",
135
130
  "graphql-yoga": "^5.3.1",
136
- "groq-sdk": "^0.5.0",
137
- "@hono/node-server": "^1.13.5",
138
131
  hono: "^4.10.3",
139
132
  langchain: "^0.3.3",
140
133
  openai: "^4.85.1",
@@ -147,15 +140,51 @@ var require_package = __commonJS({
147
140
  zod: "^3.23.3"
148
141
  },
149
142
  peerDependencies: {
150
- "@ag-ui/client": ">=0.0.39",
143
+ "@anthropic-ai/sdk": "^0.57.0",
144
+ "@ag-ui/client": "^0.0.41",
151
145
  "@ag-ui/core": ">=0.0.39",
152
146
  "@ag-ui/encoder": ">=0.0.39",
153
147
  "@ag-ui/langgraph": ">=0.0.18",
154
- "@ag-ui/proto": ">=0.0.39"
148
+ "@ag-ui/proto": ">=0.0.39",
149
+ "@langchain/aws": "^0.1.9",
150
+ "@langchain/community": "^0.3.58",
151
+ "@langchain/google-gauth": "^0.1.0",
152
+ "@langchain/langgraph-sdk": "^0.0.70",
153
+ "@langchain/openai": "^0.4.2",
154
+ "groq-sdk": "^0.5.0",
155
+ langchain: "^0.3.3",
156
+ openai: "^4.85.1"
155
157
  },
156
158
  peerDependenciesMeta: {
159
+ "@anthropic-ai/sdk": {
160
+ optional: true
161
+ },
157
162
  "@ag-ui/langgraph": {
158
163
  optional: true
164
+ },
165
+ "@langchain/aws": {
166
+ optional: true
167
+ },
168
+ "@langchain/community": {
169
+ optional: true
170
+ },
171
+ "@langchain/google-gauth": {
172
+ optional: true
173
+ },
174
+ "@langchain/langgraph-sdk": {
175
+ optional: true
176
+ },
177
+ "@langchain/openai": {
178
+ optional: true
179
+ },
180
+ "groq-sdk": {
181
+ optional: true
182
+ },
183
+ langchain: {
184
+ optional: true
185
+ },
186
+ openai: {
187
+ optional: true
159
188
  }
160
189
  },
161
190
  keywords: [
@@ -407,13 +436,22 @@ var OpenAIAdapter = class {
407
436
  return "OpenAIAdapter";
408
437
  }
409
438
  constructor(params) {
410
- this._openai = (params == null ? void 0 : params.openai) || new OpenAI({});
439
+ if (params == null ? void 0 : params.openai) {
440
+ this._openai = params.openai;
441
+ }
411
442
  if (params == null ? void 0 : params.model) {
412
443
  this.model = params.model;
413
444
  }
414
445
  this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
415
446
  this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
416
447
  }
448
+ ensureOpenAI() {
449
+ if (!this._openai) {
450
+ const OpenAI = __require("openai").default;
451
+ this._openai = new OpenAI();
452
+ }
453
+ return this._openai;
454
+ }
417
455
  async process(request) {
418
456
  const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
419
457
  const tools = actions.map(convertActionInputToOpenAITool);
@@ -448,7 +486,8 @@ var OpenAIAdapter = class {
448
486
  };
449
487
  }
450
488
  try {
451
- const stream = this.openai.beta.chat.completions.stream({
489
+ const openai = this.ensureOpenAI();
490
+ const stream = openai.beta.chat.completions.stream({
452
491
  model,
453
492
  stream: true,
454
493
  messages: openaiMessages,
@@ -808,11 +847,14 @@ var LangChainAdapter = class {
808
847
  threadId
809
848
  };
810
849
  } finally {
850
+ const { awaitAllCallbacks } = __require("@langchain/core/callbacks/promises");
811
851
  await awaitAllCallbacks();
812
852
  }
813
853
  }
814
854
  };
815
855
  __name(LangChainAdapter, "LangChainAdapter");
856
+
857
+ // src/service-adapters/google/google-genai-adapter.ts
816
858
  var DEFAULT_MODEL2 = "gemini-1.5-pro";
817
859
  var GoogleGenerativeAIAdapter = class extends LangChainAdapter {
818
860
  provider = "google";
@@ -820,11 +862,14 @@ var GoogleGenerativeAIAdapter = class extends LangChainAdapter {
820
862
  constructor(options) {
821
863
  super({
822
864
  chainFn: async ({ messages, tools, threadId }) => {
865
+ const { ChatGoogle } = __require("@langchain/google-gauth");
866
+ const { AIMessage: AIMessage2 } = __require("@langchain/core/messages");
823
867
  const filteredMessages = messages.filter((message) => {
824
- if (!(message instanceof AIMessage)) {
868
+ if (!(message instanceof AIMessage2)) {
825
869
  return true;
826
870
  }
827
- return message.content && String(message.content).trim().length > 0 || message.tool_calls && message.tool_calls.length > 0;
871
+ const aiMsg = message;
872
+ return aiMsg.content && String(aiMsg.content).trim().length > 0 || aiMsg.tool_calls && aiMsg.tool_calls.length > 0;
828
873
  });
829
874
  this.model = (options == null ? void 0 : options.model) ?? "gemini-1.5-pro";
830
875
  const model = new ChatGoogle({
@@ -842,8 +887,10 @@ var GoogleGenerativeAIAdapter = class extends LangChainAdapter {
842
887
  }
843
888
  };
844
889
  __name(GoogleGenerativeAIAdapter, "GoogleGenerativeAIAdapter");
890
+
891
+ // src/service-adapters/openai/openai-assistant-adapter.ts
845
892
  var OpenAIAssistantAdapter = class {
846
- openai;
893
+ _openai;
847
894
  codeInterpreterEnabled;
848
895
  assistantId;
849
896
  fileSearchEnabled;
@@ -853,19 +900,29 @@ var OpenAIAssistantAdapter = class {
853
900
  return "OpenAIAssistantAdapter";
854
901
  }
855
902
  constructor(params) {
856
- this.openai = params.openai || new OpenAI({});
903
+ if (params.openai) {
904
+ this._openai = params.openai;
905
+ }
857
906
  this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;
858
907
  this.fileSearchEnabled = params.fileSearchEnabled === false || true;
859
908
  this.assistantId = params.assistantId;
860
909
  this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
861
910
  this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
862
911
  }
912
+ ensureOpenAI() {
913
+ if (!this._openai) {
914
+ const OpenAI = __require("openai").default;
915
+ this._openai = new OpenAI({});
916
+ }
917
+ return this._openai;
918
+ }
863
919
  async process(request) {
864
920
  var _a, _b;
865
921
  const { messages, actions, eventSource, runId, forwardedParameters } = request;
866
922
  let threadId = (_b = (_a = request.extensions) == null ? void 0 : _a.openaiAssistantAPI) == null ? void 0 : _b.threadId;
923
+ const openai = this.ensureOpenAI();
867
924
  if (!threadId) {
868
- threadId = (await this.openai.beta.threads.create()).id;
925
+ threadId = (await openai.beta.threads.create()).id;
869
926
  }
870
927
  const lastMessage = messages.at(-1);
871
928
  let nextRunId = void 0;
@@ -889,7 +946,8 @@ var OpenAIAssistantAdapter = class {
889
946
  };
890
947
  }
891
948
  async submitToolOutputs(threadId, runId, messages, eventSource) {
892
- let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);
949
+ const openai = this.ensureOpenAI();
950
+ let run = await openai.beta.threads.runs.retrieve(threadId, runId);
893
951
  if (!run.required_action) {
894
952
  throw new Error("No tool outputs required");
895
953
  }
@@ -904,7 +962,7 @@ var OpenAIAssistantAdapter = class {
904
962
  output: message.result
905
963
  };
906
964
  });
907
- const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
965
+ const stream = openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
908
966
  tool_outputs: toolOutputs,
909
967
  ...this.disableParallelToolCalls && {
910
968
  parallel_tool_calls: false
@@ -914,6 +972,7 @@ var OpenAIAssistantAdapter = class {
914
972
  return runId;
915
973
  }
916
974
  async submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters) {
975
+ const openai = this.ensureOpenAI();
917
976
  messages = [
918
977
  ...messages
919
978
  ];
@@ -925,7 +984,7 @@ var OpenAIAssistantAdapter = class {
925
984
  if (userMessage.role !== "user") {
926
985
  throw new Error("No user message found");
927
986
  }
928
- await this.openai.beta.threads.messages.create(threadId, {
987
+ await openai.beta.threads.messages.create(threadId, {
929
988
  role: "user",
930
989
  content: userMessage.content
931
990
  });
@@ -943,7 +1002,7 @@ var OpenAIAssistantAdapter = class {
943
1002
  }
944
1003
  ] : []
945
1004
  ];
946
- let stream = this.openai.beta.threads.runs.stream(threadId, {
1005
+ let stream = openai.beta.threads.runs.stream(threadId, {
947
1006
  assistant_id: this.assistantId,
948
1007
  instructions,
949
1008
  tools,
@@ -1062,6 +1121,7 @@ var UnifyAdapter = class {
1062
1121
  }
1063
1122
  async process(request) {
1064
1123
  const tools = request.actions.map(convertActionInputToOpenAITool);
1124
+ const OpenAI = __require("openai").default;
1065
1125
  const openai = new OpenAI({
1066
1126
  apiKey: this.apiKey,
1067
1127
  baseURL: "https://api.unify.ai/v0/"
@@ -1173,12 +1233,21 @@ var GroqAdapter = class {
1173
1233
  return "GroqAdapter";
1174
1234
  }
1175
1235
  constructor(params) {
1176
- this._groq = (params == null ? void 0 : params.groq) || new Groq({});
1236
+ if (params == null ? void 0 : params.groq) {
1237
+ this._groq = params.groq;
1238
+ }
1177
1239
  if (params == null ? void 0 : params.model) {
1178
1240
  this.model = params.model;
1179
1241
  }
1180
1242
  this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
1181
1243
  }
1244
+ ensureGroq() {
1245
+ if (!this._groq) {
1246
+ const { Groq } = __require("groq-sdk");
1247
+ this._groq = new Groq({});
1248
+ }
1249
+ return this._groq;
1250
+ }
1182
1251
  async process(request) {
1183
1252
  const { threadId, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
1184
1253
  const tools = actions.map(convertActionInputToOpenAITool);
@@ -1197,7 +1266,8 @@ var GroqAdapter = class {
1197
1266
  }
1198
1267
  let stream;
1199
1268
  try {
1200
- stream = await this.groq.chat.completions.create({
1269
+ const groq = this.ensureGroq();
1270
+ stream = await groq.chat.completions.create({
1201
1271
  model,
1202
1272
  stream: true,
1203
1273
  messages: openaiMessages,
@@ -3306,199 +3376,6 @@ When using these tools:
3306
3376
  7. Always check tool responses to determine your next action`;
3307
3377
  }
3308
3378
  __name(generateMcpToolInstructions, "generateMcpToolInstructions");
3309
-
3310
- // src/agents/langgraph/events.ts
3311
- var LangGraphEventTypes;
3312
- (function(LangGraphEventTypes2) {
3313
- LangGraphEventTypes2["OnChainStart"] = "on_chain_start";
3314
- LangGraphEventTypes2["OnChainStream"] = "on_chain_stream";
3315
- LangGraphEventTypes2["OnChainEnd"] = "on_chain_end";
3316
- LangGraphEventTypes2["OnChatModelStart"] = "on_chat_model_start";
3317
- LangGraphEventTypes2["OnChatModelStream"] = "on_chat_model_stream";
3318
- LangGraphEventTypes2["OnChatModelEnd"] = "on_chat_model_end";
3319
- LangGraphEventTypes2["OnToolStart"] = "on_tool_start";
3320
- LangGraphEventTypes2["OnToolEnd"] = "on_tool_end";
3321
- LangGraphEventTypes2["OnCopilotKitStateSync"] = "on_copilotkit_state_sync";
3322
- LangGraphEventTypes2["OnCopilotKitEmitMessage"] = "on_copilotkit_emit_message";
3323
- LangGraphEventTypes2["OnCopilotKitEmitToolCall"] = "on_copilotkit_emit_tool_call";
3324
- LangGraphEventTypes2["OnCustomEvent"] = "on_custom_event";
3325
- LangGraphEventTypes2["OnInterrupt"] = "on_interrupt";
3326
- LangGraphEventTypes2["OnCopilotKitInterrupt"] = "on_copilotkit_interrupt";
3327
- LangGraphEventTypes2["OnCopilotKitError"] = "on_copilotkit_error";
3328
- })(LangGraphEventTypes || (LangGraphEventTypes = {}));
3329
- var MetaEventNames;
3330
- (function(MetaEventNames2) {
3331
- MetaEventNames2["LangGraphInterruptEvent"] = "LangGraphInterruptEvent";
3332
- MetaEventNames2["CopilotKitLangGraphInterruptEvent"] = "CopilotKitLangGraphInterruptEvent";
3333
- })(MetaEventNames || (MetaEventNames = {}));
3334
- var CustomEventNames;
3335
- (function(CustomEventNames3) {
3336
- CustomEventNames3["CopilotKitManuallyEmitMessage"] = "copilotkit_manually_emit_message";
3337
- CustomEventNames3["CopilotKitManuallyEmitToolCall"] = "copilotkit_manually_emit_tool_call";
3338
- CustomEventNames3["CopilotKitManuallyEmitIntermediateState"] = "copilotkit_manually_emit_intermediate_state";
3339
- CustomEventNames3["CopilotKitExit"] = "copilotkit_exit";
3340
- })(CustomEventNames || (CustomEventNames = {}));
3341
- var CustomEventNames2;
3342
- (function(CustomEventNames3) {
3343
- CustomEventNames3["CopilotKitManuallyEmitMessage"] = "copilotkit_manually_emit_message";
3344
- CustomEventNames3["CopilotKitManuallyEmitToolCall"] = "copilotkit_manually_emit_tool_call";
3345
- CustomEventNames3["CopilotKitManuallyEmitIntermediateState"] = "copilotkit_manually_emit_intermediate_state";
3346
- CustomEventNames3["CopilotKitExit"] = "copilotkit_exit";
3347
- })(CustomEventNames2 || (CustomEventNames2 = {}));
3348
- var LangGraphAgent = class extends LangGraphAgent$1 {
3349
- constructor(config2) {
3350
- super(config2);
3351
- }
3352
- // @ts-ignore
3353
- clone() {
3354
- return new LangGraphAgent(this.config);
3355
- }
3356
- dispatchEvent(event) {
3357
- if (event.type === EventType.CUSTOM) {
3358
- const customEvent = event;
3359
- if (customEvent.name === "copilotkit_manually_emit_message") {
3360
- this.subscriber.next({
3361
- type: EventType.TEXT_MESSAGE_START,
3362
- role: "assistant",
3363
- messageId: customEvent.value.message_id,
3364
- rawEvent: event
3365
- });
3366
- this.subscriber.next({
3367
- type: EventType.TEXT_MESSAGE_CONTENT,
3368
- messageId: customEvent.value.message_id,
3369
- delta: customEvent.value.message,
3370
- rawEvent: event
3371
- });
3372
- this.subscriber.next({
3373
- type: EventType.TEXT_MESSAGE_END,
3374
- messageId: customEvent.value.message_id,
3375
- rawEvent: event
3376
- });
3377
- return true;
3378
- }
3379
- if (customEvent.name === "copilotkit_manually_emit_tool_call") {
3380
- this.subscriber.next({
3381
- type: EventType.TOOL_CALL_START,
3382
- toolCallId: customEvent.value.id,
3383
- toolCallName: customEvent.value.name,
3384
- parentMessageId: customEvent.value.id,
3385
- rawEvent: event
3386
- });
3387
- this.subscriber.next({
3388
- type: EventType.TOOL_CALL_ARGS,
3389
- toolCallId: customEvent.value.id,
3390
- delta: customEvent.value.args,
3391
- rawEvent: event
3392
- });
3393
- this.subscriber.next({
3394
- type: EventType.TOOL_CALL_END,
3395
- toolCallId: customEvent.value.id,
3396
- rawEvent: event
3397
- });
3398
- return true;
3399
- }
3400
- if (customEvent.name === "copilotkit_manually_emit_intermediate_state") {
3401
- this.activeRun.manuallyEmittedState = customEvent.value;
3402
- this.dispatchEvent({
3403
- type: EventType.STATE_SNAPSHOT,
3404
- snapshot: this.getStateSnapshot({
3405
- values: this.activeRun.manuallyEmittedState
3406
- }),
3407
- rawEvent: event
3408
- });
3409
- return true;
3410
- }
3411
- if (customEvent.name === "copilotkit_exit") {
3412
- this.subscriber.next({
3413
- type: EventType.CUSTOM,
3414
- name: "Exit",
3415
- value: true
3416
- });
3417
- return true;
3418
- }
3419
- }
3420
- const rawEvent = event.rawEvent;
3421
- if (!rawEvent) {
3422
- this.subscriber.next(event);
3423
- return true;
3424
- }
3425
- const isMessageEvent = event.type === EventType.TEXT_MESSAGE_START || event.type === EventType.TEXT_MESSAGE_CONTENT || event.type === EventType.TEXT_MESSAGE_END;
3426
- const isToolEvent = event.type === EventType.TOOL_CALL_START || event.type === EventType.TOOL_CALL_ARGS || event.type === EventType.TOOL_CALL_END;
3427
- if ("copilotkit:emit-tool-calls" in (rawEvent.metadata || {})) {
3428
- if (rawEvent.metadata["copilotkit:emit-tool-calls"] === false && isToolEvent) {
3429
- return false;
3430
- }
3431
- }
3432
- if ("copilotkit:emit-messages" in (rawEvent.metadata || {})) {
3433
- if (rawEvent.metadata["copilotkit:emit-messages"] === false && isMessageEvent) {
3434
- return false;
3435
- }
3436
- }
3437
- this.subscriber.next(event);
3438
- return true;
3439
- }
3440
- // @ts-ignore
3441
- run(input) {
3442
- return super.run(input).pipe(map((processedEvent) => {
3443
- var _a, _b, _c, _d, _e;
3444
- if (processedEvent.type === EventType.RAW) {
3445
- const event = processedEvent.event ?? processedEvent.rawEvent;
3446
- const eventType = event.event;
3447
- const toolCallData = (_c = (_b = (_a = event.data) == null ? void 0 : _a.chunk) == null ? void 0 : _b.tool_call_chunks) == null ? void 0 : _c[0];
3448
- const toolCallUsedToPredictState = (_e = (_d = event.metadata) == null ? void 0 : _d["copilotkit:emit-intermediate-state"]) == null ? void 0 : _e.some((predictStateTool) => predictStateTool.tool === (toolCallData == null ? void 0 : toolCallData.name));
3449
- if (eventType === LangGraphEventTypes.OnChatModelStream && toolCallUsedToPredictState) {
3450
- return {
3451
- type: EventType.CUSTOM,
3452
- name: "PredictState",
3453
- value: event.metadata["copilotkit:emit-intermediate-state"]
3454
- };
3455
- }
3456
- }
3457
- return processedEvent;
3458
- }));
3459
- }
3460
- langGraphDefaultMergeState(state, messages, input) {
3461
- const aguiMergedState = super.langGraphDefaultMergeState(state, messages, input);
3462
- const { tools: returnedTools, "ag-ui": agui } = aguiMergedState;
3463
- const rawCombinedTools = [
3464
- ...returnedTools ?? [],
3465
- ...(agui == null ? void 0 : agui.tools) ?? []
3466
- ];
3467
- const combinedTools = Array.from(new Map(rawCombinedTools.map((t) => [
3468
- (t == null ? void 0 : t.id) ?? (t == null ? void 0 : t.name) ?? (t == null ? void 0 : t.key) ?? JSON.stringify(t),
3469
- t
3470
- ])).values());
3471
- return {
3472
- ...aguiMergedState,
3473
- copilotkit: {
3474
- actions: combinedTools,
3475
- context: (agui == null ? void 0 : agui.context) ?? []
3476
- }
3477
- };
3478
- }
3479
- async getSchemaKeys() {
3480
- const CONSTANT_KEYS = [
3481
- "copilotkit"
3482
- ];
3483
- const schemaKeys = await super.getSchemaKeys();
3484
- return {
3485
- config: schemaKeys.config,
3486
- input: schemaKeys.input ? [
3487
- ...schemaKeys.input,
3488
- ...CONSTANT_KEYS
3489
- ] : null,
3490
- output: schemaKeys.output ? [
3491
- ...schemaKeys.output,
3492
- ...CONSTANT_KEYS
3493
- ] : null,
3494
- context: schemaKeys.context ? [
3495
- ...schemaKeys.context,
3496
- ...CONSTANT_KEYS
3497
- ] : null
3498
- };
3499
- }
3500
- };
3501
- __name(LangGraphAgent, "LangGraphAgent");
3502
3379
  var CopilotRuntime = class {
3503
3380
  params;
3504
3381
  observability;
@@ -3508,9 +3385,10 @@ var CopilotRuntime = class {
3508
3385
  _instance;
3509
3386
  constructor(params) {
3510
3387
  const agents = (params == null ? void 0 : params.agents) ?? {};
3388
+ const endpointAgents = this.assignEndpointsToAgents((params == null ? void 0 : params.remoteEndpoints) ?? []);
3511
3389
  this.runtimeArgs = {
3512
3390
  agents: {
3513
- ...this.assignEndpointsToAgents((params == null ? void 0 : params.remoteEndpoints) ?? []),
3391
+ ...endpointAgents,
3514
3392
  ...agents
3515
3393
  },
3516
3394
  runner: (params == null ? void 0 : params.runner) ?? new InMemoryAgentRunner(),
@@ -3529,25 +3407,13 @@ var CopilotRuntime = class {
3529
3407
  return this._instance;
3530
3408
  }
3531
3409
  assignEndpointsToAgents(endpoints) {
3532
- return endpoints.reduce((acc, endpoint) => {
3533
- if (resolveEndpointType(endpoint) == EndpointType.LangGraphPlatform) {
3534
- let lgAgents = {};
3535
- const lgEndpoint = endpoint;
3536
- lgEndpoint.agents.forEach((agent) => {
3537
- const graphId = agent.assistantId ?? agent.name;
3538
- lgAgents[graphId] = new LangGraphAgent({
3539
- deploymentUrl: lgEndpoint.deploymentUrl,
3540
- langsmithApiKey: lgEndpoint.langsmithApiKey,
3541
- graphId
3542
- });
3543
- });
3544
- return {
3545
- ...acc,
3546
- ...lgAgents
3547
- };
3548
- }
3549
- return acc;
3550
- }, {});
3410
+ let result = {};
3411
+ if (endpoints.some((endpoint) => resolveEndpointType(endpoint) == EndpointType.LangGraphPlatform)) {
3412
+ throw new CopilotKitMisuseError({
3413
+ message: 'LangGraphPlatformEndpoint in remoteEndpoints is deprecated. Please use the "agents" option instead with LangGraphAgent from "@copilotkit/runtime/langgraph". Example: agents: { myAgent: new LangGraphAgent({ deploymentUrl: "...", graphId: "..." }) }'
3414
+ });
3415
+ }
3416
+ return result;
3551
3417
  }
3552
3418
  handleServiceAdapter(serviceAdapter) {
3553
3419
  this.runtimeArgs.agents = Promise.resolve(this.runtimeArgs.agents ?? {}).then(async (agents) => {
@@ -3891,6 +3757,38 @@ _ts_decorate19([
3891
3757
  AgentsResponse = _ts_decorate19([
3892
3758
  ObjectType()
3893
3759
  ], AgentsResponse);
3760
+
3761
+ // src/agents/langgraph/events.ts
3762
+ var LangGraphEventTypes;
3763
+ (function(LangGraphEventTypes2) {
3764
+ LangGraphEventTypes2["OnChainStart"] = "on_chain_start";
3765
+ LangGraphEventTypes2["OnChainStream"] = "on_chain_stream";
3766
+ LangGraphEventTypes2["OnChainEnd"] = "on_chain_end";
3767
+ LangGraphEventTypes2["OnChatModelStart"] = "on_chat_model_start";
3768
+ LangGraphEventTypes2["OnChatModelStream"] = "on_chat_model_stream";
3769
+ LangGraphEventTypes2["OnChatModelEnd"] = "on_chat_model_end";
3770
+ LangGraphEventTypes2["OnToolStart"] = "on_tool_start";
3771
+ LangGraphEventTypes2["OnToolEnd"] = "on_tool_end";
3772
+ LangGraphEventTypes2["OnCopilotKitStateSync"] = "on_copilotkit_state_sync";
3773
+ LangGraphEventTypes2["OnCopilotKitEmitMessage"] = "on_copilotkit_emit_message";
3774
+ LangGraphEventTypes2["OnCopilotKitEmitToolCall"] = "on_copilotkit_emit_tool_call";
3775
+ LangGraphEventTypes2["OnCustomEvent"] = "on_custom_event";
3776
+ LangGraphEventTypes2["OnInterrupt"] = "on_interrupt";
3777
+ LangGraphEventTypes2["OnCopilotKitInterrupt"] = "on_copilotkit_interrupt";
3778
+ LangGraphEventTypes2["OnCopilotKitError"] = "on_copilotkit_error";
3779
+ })(LangGraphEventTypes || (LangGraphEventTypes = {}));
3780
+ var MetaEventNames;
3781
+ (function(MetaEventNames2) {
3782
+ MetaEventNames2["LangGraphInterruptEvent"] = "LangGraphInterruptEvent";
3783
+ MetaEventNames2["CopilotKitLangGraphInterruptEvent"] = "CopilotKitLangGraphInterruptEvent";
3784
+ })(MetaEventNames || (MetaEventNames = {}));
3785
+ var CustomEventNames;
3786
+ (function(CustomEventNames2) {
3787
+ CustomEventNames2["CopilotKitManuallyEmitMessage"] = "copilotkit_manually_emit_message";
3788
+ CustomEventNames2["CopilotKitManuallyEmitToolCall"] = "copilotkit_manually_emit_tool_call";
3789
+ CustomEventNames2["CopilotKitManuallyEmitIntermediateState"] = "copilotkit_manually_emit_intermediate_state";
3790
+ CustomEventNames2["CopilotKitExit"] = "copilotkit_exit";
3791
+ })(CustomEventNames || (CustomEventNames = {}));
3894
3792
  function _ts_decorate20(decorators, target, key, desc) {
3895
3793
  var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
3896
3794
  if (typeof Reflect === "object" && typeof Reflect.decorate === "function")
@@ -4754,12 +4652,59 @@ function readableStreamToNodeStream(webStream) {
4754
4652
  }
4755
4653
  __name(readableStreamToNodeStream, "readableStreamToNodeStream");
4756
4654
  function getFullUrl(req) {
4757
- const path = req.url || "/";
4655
+ const expressPath = req.originalUrl ?? (req.baseUrl ? `${req.baseUrl}${req.url ?? ""}` : void 0);
4656
+ const path = expressPath || req.url || "/";
4758
4657
  const host = req.headers["x-forwarded-host"] || req.headers.host || "localhost";
4759
4658
  const proto = req.headers["x-forwarded-proto"] || (req.socket.encrypted ? "https" : "http");
4760
4659
  return `${proto}://${host}${path}`;
4761
4660
  }
4762
4661
  __name(getFullUrl, "getFullUrl");
4662
+ function toHeaders(rawHeaders) {
4663
+ const headers = new Headers();
4664
+ for (const [key, value] of Object.entries(rawHeaders)) {
4665
+ if (value === void 0)
4666
+ continue;
4667
+ if (Array.isArray(value)) {
4668
+ value.forEach((entry) => headers.append(key, entry));
4669
+ continue;
4670
+ }
4671
+ headers.append(key, value);
4672
+ }
4673
+ return headers;
4674
+ }
4675
+ __name(toHeaders, "toHeaders");
4676
+ function isStreamConsumed(req) {
4677
+ const readableState = req._readableState;
4678
+ return Boolean(req.readableEnded || req.complete || (readableState == null ? void 0 : readableState.ended) || (readableState == null ? void 0 : readableState.endEmitted));
4679
+ }
4680
+ __name(isStreamConsumed, "isStreamConsumed");
4681
+ function synthesizeBodyFromParsedBody(parsedBody, headers) {
4682
+ if (parsedBody === null || parsedBody === void 0) {
4683
+ return {
4684
+ body: null
4685
+ };
4686
+ }
4687
+ if (parsedBody instanceof Buffer || parsedBody instanceof Uint8Array) {
4688
+ return {
4689
+ body: parsedBody
4690
+ };
4691
+ }
4692
+ if (typeof parsedBody === "string") {
4693
+ return {
4694
+ body: parsedBody,
4695
+ contentType: headers.get("content-type") ?? "text/plain"
4696
+ };
4697
+ }
4698
+ return {
4699
+ body: JSON.stringify(parsedBody),
4700
+ contentType: "application/json"
4701
+ };
4702
+ }
4703
+ __name(synthesizeBodyFromParsedBody, "synthesizeBodyFromParsedBody");
4704
+ function isDisturbedOrLockedError(error) {
4705
+ return error instanceof TypeError && typeof error.message === "string" && (error.message.includes("disturbed") || error.message.includes("locked"));
4706
+ }
4707
+ __name(isDisturbedOrLockedError, "isDisturbedOrLockedError");
4763
4708
  function copilotRuntimeNodeHttpEndpoint(options) {
4764
4709
  var _a;
4765
4710
  const commonConfig = getCommonConfig(options);
@@ -4787,14 +4732,59 @@ function copilotRuntimeNodeHttpEndpoint(options) {
4787
4732
  return /* @__PURE__ */ __name(async function handler(req, res) {
4788
4733
  const url = getFullUrl(req);
4789
4734
  const hasBody = req.method !== "GET" && req.method !== "HEAD";
4790
- const request = new Request(url, {
4735
+ const baseHeaders = toHeaders(req.headers);
4736
+ const parsedBody = req.body;
4737
+ const streamConsumed = isStreamConsumed(req) || parsedBody !== void 0;
4738
+ const canStream = hasBody && !streamConsumed;
4739
+ let requestBody = void 0;
4740
+ let useDuplex = false;
4741
+ if (hasBody && canStream) {
4742
+ requestBody = req;
4743
+ useDuplex = true;
4744
+ }
4745
+ if (hasBody && streamConsumed) {
4746
+ if (parsedBody !== void 0) {
4747
+ const synthesized = synthesizeBodyFromParsedBody(parsedBody, baseHeaders);
4748
+ requestBody = synthesized.body ?? void 0;
4749
+ baseHeaders.delete("content-length");
4750
+ if (synthesized.contentType) {
4751
+ baseHeaders.set("content-type", synthesized.contentType);
4752
+ }
4753
+ logger2.debug("Request stream already consumed; using parsed req.body to rebuild request.");
4754
+ } else {
4755
+ logger2.warn("Request stream consumed with no available body; sending empty payload.");
4756
+ requestBody = void 0;
4757
+ }
4758
+ }
4759
+ const buildRequest = /* @__PURE__ */ __name((body, headers, duplex) => new Request(url, {
4791
4760
  method: req.method,
4792
- headers: req.headers,
4793
- body: hasBody ? req : void 0,
4794
- // Node/undici extension
4795
- duplex: hasBody ? "half" : void 0
4796
- });
4797
- const response = await honoApp.fetch(request);
4761
+ headers,
4762
+ body,
4763
+ duplex: duplex ? "half" : void 0
4764
+ }), "buildRequest");
4765
+ let response;
4766
+ try {
4767
+ response = await honoApp.fetch(buildRequest(requestBody, baseHeaders, useDuplex));
4768
+ } catch (error) {
4769
+ if (isDisturbedOrLockedError(error) && hasBody) {
4770
+ logger2.warn("Encountered disturbed/locked request body; rebuilding request using parsed body or empty payload.");
4771
+ const fallbackHeaders = new Headers(baseHeaders);
4772
+ let fallbackBody;
4773
+ if (parsedBody !== void 0) {
4774
+ const synthesized = synthesizeBodyFromParsedBody(parsedBody, fallbackHeaders);
4775
+ fallbackBody = synthesized.body ?? void 0;
4776
+ fallbackHeaders.delete("content-length");
4777
+ if (synthesized.contentType) {
4778
+ fallbackHeaders.set("content-type", synthesized.contentType);
4779
+ }
4780
+ } else {
4781
+ fallbackBody = void 0;
4782
+ }
4783
+ response = await honoApp.fetch(buildRequest(fallbackBody, fallbackHeaders, false));
4784
+ } else {
4785
+ throw error;
4786
+ }
4787
+ }
4798
4788
  res.statusCode = response.status;
4799
4789
  response.headers.forEach((value, key) => {
4800
4790
  res.setHeader(key, value);
@@ -4857,6 +4847,22 @@ function copilotRuntimeNestEndpoint(options) {
4857
4847
  return copilotRuntimeNodeHttpEndpoint(options);
4858
4848
  }
4859
4849
  __name(copilotRuntimeNestEndpoint, "copilotRuntimeNestEndpoint");
4850
+
4851
+ // src/lib/index.ts
4852
+ var LangGraphAgent = class {
4853
+ constructor() {
4854
+ throw new Error("LangGraphAgent import from @copilotkit/runtime is deprecated. Please import it from @copilotkit/runtime/langgraph instead");
4855
+ }
4856
+ };
4857
+ __name(LangGraphAgent, "LangGraphAgent");
4858
+ var LangGraphHttpAgent = class {
4859
+ constructor() {
4860
+ throw new Error("LangGraphHttpAgent import from @copilotkit/runtime is deprecated. Please import it from @copilotkit/runtime/langgraph instead");
4861
+ }
4862
+ };
4863
+ __name(LangGraphHttpAgent, "LangGraphHttpAgent");
4864
+
4865
+ // src/service-adapters/langchain/langserve.ts
4860
4866
  var RemoteChain = class {
4861
4867
  name;
4862
4868
  description;
@@ -4879,6 +4885,7 @@ var RemoteChain = class {
4879
4885
  description: this.description,
4880
4886
  parameters: this.parameters,
4881
4887
  handler: async (args) => {
4888
+ const { RemoteRunnable } = __require("langchain/runnables/remote");
4882
4889
  const runnable = new RemoteRunnable({
4883
4890
  url: this.chainUrl
4884
4891
  });
@@ -5091,7 +5098,9 @@ var AnthropicAdapter = class {
5091
5098
  return "AnthropicAdapter";
5092
5099
  }
5093
5100
  constructor(params) {
5094
- this._anthropic = (params == null ? void 0 : params.anthropic) || new Anthropic({});
5101
+ if (params == null ? void 0 : params.anthropic) {
5102
+ this._anthropic = params.anthropic;
5103
+ }
5095
5104
  if (params == null ? void 0 : params.model) {
5096
5105
  this.model = params.model;
5097
5106
  }
@@ -5099,6 +5108,13 @@ var AnthropicAdapter = class {
5099
5108
  enabled: false
5100
5109
  };
5101
5110
  }
5111
+ ensureAnthropic() {
5112
+ if (!this._anthropic) {
5113
+ const Anthropic = __require("@anthropic-ai/sdk").default;
5114
+ this._anthropic = new Anthropic({});
5115
+ }
5116
+ return this._anthropic;
5117
+ }
5102
5118
  /**
5103
5119
  * Adds cache control to system prompt
5104
5120
  */
@@ -5239,7 +5255,8 @@ var AnthropicAdapter = class {
5239
5255
  },
5240
5256
  stream: true
5241
5257
  };
5242
- const stream = await this.anthropic.messages.create(createParams);
5258
+ const anthropic = this.ensureAnthropic();
5259
+ const stream = await anthropic.messages.create(createParams);
5243
5260
  eventSource.stream(async (eventStream$) => {
5244
5261
  let mode = null;
5245
5262
  let didOutputText = false;
@@ -5384,6 +5401,7 @@ var ExperimentalOllamaAdapter = class {
5384
5401
  }
5385
5402
  async process(request) {
5386
5403
  const { messages, actions, eventSource } = request;
5404
+ const { Ollama } = __require("@langchain/community/llms/ollama");
5387
5405
  const ollama = new Ollama({
5388
5406
  model: this.model
5389
5407
  });
@@ -5411,6 +5429,8 @@ var ExperimentalOllamaAdapter = class {
5411
5429
  }
5412
5430
  };
5413
5431
  __name(ExperimentalOllamaAdapter, "ExperimentalOllamaAdapter");
5432
+
5433
+ // src/service-adapters/bedrock/bedrock-adapter.ts
5414
5434
  var DEFAULT_MODEL6 = "amazon.nova-lite-v1:0";
5415
5435
  var BedrockAdapter = class extends LangChainAdapter {
5416
5436
  provider = "bedrock";
@@ -5418,6 +5438,7 @@ var BedrockAdapter = class extends LangChainAdapter {
5418
5438
  constructor(options) {
5419
5439
  super({
5420
5440
  chainFn: async ({ messages, tools, threadId }) => {
5441
+ const { ChatBedrockConverse } = __require("@langchain/aws");
5421
5442
  this.model = (options == null ? void 0 : options.model) ?? "amazon.nova-lite-v1:0";
5422
5443
  const model = new ChatBedrockConverse({
5423
5444
  model: this.model,
@@ -5446,6 +5467,6 @@ var EmptyAdapter = class {
5446
5467
  __name(EmptyAdapter, "EmptyAdapter");
5447
5468
  var ExperimentalEmptyAdapter = EmptyAdapter;
5448
5469
 
5449
- export { AnthropicAdapter, BedrockAdapter, CopilotRuntime, CustomEventNames2 as CustomEventNames, EmptyAdapter, ExperimentalEmptyAdapter, ExperimentalOllamaAdapter, GoogleGenerativeAIAdapter, GroqAdapter, GuardrailsValidationFailureResponse, LangChainAdapter, LangGraphAgent, MessageStreamInterruptedResponse, OpenAIAdapter, OpenAIAssistantAdapter, RemoteChain, UnifyAdapter, UnknownErrorResponse, addCustomHeaderPlugin, buildSchema, config, convertMCPToolsToActions, convertServiceAdapterError, copilotKitEndpoint, copilotRuntimeNestEndpoint, copilotRuntimeNextJSAppRouterEndpoint, copilotRuntimeNextJSPagesRouterEndpoint, copilotRuntimeNodeExpressEndpoint, copilotRuntimeNodeHttpEndpoint, createContext, createLogger, extractParametersFromSchema, generateMcpToolInstructions, getCommonConfig, langGraphPlatformEndpoint, readableStreamToNodeStream, resolveEndpointType };
5470
+ export { AnthropicAdapter, BedrockAdapter, CopilotRuntime, EmptyAdapter, ExperimentalEmptyAdapter, ExperimentalOllamaAdapter, GoogleGenerativeAIAdapter, GroqAdapter, GuardrailsValidationFailureResponse, LangChainAdapter, LangGraphAgent, LangGraphHttpAgent, MessageStreamInterruptedResponse, OpenAIAdapter, OpenAIAssistantAdapter, RemoteChain, UnifyAdapter, UnknownErrorResponse, addCustomHeaderPlugin, buildSchema, config, convertMCPToolsToActions, convertServiceAdapterError, copilotKitEndpoint, copilotRuntimeNestEndpoint, copilotRuntimeNextJSAppRouterEndpoint, copilotRuntimeNextJSPagesRouterEndpoint, copilotRuntimeNodeExpressEndpoint, copilotRuntimeNodeHttpEndpoint, createContext, createLogger, extractParametersFromSchema, generateMcpToolInstructions, getCommonConfig, langGraphPlatformEndpoint, readableStreamToNodeStream, resolveEndpointType };
5450
5471
  //# sourceMappingURL=out.js.map
5451
5472
  //# sourceMappingURL=index.mjs.map