@copilotkit/runtime 1.50.0-beta.2 → 1.50.0-beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/dist/index.d.ts +71 -281
  2. package/dist/index.js +291 -274
  3. package/dist/index.js.map +1 -1
  4. package/dist/index.mjs +284 -263
  5. package/dist/index.mjs.map +1 -1
  6. package/dist/langgraph.d.ts +284 -0
  7. package/dist/langgraph.js +211 -0
  8. package/dist/langgraph.js.map +1 -0
  9. package/dist/langgraph.mjs +206 -0
  10. package/dist/langgraph.mjs.map +1 -0
  11. package/dist/v2/index.d.ts +1 -0
  12. package/dist/v2/index.js +7 -0
  13. package/dist/v2/index.js.map +1 -1
  14. package/dist/v2/index.mjs +1 -0
  15. package/dist/v2/index.mjs.map +1 -1
  16. package/package.json +48 -15
  17. package/src/langgraph.ts +1 -0
  18. package/src/lib/index.ts +41 -1
  19. package/src/lib/integrations/node-http/index.ts +129 -10
  20. package/src/lib/runtime/agent-integrations/{langgraph.agent.ts → langgraph/agent.ts} +5 -30
  21. package/src/lib/runtime/agent-integrations/langgraph/consts.ts +34 -0
  22. package/src/lib/runtime/agent-integrations/langgraph/index.ts +2 -0
  23. package/src/lib/runtime/copilot-runtime.ts +17 -40
  24. package/src/service-adapters/anthropic/anthropic-adapter.ts +16 -3
  25. package/src/service-adapters/bedrock/bedrock-adapter.ts +4 -1
  26. package/src/service-adapters/experimental/ollama/ollama-adapter.ts +2 -1
  27. package/src/service-adapters/google/google-genai-adapter.ts +9 -4
  28. package/src/service-adapters/groq/groq-adapter.ts +16 -3
  29. package/src/service-adapters/langchain/langchain-adapter.ts +5 -3
  30. package/src/service-adapters/langchain/langserve.ts +2 -1
  31. package/src/service-adapters/openai/openai-adapter.ts +17 -3
  32. package/src/service-adapters/openai/openai-assistant-adapter.ts +26 -11
  33. package/src/service-adapters/unify/unify-adapter.ts +3 -1
  34. package/src/v2/index.ts +1 -0
  35. package/tsup.config.ts +5 -2
package/dist/index.js CHANGED
@@ -1,13 +1,9 @@
1
1
  'use strict';
2
2
 
3
3
  require('reflect-metadata');
4
- var OpenAI = require('openai');
5
4
  var shared = require('@copilotkit/shared');
6
5
  var messages = require('@langchain/core/messages');
7
6
  var tools = require('@langchain/core/tools');
8
- var promises = require('@langchain/core/callbacks/promises');
9
- var googleGauth = require('@langchain/google-gauth');
10
- var groqSdk = require('groq-sdk');
11
7
  var typeGraphql = require('type-graphql');
12
8
  var rxjs = require('rxjs');
13
9
  var graphqlScalars = require('graphql-scalars');
@@ -16,25 +12,17 @@ var classTransformer = require('class-transformer');
16
12
  var graphql = require('graphql');
17
13
  var crypto = require('crypto');
18
14
  var runtime = require('@copilotkitnext/runtime');
19
- var client = require('@ag-ui/client');
20
- var langgraph = require('@ag-ui/langgraph');
21
15
  var agent = require('@copilotkitnext/agent');
22
16
  var pluginDeferStream = require('@graphql-yoga/plugin-defer-stream');
23
17
  var createPinoLogger = require('pino');
24
18
  var pretty = require('pino-pretty');
25
19
  var vercel = require('hono/vercel');
26
20
  var stream = require('stream');
27
- var remote = require('langchain/runnables/remote');
28
- var Anthropic = require('@anthropic-ai/sdk');
29
- var ollama = require('@langchain/community/llms/ollama');
30
- var aws = require('@langchain/aws');
31
21
 
32
22
  function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
33
23
 
34
- var OpenAI__default = /*#__PURE__*/_interopDefault(OpenAI);
35
24
  var createPinoLogger__default = /*#__PURE__*/_interopDefault(createPinoLogger);
36
25
  var pretty__default = /*#__PURE__*/_interopDefault(pretty);
37
- var Anthropic__default = /*#__PURE__*/_interopDefault(Anthropic);
38
26
 
39
27
  var __create = Object.create;
40
28
  var __defProp = Object.defineProperty;
@@ -43,7 +31,14 @@ var __getOwnPropNames = Object.getOwnPropertyNames;
43
31
  var __getProtoOf = Object.getPrototypeOf;
44
32
  var __hasOwnProp = Object.prototype.hasOwnProperty;
45
33
  var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
46
- var __commonJS = (cb, mod) => function __require() {
34
+ var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
35
+ get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
36
+ }) : x)(function(x) {
37
+ if (typeof require !== "undefined")
38
+ return require.apply(this, arguments);
39
+ throw new Error('Dynamic require of "' + x + '" is not supported');
40
+ });
41
+ var __commonJS = (cb, mod) => function __require2() {
47
42
  return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
48
43
  };
49
44
  var __copyProps = (to, from, except, desc) => {
@@ -91,6 +86,11 @@ var require_package = __commonJS({
91
86
  import: "./dist/v2/index.mjs",
92
87
  require: "./dist/v2/index.js",
93
88
  types: "./dist/v2/index.d.ts"
89
+ },
90
+ "./langgraph": {
91
+ import: "./dist/langgraph.mjs",
92
+ require: "./dist/langgraph.js",
93
+ types: "./dist/langgraph.d.ts"
94
94
  }
95
95
  },
96
96
  types: "./dist/index.d.ts",
@@ -120,29 +120,21 @@ var require_package = __commonJS({
120
120
  tsconfig: "workspace:*",
121
121
  tsup: "^6.7.0",
122
122
  typescript: "^5.2.3",
123
- vitest: "^3.2.4",
124
- "zod-to-json-schema": "^3.23.5"
123
+ vitest: "^3.2.4"
125
124
  },
126
125
  dependencies: {
127
- "@anthropic-ai/sdk": "^0.57.0",
128
126
  "@copilotkit/shared": "workspace:*",
129
127
  "@copilotkitnext/agent": "0.0.23",
130
128
  "@copilotkitnext/runtime": "0.0.23",
131
129
  "@graphql-yoga/plugin-defer-stream": "^3.3.1",
132
- "@langchain/aws": "^0.1.9",
133
- "@langchain/community": "^0.3.29",
130
+ "@hono/node-server": "^1.13.5",
134
131
  "@langchain/core": "^0.3.38",
135
- "@langchain/google-gauth": "^0.1.0",
136
- "@langchain/langgraph-sdk": "^0.0.70",
137
- "@langchain/openai": "^0.4.2",
138
132
  "@scarf/scarf": "^1.3.0",
139
133
  "class-transformer": "^0.5.1",
140
134
  "class-validator": "^0.14.1",
141
135
  graphql: "^16.8.1",
142
136
  "graphql-scalars": "^1.23.0",
143
137
  "graphql-yoga": "^5.3.1",
144
- "groq-sdk": "^0.5.0",
145
- "@hono/node-server": "^1.13.5",
146
138
  hono: "^4.10.3",
147
139
  langchain: "^0.3.3",
148
140
  openai: "^4.85.1",
@@ -155,15 +147,51 @@ var require_package = __commonJS({
155
147
  zod: "^3.23.3"
156
148
  },
157
149
  peerDependencies: {
158
- "@ag-ui/client": ">=0.0.39",
150
+ "@anthropic-ai/sdk": "^0.57.0",
151
+ "@ag-ui/client": "^0.0.41",
159
152
  "@ag-ui/core": ">=0.0.39",
160
153
  "@ag-ui/encoder": ">=0.0.39",
161
154
  "@ag-ui/langgraph": ">=0.0.18",
162
- "@ag-ui/proto": ">=0.0.39"
155
+ "@ag-ui/proto": ">=0.0.39",
156
+ "@langchain/aws": "^0.1.9",
157
+ "@langchain/community": "^0.3.58",
158
+ "@langchain/google-gauth": "^0.1.0",
159
+ "@langchain/langgraph-sdk": "^0.0.70",
160
+ "@langchain/openai": "^0.4.2",
161
+ "groq-sdk": "^0.5.0",
162
+ langchain: "^0.3.3",
163
+ openai: "^4.85.1"
163
164
  },
164
165
  peerDependenciesMeta: {
166
+ "@anthropic-ai/sdk": {
167
+ optional: true
168
+ },
165
169
  "@ag-ui/langgraph": {
166
170
  optional: true
171
+ },
172
+ "@langchain/aws": {
173
+ optional: true
174
+ },
175
+ "@langchain/community": {
176
+ optional: true
177
+ },
178
+ "@langchain/google-gauth": {
179
+ optional: true
180
+ },
181
+ "@langchain/langgraph-sdk": {
182
+ optional: true
183
+ },
184
+ "@langchain/openai": {
185
+ optional: true
186
+ },
187
+ "groq-sdk": {
188
+ optional: true
189
+ },
190
+ langchain: {
191
+ optional: true
192
+ },
193
+ openai: {
194
+ optional: true
167
195
  }
168
196
  },
169
197
  keywords: [
@@ -415,13 +443,22 @@ var OpenAIAdapter = class {
415
443
  return "OpenAIAdapter";
416
444
  }
417
445
  constructor(params) {
418
- this._openai = (params == null ? void 0 : params.openai) || new OpenAI__default.default({});
446
+ if (params == null ? void 0 : params.openai) {
447
+ this._openai = params.openai;
448
+ }
419
449
  if (params == null ? void 0 : params.model) {
420
450
  this.model = params.model;
421
451
  }
422
452
  this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
423
453
  this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
424
454
  }
455
+ ensureOpenAI() {
456
+ if (!this._openai) {
457
+ const OpenAI = __require("openai").default;
458
+ this._openai = new OpenAI();
459
+ }
460
+ return this._openai;
461
+ }
425
462
  async process(request) {
426
463
  const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
427
464
  const tools = actions.map(convertActionInputToOpenAITool);
@@ -456,7 +493,8 @@ var OpenAIAdapter = class {
456
493
  };
457
494
  }
458
495
  try {
459
- const stream = this.openai.beta.chat.completions.stream({
496
+ const openai = this.ensureOpenAI();
497
+ const stream = openai.beta.chat.completions.stream({
460
498
  model,
461
499
  stream: true,
462
500
  messages: openaiMessages,
@@ -816,26 +854,32 @@ var LangChainAdapter = class {
816
854
  threadId
817
855
  };
818
856
  } finally {
819
- await promises.awaitAllCallbacks();
857
+ const { awaitAllCallbacks } = __require("@langchain/core/callbacks/promises");
858
+ await awaitAllCallbacks();
820
859
  }
821
860
  }
822
861
  };
823
862
  __name(LangChainAdapter, "LangChainAdapter");
863
+
864
+ // src/service-adapters/google/google-genai-adapter.ts
824
865
  var DEFAULT_MODEL2 = "gemini-1.5-pro";
825
866
  var GoogleGenerativeAIAdapter = class extends LangChainAdapter {
826
867
  provider = "google";
827
868
  model = DEFAULT_MODEL2;
828
869
  constructor(options) {
829
870
  super({
830
- chainFn: async ({ messages: messages$1, tools, threadId }) => {
831
- const filteredMessages = messages$1.filter((message) => {
832
- if (!(message instanceof messages.AIMessage)) {
871
+ chainFn: async ({ messages, tools, threadId }) => {
872
+ const { ChatGoogle } = __require("@langchain/google-gauth");
873
+ const { AIMessage: AIMessage2 } = __require("@langchain/core/messages");
874
+ const filteredMessages = messages.filter((message) => {
875
+ if (!(message instanceof AIMessage2)) {
833
876
  return true;
834
877
  }
835
- return message.content && String(message.content).trim().length > 0 || message.tool_calls && message.tool_calls.length > 0;
878
+ const aiMsg = message;
879
+ return aiMsg.content && String(aiMsg.content).trim().length > 0 || aiMsg.tool_calls && aiMsg.tool_calls.length > 0;
836
880
  });
837
881
  this.model = (options == null ? void 0 : options.model) ?? "gemini-1.5-pro";
838
- const model = new googleGauth.ChatGoogle({
882
+ const model = new ChatGoogle({
839
883
  apiKey: (options == null ? void 0 : options.apiKey) ?? process.env.GOOGLE_API_KEY,
840
884
  modelName: this.model,
841
885
  apiVersion: "v1beta"
@@ -850,8 +894,10 @@ var GoogleGenerativeAIAdapter = class extends LangChainAdapter {
850
894
  }
851
895
  };
852
896
  __name(GoogleGenerativeAIAdapter, "GoogleGenerativeAIAdapter");
897
+
898
+ // src/service-adapters/openai/openai-assistant-adapter.ts
853
899
  var OpenAIAssistantAdapter = class {
854
- openai;
900
+ _openai;
855
901
  codeInterpreterEnabled;
856
902
  assistantId;
857
903
  fileSearchEnabled;
@@ -861,19 +907,29 @@ var OpenAIAssistantAdapter = class {
861
907
  return "OpenAIAssistantAdapter";
862
908
  }
863
909
  constructor(params) {
864
- this.openai = params.openai || new OpenAI__default.default({});
910
+ if (params.openai) {
911
+ this._openai = params.openai;
912
+ }
865
913
  this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;
866
914
  this.fileSearchEnabled = params.fileSearchEnabled === false || true;
867
915
  this.assistantId = params.assistantId;
868
916
  this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
869
917
  this.keepSystemRole = (params == null ? void 0 : params.keepSystemRole) ?? false;
870
918
  }
919
+ ensureOpenAI() {
920
+ if (!this._openai) {
921
+ const OpenAI = __require("openai").default;
922
+ this._openai = new OpenAI({});
923
+ }
924
+ return this._openai;
925
+ }
871
926
  async process(request) {
872
927
  var _a, _b;
873
928
  const { messages, actions, eventSource, runId, forwardedParameters } = request;
874
929
  let threadId = (_b = (_a = request.extensions) == null ? void 0 : _a.openaiAssistantAPI) == null ? void 0 : _b.threadId;
930
+ const openai = this.ensureOpenAI();
875
931
  if (!threadId) {
876
- threadId = (await this.openai.beta.threads.create()).id;
932
+ threadId = (await openai.beta.threads.create()).id;
877
933
  }
878
934
  const lastMessage = messages.at(-1);
879
935
  let nextRunId = void 0;
@@ -897,7 +953,8 @@ var OpenAIAssistantAdapter = class {
897
953
  };
898
954
  }
899
955
  async submitToolOutputs(threadId, runId, messages, eventSource) {
900
- let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);
956
+ const openai = this.ensureOpenAI();
957
+ let run = await openai.beta.threads.runs.retrieve(threadId, runId);
901
958
  if (!run.required_action) {
902
959
  throw new Error("No tool outputs required");
903
960
  }
@@ -912,7 +969,7 @@ var OpenAIAssistantAdapter = class {
912
969
  output: message.result
913
970
  };
914
971
  });
915
- const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
972
+ const stream = openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
916
973
  tool_outputs: toolOutputs,
917
974
  ...this.disableParallelToolCalls && {
918
975
  parallel_tool_calls: false
@@ -922,6 +979,7 @@ var OpenAIAssistantAdapter = class {
922
979
  return runId;
923
980
  }
924
981
  async submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters) {
982
+ const openai = this.ensureOpenAI();
925
983
  messages = [
926
984
  ...messages
927
985
  ];
@@ -933,7 +991,7 @@ var OpenAIAssistantAdapter = class {
933
991
  if (userMessage.role !== "user") {
934
992
  throw new Error("No user message found");
935
993
  }
936
- await this.openai.beta.threads.messages.create(threadId, {
994
+ await openai.beta.threads.messages.create(threadId, {
937
995
  role: "user",
938
996
  content: userMessage.content
939
997
  });
@@ -951,7 +1009,7 @@ var OpenAIAssistantAdapter = class {
951
1009
  }
952
1010
  ] : []
953
1011
  ];
954
- let stream = this.openai.beta.threads.runs.stream(threadId, {
1012
+ let stream = openai.beta.threads.runs.stream(threadId, {
955
1013
  assistant_id: this.assistantId,
956
1014
  instructions,
957
1015
  tools,
@@ -1070,7 +1128,8 @@ var UnifyAdapter = class {
1070
1128
  }
1071
1129
  async process(request) {
1072
1130
  const tools = request.actions.map(convertActionInputToOpenAITool);
1073
- const openai = new OpenAI__default.default({
1131
+ const OpenAI = __require("openai").default;
1132
+ const openai = new OpenAI({
1074
1133
  apiKey: this.apiKey,
1075
1134
  baseURL: "https://api.unify.ai/v0/"
1076
1135
  });
@@ -1181,12 +1240,21 @@ var GroqAdapter = class {
1181
1240
  return "GroqAdapter";
1182
1241
  }
1183
1242
  constructor(params) {
1184
- this._groq = (params == null ? void 0 : params.groq) || new groqSdk.Groq({});
1243
+ if (params == null ? void 0 : params.groq) {
1244
+ this._groq = params.groq;
1245
+ }
1185
1246
  if (params == null ? void 0 : params.model) {
1186
1247
  this.model = params.model;
1187
1248
  }
1188
1249
  this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
1189
1250
  }
1251
+ ensureGroq() {
1252
+ if (!this._groq) {
1253
+ const { Groq } = __require("groq-sdk");
1254
+ this._groq = new Groq({});
1255
+ }
1256
+ return this._groq;
1257
+ }
1190
1258
  async process(request) {
1191
1259
  const { threadId, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
1192
1260
  const tools = actions.map(convertActionInputToOpenAITool);
@@ -1205,7 +1273,8 @@ var GroqAdapter = class {
1205
1273
  }
1206
1274
  let stream;
1207
1275
  try {
1208
- stream = await this.groq.chat.completions.create({
1276
+ const groq = this.ensureGroq();
1277
+ stream = await groq.chat.completions.create({
1209
1278
  model,
1210
1279
  stream: true,
1211
1280
  messages: openaiMessages,
@@ -3314,199 +3383,6 @@ When using these tools:
3314
3383
  7. Always check tool responses to determine your next action`;
3315
3384
  }
3316
3385
  __name(generateMcpToolInstructions, "generateMcpToolInstructions");
3317
-
3318
- // src/agents/langgraph/events.ts
3319
- var LangGraphEventTypes;
3320
- (function(LangGraphEventTypes2) {
3321
- LangGraphEventTypes2["OnChainStart"] = "on_chain_start";
3322
- LangGraphEventTypes2["OnChainStream"] = "on_chain_stream";
3323
- LangGraphEventTypes2["OnChainEnd"] = "on_chain_end";
3324
- LangGraphEventTypes2["OnChatModelStart"] = "on_chat_model_start";
3325
- LangGraphEventTypes2["OnChatModelStream"] = "on_chat_model_stream";
3326
- LangGraphEventTypes2["OnChatModelEnd"] = "on_chat_model_end";
3327
- LangGraphEventTypes2["OnToolStart"] = "on_tool_start";
3328
- LangGraphEventTypes2["OnToolEnd"] = "on_tool_end";
3329
- LangGraphEventTypes2["OnCopilotKitStateSync"] = "on_copilotkit_state_sync";
3330
- LangGraphEventTypes2["OnCopilotKitEmitMessage"] = "on_copilotkit_emit_message";
3331
- LangGraphEventTypes2["OnCopilotKitEmitToolCall"] = "on_copilotkit_emit_tool_call";
3332
- LangGraphEventTypes2["OnCustomEvent"] = "on_custom_event";
3333
- LangGraphEventTypes2["OnInterrupt"] = "on_interrupt";
3334
- LangGraphEventTypes2["OnCopilotKitInterrupt"] = "on_copilotkit_interrupt";
3335
- LangGraphEventTypes2["OnCopilotKitError"] = "on_copilotkit_error";
3336
- })(LangGraphEventTypes || (LangGraphEventTypes = {}));
3337
- var MetaEventNames;
3338
- (function(MetaEventNames2) {
3339
- MetaEventNames2["LangGraphInterruptEvent"] = "LangGraphInterruptEvent";
3340
- MetaEventNames2["CopilotKitLangGraphInterruptEvent"] = "CopilotKitLangGraphInterruptEvent";
3341
- })(MetaEventNames || (MetaEventNames = {}));
3342
- var CustomEventNames;
3343
- (function(CustomEventNames3) {
3344
- CustomEventNames3["CopilotKitManuallyEmitMessage"] = "copilotkit_manually_emit_message";
3345
- CustomEventNames3["CopilotKitManuallyEmitToolCall"] = "copilotkit_manually_emit_tool_call";
3346
- CustomEventNames3["CopilotKitManuallyEmitIntermediateState"] = "copilotkit_manually_emit_intermediate_state";
3347
- CustomEventNames3["CopilotKitExit"] = "copilotkit_exit";
3348
- })(CustomEventNames || (CustomEventNames = {}));
3349
- exports.CustomEventNames = void 0;
3350
- (function(CustomEventNames3) {
3351
- CustomEventNames3["CopilotKitManuallyEmitMessage"] = "copilotkit_manually_emit_message";
3352
- CustomEventNames3["CopilotKitManuallyEmitToolCall"] = "copilotkit_manually_emit_tool_call";
3353
- CustomEventNames3["CopilotKitManuallyEmitIntermediateState"] = "copilotkit_manually_emit_intermediate_state";
3354
- CustomEventNames3["CopilotKitExit"] = "copilotkit_exit";
3355
- })(exports.CustomEventNames || (exports.CustomEventNames = {}));
3356
- var LangGraphAgent = class extends langgraph.LangGraphAgent {
3357
- constructor(config2) {
3358
- super(config2);
3359
- }
3360
- // @ts-ignore
3361
- clone() {
3362
- return new LangGraphAgent(this.config);
3363
- }
3364
- dispatchEvent(event) {
3365
- if (event.type === client.EventType.CUSTOM) {
3366
- const customEvent = event;
3367
- if (customEvent.name === "copilotkit_manually_emit_message") {
3368
- this.subscriber.next({
3369
- type: client.EventType.TEXT_MESSAGE_START,
3370
- role: "assistant",
3371
- messageId: customEvent.value.message_id,
3372
- rawEvent: event
3373
- });
3374
- this.subscriber.next({
3375
- type: client.EventType.TEXT_MESSAGE_CONTENT,
3376
- messageId: customEvent.value.message_id,
3377
- delta: customEvent.value.message,
3378
- rawEvent: event
3379
- });
3380
- this.subscriber.next({
3381
- type: client.EventType.TEXT_MESSAGE_END,
3382
- messageId: customEvent.value.message_id,
3383
- rawEvent: event
3384
- });
3385
- return true;
3386
- }
3387
- if (customEvent.name === "copilotkit_manually_emit_tool_call") {
3388
- this.subscriber.next({
3389
- type: client.EventType.TOOL_CALL_START,
3390
- toolCallId: customEvent.value.id,
3391
- toolCallName: customEvent.value.name,
3392
- parentMessageId: customEvent.value.id,
3393
- rawEvent: event
3394
- });
3395
- this.subscriber.next({
3396
- type: client.EventType.TOOL_CALL_ARGS,
3397
- toolCallId: customEvent.value.id,
3398
- delta: customEvent.value.args,
3399
- rawEvent: event
3400
- });
3401
- this.subscriber.next({
3402
- type: client.EventType.TOOL_CALL_END,
3403
- toolCallId: customEvent.value.id,
3404
- rawEvent: event
3405
- });
3406
- return true;
3407
- }
3408
- if (customEvent.name === "copilotkit_manually_emit_intermediate_state") {
3409
- this.activeRun.manuallyEmittedState = customEvent.value;
3410
- this.dispatchEvent({
3411
- type: client.EventType.STATE_SNAPSHOT,
3412
- snapshot: this.getStateSnapshot({
3413
- values: this.activeRun.manuallyEmittedState
3414
- }),
3415
- rawEvent: event
3416
- });
3417
- return true;
3418
- }
3419
- if (customEvent.name === "copilotkit_exit") {
3420
- this.subscriber.next({
3421
- type: client.EventType.CUSTOM,
3422
- name: "Exit",
3423
- value: true
3424
- });
3425
- return true;
3426
- }
3427
- }
3428
- const rawEvent = event.rawEvent;
3429
- if (!rawEvent) {
3430
- this.subscriber.next(event);
3431
- return true;
3432
- }
3433
- const isMessageEvent = event.type === client.EventType.TEXT_MESSAGE_START || event.type === client.EventType.TEXT_MESSAGE_CONTENT || event.type === client.EventType.TEXT_MESSAGE_END;
3434
- const isToolEvent = event.type === client.EventType.TOOL_CALL_START || event.type === client.EventType.TOOL_CALL_ARGS || event.type === client.EventType.TOOL_CALL_END;
3435
- if ("copilotkit:emit-tool-calls" in (rawEvent.metadata || {})) {
3436
- if (rawEvent.metadata["copilotkit:emit-tool-calls"] === false && isToolEvent) {
3437
- return false;
3438
- }
3439
- }
3440
- if ("copilotkit:emit-messages" in (rawEvent.metadata || {})) {
3441
- if (rawEvent.metadata["copilotkit:emit-messages"] === false && isMessageEvent) {
3442
- return false;
3443
- }
3444
- }
3445
- this.subscriber.next(event);
3446
- return true;
3447
- }
3448
- // @ts-ignore
3449
- run(input) {
3450
- return super.run(input).pipe(rxjs.map((processedEvent) => {
3451
- var _a, _b, _c, _d, _e;
3452
- if (processedEvent.type === client.EventType.RAW) {
3453
- const event = processedEvent.event ?? processedEvent.rawEvent;
3454
- const eventType = event.event;
3455
- const toolCallData = (_c = (_b = (_a = event.data) == null ? void 0 : _a.chunk) == null ? void 0 : _b.tool_call_chunks) == null ? void 0 : _c[0];
3456
- const toolCallUsedToPredictState = (_e = (_d = event.metadata) == null ? void 0 : _d["copilotkit:emit-intermediate-state"]) == null ? void 0 : _e.some((predictStateTool) => predictStateTool.tool === (toolCallData == null ? void 0 : toolCallData.name));
3457
- if (eventType === LangGraphEventTypes.OnChatModelStream && toolCallUsedToPredictState) {
3458
- return {
3459
- type: client.EventType.CUSTOM,
3460
- name: "PredictState",
3461
- value: event.metadata["copilotkit:emit-intermediate-state"]
3462
- };
3463
- }
3464
- }
3465
- return processedEvent;
3466
- }));
3467
- }
3468
- langGraphDefaultMergeState(state, messages, input) {
3469
- const aguiMergedState = super.langGraphDefaultMergeState(state, messages, input);
3470
- const { tools: returnedTools, "ag-ui": agui } = aguiMergedState;
3471
- const rawCombinedTools = [
3472
- ...returnedTools ?? [],
3473
- ...(agui == null ? void 0 : agui.tools) ?? []
3474
- ];
3475
- const combinedTools = Array.from(new Map(rawCombinedTools.map((t) => [
3476
- (t == null ? void 0 : t.id) ?? (t == null ? void 0 : t.name) ?? (t == null ? void 0 : t.key) ?? JSON.stringify(t),
3477
- t
3478
- ])).values());
3479
- return {
3480
- ...aguiMergedState,
3481
- copilotkit: {
3482
- actions: combinedTools,
3483
- context: (agui == null ? void 0 : agui.context) ?? []
3484
- }
3485
- };
3486
- }
3487
- async getSchemaKeys() {
3488
- const CONSTANT_KEYS = [
3489
- "copilotkit"
3490
- ];
3491
- const schemaKeys = await super.getSchemaKeys();
3492
- return {
3493
- config: schemaKeys.config,
3494
- input: schemaKeys.input ? [
3495
- ...schemaKeys.input,
3496
- ...CONSTANT_KEYS
3497
- ] : null,
3498
- output: schemaKeys.output ? [
3499
- ...schemaKeys.output,
3500
- ...CONSTANT_KEYS
3501
- ] : null,
3502
- context: schemaKeys.context ? [
3503
- ...schemaKeys.context,
3504
- ...CONSTANT_KEYS
3505
- ] : null
3506
- };
3507
- }
3508
- };
3509
- __name(LangGraphAgent, "LangGraphAgent");
3510
3386
  var CopilotRuntime = class {
3511
3387
  params;
3512
3388
  observability;
@@ -3516,9 +3392,10 @@ var CopilotRuntime = class {
3516
3392
  _instance;
3517
3393
  constructor(params) {
3518
3394
  const agents = (params == null ? void 0 : params.agents) ?? {};
3395
+ const endpointAgents = this.assignEndpointsToAgents((params == null ? void 0 : params.remoteEndpoints) ?? []);
3519
3396
  this.runtimeArgs = {
3520
3397
  agents: {
3521
- ...this.assignEndpointsToAgents((params == null ? void 0 : params.remoteEndpoints) ?? []),
3398
+ ...endpointAgents,
3522
3399
  ...agents
3523
3400
  },
3524
3401
  runner: (params == null ? void 0 : params.runner) ?? new runtime.InMemoryAgentRunner(),
@@ -3537,25 +3414,13 @@ var CopilotRuntime = class {
3537
3414
  return this._instance;
3538
3415
  }
3539
3416
  assignEndpointsToAgents(endpoints) {
3540
- return endpoints.reduce((acc, endpoint) => {
3541
- if (resolveEndpointType(endpoint) == EndpointType.LangGraphPlatform) {
3542
- let lgAgents = {};
3543
- const lgEndpoint = endpoint;
3544
- lgEndpoint.agents.forEach((agent) => {
3545
- const graphId = agent.assistantId ?? agent.name;
3546
- lgAgents[graphId] = new LangGraphAgent({
3547
- deploymentUrl: lgEndpoint.deploymentUrl,
3548
- langsmithApiKey: lgEndpoint.langsmithApiKey,
3549
- graphId
3550
- });
3551
- });
3552
- return {
3553
- ...acc,
3554
- ...lgAgents
3555
- };
3556
- }
3557
- return acc;
3558
- }, {});
3417
+ let result = {};
3418
+ if (endpoints.some((endpoint) => resolveEndpointType(endpoint) == EndpointType.LangGraphPlatform)) {
3419
+ throw new shared.CopilotKitMisuseError({
3420
+ message: 'LangGraphPlatformEndpoint in remoteEndpoints is deprecated. Please use the "agents" option instead with LangGraphAgent from "@copilotkit/runtime/langgraph". Example: agents: { myAgent: new LangGraphAgent({ deploymentUrl: "...", graphId: "..." }) }'
3421
+ });
3422
+ }
3423
+ return result;
3559
3424
  }
3560
3425
  handleServiceAdapter(serviceAdapter) {
3561
3426
  this.runtimeArgs.agents = Promise.resolve(this.runtimeArgs.agents ?? {}).then(async (agents) => {
@@ -3899,6 +3764,38 @@ _ts_decorate19([
3899
3764
  AgentsResponse = _ts_decorate19([
3900
3765
  typeGraphql.ObjectType()
3901
3766
  ], AgentsResponse);
3767
+
3768
+ // src/agents/langgraph/events.ts
3769
+ var LangGraphEventTypes;
3770
+ (function(LangGraphEventTypes2) {
3771
+ LangGraphEventTypes2["OnChainStart"] = "on_chain_start";
3772
+ LangGraphEventTypes2["OnChainStream"] = "on_chain_stream";
3773
+ LangGraphEventTypes2["OnChainEnd"] = "on_chain_end";
3774
+ LangGraphEventTypes2["OnChatModelStart"] = "on_chat_model_start";
3775
+ LangGraphEventTypes2["OnChatModelStream"] = "on_chat_model_stream";
3776
+ LangGraphEventTypes2["OnChatModelEnd"] = "on_chat_model_end";
3777
+ LangGraphEventTypes2["OnToolStart"] = "on_tool_start";
3778
+ LangGraphEventTypes2["OnToolEnd"] = "on_tool_end";
3779
+ LangGraphEventTypes2["OnCopilotKitStateSync"] = "on_copilotkit_state_sync";
3780
+ LangGraphEventTypes2["OnCopilotKitEmitMessage"] = "on_copilotkit_emit_message";
3781
+ LangGraphEventTypes2["OnCopilotKitEmitToolCall"] = "on_copilotkit_emit_tool_call";
3782
+ LangGraphEventTypes2["OnCustomEvent"] = "on_custom_event";
3783
+ LangGraphEventTypes2["OnInterrupt"] = "on_interrupt";
3784
+ LangGraphEventTypes2["OnCopilotKitInterrupt"] = "on_copilotkit_interrupt";
3785
+ LangGraphEventTypes2["OnCopilotKitError"] = "on_copilotkit_error";
3786
+ })(LangGraphEventTypes || (LangGraphEventTypes = {}));
3787
+ var MetaEventNames;
3788
+ (function(MetaEventNames2) {
3789
+ MetaEventNames2["LangGraphInterruptEvent"] = "LangGraphInterruptEvent";
3790
+ MetaEventNames2["CopilotKitLangGraphInterruptEvent"] = "CopilotKitLangGraphInterruptEvent";
3791
+ })(MetaEventNames || (MetaEventNames = {}));
3792
+ var CustomEventNames;
3793
+ (function(CustomEventNames2) {
3794
+ CustomEventNames2["CopilotKitManuallyEmitMessage"] = "copilotkit_manually_emit_message";
3795
+ CustomEventNames2["CopilotKitManuallyEmitToolCall"] = "copilotkit_manually_emit_tool_call";
3796
+ CustomEventNames2["CopilotKitManuallyEmitIntermediateState"] = "copilotkit_manually_emit_intermediate_state";
3797
+ CustomEventNames2["CopilotKitExit"] = "copilotkit_exit";
3798
+ })(CustomEventNames || (CustomEventNames = {}));
3902
3799
  function _ts_decorate20(decorators, target, key, desc) {
3903
3800
  var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
3904
3801
  if (typeof Reflect === "object" && typeof Reflect.decorate === "function")
@@ -4762,12 +4659,59 @@ function readableStreamToNodeStream(webStream) {
4762
4659
  }
4763
4660
  __name(readableStreamToNodeStream, "readableStreamToNodeStream");
4764
4661
  function getFullUrl(req) {
4765
- const path = req.url || "/";
4662
+ const expressPath = req.originalUrl ?? (req.baseUrl ? `${req.baseUrl}${req.url ?? ""}` : void 0);
4663
+ const path = expressPath || req.url || "/";
4766
4664
  const host = req.headers["x-forwarded-host"] || req.headers.host || "localhost";
4767
4665
  const proto = req.headers["x-forwarded-proto"] || (req.socket.encrypted ? "https" : "http");
4768
4666
  return `${proto}://${host}${path}`;
4769
4667
  }
4770
4668
  __name(getFullUrl, "getFullUrl");
4669
+ function toHeaders(rawHeaders) {
4670
+ const headers = new Headers();
4671
+ for (const [key, value] of Object.entries(rawHeaders)) {
4672
+ if (value === void 0)
4673
+ continue;
4674
+ if (Array.isArray(value)) {
4675
+ value.forEach((entry) => headers.append(key, entry));
4676
+ continue;
4677
+ }
4678
+ headers.append(key, value);
4679
+ }
4680
+ return headers;
4681
+ }
4682
+ __name(toHeaders, "toHeaders");
4683
+ function isStreamConsumed(req) {
4684
+ const readableState = req._readableState;
4685
+ return Boolean(req.readableEnded || req.complete || (readableState == null ? void 0 : readableState.ended) || (readableState == null ? void 0 : readableState.endEmitted));
4686
+ }
4687
+ __name(isStreamConsumed, "isStreamConsumed");
4688
+ function synthesizeBodyFromParsedBody(parsedBody, headers) {
4689
+ if (parsedBody === null || parsedBody === void 0) {
4690
+ return {
4691
+ body: null
4692
+ };
4693
+ }
4694
+ if (parsedBody instanceof Buffer || parsedBody instanceof Uint8Array) {
4695
+ return {
4696
+ body: parsedBody
4697
+ };
4698
+ }
4699
+ if (typeof parsedBody === "string") {
4700
+ return {
4701
+ body: parsedBody,
4702
+ contentType: headers.get("content-type") ?? "text/plain"
4703
+ };
4704
+ }
4705
+ return {
4706
+ body: JSON.stringify(parsedBody),
4707
+ contentType: "application/json"
4708
+ };
4709
+ }
4710
+ __name(synthesizeBodyFromParsedBody, "synthesizeBodyFromParsedBody");
4711
+ function isDisturbedOrLockedError(error) {
4712
+ return error instanceof TypeError && typeof error.message === "string" && (error.message.includes("disturbed") || error.message.includes("locked"));
4713
+ }
4714
+ __name(isDisturbedOrLockedError, "isDisturbedOrLockedError");
4771
4715
  function copilotRuntimeNodeHttpEndpoint(options) {
4772
4716
  var _a;
4773
4717
  const commonConfig = getCommonConfig(options);
@@ -4795,14 +4739,59 @@ function copilotRuntimeNodeHttpEndpoint(options) {
4795
4739
  return /* @__PURE__ */ __name(async function handler(req, res) {
4796
4740
  const url = getFullUrl(req);
4797
4741
  const hasBody = req.method !== "GET" && req.method !== "HEAD";
4798
- const request = new Request(url, {
4742
+ const baseHeaders = toHeaders(req.headers);
4743
+ const parsedBody = req.body;
4744
+ const streamConsumed = isStreamConsumed(req) || parsedBody !== void 0;
4745
+ const canStream = hasBody && !streamConsumed;
4746
+ let requestBody = void 0;
4747
+ let useDuplex = false;
4748
+ if (hasBody && canStream) {
4749
+ requestBody = req;
4750
+ useDuplex = true;
4751
+ }
4752
+ if (hasBody && streamConsumed) {
4753
+ if (parsedBody !== void 0) {
4754
+ const synthesized = synthesizeBodyFromParsedBody(parsedBody, baseHeaders);
4755
+ requestBody = synthesized.body ?? void 0;
4756
+ baseHeaders.delete("content-length");
4757
+ if (synthesized.contentType) {
4758
+ baseHeaders.set("content-type", synthesized.contentType);
4759
+ }
4760
+ logger2.debug("Request stream already consumed; using parsed req.body to rebuild request.");
4761
+ } else {
4762
+ logger2.warn("Request stream consumed with no available body; sending empty payload.");
4763
+ requestBody = void 0;
4764
+ }
4765
+ }
4766
+ const buildRequest = /* @__PURE__ */ __name((body, headers, duplex) => new Request(url, {
4799
4767
  method: req.method,
4800
- headers: req.headers,
4801
- body: hasBody ? req : void 0,
4802
- // Node/undici extension
4803
- duplex: hasBody ? "half" : void 0
4804
- });
4805
- const response = await honoApp.fetch(request);
4768
+ headers,
4769
+ body,
4770
+ duplex: duplex ? "half" : void 0
4771
+ }), "buildRequest");
4772
+ let response;
4773
+ try {
4774
+ response = await honoApp.fetch(buildRequest(requestBody, baseHeaders, useDuplex));
4775
+ } catch (error) {
4776
+ if (isDisturbedOrLockedError(error) && hasBody) {
4777
+ logger2.warn("Encountered disturbed/locked request body; rebuilding request using parsed body or empty payload.");
4778
+ const fallbackHeaders = new Headers(baseHeaders);
4779
+ let fallbackBody;
4780
+ if (parsedBody !== void 0) {
4781
+ const synthesized = synthesizeBodyFromParsedBody(parsedBody, fallbackHeaders);
4782
+ fallbackBody = synthesized.body ?? void 0;
4783
+ fallbackHeaders.delete("content-length");
4784
+ if (synthesized.contentType) {
4785
+ fallbackHeaders.set("content-type", synthesized.contentType);
4786
+ }
4787
+ } else {
4788
+ fallbackBody = void 0;
4789
+ }
4790
+ response = await honoApp.fetch(buildRequest(fallbackBody, fallbackHeaders, false));
4791
+ } else {
4792
+ throw error;
4793
+ }
4794
+ }
4806
4795
  res.statusCode = response.status;
4807
4796
  response.headers.forEach((value, key) => {
4808
4797
  res.setHeader(key, value);
@@ -4865,6 +4854,22 @@ function copilotRuntimeNestEndpoint(options) {
4865
4854
  return copilotRuntimeNodeHttpEndpoint(options);
4866
4855
  }
4867
4856
  __name(copilotRuntimeNestEndpoint, "copilotRuntimeNestEndpoint");
4857
+
4858
+ // src/lib/index.ts
4859
+ var LangGraphAgent = class {
4860
+ constructor() {
4861
+ throw new Error("LangGraphAgent import from @copilotkit/runtime is deprecated. Please import it from @copilotkit/runtime/langgraph instead");
4862
+ }
4863
+ };
4864
+ __name(LangGraphAgent, "LangGraphAgent");
4865
+ var LangGraphHttpAgent = class {
4866
+ constructor() {
4867
+ throw new Error("LangGraphHttpAgent import from @copilotkit/runtime is deprecated. Please import it from @copilotkit/runtime/langgraph instead");
4868
+ }
4869
+ };
4870
+ __name(LangGraphHttpAgent, "LangGraphHttpAgent");
4871
+
4872
+ // src/service-adapters/langchain/langserve.ts
4868
4873
  var RemoteChain = class {
4869
4874
  name;
4870
4875
  description;
@@ -4887,7 +4892,8 @@ var RemoteChain = class {
4887
4892
  description: this.description,
4888
4893
  parameters: this.parameters,
4889
4894
  handler: async (args) => {
4890
- const runnable = new remote.RemoteRunnable({
4895
+ const { RemoteRunnable } = __require("langchain/runnables/remote");
4896
+ const runnable = new RemoteRunnable({
4891
4897
  url: this.chainUrl
4892
4898
  });
4893
4899
  let input;
@@ -5099,7 +5105,9 @@ var AnthropicAdapter = class {
5099
5105
  return "AnthropicAdapter";
5100
5106
  }
5101
5107
  constructor(params) {
5102
- this._anthropic = (params == null ? void 0 : params.anthropic) || new Anthropic__default.default({});
5108
+ if (params == null ? void 0 : params.anthropic) {
5109
+ this._anthropic = params.anthropic;
5110
+ }
5103
5111
  if (params == null ? void 0 : params.model) {
5104
5112
  this.model = params.model;
5105
5113
  }
@@ -5107,6 +5115,13 @@ var AnthropicAdapter = class {
5107
5115
  enabled: false
5108
5116
  };
5109
5117
  }
5118
+ ensureAnthropic() {
5119
+ if (!this._anthropic) {
5120
+ const Anthropic = __require("@anthropic-ai/sdk").default;
5121
+ this._anthropic = new Anthropic({});
5122
+ }
5123
+ return this._anthropic;
5124
+ }
5110
5125
  /**
5111
5126
  * Adds cache control to system prompt
5112
5127
  */
@@ -5247,7 +5262,8 @@ var AnthropicAdapter = class {
5247
5262
  },
5248
5263
  stream: true
5249
5264
  };
5250
- const stream = await this.anthropic.messages.create(createParams);
5265
+ const anthropic = this.ensureAnthropic();
5266
+ const stream = await anthropic.messages.create(createParams);
5251
5267
  eventSource.stream(async (eventStream$) => {
5252
5268
  let mode = null;
5253
5269
  let didOutputText = false;
@@ -5392,11 +5408,12 @@ var ExperimentalOllamaAdapter = class {
5392
5408
  }
5393
5409
  async process(request) {
5394
5410
  const { messages, actions, eventSource } = request;
5395
- const ollama$1 = new ollama.Ollama({
5411
+ const { Ollama } = __require("@langchain/community/llms/ollama");
5412
+ const ollama = new Ollama({
5396
5413
  model: this.model
5397
5414
  });
5398
5415
  const contents = messages.filter((m) => m.isTextMessage()).map((m) => m.content);
5399
- const _stream = await ollama$1.stream(contents);
5416
+ const _stream = await ollama.stream(contents);
5400
5417
  eventSource.stream(async (eventStream$) => {
5401
5418
  const currentMessageId = shared.randomId();
5402
5419
  eventStream$.sendTextMessageStart({
@@ -5419,6 +5436,8 @@ var ExperimentalOllamaAdapter = class {
5419
5436
  }
5420
5437
  };
5421
5438
  __name(ExperimentalOllamaAdapter, "ExperimentalOllamaAdapter");
5439
+
5440
+ // src/service-adapters/bedrock/bedrock-adapter.ts
5422
5441
  var DEFAULT_MODEL6 = "amazon.nova-lite-v1:0";
5423
5442
  var BedrockAdapter = class extends LangChainAdapter {
5424
5443
  provider = "bedrock";
@@ -5426,8 +5445,9 @@ var BedrockAdapter = class extends LangChainAdapter {
5426
5445
  constructor(options) {
5427
5446
  super({
5428
5447
  chainFn: async ({ messages, tools, threadId }) => {
5448
+ const { ChatBedrockConverse } = __require("@langchain/aws");
5429
5449
  this.model = (options == null ? void 0 : options.model) ?? "amazon.nova-lite-v1:0";
5430
- const model = new aws.ChatBedrockConverse({
5450
+ const model = new ChatBedrockConverse({
5431
5451
  model: this.model,
5432
5452
  region: (options == null ? void 0 : options.region) ?? "us-east-1",
5433
5453
  credentials: (options == null ? void 0 : options.credentials) ? {
@@ -5454,10 +5474,6 @@ var EmptyAdapter = class {
5454
5474
  __name(EmptyAdapter, "EmptyAdapter");
5455
5475
  var ExperimentalEmptyAdapter = EmptyAdapter;
5456
5476
 
5457
- Object.defineProperty(exports, 'LangGraphHttpAgent', {
5458
- enumerable: true,
5459
- get: function () { return langgraph.LangGraphHttpAgent; }
5460
- });
5461
5477
  exports.AnthropicAdapter = AnthropicAdapter;
5462
5478
  exports.BedrockAdapter = BedrockAdapter;
5463
5479
  exports.CopilotRuntime = CopilotRuntime;
@@ -5469,6 +5485,7 @@ exports.GroqAdapter = GroqAdapter;
5469
5485
  exports.GuardrailsValidationFailureResponse = GuardrailsValidationFailureResponse;
5470
5486
  exports.LangChainAdapter = LangChainAdapter;
5471
5487
  exports.LangGraphAgent = LangGraphAgent;
5488
+ exports.LangGraphHttpAgent = LangGraphHttpAgent;
5472
5489
  exports.MessageStreamInterruptedResponse = MessageStreamInterruptedResponse;
5473
5490
  exports.OpenAIAdapter = OpenAIAdapter;
5474
5491
  exports.OpenAIAssistantAdapter = OpenAIAssistantAdapter;