@copilotkit/runtime 1.6.0 → 1.7.0-next.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/CHANGELOG.md +28 -0
  2. package/README.md +75 -0
  3. package/dist/{chunk-WUMAYJP3.mjs → chunk-D6YNY2XB.mjs} +2 -2
  4. package/dist/chunk-PTC5JN3P.mjs +1 -0
  5. package/dist/{chunk-DUW72ZZB.mjs → chunk-QTRO3GPV.mjs} +206 -70
  6. package/dist/chunk-QTRO3GPV.mjs.map +1 -0
  7. package/dist/{chunk-MPI4JZZR.mjs → chunk-QZ6X33MR.mjs} +2 -2
  8. package/dist/{chunk-2RP2NR4F.mjs → chunk-RQS3BGAT.mjs} +2 -2
  9. package/dist/{copilot-runtime-15bfc4f4.d.ts → copilot-runtime-5103c7e7.d.ts} +66 -1
  10. package/dist/index.d.ts +1 -1
  11. package/dist/index.js +206 -69
  12. package/dist/index.js.map +1 -1
  13. package/dist/index.mjs +7 -5
  14. package/dist/index.mjs.map +1 -1
  15. package/dist/lib/index.d.ts +1 -1
  16. package/dist/lib/index.js +206 -69
  17. package/dist/lib/index.js.map +1 -1
  18. package/dist/lib/index.mjs +7 -5
  19. package/dist/lib/integrations/index.d.ts +2 -2
  20. package/dist/lib/integrations/index.js +4 -1
  21. package/dist/lib/integrations/index.js.map +1 -1
  22. package/dist/lib/integrations/index.mjs +4 -4
  23. package/dist/lib/integrations/nest/index.d.ts +1 -1
  24. package/dist/lib/integrations/nest/index.js +4 -1
  25. package/dist/lib/integrations/nest/index.js.map +1 -1
  26. package/dist/lib/integrations/nest/index.mjs +2 -2
  27. package/dist/lib/integrations/node-express/index.d.ts +1 -1
  28. package/dist/lib/integrations/node-express/index.js +4 -1
  29. package/dist/lib/integrations/node-express/index.js.map +1 -1
  30. package/dist/lib/integrations/node-express/index.mjs +2 -2
  31. package/dist/lib/integrations/node-http/index.d.ts +1 -1
  32. package/dist/lib/integrations/node-http/index.js +4 -1
  33. package/dist/lib/integrations/node-http/index.js.map +1 -1
  34. package/dist/lib/integrations/node-http/index.mjs +1 -1
  35. package/package.json +2 -2
  36. package/src/graphql/resolvers/copilot.resolver.ts +4 -0
  37. package/src/lib/index.ts +1 -0
  38. package/src/lib/logger.ts +48 -0
  39. package/src/lib/runtime/__tests__/remote-action-constructors.test.ts +45 -35
  40. package/src/lib/runtime/copilot-runtime.ts +176 -16
  41. package/src/lib/runtime/remote-action-constructors.ts +28 -68
  42. package/src/lib/runtime/remote-actions.ts +5 -5
  43. package/src/lib/streaming.ts +59 -0
  44. package/src/service-adapters/events.ts +3 -3
  45. package/dist/chunk-DFOKBSIS.mjs +0 -1
  46. package/dist/chunk-DUW72ZZB.mjs.map +0 -1
  47. /package/dist/{chunk-WUMAYJP3.mjs.map → chunk-D6YNY2XB.mjs.map} +0 -0
  48. /package/dist/{chunk-DFOKBSIS.mjs.map → chunk-PTC5JN3P.mjs.map} +0 -0
  49. /package/dist/{chunk-MPI4JZZR.mjs.map → chunk-QZ6X33MR.mjs.map} +0 -0
  50. /package/dist/{chunk-2RP2NR4F.mjs.map → chunk-RQS3BGAT.mjs.map} +0 -0
package/dist/index.mjs CHANGED
@@ -1,15 +1,15 @@
1
- import "./chunk-DFOKBSIS.mjs";
1
+ import "./chunk-PTC5JN3P.mjs";
2
2
  import {
3
3
  config,
4
4
  copilotRuntimeNextJSAppRouterEndpoint,
5
5
  copilotRuntimeNextJSPagesRouterEndpoint
6
- } from "./chunk-WUMAYJP3.mjs";
6
+ } from "./chunk-D6YNY2XB.mjs";
7
7
  import {
8
8
  copilotRuntimeNestEndpoint
9
- } from "./chunk-MPI4JZZR.mjs";
9
+ } from "./chunk-QZ6X33MR.mjs";
10
10
  import {
11
11
  copilotRuntimeNodeExpressEndpoint
12
- } from "./chunk-2RP2NR4F.mjs";
12
+ } from "./chunk-RQS3BGAT.mjs";
13
13
  import {
14
14
  CopilotRuntime,
15
15
  addCustomHeaderPlugin,
@@ -17,11 +17,12 @@ import {
17
17
  copilotKitEndpoint,
18
18
  copilotRuntimeNodeHttpEndpoint,
19
19
  createContext,
20
+ createLogger,
20
21
  flattenToolCallsNoDuplicates,
21
22
  getCommonConfig,
22
23
  langGraphPlatformEndpoint,
23
24
  resolveEndpointType
24
- } from "./chunk-DUW72ZZB.mjs";
25
+ } from "./chunk-QTRO3GPV.mjs";
25
26
  import {
26
27
  AnthropicAdapter,
27
28
  EmptyAdapter,
@@ -72,6 +73,7 @@ export {
72
73
  copilotRuntimeNodeExpressEndpoint,
73
74
  copilotRuntimeNodeHttpEndpoint,
74
75
  createContext,
76
+ createLogger,
75
77
  flattenToolCallsNoDuplicates,
76
78
  getCommonConfig,
77
79
  langGraphPlatformEndpoint,
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts"],"sourcesContent":["import \"reflect-metadata\";\nexport * from \"./lib\";\nexport * from \"./utils\";\nexport * from \"./service-adapters\";\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,OAAO;","names":[]}
1
+ {"version":3,"sources":["../src/index.ts"],"sourcesContent":["import \"reflect-metadata\";\nexport * from \"./lib\";\nexport * from \"./utils\";\nexport * from \"./service-adapters\";\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,OAAO;","names":[]}
@@ -1,4 +1,4 @@
1
- export { i as CommonConfig, d as CopilotRequestContextProperties, a as CopilotRuntime, C as CopilotRuntimeConstructorParams, e as CreateCopilotRuntimeServerOptions, G as GraphQLContext, b as addCustomHeaderPlugin, h as buildSchema, c as copilotKitEndpoint, g as createContext, f as flattenToolCallsNoDuplicates, j as getCommonConfig, l as langGraphPlatformEndpoint, r as resolveEndpointType } from '../copilot-runtime-15bfc4f4.js';
1
+ export { i as CommonConfig, q as CopilotLoggerHooks, s as CopilotLoggingConfig, d as CopilotRequestContextProperties, a as CopilotRuntime, C as CopilotRuntimeConstructorParams, k as CopilotRuntimeLogger, e as CreateCopilotRuntimeServerOptions, G as GraphQLContext, p as LogLLMErrorData, n as LogLLMRequestData, o as LogLLMResponseData, L as LogLevel, b as addCustomHeaderPlugin, h as buildSchema, c as copilotKitEndpoint, g as createContext, m as createLogger, f as flattenToolCallsNoDuplicates, j as getCommonConfig, l as langGraphPlatformEndpoint, r as resolveEndpointType } from '../copilot-runtime-5103c7e7.js';
2
2
  export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-fb9aa3ab.js';
3
3
  export { CopilotRuntimeServerInstance, config, copilotRuntimeNextJSAppRouterEndpoint, copilotRuntimeNextJSPagesRouterEndpoint } from './integrations/index.js';
4
4
  export { copilotRuntimeNodeHttpEndpoint } from './integrations/node-http/index.js';
package/dist/lib/index.js CHANGED
@@ -44,7 +44,7 @@ var require_package = __commonJS({
44
44
  publishConfig: {
45
45
  access: "public"
46
46
  },
47
- version: "1.6.0",
47
+ version: "1.7.0-next.1",
48
48
  sideEffects: false,
49
49
  main: "./dist/index.js",
50
50
  module: "./dist/index.mjs",
@@ -142,6 +142,7 @@ __export(lib_exports, {
142
142
  copilotRuntimeNodeExpressEndpoint: () => copilotRuntimeNodeExpressEndpoint,
143
143
  copilotRuntimeNodeHttpEndpoint: () => copilotRuntimeNodeHttpEndpoint,
144
144
  createContext: () => createContext,
145
+ createLogger: () => createLogger,
145
146
  flattenToolCallsNoDuplicates: () => flattenToolCallsNoDuplicates,
146
147
  getCommonConfig: () => getCommonConfig,
147
148
  langGraphPlatformEndpoint: () => langGraphPlatformEndpoint,
@@ -2301,7 +2302,7 @@ async function streamEvents(controller, args) {
2301
2302
  let emitIntermediateStateUntilEnd = null;
2302
2303
  let shouldExit = false;
2303
2304
  let externalRunId = null;
2304
- const streamResponse2 = client.runs.stream(threadId, assistantId, payload);
2305
+ const streamResponse = client.runs.stream(threadId, assistantId, payload);
2305
2306
  const emit = /* @__PURE__ */ __name((message) => controller.enqueue(new TextEncoder().encode(message)), "emit");
2306
2307
  let latestStateValues = {};
2307
2308
  let updatedState = state;
@@ -2311,7 +2312,7 @@ async function streamEvents(controller, args) {
2311
2312
  telemetry_client_default.capture("oss.runtime.agent_execution_stream_started", {
2312
2313
  hashedLgcKey: streamInfo.hashedLgcKey
2313
2314
  });
2314
- for await (const chunk of streamResponse2) {
2315
+ for await (const chunk of streamResponse) {
2315
2316
  if (![
2316
2317
  "events",
2317
2318
  "values",
@@ -2726,6 +2727,54 @@ __name(getSchemaKeys, "getSchemaKeys");
2726
2727
 
2727
2728
  // src/lib/runtime/remote-action-constructors.ts
2728
2729
  var import_shared11 = require("@copilotkit/shared");
2730
+
2731
+ // src/lib/streaming.ts
2732
+ async function writeJsonLineResponseToEventStream(response, eventStream$) {
2733
+ const reader = response.getReader();
2734
+ const decoder = new TextDecoder();
2735
+ let buffer = [];
2736
+ function flushBuffer() {
2737
+ const currentBuffer = buffer.join("");
2738
+ if (currentBuffer.trim().length === 0) {
2739
+ return;
2740
+ }
2741
+ const parts = currentBuffer.split("\n");
2742
+ if (parts.length === 0) {
2743
+ return;
2744
+ }
2745
+ const lastPartIsComplete = currentBuffer.endsWith("\n");
2746
+ buffer = [];
2747
+ if (!lastPartIsComplete) {
2748
+ buffer.push(parts.pop());
2749
+ }
2750
+ parts.map((part) => part.trim()).filter((part) => part != "").forEach((part) => {
2751
+ eventStream$.next(JSON.parse(part));
2752
+ });
2753
+ }
2754
+ __name(flushBuffer, "flushBuffer");
2755
+ try {
2756
+ while (true) {
2757
+ const { done, value } = await reader.read();
2758
+ if (!done) {
2759
+ buffer.push(decoder.decode(value, {
2760
+ stream: true
2761
+ }));
2762
+ }
2763
+ flushBuffer();
2764
+ if (done) {
2765
+ break;
2766
+ }
2767
+ }
2768
+ } catch (error) {
2769
+ console.error("Error in stream", error);
2770
+ eventStream$.error(error);
2771
+ return;
2772
+ }
2773
+ eventStream$.complete();
2774
+ }
2775
+ __name(writeJsonLineResponseToEventStream, "writeJsonLineResponseToEventStream");
2776
+
2777
+ // src/lib/runtime/remote-action-constructors.ts
2729
2778
  var import_shared12 = require("@copilotkit/shared");
2730
2779
  var import_shared13 = require("@copilotkit/shared");
2731
2780
  function constructLGCRemoteAction({ endpoint, graphqlContext, logger: logger2, messages, agentStates }) {
@@ -2735,7 +2784,7 @@ function constructLGCRemoteAction({ endpoint, graphqlContext, logger: logger2, m
2735
2784
  parameters: [],
2736
2785
  handler: async (_args) => {
2737
2786
  },
2738
- langGraphAgentHandler: async ({ name, actionInputsWithoutAgents, threadId, nodeName, additionalMessages = [], metaEvents }) => {
2787
+ remoteAgentHandler: async ({ name, actionInputsWithoutAgents, threadId, nodeName, additionalMessages = [], metaEvents }) => {
2739
2788
  logger2.debug({
2740
2789
  actionName: agent.name
2741
2790
  }, "Executing LangGraph Platform agent");
@@ -2779,7 +2828,7 @@ function constructLGCRemoteAction({ endpoint, graphqlContext, logger: logger2, m
2779
2828
  metaEvents
2780
2829
  });
2781
2830
  const eventSource = new RemoteLangGraphEventSource();
2782
- streamResponse(response, eventSource.eventStream$);
2831
+ writeJsonLineResponseToEventStream(response, eventSource.eventStream$);
2783
2832
  return eventSource.processLangGraphEvents();
2784
2833
  } catch (error) {
2785
2834
  logger2.error({
@@ -2796,6 +2845,11 @@ function constructLGCRemoteAction({ endpoint, graphqlContext, logger: logger2, m
2796
2845
  ];
2797
2846
  }
2798
2847
  __name(constructLGCRemoteAction, "constructLGCRemoteAction");
2848
+ var RemoteAgentType;
2849
+ (function(RemoteAgentType2) {
2850
+ RemoteAgentType2["LangGraph"] = "langgraph";
2851
+ RemoteAgentType2["CrewAI"] = "crewai";
2852
+ })(RemoteAgentType || (RemoteAgentType = {}));
2799
2853
  function constructRemoteActions({ json, url, onBeforeRequest, graphqlContext, logger: logger2, messages, agentStates }) {
2800
2854
  const totalAgents = Array.isArray(json["agents"]) ? json["agents"].length : 0;
2801
2855
  const actions = json["actions"].map((action) => ({
@@ -2865,7 +2919,7 @@ function constructRemoteActions({ json, url, onBeforeRequest, graphqlContext, lo
2865
2919
  parameters: [],
2866
2920
  handler: async (_args) => {
2867
2921
  },
2868
- langGraphAgentHandler: async ({ name, actionInputsWithoutAgents, threadId, nodeName, additionalMessages = [], metaEvents }) => {
2922
+ remoteAgentHandler: async ({ name, actionInputsWithoutAgents, threadId, nodeName, additionalMessages = [], metaEvents }) => {
2869
2923
  logger2.debug({
2870
2924
  actionName: agent.name
2871
2925
  }, "Executing remote agent");
@@ -2925,9 +2979,17 @@ function constructRemoteActions({ json, url, onBeforeRequest, graphqlContext, lo
2925
2979
  isRemoteEndpoint: true
2926
2980
  });
2927
2981
  }
2928
- const eventSource = new RemoteLangGraphEventSource();
2929
- streamResponse(response.body, eventSource.eventStream$);
2930
- return eventSource.processLangGraphEvents();
2982
+ if (agent.type === "langgraph") {
2983
+ const eventSource = new RemoteLangGraphEventSource();
2984
+ writeJsonLineResponseToEventStream(response.body, eventSource.eventStream$);
2985
+ return eventSource.processLangGraphEvents();
2986
+ } else if (agent.type === "crewai") {
2987
+ const eventStream$ = new RuntimeEventSubject();
2988
+ writeJsonLineResponseToEventStream(response.body, eventStream$);
2989
+ return eventStream$;
2990
+ } else {
2991
+ throw new Error("Unsupported agent type");
2992
+ }
2931
2993
  } catch (error) {
2932
2994
  if (error instanceof import_shared11.CopilotKitError) {
2933
2995
  throw error;
@@ -2945,50 +3007,6 @@ function constructRemoteActions({ json, url, onBeforeRequest, graphqlContext, lo
2945
3007
  ];
2946
3008
  }
2947
3009
  __name(constructRemoteActions, "constructRemoteActions");
2948
- async function streamResponse(response, eventStream$) {
2949
- const reader = response.getReader();
2950
- const decoder = new TextDecoder();
2951
- let buffer = [];
2952
- function flushBuffer() {
2953
- const currentBuffer = buffer.join("");
2954
- if (currentBuffer.trim().length === 0) {
2955
- return;
2956
- }
2957
- const parts = currentBuffer.split("\n");
2958
- if (parts.length === 0) {
2959
- return;
2960
- }
2961
- const lastPartIsComplete = currentBuffer.endsWith("\n");
2962
- buffer = [];
2963
- if (!lastPartIsComplete) {
2964
- buffer.push(parts.pop());
2965
- }
2966
- parts.map((part) => part.trim()).filter((part) => part != "").forEach((part) => {
2967
- eventStream$.next(JSON.parse(part));
2968
- });
2969
- }
2970
- __name(flushBuffer, "flushBuffer");
2971
- try {
2972
- while (true) {
2973
- const { done, value } = await reader.read();
2974
- if (!done) {
2975
- buffer.push(decoder.decode(value, {
2976
- stream: true
2977
- }));
2978
- }
2979
- flushBuffer();
2980
- if (done) {
2981
- break;
2982
- }
2983
- }
2984
- } catch (error) {
2985
- console.error("Error in stream", error);
2986
- eventStream$.error(error);
2987
- return;
2988
- }
2989
- eventStream$.complete();
2990
- }
2991
- __name(streamResponse, "streamResponse");
2992
3010
  function createHeaders(onBeforeRequest, graphqlContext) {
2993
3011
  const headers = {
2994
3012
  "Content-Type": "application/json"
@@ -3012,13 +3030,13 @@ var EndpointType;
3012
3030
  EndpointType2["CopilotKit"] = "copilotKit";
3013
3031
  EndpointType2["LangGraphPlatform"] = "langgraph-platform";
3014
3032
  })(EndpointType || (EndpointType = {}));
3015
- function isLangGraphAgentAction(action) {
3033
+ function isRemoteAgentAction(action) {
3016
3034
  if (!action) {
3017
3035
  return false;
3018
3036
  }
3019
- return typeof action.langGraphAgentHandler === "function";
3037
+ return typeof action.remoteAgentHandler === "function";
3020
3038
  }
3021
- __name(isLangGraphAgentAction, "isLangGraphAgentAction");
3039
+ __name(isRemoteAgentAction, "isRemoteAgentAction");
3022
3040
  async function fetchRemoteInfo({ url, onBeforeRequest, graphqlContext, logger: logger2, frontendUrl }) {
3023
3041
  logger2.debug({
3024
3042
  url
@@ -3522,7 +3540,7 @@ async function executeAction(eventStream$, guardrailsResult$, action, actionArgu
3522
3540
  return;
3523
3541
  }
3524
3542
  }
3525
- if (isLangGraphAgentAction(action)) {
3543
+ if (isRemoteAgentAction(action)) {
3526
3544
  const result = `${action.name} agent started`;
3527
3545
  const agentExecution = (0, import_class_transformer.plainToInstance)(ActionExecutionMessage, {
3528
3546
  id: actionExecutionId,
@@ -3543,7 +3561,7 @@ async function executeAction(eventStream$, guardrailsResult$, action, actionArgu
3543
3561
  actionName: action.name,
3544
3562
  result
3545
3563
  });
3546
- const stream = await action.langGraphAgentHandler({
3564
+ const stream = await action.remoteAgentHandler({
3547
3565
  name: action.name,
3548
3566
  threadId,
3549
3567
  actionInputsWithoutAgents,
@@ -3655,6 +3673,7 @@ var CopilotRuntime = class {
3655
3673
  onBeforeRequest;
3656
3674
  onAfterRequest;
3657
3675
  delegateAgentProcessingToServiceAdapter;
3676
+ logging;
3658
3677
  constructor(params) {
3659
3678
  var _a, _b;
3660
3679
  if ((params == null ? void 0 : params.actions) && (params == null ? void 0 : params.remoteEndpoints)) {
@@ -3671,11 +3690,14 @@ var CopilotRuntime = class {
3671
3690
  this.onBeforeRequest = (_a = params == null ? void 0 : params.middleware) == null ? void 0 : _a.onBeforeRequest;
3672
3691
  this.onAfterRequest = (_b = params == null ? void 0 : params.middleware) == null ? void 0 : _b.onAfterRequest;
3673
3692
  this.delegateAgentProcessingToServiceAdapter = (params == null ? void 0 : params.delegateAgentProcessingToServiceAdapter) || false;
3693
+ this.logging = params == null ? void 0 : params.logging;
3674
3694
  }
3675
3695
  async processRuntimeRequest(request) {
3676
- var _a;
3696
+ var _a, _b, _c, _d, _e;
3677
3697
  const { serviceAdapter, messages: rawMessages, actions: clientSideActionsInput, threadId, runId, outputMessagesPromise, graphqlContext, forwardedParameters, url, extensions, agentSession, agentStates } = request;
3678
3698
  const eventSource = new RuntimeEventSource();
3699
+ const requestStartTime = Date.now();
3700
+ const streamedChunks = [];
3679
3701
  try {
3680
3702
  if (agentSession && !this.delegateAgentProcessingToServiceAdapter) {
3681
3703
  return await this.processAgentRequest(request);
@@ -3690,6 +3712,23 @@ please use an LLM adapter instead.`
3690
3712
  const messages = rawMessages.filter((message) => !message.agentStateMessage);
3691
3713
  const inputMessages = convertGqlInputToMessages(messages);
3692
3714
  const serverSideActions = await this.getServerSideActions(request);
3715
+ if ((_a = this.logging) == null ? void 0 : _a.enabled) {
3716
+ try {
3717
+ const requestData = {
3718
+ threadId,
3719
+ runId,
3720
+ model: forwardedParameters == null ? void 0 : forwardedParameters.model,
3721
+ messages: inputMessages,
3722
+ actions: clientSideActionsInput,
3723
+ forwardedParameters,
3724
+ timestamp: requestStartTime,
3725
+ provider: this.detectProvider(serviceAdapter)
3726
+ };
3727
+ await this.logging.logger.logRequest(requestData);
3728
+ } catch (error) {
3729
+ console.error("Error logging LLM request:", error);
3730
+ }
3731
+ }
3693
3732
  const serverSideActionsInput = serverSideActions.map((action) => ({
3694
3733
  name: action.name,
3695
3734
  description: action.description,
@@ -3702,7 +3741,7 @@ please use an LLM adapter instead.`
3702
3741
  (action) => action.available !== ActionInputAvailability.remote
3703
3742
  )
3704
3743
  ]);
3705
- await ((_a = this.onBeforeRequest) == null ? void 0 : _a.call(this, {
3744
+ await ((_b = this.onBeforeRequest) == null ? void 0 : _b.call(this, {
3706
3745
  threadId,
3707
3746
  runId,
3708
3747
  inputMessages,
@@ -3733,6 +3772,69 @@ please use an LLM adapter instead.`
3733
3772
  });
3734
3773
  }).catch((_error) => {
3735
3774
  });
3775
+ if ((_c = this.logging) == null ? void 0 : _c.enabled) {
3776
+ try {
3777
+ outputMessagesPromise.then((outputMessages) => {
3778
+ var _a2;
3779
+ const responseData = {
3780
+ threadId: result.threadId,
3781
+ runId: result.runId,
3782
+ model: forwardedParameters == null ? void 0 : forwardedParameters.model,
3783
+ // Use collected chunks for progressive mode or outputMessages for regular mode
3784
+ output: this.logging.progressive ? streamedChunks : outputMessages,
3785
+ latency: Date.now() - requestStartTime,
3786
+ timestamp: Date.now(),
3787
+ provider: this.detectProvider(serviceAdapter),
3788
+ // Indicate this is the final response
3789
+ isFinalResponse: true
3790
+ };
3791
+ try {
3792
+ (_a2 = this.logging) == null ? void 0 : _a2.logger.logResponse(responseData);
3793
+ } catch (logError) {
3794
+ console.error("Error logging LLM response:", logError);
3795
+ }
3796
+ }).catch((error) => {
3797
+ console.error("Failed to get output messages for logging:", error);
3798
+ });
3799
+ } catch (error) {
3800
+ console.error("Error setting up logging for LLM response:", error);
3801
+ }
3802
+ }
3803
+ if (((_d = this.logging) == null ? void 0 : _d.enabled) && this.logging.progressive) {
3804
+ const originalStream = eventSource.stream.bind(eventSource);
3805
+ eventSource.stream = async (callback) => {
3806
+ await originalStream(async (eventStream$) => {
3807
+ eventStream$.subscribe({
3808
+ next: (event) => {
3809
+ if (event.type === RuntimeEventTypes.TextMessageContent) {
3810
+ streamedChunks.push(event.content);
3811
+ try {
3812
+ const progressiveData = {
3813
+ threadId: threadId || "",
3814
+ runId,
3815
+ model: forwardedParameters == null ? void 0 : forwardedParameters.model,
3816
+ output: event.content,
3817
+ latency: Date.now() - requestStartTime,
3818
+ timestamp: Date.now(),
3819
+ provider: this.detectProvider(serviceAdapter),
3820
+ isProgressiveChunk: true
3821
+ };
3822
+ Promise.resolve().then(() => {
3823
+ var _a2;
3824
+ (_a2 = this.logging) == null ? void 0 : _a2.logger.logResponse(progressiveData);
3825
+ }).catch((error) => {
3826
+ console.error("Error in progressive logging:", error);
3827
+ });
3828
+ } catch (error) {
3829
+ console.error("Error preparing progressive log data:", error);
3830
+ }
3831
+ }
3832
+ }
3833
+ });
3834
+ await callback(eventStream$);
3835
+ });
3836
+ };
3837
+ }
3736
3838
  return {
3737
3839
  threadId: nonEmptyThreadId,
3738
3840
  runId: result.runId,
@@ -3745,6 +3847,22 @@ please use an LLM adapter instead.`
3745
3847
  extensions: result.extensions
3746
3848
  };
3747
3849
  } catch (error) {
3850
+ if ((_e = this.logging) == null ? void 0 : _e.enabled) {
3851
+ try {
3852
+ const errorData = {
3853
+ threadId,
3854
+ runId,
3855
+ model: forwardedParameters == null ? void 0 : forwardedParameters.model,
3856
+ error: error instanceof Error ? error : String(error),
3857
+ timestamp: Date.now(),
3858
+ latency: Date.now() - requestStartTime,
3859
+ provider: this.detectProvider(serviceAdapter)
3860
+ };
3861
+ await this.logging.logger.logError(errorData);
3862
+ } catch (logError) {
3863
+ console.error("Error logging LLM error:", logError);
3864
+ }
3865
+ }
3748
3866
  if (error instanceof import_shared18.CopilotKitError) {
3749
3867
  throw error;
3750
3868
  }
@@ -3754,7 +3872,6 @@ please use an LLM adapter instead.`
3754
3872
  }
3755
3873
  }
3756
3874
  async discoverAgentsFromEndpoints(graphqlContext) {
3757
- const headers = createHeaders(null, graphqlContext);
3758
3875
  const agents = this.remoteEndpointDefinitions.reduce(async (acc, endpoint) => {
3759
3876
  const agents2 = await acc;
3760
3877
  if (endpoint.type === EndpointType.LangGraphPlatform) {
@@ -3780,11 +3897,12 @@ please use an LLM adapter instead.`
3780
3897
  ...endpointAgents
3781
3898
  ];
3782
3899
  }
3900
+ const cpkEndpoint = endpoint;
3783
3901
  const fetchUrl = `${endpoint.url}/info`;
3784
3902
  try {
3785
3903
  const response = await fetch(fetchUrl, {
3786
3904
  method: "POST",
3787
- headers,
3905
+ headers: createHeaders(cpkEndpoint.onBeforeRequest, graphqlContext),
3788
3906
  body: JSON.stringify({
3789
3907
  properties: graphqlContext.properties
3790
3908
  })
@@ -3830,7 +3948,6 @@ please use an LLM adapter instead.`
3830
3948
  if (!agentWithEndpoint) {
3831
3949
  throw new Error("Agent not found");
3832
3950
  }
3833
- const headers = createHeaders(null, graphqlContext);
3834
3951
  if (agentWithEndpoint.endpoint.type === EndpointType.LangGraphPlatform) {
3835
3952
  const propertyHeaders = graphqlContext.properties.authorization ? {
3836
3953
  authorization: `Bearer ${graphqlContext.properties.authorization}`
@@ -3865,11 +3982,12 @@ please use an LLM adapter instead.`
3865
3982
  };
3866
3983
  }
3867
3984
  } else if (agentWithEndpoint.endpoint.type === EndpointType.CopilotKit || !("type" in agentWithEndpoint.endpoint)) {
3868
- const fetchUrl = `${agentWithEndpoint.endpoint.url}/agents/state`;
3985
+ const cpkEndpoint = agentWithEndpoint.endpoint;
3986
+ const fetchUrl = `${cpkEndpoint.url}/agents/state`;
3869
3987
  try {
3870
3988
  const response = await fetch(fetchUrl, {
3871
3989
  method: "POST",
3872
- headers,
3990
+ headers: createHeaders(cpkEndpoint.onBeforeRequest, graphqlContext),
3873
3991
  body: JSON.stringify({
3874
3992
  properties: graphqlContext.properties,
3875
3993
  threadId,
@@ -3914,7 +4032,7 @@ please use an LLM adapter instead.`
3914
4032
  const threadId = threadIdFromRequest ?? agentSession.threadId;
3915
4033
  const serverSideActions = await this.getServerSideActions(request);
3916
4034
  const messages = convertGqlInputToMessages(rawMessages);
3917
- const currentAgent = serverSideActions.find((action) => action.name === agentName && isLangGraphAgentAction(action));
4035
+ const currentAgent = serverSideActions.find((action) => action.name === agentName && isRemoteAgentAction(action));
3918
4036
  if (!currentAgent) {
3919
4037
  throw new import_shared18.CopilotKitAgentDiscoveryError({
3920
4038
  agentName
@@ -3922,8 +4040,8 @@ please use an LLM adapter instead.`
3922
4040
  }
3923
4041
  const availableActionsForCurrentAgent = serverSideActions.filter((action) => (
3924
4042
  // Case 1: Keep all regular (non-agent) actions
3925
- !isLangGraphAgentAction(action) || // Case 2: For agent actions, keep all except self (prevent infinite loops)
3926
- isLangGraphAgentAction(action) && action.name !== agentName
4043
+ !isRemoteAgentAction(action) || // Case 2: For agent actions, keep all except self (prevent infinite loops)
4044
+ isRemoteAgentAction(action) && action.name !== agentName
3927
4045
  )).map((action) => ({
3928
4046
  name: action.name,
3929
4047
  description: action.description,
@@ -3941,7 +4059,7 @@ please use an LLM adapter instead.`
3941
4059
  }));
3942
4060
  try {
3943
4061
  const eventSource = new RuntimeEventSource();
3944
- const stream = await currentAgent.langGraphAgentHandler({
4062
+ const stream = await currentAgent.remoteAgentHandler({
3945
4063
  name: agentName,
3946
4064
  threadId,
3947
4065
  nodeName,
@@ -4015,6 +4133,21 @@ please use an LLM adapter instead.`
4015
4133
  ...remoteActions
4016
4134
  ];
4017
4135
  }
4136
+ // Add helper method to detect provider
4137
+ detectProvider(serviceAdapter) {
4138
+ const adapterName = serviceAdapter.constructor.name;
4139
+ if (adapterName.includes("OpenAI"))
4140
+ return "openai";
4141
+ if (adapterName.includes("Anthropic"))
4142
+ return "anthropic";
4143
+ if (adapterName.includes("Google"))
4144
+ return "google";
4145
+ if (adapterName.includes("Groq"))
4146
+ return "groq";
4147
+ if (adapterName.includes("LangChain"))
4148
+ return "langchain";
4149
+ return void 0;
4150
+ }
4018
4151
  };
4019
4152
  __name(CopilotRuntime, "CopilotRuntime");
4020
4153
  function flattenToolCallsNoDuplicates(toolsByPriority) {
@@ -5034,6 +5167,9 @@ var CopilotResolver = class {
5034
5167
  resolveOutputMessagesPromise = resolve;
5035
5168
  rejectOutputMessagesPromise = reject;
5036
5169
  });
5170
+ if (copilotCloudPublicApiKey) {
5171
+ ctx.properties["copilotCloudPublicApiKey"] = copilotCloudPublicApiKey;
5172
+ }
5037
5173
  logger2.debug("Processing");
5038
5174
  const { eventSource, threadId = (0, import_shared19.randomId)(), runId, serverSideActions, actionInputsWithoutAgents, extensions } = await copilotRuntime.processRuntimeRequest({
5039
5175
  serviceAdapter,
@@ -5822,6 +5958,7 @@ __name(copilotRuntimeNestEndpoint, "copilotRuntimeNestEndpoint");
5822
5958
  copilotRuntimeNodeExpressEndpoint,
5823
5959
  copilotRuntimeNodeHttpEndpoint,
5824
5960
  createContext,
5961
+ createLogger,
5825
5962
  flattenToolCallsNoDuplicates,
5826
5963
  getCommonConfig,
5827
5964
  langGraphPlatformEndpoint,