@copilotkit/runtime 1.7.0-next.0 → 1.7.0-next.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/README.md +75 -0
- package/dist/{chunk-PH24IU7T.mjs → chunk-D6YNY2XB.mjs} +2 -2
- package/dist/chunk-PTC5JN3P.mjs +1 -0
- package/dist/{chunk-34Y5DNNJ.mjs → chunk-QTRO3GPV.mjs} +120 -5
- package/dist/{chunk-34Y5DNNJ.mjs.map → chunk-QTRO3GPV.mjs.map} +1 -1
- package/dist/{chunk-2BN7NZNC.mjs → chunk-QZ6X33MR.mjs} +2 -2
- package/dist/{chunk-ZYFN76KV.mjs → chunk-RQS3BGAT.mjs} +2 -2
- package/dist/{copilot-runtime-15bfc4f4.d.ts → copilot-runtime-5103c7e7.d.ts} +66 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +120 -4
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +7 -5
- package/dist/index.mjs.map +1 -1
- package/dist/lib/index.d.ts +1 -1
- package/dist/lib/index.js +120 -4
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/index.mjs +7 -5
- package/dist/lib/integrations/index.d.ts +2 -2
- package/dist/lib/integrations/index.js +1 -1
- package/dist/lib/integrations/index.js.map +1 -1
- package/dist/lib/integrations/index.mjs +4 -4
- package/dist/lib/integrations/nest/index.d.ts +1 -1
- package/dist/lib/integrations/nest/index.js +1 -1
- package/dist/lib/integrations/nest/index.js.map +1 -1
- package/dist/lib/integrations/nest/index.mjs +2 -2
- package/dist/lib/integrations/node-express/index.d.ts +1 -1
- package/dist/lib/integrations/node-express/index.js +1 -1
- package/dist/lib/integrations/node-express/index.js.map +1 -1
- package/dist/lib/integrations/node-express/index.mjs +2 -2
- package/dist/lib/integrations/node-http/index.d.ts +1 -1
- package/dist/lib/integrations/node-http/index.js +1 -1
- package/dist/lib/integrations/node-http/index.js.map +1 -1
- package/dist/lib/integrations/node-http/index.mjs +1 -1
- package/package.json +2 -2
- package/src/lib/index.ts +1 -0
- package/src/lib/logger.ts +48 -0
- package/src/lib/runtime/copilot-runtime.ts +162 -2
- package/dist/chunk-DFOKBSIS.mjs +0 -1
- /package/dist/{chunk-PH24IU7T.mjs.map → chunk-D6YNY2XB.mjs.map} +0 -0
- /package/dist/{chunk-DFOKBSIS.mjs.map → chunk-PTC5JN3P.mjs.map} +0 -0
- /package/dist/{chunk-2BN7NZNC.mjs.map → chunk-QZ6X33MR.mjs.map} +0 -0
- /package/dist/{chunk-ZYFN76KV.mjs.map → chunk-RQS3BGAT.mjs.map} +0 -0
package/CHANGELOG.md
CHANGED
package/README.md
CHANGED
|
@@ -44,3 +44,78 @@
|
|
|
44
44
|
# Documentation
|
|
45
45
|
|
|
46
46
|
To get started with CopilotKit, please check out the [documentation](https://docs.copilotkit.ai).
|
|
47
|
+
|
|
48
|
+
## LangFuse Logging Integration
|
|
49
|
+
|
|
50
|
+
CopilotKit now supports LangFuse logging integration to help you monitor, analyze, and debug your LLM requests and responses.
|
|
51
|
+
|
|
52
|
+
### Setup
|
|
53
|
+
|
|
54
|
+
To enable LangFuse logging, you can configure it when initializing the CopilotRuntime:
|
|
55
|
+
|
|
56
|
+
```typescript
|
|
57
|
+
import { CopilotRuntime, OpenAIAdapter } from "@copilotkit/runtime";
|
|
58
|
+
import { LangfuseClient } from "langfuse";
|
|
59
|
+
|
|
60
|
+
// Initialize your LangFuse client
|
|
61
|
+
const langfuse = new LangfuseClient({
|
|
62
|
+
publicKey: process.env.LANGFUSE_PUBLIC_KEY!,
|
|
63
|
+
secretKey: process.env.LANGFUSE_SECRET_KEY!,
|
|
64
|
+
baseUrl: process.env.LANGFUSE_BASE_URL,
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
// Create a CopilotRuntime with LangFuse logging enabled
|
|
68
|
+
const runtime = new CopilotRuntime({
|
|
69
|
+
adapter: new OpenAIAdapter({ apiKey: process.env.OPENAI_API_KEY }),
|
|
70
|
+
logging: {
|
|
71
|
+
enabled: true,
|
|
72
|
+
progressive: true, // Set to false for buffered logging
|
|
73
|
+
logger: {
|
|
74
|
+
logRequest: (data) => langfuse.trace({ name: "LLM Request", input: data }),
|
|
75
|
+
logResponse: (data) => langfuse.trace({ name: "LLM Response", output: data }),
|
|
76
|
+
logError: (errorData) => langfuse.trace({ name: "LLM Error", metadata: errorData }),
|
|
77
|
+
},
|
|
78
|
+
},
|
|
79
|
+
});
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
### Configuration Options
|
|
83
|
+
|
|
84
|
+
The logging configuration accepts the following options:
|
|
85
|
+
|
|
86
|
+
- `enabled` (boolean): Enable or disable logging (default: false)
|
|
87
|
+
- `progressive` (boolean): When true, logs each chunk as it's streamed. When false, logs the complete response (default: true)
|
|
88
|
+
- `logger` (object): Contains callback functions for logging:
|
|
89
|
+
- `logRequest`: Called when an LLM request is made
|
|
90
|
+
- `logResponse`: Called when an LLM response is received
|
|
91
|
+
- `logError`: Called when an error occurs during an LLM request
|
|
92
|
+
|
|
93
|
+
### Custom Logging Integrations
|
|
94
|
+
|
|
95
|
+
You can integrate with any logging service by implementing the logger interface:
|
|
96
|
+
|
|
97
|
+
```typescript
|
|
98
|
+
const runtime = new CopilotRuntime({
|
|
99
|
+
adapter: new OpenAIAdapter({ apiKey: "YOUR_API_KEY" }),
|
|
100
|
+
logging: {
|
|
101
|
+
enabled: true,
|
|
102
|
+
progressive: false,
|
|
103
|
+
logger: {
|
|
104
|
+
logRequest: (data) => {
|
|
105
|
+
// Implement your custom logging logic
|
|
106
|
+
console.log("LLM Request:", JSON.stringify(data));
|
|
107
|
+
},
|
|
108
|
+
logResponse: (data) => {
|
|
109
|
+
// Implement your custom logging logic
|
|
110
|
+
console.log("LLM Response:", JSON.stringify(data));
|
|
111
|
+
},
|
|
112
|
+
logError: (error) => {
|
|
113
|
+
// Implement your custom error logging
|
|
114
|
+
console.error("LLM Error:", error);
|
|
115
|
+
},
|
|
116
|
+
},
|
|
117
|
+
},
|
|
118
|
+
});
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
This allows you to send your logs to any system or service that you prefer.
|
|
@@ -2,7 +2,7 @@ import {
|
|
|
2
2
|
getCommonConfig,
|
|
3
3
|
getRuntimeInstanceTelemetryInfo,
|
|
4
4
|
telemetry_client_default
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-QTRO3GPV.mjs";
|
|
6
6
|
import {
|
|
7
7
|
__name
|
|
8
8
|
} from "./chunk-FHD4JECV.mjs";
|
|
@@ -77,4 +77,4 @@ export {
|
|
|
77
77
|
config,
|
|
78
78
|
copilotRuntimeNextJSPagesRouterEndpoint
|
|
79
79
|
};
|
|
80
|
-
//# sourceMappingURL=chunk-
|
|
80
|
+
//# sourceMappingURL=chunk-D6YNY2XB.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
//# sourceMappingURL=chunk-PTC5JN3P.mjs.map
|
|
@@ -39,7 +39,7 @@ var require_package = __commonJS({
|
|
|
39
39
|
publishConfig: {
|
|
40
40
|
access: "public"
|
|
41
41
|
},
|
|
42
|
-
version: "1.7.0-next.
|
|
42
|
+
version: "1.7.0-next.1",
|
|
43
43
|
sideEffects: false,
|
|
44
44
|
main: "./dist/index.js",
|
|
45
45
|
module: "./dist/index.mjs",
|
|
@@ -2712,6 +2712,7 @@ var CopilotRuntime = class {
|
|
|
2712
2712
|
onBeforeRequest;
|
|
2713
2713
|
onAfterRequest;
|
|
2714
2714
|
delegateAgentProcessingToServiceAdapter;
|
|
2715
|
+
logging;
|
|
2715
2716
|
constructor(params) {
|
|
2716
2717
|
var _a, _b;
|
|
2717
2718
|
if ((params == null ? void 0 : params.actions) && (params == null ? void 0 : params.remoteEndpoints)) {
|
|
@@ -2728,11 +2729,14 @@ var CopilotRuntime = class {
|
|
|
2728
2729
|
this.onBeforeRequest = (_a = params == null ? void 0 : params.middleware) == null ? void 0 : _a.onBeforeRequest;
|
|
2729
2730
|
this.onAfterRequest = (_b = params == null ? void 0 : params.middleware) == null ? void 0 : _b.onAfterRequest;
|
|
2730
2731
|
this.delegateAgentProcessingToServiceAdapter = (params == null ? void 0 : params.delegateAgentProcessingToServiceAdapter) || false;
|
|
2732
|
+
this.logging = params == null ? void 0 : params.logging;
|
|
2731
2733
|
}
|
|
2732
2734
|
async processRuntimeRequest(request) {
|
|
2733
|
-
var _a;
|
|
2735
|
+
var _a, _b, _c, _d, _e;
|
|
2734
2736
|
const { serviceAdapter, messages: rawMessages, actions: clientSideActionsInput, threadId, runId, outputMessagesPromise, graphqlContext, forwardedParameters, url, extensions, agentSession, agentStates } = request;
|
|
2735
2737
|
const eventSource = new RuntimeEventSource();
|
|
2738
|
+
const requestStartTime = Date.now();
|
|
2739
|
+
const streamedChunks = [];
|
|
2736
2740
|
try {
|
|
2737
2741
|
if (agentSession && !this.delegateAgentProcessingToServiceAdapter) {
|
|
2738
2742
|
return await this.processAgentRequest(request);
|
|
@@ -2747,6 +2751,23 @@ please use an LLM adapter instead.`
|
|
|
2747
2751
|
const messages = rawMessages.filter((message) => !message.agentStateMessage);
|
|
2748
2752
|
const inputMessages = convertGqlInputToMessages(messages);
|
|
2749
2753
|
const serverSideActions = await this.getServerSideActions(request);
|
|
2754
|
+
if ((_a = this.logging) == null ? void 0 : _a.enabled) {
|
|
2755
|
+
try {
|
|
2756
|
+
const requestData = {
|
|
2757
|
+
threadId,
|
|
2758
|
+
runId,
|
|
2759
|
+
model: forwardedParameters == null ? void 0 : forwardedParameters.model,
|
|
2760
|
+
messages: inputMessages,
|
|
2761
|
+
actions: clientSideActionsInput,
|
|
2762
|
+
forwardedParameters,
|
|
2763
|
+
timestamp: requestStartTime,
|
|
2764
|
+
provider: this.detectProvider(serviceAdapter)
|
|
2765
|
+
};
|
|
2766
|
+
await this.logging.logger.logRequest(requestData);
|
|
2767
|
+
} catch (error) {
|
|
2768
|
+
console.error("Error logging LLM request:", error);
|
|
2769
|
+
}
|
|
2770
|
+
}
|
|
2750
2771
|
const serverSideActionsInput = serverSideActions.map((action) => ({
|
|
2751
2772
|
name: action.name,
|
|
2752
2773
|
description: action.description,
|
|
@@ -2759,7 +2780,7 @@ please use an LLM adapter instead.`
|
|
|
2759
2780
|
(action) => action.available !== ActionInputAvailability.remote
|
|
2760
2781
|
)
|
|
2761
2782
|
]);
|
|
2762
|
-
await ((
|
|
2783
|
+
await ((_b = this.onBeforeRequest) == null ? void 0 : _b.call(this, {
|
|
2763
2784
|
threadId,
|
|
2764
2785
|
runId,
|
|
2765
2786
|
inputMessages,
|
|
@@ -2790,6 +2811,69 @@ please use an LLM adapter instead.`
|
|
|
2790
2811
|
});
|
|
2791
2812
|
}).catch((_error) => {
|
|
2792
2813
|
});
|
|
2814
|
+
if ((_c = this.logging) == null ? void 0 : _c.enabled) {
|
|
2815
|
+
try {
|
|
2816
|
+
outputMessagesPromise.then((outputMessages) => {
|
|
2817
|
+
var _a2;
|
|
2818
|
+
const responseData = {
|
|
2819
|
+
threadId: result.threadId,
|
|
2820
|
+
runId: result.runId,
|
|
2821
|
+
model: forwardedParameters == null ? void 0 : forwardedParameters.model,
|
|
2822
|
+
// Use collected chunks for progressive mode or outputMessages for regular mode
|
|
2823
|
+
output: this.logging.progressive ? streamedChunks : outputMessages,
|
|
2824
|
+
latency: Date.now() - requestStartTime,
|
|
2825
|
+
timestamp: Date.now(),
|
|
2826
|
+
provider: this.detectProvider(serviceAdapter),
|
|
2827
|
+
// Indicate this is the final response
|
|
2828
|
+
isFinalResponse: true
|
|
2829
|
+
};
|
|
2830
|
+
try {
|
|
2831
|
+
(_a2 = this.logging) == null ? void 0 : _a2.logger.logResponse(responseData);
|
|
2832
|
+
} catch (logError) {
|
|
2833
|
+
console.error("Error logging LLM response:", logError);
|
|
2834
|
+
}
|
|
2835
|
+
}).catch((error) => {
|
|
2836
|
+
console.error("Failed to get output messages for logging:", error);
|
|
2837
|
+
});
|
|
2838
|
+
} catch (error) {
|
|
2839
|
+
console.error("Error setting up logging for LLM response:", error);
|
|
2840
|
+
}
|
|
2841
|
+
}
|
|
2842
|
+
if (((_d = this.logging) == null ? void 0 : _d.enabled) && this.logging.progressive) {
|
|
2843
|
+
const originalStream = eventSource.stream.bind(eventSource);
|
|
2844
|
+
eventSource.stream = async (callback) => {
|
|
2845
|
+
await originalStream(async (eventStream$) => {
|
|
2846
|
+
eventStream$.subscribe({
|
|
2847
|
+
next: (event) => {
|
|
2848
|
+
if (event.type === RuntimeEventTypes.TextMessageContent) {
|
|
2849
|
+
streamedChunks.push(event.content);
|
|
2850
|
+
try {
|
|
2851
|
+
const progressiveData = {
|
|
2852
|
+
threadId: threadId || "",
|
|
2853
|
+
runId,
|
|
2854
|
+
model: forwardedParameters == null ? void 0 : forwardedParameters.model,
|
|
2855
|
+
output: event.content,
|
|
2856
|
+
latency: Date.now() - requestStartTime,
|
|
2857
|
+
timestamp: Date.now(),
|
|
2858
|
+
provider: this.detectProvider(serviceAdapter),
|
|
2859
|
+
isProgressiveChunk: true
|
|
2860
|
+
};
|
|
2861
|
+
Promise.resolve().then(() => {
|
|
2862
|
+
var _a2;
|
|
2863
|
+
(_a2 = this.logging) == null ? void 0 : _a2.logger.logResponse(progressiveData);
|
|
2864
|
+
}).catch((error) => {
|
|
2865
|
+
console.error("Error in progressive logging:", error);
|
|
2866
|
+
});
|
|
2867
|
+
} catch (error) {
|
|
2868
|
+
console.error("Error preparing progressive log data:", error);
|
|
2869
|
+
}
|
|
2870
|
+
}
|
|
2871
|
+
}
|
|
2872
|
+
});
|
|
2873
|
+
await callback(eventStream$);
|
|
2874
|
+
});
|
|
2875
|
+
};
|
|
2876
|
+
}
|
|
2793
2877
|
return {
|
|
2794
2878
|
threadId: nonEmptyThreadId,
|
|
2795
2879
|
runId: result.runId,
|
|
@@ -2802,6 +2886,22 @@ please use an LLM adapter instead.`
|
|
|
2802
2886
|
extensions: result.extensions
|
|
2803
2887
|
};
|
|
2804
2888
|
} catch (error) {
|
|
2889
|
+
if ((_e = this.logging) == null ? void 0 : _e.enabled) {
|
|
2890
|
+
try {
|
|
2891
|
+
const errorData = {
|
|
2892
|
+
threadId,
|
|
2893
|
+
runId,
|
|
2894
|
+
model: forwardedParameters == null ? void 0 : forwardedParameters.model,
|
|
2895
|
+
error: error instanceof Error ? error : String(error),
|
|
2896
|
+
timestamp: Date.now(),
|
|
2897
|
+
latency: Date.now() - requestStartTime,
|
|
2898
|
+
provider: this.detectProvider(serviceAdapter)
|
|
2899
|
+
};
|
|
2900
|
+
await this.logging.logger.logError(errorData);
|
|
2901
|
+
} catch (logError) {
|
|
2902
|
+
console.error("Error logging LLM error:", logError);
|
|
2903
|
+
}
|
|
2904
|
+
}
|
|
2805
2905
|
if (error instanceof CopilotKitError3) {
|
|
2806
2906
|
throw error;
|
|
2807
2907
|
}
|
|
@@ -2887,7 +2987,6 @@ please use an LLM adapter instead.`
|
|
|
2887
2987
|
if (!agentWithEndpoint) {
|
|
2888
2988
|
throw new Error("Agent not found");
|
|
2889
2989
|
}
|
|
2890
|
-
const headers = createHeaders(null, graphqlContext);
|
|
2891
2990
|
if (agentWithEndpoint.endpoint.type === EndpointType.LangGraphPlatform) {
|
|
2892
2991
|
const propertyHeaders = graphqlContext.properties.authorization ? {
|
|
2893
2992
|
authorization: `Bearer ${graphqlContext.properties.authorization}`
|
|
@@ -3073,6 +3172,21 @@ please use an LLM adapter instead.`
|
|
|
3073
3172
|
...remoteActions
|
|
3074
3173
|
];
|
|
3075
3174
|
}
|
|
3175
|
+
// Add helper method to detect provider
|
|
3176
|
+
detectProvider(serviceAdapter) {
|
|
3177
|
+
const adapterName = serviceAdapter.constructor.name;
|
|
3178
|
+
if (adapterName.includes("OpenAI"))
|
|
3179
|
+
return "openai";
|
|
3180
|
+
if (adapterName.includes("Anthropic"))
|
|
3181
|
+
return "anthropic";
|
|
3182
|
+
if (adapterName.includes("Google"))
|
|
3183
|
+
return "google";
|
|
3184
|
+
if (adapterName.includes("Groq"))
|
|
3185
|
+
return "groq";
|
|
3186
|
+
if (adapterName.includes("LangChain"))
|
|
3187
|
+
return "langchain";
|
|
3188
|
+
return void 0;
|
|
3189
|
+
}
|
|
3076
3190
|
};
|
|
3077
3191
|
__name(CopilotRuntime, "CopilotRuntime");
|
|
3078
3192
|
function flattenToolCallsNoDuplicates(toolsByPriority) {
|
|
@@ -4320,10 +4434,11 @@ export {
|
|
|
4320
4434
|
copilotKitEndpoint,
|
|
4321
4435
|
langGraphPlatformEndpoint,
|
|
4322
4436
|
resolveEndpointType,
|
|
4437
|
+
createLogger,
|
|
4323
4438
|
addCustomHeaderPlugin,
|
|
4324
4439
|
createContext,
|
|
4325
4440
|
buildSchema,
|
|
4326
4441
|
getCommonConfig,
|
|
4327
4442
|
copilotRuntimeNodeHttpEndpoint
|
|
4328
4443
|
};
|
|
4329
|
-
//# sourceMappingURL=chunk-
|
|
4444
|
+
//# sourceMappingURL=chunk-QTRO3GPV.mjs.map
|