@copilotkit/runtime 1.7.0-next.1 → 1.7.1-next.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +21 -0
- package/dist/{chunk-RQS3BGAT.mjs → chunk-5PFBYGDX.mjs} +2 -2
- package/dist/{chunk-QTRO3GPV.mjs → chunk-ANTSN7W3.mjs} +136 -20
- package/dist/chunk-ANTSN7W3.mjs.map +1 -0
- package/dist/{chunk-QZ6X33MR.mjs → chunk-LOGYVH7X.mjs} +2 -2
- package/dist/{chunk-D6YNY2XB.mjs → chunk-SJPSYQ4P.mjs} +2 -2
- package/dist/{copilot-runtime-5103c7e7.d.ts → copilot-runtime-eb953402.d.ts} +78 -46
- package/dist/index.d.ts +1 -1
- package/dist/index.js +135 -19
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +4 -4
- package/dist/lib/index.d.ts +1 -1
- package/dist/lib/index.js +135 -19
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/index.mjs +4 -4
- package/dist/lib/integrations/index.d.ts +2 -2
- package/dist/lib/integrations/index.js +2 -2
- package/dist/lib/integrations/index.js.map +1 -1
- package/dist/lib/integrations/index.mjs +4 -4
- package/dist/lib/integrations/nest/index.d.ts +1 -1
- package/dist/lib/integrations/nest/index.js +2 -2
- package/dist/lib/integrations/nest/index.js.map +1 -1
- package/dist/lib/integrations/nest/index.mjs +2 -2
- package/dist/lib/integrations/node-express/index.d.ts +1 -1
- package/dist/lib/integrations/node-express/index.js +2 -2
- package/dist/lib/integrations/node-express/index.js.map +1 -1
- package/dist/lib/integrations/node-express/index.mjs +2 -2
- package/dist/lib/integrations/node-http/index.d.ts +1 -1
- package/dist/lib/integrations/node-http/index.js +2 -2
- package/dist/lib/integrations/node-http/index.js.map +1 -1
- package/dist/lib/integrations/node-http/index.mjs +1 -1
- package/package.json +2 -2
- package/src/graphql/resolvers/copilot.resolver.ts +1 -1
- package/src/lib/logger.ts +0 -48
- package/src/lib/observability.ts +73 -0
- package/src/lib/runtime/copilot-runtime.ts +182 -22
- package/dist/chunk-QTRO3GPV.mjs.map +0 -1
- /package/dist/{chunk-RQS3BGAT.mjs.map → chunk-5PFBYGDX.mjs.map} +0 -0
- /package/dist/{chunk-QZ6X33MR.mjs.map → chunk-LOGYVH7X.mjs.map} +0 -0
- /package/dist/{chunk-D6YNY2XB.mjs.map → chunk-SJPSYQ4P.mjs.map} +0 -0
package/dist/index.mjs
CHANGED
|
@@ -3,13 +3,13 @@ import {
|
|
|
3
3
|
config,
|
|
4
4
|
copilotRuntimeNextJSAppRouterEndpoint,
|
|
5
5
|
copilotRuntimeNextJSPagesRouterEndpoint
|
|
6
|
-
} from "./chunk-
|
|
6
|
+
} from "./chunk-SJPSYQ4P.mjs";
|
|
7
7
|
import {
|
|
8
8
|
copilotRuntimeNestEndpoint
|
|
9
|
-
} from "./chunk-
|
|
9
|
+
} from "./chunk-LOGYVH7X.mjs";
|
|
10
10
|
import {
|
|
11
11
|
copilotRuntimeNodeExpressEndpoint
|
|
12
|
-
} from "./chunk-
|
|
12
|
+
} from "./chunk-5PFBYGDX.mjs";
|
|
13
13
|
import {
|
|
14
14
|
CopilotRuntime,
|
|
15
15
|
addCustomHeaderPlugin,
|
|
@@ -22,7 +22,7 @@ import {
|
|
|
22
22
|
getCommonConfig,
|
|
23
23
|
langGraphPlatformEndpoint,
|
|
24
24
|
resolveEndpointType
|
|
25
|
-
} from "./chunk-
|
|
25
|
+
} from "./chunk-ANTSN7W3.mjs";
|
|
26
26
|
import {
|
|
27
27
|
AnthropicAdapter,
|
|
28
28
|
EmptyAdapter,
|
package/dist/lib/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export { i as CommonConfig,
|
|
1
|
+
export { i as CommonConfig, d as CopilotRequestContextProperties, a as CopilotRuntime, C as CopilotRuntimeConstructorParams, k as CopilotRuntimeLogger, e as CreateCopilotRuntimeServerOptions, G as GraphQLContext, L as LogLevel, b as addCustomHeaderPlugin, h as buildSchema, c as copilotKitEndpoint, g as createContext, m as createLogger, f as flattenToolCallsNoDuplicates, j as getCommonConfig, l as langGraphPlatformEndpoint, r as resolveEndpointType } from '../copilot-runtime-eb953402.js';
|
|
2
2
|
export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-fb9aa3ab.js';
|
|
3
3
|
export { CopilotRuntimeServerInstance, config, copilotRuntimeNextJSAppRouterEndpoint, copilotRuntimeNextJSPagesRouterEndpoint } from './integrations/index.js';
|
|
4
4
|
export { copilotRuntimeNodeHttpEndpoint } from './integrations/node-http/index.js';
|
package/dist/lib/index.js
CHANGED
|
@@ -44,7 +44,7 @@ var require_package = __commonJS({
|
|
|
44
44
|
publishConfig: {
|
|
45
45
|
access: "public"
|
|
46
46
|
},
|
|
47
|
-
version: "1.7.
|
|
47
|
+
version: "1.7.1-next.0",
|
|
48
48
|
sideEffects: false,
|
|
49
49
|
main: "./dist/index.js",
|
|
50
50
|
module: "./dist/index.mjs",
|
|
@@ -3673,7 +3673,7 @@ var CopilotRuntime = class {
|
|
|
3673
3673
|
onBeforeRequest;
|
|
3674
3674
|
onAfterRequest;
|
|
3675
3675
|
delegateAgentProcessingToServiceAdapter;
|
|
3676
|
-
|
|
3676
|
+
observability;
|
|
3677
3677
|
constructor(params) {
|
|
3678
3678
|
var _a, _b;
|
|
3679
3679
|
if ((params == null ? void 0 : params.actions) && (params == null ? void 0 : params.remoteEndpoints)) {
|
|
@@ -3690,11 +3690,11 @@ var CopilotRuntime = class {
|
|
|
3690
3690
|
this.onBeforeRequest = (_a = params == null ? void 0 : params.middleware) == null ? void 0 : _a.onBeforeRequest;
|
|
3691
3691
|
this.onAfterRequest = (_b = params == null ? void 0 : params.middleware) == null ? void 0 : _b.onAfterRequest;
|
|
3692
3692
|
this.delegateAgentProcessingToServiceAdapter = (params == null ? void 0 : params.delegateAgentProcessingToServiceAdapter) || false;
|
|
3693
|
-
this.
|
|
3693
|
+
this.observability = params == null ? void 0 : params.observability_c;
|
|
3694
3694
|
}
|
|
3695
3695
|
async processRuntimeRequest(request) {
|
|
3696
3696
|
var _a, _b, _c, _d, _e;
|
|
3697
|
-
const { serviceAdapter, messages: rawMessages, actions: clientSideActionsInput, threadId, runId, outputMessagesPromise, graphqlContext, forwardedParameters, url, extensions, agentSession, agentStates } = request;
|
|
3697
|
+
const { serviceAdapter, messages: rawMessages, actions: clientSideActionsInput, threadId, runId, outputMessagesPromise, graphqlContext, forwardedParameters, url, extensions, agentSession, agentStates, publicApiKey } = request;
|
|
3698
3698
|
const eventSource = new RuntimeEventSource();
|
|
3699
3699
|
const requestStartTime = Date.now();
|
|
3700
3700
|
const streamedChunks = [];
|
|
@@ -3712,7 +3712,7 @@ please use an LLM adapter instead.`
|
|
|
3712
3712
|
const messages = rawMessages.filter((message) => !message.agentStateMessage);
|
|
3713
3713
|
const inputMessages = convertGqlInputToMessages(messages);
|
|
3714
3714
|
const serverSideActions = await this.getServerSideActions(request);
|
|
3715
|
-
if ((_a = this.
|
|
3715
|
+
if (((_a = this.observability) == null ? void 0 : _a.enabled) && publicApiKey) {
|
|
3716
3716
|
try {
|
|
3717
3717
|
const requestData = {
|
|
3718
3718
|
threadId,
|
|
@@ -3724,7 +3724,7 @@ please use an LLM adapter instead.`
|
|
|
3724
3724
|
timestamp: requestStartTime,
|
|
3725
3725
|
provider: this.detectProvider(serviceAdapter)
|
|
3726
3726
|
};
|
|
3727
|
-
await this.
|
|
3727
|
+
await this.observability.hooks.handleRequest(requestData);
|
|
3728
3728
|
} catch (error) {
|
|
3729
3729
|
console.error("Error logging LLM request:", error);
|
|
3730
3730
|
}
|
|
@@ -3772,16 +3772,15 @@ please use an LLM adapter instead.`
|
|
|
3772
3772
|
});
|
|
3773
3773
|
}).catch((_error) => {
|
|
3774
3774
|
});
|
|
3775
|
-
if ((_c = this.
|
|
3775
|
+
if (((_c = this.observability) == null ? void 0 : _c.enabled) && publicApiKey) {
|
|
3776
3776
|
try {
|
|
3777
3777
|
outputMessagesPromise.then((outputMessages) => {
|
|
3778
|
-
var _a2;
|
|
3779
3778
|
const responseData = {
|
|
3780
3779
|
threadId: result.threadId,
|
|
3781
3780
|
runId: result.runId,
|
|
3782
3781
|
model: forwardedParameters == null ? void 0 : forwardedParameters.model,
|
|
3783
3782
|
// Use collected chunks for progressive mode or outputMessages for regular mode
|
|
3784
|
-
output: this.
|
|
3783
|
+
output: this.observability.progressive ? streamedChunks : outputMessages,
|
|
3785
3784
|
latency: Date.now() - requestStartTime,
|
|
3786
3785
|
timestamp: Date.now(),
|
|
3787
3786
|
provider: this.detectProvider(serviceAdapter),
|
|
@@ -3789,7 +3788,7 @@ please use an LLM adapter instead.`
|
|
|
3789
3788
|
isFinalResponse: true
|
|
3790
3789
|
};
|
|
3791
3790
|
try {
|
|
3792
|
-
|
|
3791
|
+
this.observability.hooks.handleResponse(responseData);
|
|
3793
3792
|
} catch (logError) {
|
|
3794
3793
|
console.error("Error logging LLM response:", logError);
|
|
3795
3794
|
}
|
|
@@ -3800,7 +3799,7 @@ please use an LLM adapter instead.`
|
|
|
3800
3799
|
console.error("Error setting up logging for LLM response:", error);
|
|
3801
3800
|
}
|
|
3802
3801
|
}
|
|
3803
|
-
if (((_d = this.
|
|
3802
|
+
if (((_d = this.observability) == null ? void 0 : _d.enabled) && this.observability.progressive && publicApiKey) {
|
|
3804
3803
|
const originalStream = eventSource.stream.bind(eventSource);
|
|
3805
3804
|
eventSource.stream = async (callback) => {
|
|
3806
3805
|
await originalStream(async (eventStream$) => {
|
|
@@ -3820,8 +3819,7 @@ please use an LLM adapter instead.`
|
|
|
3820
3819
|
isProgressiveChunk: true
|
|
3821
3820
|
};
|
|
3822
3821
|
Promise.resolve().then(() => {
|
|
3823
|
-
|
|
3824
|
-
(_a2 = this.logging) == null ? void 0 : _a2.logger.logResponse(progressiveData);
|
|
3822
|
+
this.observability.hooks.handleResponse(progressiveData);
|
|
3825
3823
|
}).catch((error) => {
|
|
3826
3824
|
console.error("Error in progressive logging:", error);
|
|
3827
3825
|
});
|
|
@@ -3847,7 +3845,7 @@ please use an LLM adapter instead.`
|
|
|
3847
3845
|
extensions: result.extensions
|
|
3848
3846
|
};
|
|
3849
3847
|
} catch (error) {
|
|
3850
|
-
if ((_e = this.
|
|
3848
|
+
if (((_e = this.observability) == null ? void 0 : _e.enabled) && publicApiKey) {
|
|
3851
3849
|
try {
|
|
3852
3850
|
const errorData = {
|
|
3853
3851
|
threadId,
|
|
@@ -3858,7 +3856,7 @@ please use an LLM adapter instead.`
|
|
|
3858
3856
|
latency: Date.now() - requestStartTime,
|
|
3859
3857
|
provider: this.detectProvider(serviceAdapter)
|
|
3860
3858
|
};
|
|
3861
|
-
await this.
|
|
3859
|
+
await this.observability.hooks.handleError(errorData);
|
|
3862
3860
|
} catch (logError) {
|
|
3863
3861
|
console.error("Error logging LLM error:", logError);
|
|
3864
3862
|
}
|
|
@@ -4026,9 +4024,11 @@ please use an LLM adapter instead.`
|
|
|
4026
4024
|
}
|
|
4027
4025
|
}
|
|
4028
4026
|
async processAgentRequest(request) {
|
|
4029
|
-
var _a;
|
|
4030
|
-
const { messages: rawMessages, outputMessagesPromise, graphqlContext, agentSession, threadId: threadIdFromRequest, metaEvents } = request;
|
|
4027
|
+
var _a, _b, _c, _d, _e;
|
|
4028
|
+
const { messages: rawMessages, outputMessagesPromise, graphqlContext, agentSession, threadId: threadIdFromRequest, metaEvents, publicApiKey, forwardedParameters } = request;
|
|
4031
4029
|
const { agentName, nodeName } = agentSession;
|
|
4030
|
+
const requestStartTime = Date.now();
|
|
4031
|
+
const streamedChunks = [];
|
|
4032
4032
|
const threadId = threadIdFromRequest ?? agentSession.threadId;
|
|
4033
4033
|
const serverSideActions = await this.getServerSideActions(request);
|
|
4034
4034
|
const messages = convertGqlInputToMessages(rawMessages);
|
|
@@ -4051,7 +4051,26 @@ please use an LLM adapter instead.`
|
|
|
4051
4051
|
...availableActionsForCurrentAgent,
|
|
4052
4052
|
...request.actions
|
|
4053
4053
|
]);
|
|
4054
|
-
|
|
4054
|
+
if (((_a = this.observability) == null ? void 0 : _a.enabled) && publicApiKey) {
|
|
4055
|
+
try {
|
|
4056
|
+
const requestData = {
|
|
4057
|
+
threadId,
|
|
4058
|
+
runId: void 0,
|
|
4059
|
+
model: forwardedParameters == null ? void 0 : forwardedParameters.model,
|
|
4060
|
+
messages,
|
|
4061
|
+
actions: allAvailableActions,
|
|
4062
|
+
forwardedParameters,
|
|
4063
|
+
timestamp: requestStartTime,
|
|
4064
|
+
provider: "agent",
|
|
4065
|
+
agentName,
|
|
4066
|
+
nodeName
|
|
4067
|
+
};
|
|
4068
|
+
await this.observability.hooks.handleRequest(requestData);
|
|
4069
|
+
} catch (error) {
|
|
4070
|
+
console.error("Error logging agent request:", error);
|
|
4071
|
+
}
|
|
4072
|
+
}
|
|
4073
|
+
await ((_b = this.onBeforeRequest) == null ? void 0 : _b.call(this, {
|
|
4055
4074
|
threadId,
|
|
4056
4075
|
runId: void 0,
|
|
4057
4076
|
inputMessages: messages,
|
|
@@ -4066,17 +4085,96 @@ please use an LLM adapter instead.`
|
|
|
4066
4085
|
metaEvents,
|
|
4067
4086
|
actionInputsWithoutAgents: allAvailableActions
|
|
4068
4087
|
});
|
|
4088
|
+
if (((_c = this.observability) == null ? void 0 : _c.enabled) && this.observability.progressive && publicApiKey) {
|
|
4089
|
+
const originalStream = eventSource.stream.bind(eventSource);
|
|
4090
|
+
eventSource.stream = async (callback) => {
|
|
4091
|
+
await originalStream(async (eventStream$) => {
|
|
4092
|
+
eventStream$.subscribe({
|
|
4093
|
+
next: (event) => {
|
|
4094
|
+
if (event.type === RuntimeEventTypes.TextMessageContent) {
|
|
4095
|
+
streamedChunks.push(event.content);
|
|
4096
|
+
try {
|
|
4097
|
+
const progressiveData = {
|
|
4098
|
+
threadId: threadId || "",
|
|
4099
|
+
runId: void 0,
|
|
4100
|
+
model: forwardedParameters == null ? void 0 : forwardedParameters.model,
|
|
4101
|
+
output: event.content,
|
|
4102
|
+
latency: Date.now() - requestStartTime,
|
|
4103
|
+
timestamp: Date.now(),
|
|
4104
|
+
provider: "agent",
|
|
4105
|
+
isProgressiveChunk: true,
|
|
4106
|
+
agentName,
|
|
4107
|
+
nodeName
|
|
4108
|
+
};
|
|
4109
|
+
Promise.resolve().then(() => {
|
|
4110
|
+
this.observability.hooks.handleResponse(progressiveData);
|
|
4111
|
+
}).catch((error) => {
|
|
4112
|
+
console.error("Error in progressive agent logging:", error);
|
|
4113
|
+
});
|
|
4114
|
+
} catch (error) {
|
|
4115
|
+
console.error("Error preparing progressive agent log data:", error);
|
|
4116
|
+
}
|
|
4117
|
+
}
|
|
4118
|
+
}
|
|
4119
|
+
});
|
|
4120
|
+
await callback(eventStream$);
|
|
4121
|
+
});
|
|
4122
|
+
};
|
|
4123
|
+
}
|
|
4069
4124
|
eventSource.stream(async (eventStream$) => {
|
|
4070
4125
|
(0, import_rxjs3.from)(stream).subscribe({
|
|
4071
4126
|
next: (event) => eventStream$.next(event),
|
|
4072
4127
|
error: (err) => {
|
|
4128
|
+
var _a2;
|
|
4073
4129
|
console.error("Error in stream", err);
|
|
4130
|
+
if (((_a2 = this.observability) == null ? void 0 : _a2.enabled) && publicApiKey) {
|
|
4131
|
+
try {
|
|
4132
|
+
const errorData = {
|
|
4133
|
+
threadId,
|
|
4134
|
+
runId: void 0,
|
|
4135
|
+
model: forwardedParameters == null ? void 0 : forwardedParameters.model,
|
|
4136
|
+
error: err instanceof Error ? err : String(err),
|
|
4137
|
+
timestamp: Date.now(),
|
|
4138
|
+
latency: Date.now() - requestStartTime,
|
|
4139
|
+
provider: "agent",
|
|
4140
|
+
agentName,
|
|
4141
|
+
nodeName
|
|
4142
|
+
};
|
|
4143
|
+
this.observability.hooks.handleError(errorData);
|
|
4144
|
+
} catch (logError) {
|
|
4145
|
+
console.error("Error logging agent error:", logError);
|
|
4146
|
+
}
|
|
4147
|
+
}
|
|
4074
4148
|
eventStream$.error(err);
|
|
4075
4149
|
eventStream$.complete();
|
|
4076
4150
|
},
|
|
4077
4151
|
complete: () => eventStream$.complete()
|
|
4078
4152
|
});
|
|
4079
4153
|
});
|
|
4154
|
+
if (((_d = this.observability) == null ? void 0 : _d.enabled) && publicApiKey) {
|
|
4155
|
+
outputMessagesPromise.then((outputMessages) => {
|
|
4156
|
+
const responseData = {
|
|
4157
|
+
threadId,
|
|
4158
|
+
runId: void 0,
|
|
4159
|
+
model: forwardedParameters == null ? void 0 : forwardedParameters.model,
|
|
4160
|
+
// Use collected chunks for progressive mode or outputMessages for regular mode
|
|
4161
|
+
output: this.observability.progressive ? streamedChunks : outputMessages,
|
|
4162
|
+
latency: Date.now() - requestStartTime,
|
|
4163
|
+
timestamp: Date.now(),
|
|
4164
|
+
provider: "agent",
|
|
4165
|
+
isFinalResponse: true,
|
|
4166
|
+
agentName,
|
|
4167
|
+
nodeName
|
|
4168
|
+
};
|
|
4169
|
+
try {
|
|
4170
|
+
this.observability.hooks.handleResponse(responseData);
|
|
4171
|
+
} catch (logError) {
|
|
4172
|
+
console.error("Error logging agent response:", logError);
|
|
4173
|
+
}
|
|
4174
|
+
}).catch((error) => {
|
|
4175
|
+
console.error("Failed to get output messages for agent logging:", error);
|
|
4176
|
+
});
|
|
4177
|
+
}
|
|
4080
4178
|
outputMessagesPromise.then((outputMessages) => {
|
|
4081
4179
|
var _a2;
|
|
4082
4180
|
(_a2 = this.onAfterRequest) == null ? void 0 : _a2.call(this, {
|
|
@@ -4096,6 +4194,24 @@ please use an LLM adapter instead.`
|
|
|
4096
4194
|
actionInputsWithoutAgents: allAvailableActions
|
|
4097
4195
|
};
|
|
4098
4196
|
} catch (error) {
|
|
4197
|
+
if (((_e = this.observability) == null ? void 0 : _e.enabled) && publicApiKey) {
|
|
4198
|
+
try {
|
|
4199
|
+
const errorData = {
|
|
4200
|
+
threadId,
|
|
4201
|
+
runId: void 0,
|
|
4202
|
+
model: forwardedParameters == null ? void 0 : forwardedParameters.model,
|
|
4203
|
+
error: error instanceof Error ? error : String(error),
|
|
4204
|
+
timestamp: Date.now(),
|
|
4205
|
+
latency: Date.now() - requestStartTime,
|
|
4206
|
+
provider: "agent",
|
|
4207
|
+
agentName,
|
|
4208
|
+
nodeName
|
|
4209
|
+
};
|
|
4210
|
+
await this.observability.hooks.handleError(errorData);
|
|
4211
|
+
} catch (logError) {
|
|
4212
|
+
console.error("Error logging agent error:", logError);
|
|
4213
|
+
}
|
|
4214
|
+
}
|
|
4099
4215
|
console.error("Error getting response:", error);
|
|
4100
4216
|
throw error;
|
|
4101
4217
|
}
|
|
@@ -5177,7 +5293,7 @@ var CopilotResolver = class {
|
|
|
5177
5293
|
actions: data.frontend.actions.filter((action) => action.available !== ActionInputAvailability.disabled),
|
|
5178
5294
|
threadId: data.threadId,
|
|
5179
5295
|
runId: data.runId,
|
|
5180
|
-
publicApiKey:
|
|
5296
|
+
publicApiKey: copilotCloudPublicApiKey,
|
|
5181
5297
|
outputMessagesPromise,
|
|
5182
5298
|
graphqlContext: ctx,
|
|
5183
5299
|
forwardedParameters: data.forwardedParameters,
|