@copilotkit/runtime 1.7.0 → 1.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/{chunk-7X5R75DH.mjs → chunk-I32ZVLQF.mjs} +2 -2
- package/dist/{chunk-66YYMUU2.mjs → chunk-IJGSJSXO.mjs} +136 -20
- package/dist/chunk-IJGSJSXO.mjs.map +1 -0
- package/dist/{chunk-BYQ7M4HT.mjs → chunk-UIQ6VQVO.mjs} +2 -2
- package/dist/{chunk-KFTZCVAE.mjs → chunk-ZRT6C4KZ.mjs} +2 -2
- package/dist/{copilot-runtime-5103c7e7.d.ts → copilot-runtime-eb953402.d.ts} +78 -46
- package/dist/index.d.ts +1 -1
- package/dist/index.js +135 -19
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +4 -4
- package/dist/lib/index.d.ts +1 -1
- package/dist/lib/index.js +135 -19
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/index.mjs +4 -4
- package/dist/lib/integrations/index.d.ts +2 -2
- package/dist/lib/integrations/index.js +2 -2
- package/dist/lib/integrations/index.js.map +1 -1
- package/dist/lib/integrations/index.mjs +4 -4
- package/dist/lib/integrations/nest/index.d.ts +1 -1
- package/dist/lib/integrations/nest/index.js +2 -2
- package/dist/lib/integrations/nest/index.js.map +1 -1
- package/dist/lib/integrations/nest/index.mjs +2 -2
- package/dist/lib/integrations/node-express/index.d.ts +1 -1
- package/dist/lib/integrations/node-express/index.js +2 -2
- package/dist/lib/integrations/node-express/index.js.map +1 -1
- package/dist/lib/integrations/node-express/index.mjs +2 -2
- package/dist/lib/integrations/node-http/index.d.ts +1 -1
- package/dist/lib/integrations/node-http/index.js +2 -2
- package/dist/lib/integrations/node-http/index.js.map +1 -1
- package/dist/lib/integrations/node-http/index.mjs +1 -1
- package/package.json +2 -2
- package/src/graphql/resolvers/copilot.resolver.ts +1 -1
- package/src/lib/logger.ts +0 -48
- package/src/lib/observability.ts +73 -0
- package/src/lib/runtime/copilot-runtime.ts +182 -22
- package/dist/chunk-66YYMUU2.mjs.map +0 -1
- /package/dist/{chunk-7X5R75DH.mjs.map → chunk-I32ZVLQF.mjs.map} +0 -0
- /package/dist/{chunk-BYQ7M4HT.mjs.map → chunk-UIQ6VQVO.mjs.map} +0 -0
- /package/dist/{chunk-KFTZCVAE.mjs.map → chunk-ZRT6C4KZ.mjs.map} +0 -0
package/package.json
CHANGED
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
"publishConfig": {
|
|
10
10
|
"access": "public"
|
|
11
11
|
},
|
|
12
|
-
"version": "1.7.
|
|
12
|
+
"version": "1.7.1",
|
|
13
13
|
"sideEffects": false,
|
|
14
14
|
"main": "./dist/index.js",
|
|
15
15
|
"module": "./dist/index.mjs",
|
|
@@ -59,7 +59,7 @@
|
|
|
59
59
|
"rxjs": "^7.8.1",
|
|
60
60
|
"type-graphql": "2.0.0-rc.1",
|
|
61
61
|
"zod": "^3.23.3",
|
|
62
|
-
"@copilotkit/shared": "1.7.
|
|
62
|
+
"@copilotkit/shared": "1.7.1"
|
|
63
63
|
},
|
|
64
64
|
"keywords": [
|
|
65
65
|
"copilotkit",
|
|
@@ -216,7 +216,7 @@ export class CopilotResolver {
|
|
|
216
216
|
),
|
|
217
217
|
threadId: data.threadId,
|
|
218
218
|
runId: data.runId,
|
|
219
|
-
publicApiKey:
|
|
219
|
+
publicApiKey: copilotCloudPublicApiKey,
|
|
220
220
|
outputMessagesPromise,
|
|
221
221
|
graphqlContext: ctx,
|
|
222
222
|
forwardedParameters: data.forwardedParameters,
|
package/src/lib/logger.ts
CHANGED
|
@@ -26,51 +26,3 @@ export function createLogger(options?: { level?: LogLevel; component?: string })
|
|
|
26
26
|
return logger;
|
|
27
27
|
}
|
|
28
28
|
}
|
|
29
|
-
|
|
30
|
-
// LangFuse Logging Integration
|
|
31
|
-
export interface LogLLMRequestData {
|
|
32
|
-
threadId?: string;
|
|
33
|
-
runId?: string;
|
|
34
|
-
model?: string;
|
|
35
|
-
messages: any[];
|
|
36
|
-
actions?: any[];
|
|
37
|
-
forwardedParameters?: any;
|
|
38
|
-
timestamp: number;
|
|
39
|
-
provider?: string;
|
|
40
|
-
[key: string]: any;
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
export interface LogLLMResponseData {
|
|
44
|
-
threadId: string;
|
|
45
|
-
runId?: string;
|
|
46
|
-
model?: string;
|
|
47
|
-
output: any;
|
|
48
|
-
latency: number;
|
|
49
|
-
timestamp: number;
|
|
50
|
-
provider?: string;
|
|
51
|
-
isProgressiveChunk?: boolean;
|
|
52
|
-
isFinalResponse?: boolean;
|
|
53
|
-
[key: string]: any;
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
export interface LogLLMErrorData {
|
|
57
|
-
threadId?: string;
|
|
58
|
-
runId?: string;
|
|
59
|
-
model?: string;
|
|
60
|
-
error: Error | string;
|
|
61
|
-
timestamp: number;
|
|
62
|
-
provider?: string;
|
|
63
|
-
[key: string]: any;
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
export interface CopilotLoggerHooks {
|
|
67
|
-
logRequest: (data: LogLLMRequestData) => void | Promise<void>;
|
|
68
|
-
logResponse: (data: LogLLMResponseData) => void | Promise<void>;
|
|
69
|
-
logError: (data: LogLLMErrorData) => void | Promise<void>;
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
export interface CopilotLoggingConfig {
|
|
73
|
-
enabled: boolean;
|
|
74
|
-
progressive: boolean;
|
|
75
|
-
logger: CopilotLoggerHooks;
|
|
76
|
-
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
export interface LLMRequestData {
|
|
2
|
+
threadId?: string;
|
|
3
|
+
runId?: string;
|
|
4
|
+
model?: string;
|
|
5
|
+
messages: any[];
|
|
6
|
+
actions?: any[];
|
|
7
|
+
forwardedParameters?: any;
|
|
8
|
+
timestamp: number;
|
|
9
|
+
provider?: string;
|
|
10
|
+
[key: string]: any;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export interface LLMResponseData {
|
|
14
|
+
threadId: string;
|
|
15
|
+
runId?: string;
|
|
16
|
+
model?: string;
|
|
17
|
+
output: any;
|
|
18
|
+
latency: number;
|
|
19
|
+
timestamp: number;
|
|
20
|
+
provider?: string;
|
|
21
|
+
isProgressiveChunk?: boolean;
|
|
22
|
+
isFinalResponse?: boolean;
|
|
23
|
+
[key: string]: any;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export interface LLMErrorData {
|
|
27
|
+
threadId?: string;
|
|
28
|
+
runId?: string;
|
|
29
|
+
model?: string;
|
|
30
|
+
error: Error | string;
|
|
31
|
+
timestamp: number;
|
|
32
|
+
provider?: string;
|
|
33
|
+
[key: string]: any;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export interface CopilotObservabilityHooks {
|
|
37
|
+
handleRequest: (data: LLMRequestData) => void | Promise<void>;
|
|
38
|
+
handleResponse: (data: LLMResponseData) => void | Promise<void>;
|
|
39
|
+
handleError: (data: LLMErrorData) => void | Promise<void>;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Configuration for CopilotKit logging functionality.
|
|
44
|
+
*
|
|
45
|
+
* @remarks
|
|
46
|
+
* Custom logging handlers require a valid CopilotKit public API key.
|
|
47
|
+
* Sign up at https://docs.copilotkit.ai/quickstart#get-a-copilot-cloud-public-api-key to get your key.
|
|
48
|
+
*/
|
|
49
|
+
export interface CopilotObservabilityConfig {
|
|
50
|
+
/**
|
|
51
|
+
* Enable or disable logging functionality.
|
|
52
|
+
*
|
|
53
|
+
* @default false
|
|
54
|
+
*/
|
|
55
|
+
enabled: boolean;
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Controls whether logs are streamed progressively or buffered.
|
|
59
|
+
* - When true: Each token and update is logged as it's generated (real-time)
|
|
60
|
+
* - When false: Complete responses are logged after completion (batched)
|
|
61
|
+
*
|
|
62
|
+
* @default true
|
|
63
|
+
*/
|
|
64
|
+
progressive: boolean;
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Custom observability hooks for request, response, and error events.
|
|
68
|
+
*
|
|
69
|
+
* @remarks
|
|
70
|
+
* Using custom observability hooks requires a valid CopilotKit public API key.
|
|
71
|
+
*/
|
|
72
|
+
hooks: CopilotObservabilityHooks;
|
|
73
|
+
}
|
|
@@ -62,11 +62,11 @@ import { Client as LangGraphClient } from "@langchain/langgraph-sdk";
|
|
|
62
62
|
import { langchainMessagesToCopilotKit } from "./remote-lg-action";
|
|
63
63
|
import { MetaEventInput } from "../../graphql/inputs/meta-event.input";
|
|
64
64
|
import {
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
} from "../
|
|
65
|
+
CopilotObservabilityConfig,
|
|
66
|
+
LLMRequestData,
|
|
67
|
+
LLMResponseData,
|
|
68
|
+
LLMErrorData,
|
|
69
|
+
} from "../observability";
|
|
70
70
|
|
|
71
71
|
interface CopilotRuntimeRequest {
|
|
72
72
|
serviceAdapter: CopilotServiceAdapter;
|
|
@@ -187,8 +187,14 @@ export interface CopilotRuntimeConstructorParams<T extends Parameter[] | [] = []
|
|
|
187
187
|
delegateAgentProcessingToServiceAdapter?: boolean;
|
|
188
188
|
|
|
189
189
|
/**
|
|
190
|
-
* Configuration for LLM request/response logging
|
|
190
|
+
* Configuration for LLM request/response logging.
|
|
191
|
+
* Requires publicApiKey from CopilotKit component to be set:
|
|
191
192
|
*
|
|
193
|
+
* ```tsx
|
|
194
|
+
* <CopilotKit publicApiKey="ck_pub_..." />
|
|
195
|
+
* ```
|
|
196
|
+
*
|
|
197
|
+
* Example logging config:
|
|
192
198
|
* ```ts
|
|
193
199
|
* logging: {
|
|
194
200
|
* enabled: true, // Enable or disable logging
|
|
@@ -201,7 +207,7 @@ export interface CopilotRuntimeConstructorParams<T extends Parameter[] | [] = []
|
|
|
201
207
|
* }
|
|
202
208
|
* ```
|
|
203
209
|
*/
|
|
204
|
-
|
|
210
|
+
observability_c?: CopilotObservabilityConfig;
|
|
205
211
|
}
|
|
206
212
|
|
|
207
213
|
export class CopilotRuntime<const T extends Parameter[] | [] = []> {
|
|
@@ -211,7 +217,7 @@ export class CopilotRuntime<const T extends Parameter[] | [] = []> {
|
|
|
211
217
|
private onBeforeRequest?: OnBeforeRequestHandler;
|
|
212
218
|
private onAfterRequest?: OnAfterRequestHandler;
|
|
213
219
|
private delegateAgentProcessingToServiceAdapter: boolean;
|
|
214
|
-
private
|
|
220
|
+
private observability?: CopilotObservabilityConfig;
|
|
215
221
|
|
|
216
222
|
constructor(params?: CopilotRuntimeConstructorParams<T>) {
|
|
217
223
|
// Do not register actions if endpoints are set
|
|
@@ -233,7 +239,7 @@ export class CopilotRuntime<const T extends Parameter[] | [] = []> {
|
|
|
233
239
|
this.onAfterRequest = params?.middleware?.onAfterRequest;
|
|
234
240
|
this.delegateAgentProcessingToServiceAdapter =
|
|
235
241
|
params?.delegateAgentProcessingToServiceAdapter || false;
|
|
236
|
-
this.
|
|
242
|
+
this.observability = params?.observability_c;
|
|
237
243
|
}
|
|
238
244
|
|
|
239
245
|
async processRuntimeRequest(request: CopilotRuntimeRequest): Promise<CopilotRuntimeResponse> {
|
|
@@ -250,6 +256,7 @@ export class CopilotRuntime<const T extends Parameter[] | [] = []> {
|
|
|
250
256
|
extensions,
|
|
251
257
|
agentSession,
|
|
252
258
|
agentStates,
|
|
259
|
+
publicApiKey,
|
|
253
260
|
} = request;
|
|
254
261
|
|
|
255
262
|
const eventSource = new RuntimeEventSource();
|
|
@@ -275,9 +282,9 @@ please use an LLM adapter instead.`,
|
|
|
275
282
|
const serverSideActions = await this.getServerSideActions(request);
|
|
276
283
|
|
|
277
284
|
// Log LLM request if logging is enabled
|
|
278
|
-
if (this.
|
|
285
|
+
if (this.observability?.enabled && publicApiKey) {
|
|
279
286
|
try {
|
|
280
|
-
const requestData:
|
|
287
|
+
const requestData: LLMRequestData = {
|
|
281
288
|
threadId,
|
|
282
289
|
runId,
|
|
283
290
|
model: forwardedParameters?.model,
|
|
@@ -288,7 +295,7 @@ please use an LLM adapter instead.`,
|
|
|
288
295
|
provider: this.detectProvider(serviceAdapter),
|
|
289
296
|
};
|
|
290
297
|
|
|
291
|
-
await this.
|
|
298
|
+
await this.observability.hooks.handleRequest(requestData);
|
|
292
299
|
} catch (error) {
|
|
293
300
|
console.error("Error logging LLM request:", error);
|
|
294
301
|
}
|
|
@@ -346,16 +353,16 @@ please use an LLM adapter instead.`,
|
|
|
346
353
|
.catch((_error) => {});
|
|
347
354
|
|
|
348
355
|
// After getting the response, log it if logging is enabled
|
|
349
|
-
if (this.
|
|
356
|
+
if (this.observability?.enabled && publicApiKey) {
|
|
350
357
|
try {
|
|
351
358
|
outputMessagesPromise
|
|
352
359
|
.then((outputMessages) => {
|
|
353
|
-
const responseData:
|
|
360
|
+
const responseData: LLMResponseData = {
|
|
354
361
|
threadId: result.threadId,
|
|
355
362
|
runId: result.runId,
|
|
356
363
|
model: forwardedParameters?.model,
|
|
357
364
|
// Use collected chunks for progressive mode or outputMessages for regular mode
|
|
358
|
-
output: this.
|
|
365
|
+
output: this.observability.progressive ? streamedChunks : outputMessages,
|
|
359
366
|
latency: Date.now() - requestStartTime,
|
|
360
367
|
timestamp: Date.now(),
|
|
361
368
|
provider: this.detectProvider(serviceAdapter),
|
|
@@ -364,7 +371,7 @@ please use an LLM adapter instead.`,
|
|
|
364
371
|
};
|
|
365
372
|
|
|
366
373
|
try {
|
|
367
|
-
this.
|
|
374
|
+
this.observability.hooks.handleResponse(responseData);
|
|
368
375
|
} catch (logError) {
|
|
369
376
|
console.error("Error logging LLM response:", logError);
|
|
370
377
|
}
|
|
@@ -378,7 +385,7 @@ please use an LLM adapter instead.`,
|
|
|
378
385
|
}
|
|
379
386
|
|
|
380
387
|
// Add progressive logging if enabled
|
|
381
|
-
if (this.
|
|
388
|
+
if (this.observability?.enabled && this.observability.progressive && publicApiKey) {
|
|
382
389
|
// Keep reference to original stream function
|
|
383
390
|
const originalStream = eventSource.stream.bind(eventSource);
|
|
384
391
|
|
|
@@ -395,7 +402,7 @@ please use an LLM adapter instead.`,
|
|
|
395
402
|
|
|
396
403
|
// Log each chunk separately for progressive mode
|
|
397
404
|
try {
|
|
398
|
-
const progressiveData:
|
|
405
|
+
const progressiveData: LLMResponseData = {
|
|
399
406
|
threadId: threadId || "",
|
|
400
407
|
runId,
|
|
401
408
|
model: forwardedParameters?.model,
|
|
@@ -409,7 +416,7 @@ please use an LLM adapter instead.`,
|
|
|
409
416
|
// Use Promise to handle async logger without awaiting
|
|
410
417
|
Promise.resolve()
|
|
411
418
|
.then(() => {
|
|
412
|
-
this.
|
|
419
|
+
this.observability.hooks.handleResponse(progressiveData);
|
|
413
420
|
})
|
|
414
421
|
.catch((error) => {
|
|
415
422
|
console.error("Error in progressive logging:", error);
|
|
@@ -444,9 +451,9 @@ please use an LLM adapter instead.`,
|
|
|
444
451
|
};
|
|
445
452
|
} catch (error) {
|
|
446
453
|
// Log error if logging is enabled
|
|
447
|
-
if (this.
|
|
454
|
+
if (this.observability?.enabled && publicApiKey) {
|
|
448
455
|
try {
|
|
449
|
-
const errorData:
|
|
456
|
+
const errorData: LLMErrorData = {
|
|
450
457
|
threadId,
|
|
451
458
|
runId,
|
|
452
459
|
model: forwardedParameters?.model,
|
|
@@ -456,7 +463,7 @@ please use an LLM adapter instead.`,
|
|
|
456
463
|
provider: this.detectProvider(serviceAdapter),
|
|
457
464
|
};
|
|
458
465
|
|
|
459
|
-
await this.
|
|
466
|
+
await this.observability.hooks.handleError(errorData);
|
|
460
467
|
} catch (logError) {
|
|
461
468
|
console.error("Error logging LLM error:", logError);
|
|
462
469
|
}
|
|
@@ -643,9 +650,16 @@ please use an LLM adapter instead.`,
|
|
|
643
650
|
agentSession,
|
|
644
651
|
threadId: threadIdFromRequest,
|
|
645
652
|
metaEvents,
|
|
653
|
+
publicApiKey,
|
|
654
|
+
forwardedParameters,
|
|
646
655
|
} = request;
|
|
647
656
|
const { agentName, nodeName } = agentSession;
|
|
648
657
|
|
|
658
|
+
// Track request start time for observability
|
|
659
|
+
const requestStartTime = Date.now();
|
|
660
|
+
// For storing streamed chunks if progressive logging is enabled
|
|
661
|
+
const streamedChunks: any[] = [];
|
|
662
|
+
|
|
649
663
|
// for backwards compatibility, deal with the case when no threadId is provided
|
|
650
664
|
const threadId = threadIdFromRequest ?? agentSession.threadId;
|
|
651
665
|
|
|
@@ -683,12 +697,35 @@ please use an LLM adapter instead.`,
|
|
|
683
697
|
...request.actions,
|
|
684
698
|
]);
|
|
685
699
|
|
|
700
|
+
// Log agent request if observability is enabled
|
|
701
|
+
if (this.observability?.enabled && publicApiKey) {
|
|
702
|
+
try {
|
|
703
|
+
const requestData: LLMRequestData = {
|
|
704
|
+
threadId,
|
|
705
|
+
runId: undefined,
|
|
706
|
+
model: forwardedParameters?.model,
|
|
707
|
+
messages,
|
|
708
|
+
actions: allAvailableActions,
|
|
709
|
+
forwardedParameters,
|
|
710
|
+
timestamp: requestStartTime,
|
|
711
|
+
provider: "agent",
|
|
712
|
+
agentName, // Add agent-specific context
|
|
713
|
+
nodeName,
|
|
714
|
+
};
|
|
715
|
+
|
|
716
|
+
await this.observability.hooks.handleRequest(requestData);
|
|
717
|
+
} catch (error) {
|
|
718
|
+
console.error("Error logging agent request:", error);
|
|
719
|
+
}
|
|
720
|
+
}
|
|
721
|
+
|
|
686
722
|
await this.onBeforeRequest?.({
|
|
687
723
|
threadId,
|
|
688
724
|
runId: undefined,
|
|
689
725
|
inputMessages: messages,
|
|
690
726
|
properties: graphqlContext.properties,
|
|
691
727
|
});
|
|
728
|
+
|
|
692
729
|
try {
|
|
693
730
|
const eventSource = new RuntimeEventSource();
|
|
694
731
|
const stream = await currentAgent.remoteAgentHandler({
|
|
@@ -699,11 +736,84 @@ please use an LLM adapter instead.`,
|
|
|
699
736
|
actionInputsWithoutAgents: allAvailableActions,
|
|
700
737
|
});
|
|
701
738
|
|
|
739
|
+
// Add progressive observability if enabled
|
|
740
|
+
if (this.observability?.enabled && this.observability.progressive && publicApiKey) {
|
|
741
|
+
// Wrap the stream function to intercept events for observability without changing core logic
|
|
742
|
+
const originalStream = eventSource.stream.bind(eventSource);
|
|
743
|
+
|
|
744
|
+
eventSource.stream = async (callback) => {
|
|
745
|
+
await originalStream(async (eventStream$) => {
|
|
746
|
+
// Create subscription to capture streaming events
|
|
747
|
+
eventStream$.subscribe({
|
|
748
|
+
next: (event) => {
|
|
749
|
+
// Only log content chunks
|
|
750
|
+
if (event.type === RuntimeEventTypes.TextMessageContent) {
|
|
751
|
+
// Store the chunk
|
|
752
|
+
streamedChunks.push(event.content);
|
|
753
|
+
|
|
754
|
+
// Log each chunk separately for progressive mode
|
|
755
|
+
try {
|
|
756
|
+
const progressiveData: LLMResponseData = {
|
|
757
|
+
threadId: threadId || "",
|
|
758
|
+
runId: undefined,
|
|
759
|
+
model: forwardedParameters?.model,
|
|
760
|
+
output: event.content,
|
|
761
|
+
latency: Date.now() - requestStartTime,
|
|
762
|
+
timestamp: Date.now(),
|
|
763
|
+
provider: "agent",
|
|
764
|
+
isProgressiveChunk: true,
|
|
765
|
+
agentName,
|
|
766
|
+
nodeName,
|
|
767
|
+
};
|
|
768
|
+
|
|
769
|
+
// Use Promise to handle async logger without awaiting
|
|
770
|
+
Promise.resolve()
|
|
771
|
+
.then(() => {
|
|
772
|
+
this.observability.hooks.handleResponse(progressiveData);
|
|
773
|
+
})
|
|
774
|
+
.catch((error) => {
|
|
775
|
+
console.error("Error in progressive agent logging:", error);
|
|
776
|
+
});
|
|
777
|
+
} catch (error) {
|
|
778
|
+
console.error("Error preparing progressive agent log data:", error);
|
|
779
|
+
}
|
|
780
|
+
}
|
|
781
|
+
},
|
|
782
|
+
});
|
|
783
|
+
|
|
784
|
+
// Call the original callback with the event stream
|
|
785
|
+
await callback(eventStream$);
|
|
786
|
+
});
|
|
787
|
+
};
|
|
788
|
+
}
|
|
789
|
+
|
|
702
790
|
eventSource.stream(async (eventStream$) => {
|
|
703
791
|
from(stream).subscribe({
|
|
704
792
|
next: (event) => eventStream$.next(event),
|
|
705
793
|
error: (err) => {
|
|
706
794
|
console.error("Error in stream", err);
|
|
795
|
+
|
|
796
|
+
// Log error with observability if enabled
|
|
797
|
+
if (this.observability?.enabled && publicApiKey) {
|
|
798
|
+
try {
|
|
799
|
+
const errorData: LLMErrorData = {
|
|
800
|
+
threadId,
|
|
801
|
+
runId: undefined,
|
|
802
|
+
model: forwardedParameters?.model,
|
|
803
|
+
error: err instanceof Error ? err : String(err),
|
|
804
|
+
timestamp: Date.now(),
|
|
805
|
+
latency: Date.now() - requestStartTime,
|
|
806
|
+
provider: "agent",
|
|
807
|
+
agentName,
|
|
808
|
+
nodeName,
|
|
809
|
+
};
|
|
810
|
+
|
|
811
|
+
this.observability.hooks.handleError(errorData);
|
|
812
|
+
} catch (logError) {
|
|
813
|
+
console.error("Error logging agent error:", logError);
|
|
814
|
+
}
|
|
815
|
+
}
|
|
816
|
+
|
|
707
817
|
eventStream$.error(err);
|
|
708
818
|
eventStream$.complete();
|
|
709
819
|
},
|
|
@@ -711,6 +821,35 @@ please use an LLM adapter instead.`,
|
|
|
711
821
|
});
|
|
712
822
|
});
|
|
713
823
|
|
|
824
|
+
// Log final agent response when outputs are available
|
|
825
|
+
if (this.observability?.enabled && publicApiKey) {
|
|
826
|
+
outputMessagesPromise
|
|
827
|
+
.then((outputMessages) => {
|
|
828
|
+
const responseData: LLMResponseData = {
|
|
829
|
+
threadId,
|
|
830
|
+
runId: undefined,
|
|
831
|
+
model: forwardedParameters?.model,
|
|
832
|
+
// Use collected chunks for progressive mode or outputMessages for regular mode
|
|
833
|
+
output: this.observability.progressive ? streamedChunks : outputMessages,
|
|
834
|
+
latency: Date.now() - requestStartTime,
|
|
835
|
+
timestamp: Date.now(),
|
|
836
|
+
provider: "agent",
|
|
837
|
+
isFinalResponse: true,
|
|
838
|
+
agentName,
|
|
839
|
+
nodeName,
|
|
840
|
+
};
|
|
841
|
+
|
|
842
|
+
try {
|
|
843
|
+
this.observability.hooks.handleResponse(responseData);
|
|
844
|
+
} catch (logError) {
|
|
845
|
+
console.error("Error logging agent response:", logError);
|
|
846
|
+
}
|
|
847
|
+
})
|
|
848
|
+
.catch((error) => {
|
|
849
|
+
console.error("Failed to get output messages for agent logging:", error);
|
|
850
|
+
});
|
|
851
|
+
}
|
|
852
|
+
|
|
714
853
|
outputMessagesPromise
|
|
715
854
|
.then((outputMessages) => {
|
|
716
855
|
this.onAfterRequest?.({
|
|
@@ -731,6 +870,27 @@ please use an LLM adapter instead.`,
|
|
|
731
870
|
actionInputsWithoutAgents: allAvailableActions,
|
|
732
871
|
};
|
|
733
872
|
} catch (error) {
|
|
873
|
+
// Log error with observability if enabled
|
|
874
|
+
if (this.observability?.enabled && publicApiKey) {
|
|
875
|
+
try {
|
|
876
|
+
const errorData: LLMErrorData = {
|
|
877
|
+
threadId,
|
|
878
|
+
runId: undefined,
|
|
879
|
+
model: forwardedParameters?.model,
|
|
880
|
+
error: error instanceof Error ? error : String(error),
|
|
881
|
+
timestamp: Date.now(),
|
|
882
|
+
latency: Date.now() - requestStartTime,
|
|
883
|
+
provider: "agent",
|
|
884
|
+
agentName,
|
|
885
|
+
nodeName,
|
|
886
|
+
};
|
|
887
|
+
|
|
888
|
+
await this.observability.hooks.handleError(errorData);
|
|
889
|
+
} catch (logError) {
|
|
890
|
+
console.error("Error logging agent error:", logError);
|
|
891
|
+
}
|
|
892
|
+
}
|
|
893
|
+
|
|
734
894
|
console.error("Error getting response:", error);
|
|
735
895
|
throw error;
|
|
736
896
|
}
|