@librechat/agents 1.4.2 → 1.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/common/enum.cjs +17 -0
- package/dist/cjs/common/enum.cjs.map +1 -1
- package/dist/cjs/graphs/Graph.cjs +2 -2
- package/dist/cjs/graphs/Graph.cjs.map +1 -1
- package/dist/cjs/main.cjs +9 -0
- package/dist/cjs/main.cjs.map +1 -1
- package/dist/cjs/run.cjs +2 -1
- package/dist/cjs/run.cjs.map +1 -1
- package/dist/cjs/stream.cjs +137 -9
- package/dist/cjs/stream.cjs.map +1 -1
- package/dist/esm/common/enum.mjs +18 -1
- package/dist/esm/common/enum.mjs.map +1 -1
- package/dist/esm/graphs/Graph.mjs +2 -2
- package/dist/esm/graphs/Graph.mjs.map +1 -1
- package/dist/esm/main.mjs +2 -2
- package/dist/esm/run.mjs +2 -1
- package/dist/esm/run.mjs.map +1 -1
- package/dist/esm/stream.mjs +138 -11
- package/dist/esm/stream.mjs.map +1 -1
- package/dist/types/common/enum.d.ts +14 -0
- package/dist/types/scripts/abort.d.ts +1 -0
- package/dist/types/scripts/content.d.ts +1 -0
- package/dist/types/stream.d.ts +11 -0
- package/dist/types/types/graph.d.ts +7 -0
- package/dist/types/types/stream.d.ts +2 -2
- package/dist/types/types/tools.d.ts +14 -0
- package/package.json +8 -7
- package/src/common/enum.ts +17 -0
- package/src/graphs/Graph.ts +3 -3
- package/src/run.ts +2 -1
- package/src/scripts/abort.ts +136 -0
- package/src/scripts/content.ts +118 -0
- package/src/stream.ts +179 -11
- package/src/types/graph.ts +8 -0
- package/src/types/stream.ts +2 -2
- package/src/types/tools.ts +17 -1
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@librechat/agents",
|
|
3
|
-
"version": "1.4.
|
|
3
|
+
"version": "1.4.4",
|
|
4
4
|
"main": "./dist/cjs/main.cjs",
|
|
5
5
|
"module": "./dist/esm/main.mjs",
|
|
6
6
|
"types": "./dist/types/index.d.ts",
|
|
@@ -39,7 +39,9 @@
|
|
|
39
39
|
"clean": "node ./config/clean.js",
|
|
40
40
|
"script": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/cli.ts",
|
|
41
41
|
"bun:cli": "bun -r dotenv/config ./src/scripts/cli.ts --provider 'bedrock' --name 'Jo' --location 'New York, NY'",
|
|
42
|
-
"start:cli": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/cli.ts --provider '
|
|
42
|
+
"start:cli": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/cli.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
|
|
43
|
+
"content": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/content.ts --provider 'bedrock' --name 'Jo' --location 'New York, NY'",
|
|
44
|
+
"abort": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/abort.ts --provider 'bedrock' --name 'Jo' --location 'New York, NY'",
|
|
43
45
|
"start:cli2": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/cli2.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
|
|
44
46
|
"script2": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/proto/example_test.ts",
|
|
45
47
|
"script3": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/proto/example_test_anthropic.ts",
|
|
@@ -61,10 +63,10 @@
|
|
|
61
63
|
"@aws-crypto/sha256-js": "^5.2.0",
|
|
62
64
|
"@aws-sdk/credential-provider-node": "^3.613.0",
|
|
63
65
|
"@aws-sdk/types": "^3.609.0",
|
|
64
|
-
"@langchain/anthropic": "^0.2.
|
|
65
|
-
"@langchain/aws": "^0.0.
|
|
66
|
+
"@langchain/anthropic": "^0.2.15",
|
|
67
|
+
"@langchain/aws": "^0.0.10",
|
|
66
68
|
"@langchain/community": "^0.2.20",
|
|
67
|
-
"@langchain/core": "^0.2.
|
|
69
|
+
"@langchain/core": "^0.2.31",
|
|
68
70
|
"@langchain/google-vertexai": "^0.0.20",
|
|
69
71
|
"@langchain/langgraph": "^0.0.31",
|
|
70
72
|
"@langchain/mistralai": "^0.0.26",
|
|
@@ -74,10 +76,9 @@
|
|
|
74
76
|
"@smithy/util-utf8": "^2.0.0",
|
|
75
77
|
"dotenv": "^16.4.5",
|
|
76
78
|
"langchain": "^0.2.10",
|
|
77
|
-
"nanoid": "^3.3.
|
|
79
|
+
"nanoid": "^3.3.7"
|
|
78
80
|
},
|
|
79
81
|
"resolutions": {
|
|
80
|
-
"@langchain/core": "0.2.18",
|
|
81
82
|
"@smithy/eventstream-codec": "^2.2.0",
|
|
82
83
|
"@smithy/protocol-http": "^3.0.6",
|
|
83
84
|
"@smithy/signature-v4": "^2.0.10",
|
package/src/common/enum.ts
CHANGED
|
@@ -97,6 +97,23 @@ export enum StepTypes {
|
|
|
97
97
|
MESSAGE_CREATION = 'message_creation'
|
|
98
98
|
}
|
|
99
99
|
|
|
100
|
+
export enum ContentTypes {
|
|
101
|
+
TEXT = 'text',
|
|
102
|
+
TOOL_CALL = 'tool_call',
|
|
103
|
+
IMAGE_FILE = 'image_file',
|
|
104
|
+
IMAGE_URL = 'image_url',
|
|
105
|
+
ERROR = 'error',
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
export enum ToolCallTypes {
|
|
109
|
+
FUNCTION = 'function',
|
|
110
|
+
RETRIEVAL = 'retrieval',
|
|
111
|
+
FILE_SEARCH = 'file_search',
|
|
112
|
+
CODE_INTERPRETER = 'code_interpreter',
|
|
113
|
+
/* Agents Tool Call */
|
|
114
|
+
TOOL_CALL = 'tool_call',
|
|
115
|
+
}
|
|
116
|
+
|
|
100
117
|
export enum Callback {
|
|
101
118
|
TOOL_ERROR = 'handleToolError',
|
|
102
119
|
TOOL_START = 'handleToolStart',
|
package/src/graphs/Graph.ts
CHANGED
|
@@ -85,7 +85,7 @@ export class StandardGraph extends Graph<
|
|
|
85
85
|
provider,
|
|
86
86
|
clientOptions,
|
|
87
87
|
instructions,
|
|
88
|
-
additional_instructions,
|
|
88
|
+
additional_instructions = '',
|
|
89
89
|
} : {
|
|
90
90
|
runId?: string;
|
|
91
91
|
provider: Providers;
|
|
@@ -104,7 +104,7 @@ export class StandardGraph extends Graph<
|
|
|
104
104
|
this.graphState = this.createGraphState();
|
|
105
105
|
this.boundModel = this.initializeModel();
|
|
106
106
|
|
|
107
|
-
let finalInstructions = instructions;
|
|
107
|
+
let finalInstructions = instructions ?? '';
|
|
108
108
|
if (additional_instructions) {
|
|
109
109
|
finalInstructions = finalInstructions ? `${finalInstructions}\n\n${additional_instructions}` : additional_instructions;
|
|
110
110
|
}
|
|
@@ -244,7 +244,7 @@ export class StandardGraph extends Graph<
|
|
|
244
244
|
|
|
245
245
|
createCallModel() {
|
|
246
246
|
return async (state: t.BaseGraphState, config?: RunnableConfig): Promise<Partial<t.BaseGraphState>> => {
|
|
247
|
-
const { provider } = (config?.configurable as t.GraphConfig) ?? {} ;
|
|
247
|
+
const { provider } = (config?.configurable as t.GraphConfig | undefined) ?? {} ;
|
|
248
248
|
if (!config || !provider) {
|
|
249
249
|
throw new Error(`No ${config ? 'provider' : 'config'} provided`);
|
|
250
250
|
}
|
package/src/run.ts
CHANGED
|
@@ -90,7 +90,8 @@ export class Run<T extends t.BaseGraphState> {
|
|
|
90
90
|
const { data, name, metadata, ...info } = event;
|
|
91
91
|
|
|
92
92
|
let eventName: t.EventName = info.event;
|
|
93
|
-
|
|
93
|
+
const isDoubleCallProvider = provider === Providers.ANTHROPIC || provider === Providers.BEDROCK;
|
|
94
|
+
if (hasTools && isDoubleCallProvider && eventName === GraphEvents.CHAT_MODEL_STREAM) {
|
|
94
95
|
/* Skipping CHAT_MODEL_STREAM event for Anthropic due to double-call edge case */
|
|
95
96
|
continue;
|
|
96
97
|
}
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
/* eslint-disable no-console */
|
|
2
|
+
// src/scripts/cli.ts
|
|
3
|
+
import { config } from 'dotenv';
|
|
4
|
+
config();
|
|
5
|
+
import { HumanMessage, BaseMessage } from '@langchain/core/messages';
|
|
6
|
+
import { TavilySearchResults } from '@langchain/community/tools/tavily_search';
|
|
7
|
+
import type * as t from '@/types';
|
|
8
|
+
import { ChatModelStreamHandler, createContentAggregator } from '@/stream';
|
|
9
|
+
import { ToolEndHandler } from '@/events';
|
|
10
|
+
|
|
11
|
+
import { getArgs } from '@/scripts/args';
|
|
12
|
+
import { Run } from '@/run';
|
|
13
|
+
import { GraphEvents, Callback } from '@/common';
|
|
14
|
+
import { getLLMConfig } from '@/utils/llmConfig';
|
|
15
|
+
|
|
16
|
+
const conversationHistory: BaseMessage[] = [];
|
|
17
|
+
|
|
18
|
+
async function testStandardStreaming(): Promise<void> {
|
|
19
|
+
const { userName, location, provider, currentDate } = await getArgs();
|
|
20
|
+
const { contentParts, aggregateContent } = createContentAggregator();
|
|
21
|
+
const controller = new AbortController();
|
|
22
|
+
|
|
23
|
+
const customHandlers = {
|
|
24
|
+
[GraphEvents.TOOL_END]: new ToolEndHandler(),
|
|
25
|
+
[GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
|
|
26
|
+
[GraphEvents.ON_RUN_STEP_COMPLETED]: {
|
|
27
|
+
handle: (event: GraphEvents.ON_RUN_STEP_COMPLETED, data: t.StreamEventData): void => {
|
|
28
|
+
console.log('====== ON_RUN_STEP_COMPLETED ======');
|
|
29
|
+
aggregateContent({ event, data: data as unknown as { result: t.ToolEndEvent } });
|
|
30
|
+
}
|
|
31
|
+
},
|
|
32
|
+
[GraphEvents.ON_RUN_STEP]: {
|
|
33
|
+
handle: (event: GraphEvents.ON_RUN_STEP, data: t.StreamEventData): void => {
|
|
34
|
+
console.log('====== ON_RUN_STEP ======');
|
|
35
|
+
console.dir(data, { depth: null });
|
|
36
|
+
aggregateContent({ event, data: data as t.RunStep });
|
|
37
|
+
}
|
|
38
|
+
},
|
|
39
|
+
[GraphEvents.ON_RUN_STEP_DELTA]: {
|
|
40
|
+
handle: (event: GraphEvents.ON_RUN_STEP_DELTA, data: t.StreamEventData): void => {
|
|
41
|
+
console.log('====== ON_RUN_STEP_DELTA ======');
|
|
42
|
+
console.dir(data, { depth: null });
|
|
43
|
+
aggregateContent({ event, data: data as t.RunStepDeltaEvent });
|
|
44
|
+
}
|
|
45
|
+
},
|
|
46
|
+
[GraphEvents.ON_MESSAGE_DELTA]: {
|
|
47
|
+
handle: (event: GraphEvents.ON_MESSAGE_DELTA, data: t.StreamEventData): void => {
|
|
48
|
+
console.log('====== ON_MESSAGE_DELTA ======');
|
|
49
|
+
console.dir(data, { depth: null });
|
|
50
|
+
aggregateContent({ event, data: data as t.MessageDeltaEvent });
|
|
51
|
+
}
|
|
52
|
+
},
|
|
53
|
+
[GraphEvents.TOOL_START]: {
|
|
54
|
+
handle: (_event: string, data: t.StreamEventData, metadata?: Record<string, unknown>): void => {
|
|
55
|
+
console.log('====== TOOL_START ======');
|
|
56
|
+
}
|
|
57
|
+
},
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
const llmConfig = getLLMConfig(provider);
|
|
61
|
+
|
|
62
|
+
const run = await Run.create<t.IState>({
|
|
63
|
+
graphConfig: {
|
|
64
|
+
type: 'standard',
|
|
65
|
+
llmConfig,
|
|
66
|
+
tools: [new TavilySearchResults()],
|
|
67
|
+
instructions: 'You are a friendly AI assistant. Always address the user by their name.',
|
|
68
|
+
additional_instructions: `The user's name is ${userName} and they are located in ${location}.`,
|
|
69
|
+
},
|
|
70
|
+
customHandlers,
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
const config = {
|
|
74
|
+
configurable: {
|
|
75
|
+
provider,
|
|
76
|
+
thread_id: 'conversation-num-1',
|
|
77
|
+
},
|
|
78
|
+
signal: controller.signal,
|
|
79
|
+
streamMode: 'values',
|
|
80
|
+
version: 'v2' as const,
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
console.log('Test 1: Weather query (content parts test)');
|
|
84
|
+
|
|
85
|
+
const userMessage = `
|
|
86
|
+
Make a search for the weather in ${location} today, which is ${currentDate}.
|
|
87
|
+
Make sure to always refer to me by name, which is ${userName}.
|
|
88
|
+
After giving me a thorough summary, tell me a joke about the weather forecast we went over.
|
|
89
|
+
`;
|
|
90
|
+
|
|
91
|
+
conversationHistory.push(new HumanMessage(userMessage));
|
|
92
|
+
|
|
93
|
+
const inputs = {
|
|
94
|
+
messages: conversationHistory,
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
// Set a timeout to abort the operation after 5 seconds
|
|
98
|
+
setTimeout(() => {
|
|
99
|
+
controller.abort();
|
|
100
|
+
console.log('Operation aborted');
|
|
101
|
+
console.log('Current content parts:');
|
|
102
|
+
console.dir(contentParts, { depth: null });
|
|
103
|
+
}, 8000);
|
|
104
|
+
|
|
105
|
+
try {
|
|
106
|
+
const finalContentParts = await run.processStream(inputs, config);
|
|
107
|
+
const finalMessages = run.getRunMessages();
|
|
108
|
+
if (finalMessages) {
|
|
109
|
+
conversationHistory.push(...finalMessages);
|
|
110
|
+
console.dir(conversationHistory, { depth: null });
|
|
111
|
+
}
|
|
112
|
+
console.dir(finalContentParts, { depth: null });
|
|
113
|
+
} catch (error) {
|
|
114
|
+
if ((error as Error)?.name === 'AbortError') {
|
|
115
|
+
console.log('Operation was aborted');
|
|
116
|
+
} else {
|
|
117
|
+
console.error('An error occurred:', error);
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
console.log('\n\n====================\n\n');
|
|
122
|
+
console.dir(contentParts, { depth: null });
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
process.on('unhandledRejection', (reason, promise) => {
|
|
126
|
+
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
|
|
127
|
+
console.log('Conversation history:');
|
|
128
|
+
process.exit(1);
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
testStandardStreaming().catch((err) => {
|
|
132
|
+
console.error(err);
|
|
133
|
+
console.log('Conversation history:');
|
|
134
|
+
console.dir(conversationHistory, { depth: null });
|
|
135
|
+
process.exit(1);
|
|
136
|
+
});
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
/* eslint-disable no-console */
|
|
2
|
+
// src/scripts/cli.ts
|
|
3
|
+
import { config } from 'dotenv';
|
|
4
|
+
config();
|
|
5
|
+
import { HumanMessage, BaseMessage } from '@langchain/core/messages';
|
|
6
|
+
import { TavilySearchResults } from '@langchain/community/tools/tavily_search';
|
|
7
|
+
import type * as t from '@/types';
|
|
8
|
+
import { ChatModelStreamHandler, createContentAggregator } from '@/stream';
|
|
9
|
+
import { ToolEndHandler } from '@/events';
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
import { getArgs } from '@/scripts/args';
|
|
13
|
+
import { Run } from '@/run';
|
|
14
|
+
import { GraphEvents, Callback } from '@/common';
|
|
15
|
+
import { getLLMConfig } from '@/utils/llmConfig';
|
|
16
|
+
|
|
17
|
+
const conversationHistory: BaseMessage[] = [];
|
|
18
|
+
async function testStandardStreaming(): Promise<void> {
|
|
19
|
+
const { userName, location, provider, currentDate } = await getArgs();
|
|
20
|
+
const { contentParts, aggregateContent } = createContentAggregator();
|
|
21
|
+
const customHandlers = {
|
|
22
|
+
[GraphEvents.TOOL_END]: new ToolEndHandler(),
|
|
23
|
+
// [GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(),
|
|
24
|
+
[GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
|
|
25
|
+
[GraphEvents.ON_RUN_STEP_COMPLETED]: {
|
|
26
|
+
handle: (event: GraphEvents.ON_RUN_STEP_COMPLETED, data: t.StreamEventData): void => {
|
|
27
|
+
console.log('====== ON_RUN_STEP_COMPLETED ======');
|
|
28
|
+
// console.dir(data, { depth: null });
|
|
29
|
+
aggregateContent({ event, data: data as unknown as { result: t.ToolEndEvent } });
|
|
30
|
+
}
|
|
31
|
+
},
|
|
32
|
+
[GraphEvents.ON_RUN_STEP]: {
|
|
33
|
+
handle: (event: GraphEvents.ON_RUN_STEP, data: t.StreamEventData): void => {
|
|
34
|
+
console.log('====== ON_RUN_STEP ======');
|
|
35
|
+
console.dir(data, { depth: null });
|
|
36
|
+
aggregateContent({ event, data: data as t.RunStep });
|
|
37
|
+
}
|
|
38
|
+
},
|
|
39
|
+
[GraphEvents.ON_RUN_STEP_DELTA]: {
|
|
40
|
+
handle: (event: GraphEvents.ON_RUN_STEP_DELTA, data: t.StreamEventData): void => {
|
|
41
|
+
console.log('====== ON_RUN_STEP_DELTA ======');
|
|
42
|
+
console.dir(data, { depth: null });
|
|
43
|
+
aggregateContent({ event, data: data as t.RunStepDeltaEvent });
|
|
44
|
+
}
|
|
45
|
+
},
|
|
46
|
+
[GraphEvents.ON_MESSAGE_DELTA]: {
|
|
47
|
+
handle: (event: GraphEvents.ON_MESSAGE_DELTA, data: t.StreamEventData): void => {
|
|
48
|
+
console.log('====== ON_MESSAGE_DELTA ======');
|
|
49
|
+
console.dir(data, { depth: null });
|
|
50
|
+
aggregateContent({ event, data: data as t.MessageDeltaEvent });
|
|
51
|
+
}
|
|
52
|
+
},
|
|
53
|
+
[GraphEvents.TOOL_START]: {
|
|
54
|
+
handle: (_event: string, data: t.StreamEventData, metadata?: Record<string, unknown>): void => {
|
|
55
|
+
console.log('====== TOOL_START ======');
|
|
56
|
+
// console.dir(data, { depth: null });
|
|
57
|
+
}
|
|
58
|
+
},
|
|
59
|
+
};
|
|
60
|
+
|
|
61
|
+
const llmConfig = getLLMConfig(provider);
|
|
62
|
+
|
|
63
|
+
const run = await Run.create<t.IState>({
|
|
64
|
+
graphConfig: {
|
|
65
|
+
type: 'standard',
|
|
66
|
+
llmConfig,
|
|
67
|
+
tools: [new TavilySearchResults()],
|
|
68
|
+
instructions: 'You are a friendly AI assistant. Always address the user by their name.',
|
|
69
|
+
additional_instructions: `The user's name is ${userName} and they are located in ${location}.`,
|
|
70
|
+
},
|
|
71
|
+
customHandlers,
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
const config = {
|
|
75
|
+
configurable: {
|
|
76
|
+
provider,
|
|
77
|
+
thread_id: 'conversation-num-1',
|
|
78
|
+
},
|
|
79
|
+
streamMode: 'values',
|
|
80
|
+
version: 'v2' as const,
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
console.log('Test 1: Weather query (content parts test)');
|
|
84
|
+
|
|
85
|
+
const userMessage = `
|
|
86
|
+
Make a search for the weather in ${location} today, which is ${currentDate}.
|
|
87
|
+
Make sure to always refer to me by name, which is ${userName}.
|
|
88
|
+
After giving me a thorough summary, tell me a joke about the weather forecast we went over.
|
|
89
|
+
`;
|
|
90
|
+
|
|
91
|
+
conversationHistory.push(new HumanMessage(userMessage));
|
|
92
|
+
|
|
93
|
+
const inputs = {
|
|
94
|
+
messages: conversationHistory,
|
|
95
|
+
};
|
|
96
|
+
const finalContentParts = await run.processStream(inputs, config);
|
|
97
|
+
const finalMessages = run.getRunMessages();
|
|
98
|
+
if (finalMessages) {
|
|
99
|
+
conversationHistory.push(...finalMessages);
|
|
100
|
+
console.dir(conversationHistory, { depth: null });
|
|
101
|
+
}
|
|
102
|
+
console.dir(finalContentParts, { depth: null });
|
|
103
|
+
console.log('\n\n====================\n\n');
|
|
104
|
+
console.dir(contentParts, { depth: null });
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
process.on('unhandledRejection', (reason, promise) => {
|
|
108
|
+
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
|
|
109
|
+
console.log('Conversation history:');
|
|
110
|
+
process.exit(1);
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
testStandardStreaming().catch((err) => {
|
|
114
|
+
console.error(err);
|
|
115
|
+
console.log('Conversation history:');
|
|
116
|
+
console.dir(conversationHistory, { depth: null });
|
|
117
|
+
process.exit(1);
|
|
118
|
+
});
|
package/src/stream.ts
CHANGED
|
@@ -4,16 +4,25 @@ import type { AIMessageChunk } from '@langchain/core/messages';
|
|
|
4
4
|
import type { ToolCall } from '@langchain/core/messages/tool';
|
|
5
5
|
import type { Graph } from '@/graphs';
|
|
6
6
|
import type * as t from '@/types';
|
|
7
|
-
import { StepTypes } from '@/common';
|
|
7
|
+
import { StepTypes, ContentTypes, GraphEvents, ToolCallTypes } from '@/common';
|
|
8
|
+
|
|
9
|
+
function getNonEmptyValue(possibleValues: string[]): string | undefined {
|
|
10
|
+
for (const value of possibleValues) {
|
|
11
|
+
if (value && value.trim() !== '') {
|
|
12
|
+
return value;
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
return undefined;
|
|
16
|
+
}
|
|
8
17
|
|
|
9
18
|
const getMessageId = (stepKey: string, graph: Graph<t.BaseGraphState>): string | undefined => {
|
|
10
19
|
const messageId = graph.messageIdsByStepKey.get(stepKey);
|
|
11
|
-
if (messageId) {
|
|
20
|
+
if (messageId != null && messageId) {
|
|
12
21
|
return;
|
|
13
22
|
}
|
|
14
23
|
|
|
15
24
|
const prelimMessageId = graph.prelimMessageIdsByStepKey.get(stepKey);
|
|
16
|
-
if (prelimMessageId) {
|
|
25
|
+
if (prelimMessageId != null && prelimMessageId) {
|
|
17
26
|
graph.prelimMessageIdsByStepKey.delete(stepKey);
|
|
18
27
|
graph.messageIdsByStepKey.set(stepKey, prelimMessageId);
|
|
19
28
|
return prelimMessageId;
|
|
@@ -31,7 +40,7 @@ export class ChatModelStreamHandler implements t.EventHandler {
|
|
|
31
40
|
}
|
|
32
41
|
|
|
33
42
|
const chunk = data.chunk as AIMessageChunk;
|
|
34
|
-
const content = chunk
|
|
43
|
+
const content = chunk.content;
|
|
35
44
|
|
|
36
45
|
if (!graph.config) {
|
|
37
46
|
throw new Error('Config not found in graph');
|
|
@@ -42,8 +51,8 @@ export class ChatModelStreamHandler implements t.EventHandler {
|
|
|
42
51
|
return;
|
|
43
52
|
}
|
|
44
53
|
|
|
45
|
-
const hasToolCalls = chunk.tool_calls && chunk.tool_calls.length > 0;
|
|
46
|
-
const hasToolCallChunks = chunk.tool_call_chunks && chunk.tool_call_chunks.length > 0;
|
|
54
|
+
const hasToolCalls = (chunk.tool_calls && chunk.tool_calls.length > 0) ?? false;
|
|
55
|
+
const hasToolCallChunks = (chunk.tool_call_chunks && chunk.tool_call_chunks.length > 0) ?? false;
|
|
47
56
|
|
|
48
57
|
if (hasToolCalls && chunk.tool_calls?.every((tc) => tc.id)) {
|
|
49
58
|
const tool_calls: ToolCall[] = [];
|
|
@@ -64,7 +73,7 @@ export class ChatModelStreamHandler implements t.EventHandler {
|
|
|
64
73
|
|
|
65
74
|
const isEmptyContent = !content || !content.length;
|
|
66
75
|
const isEmptyChunk = isEmptyContent && !hasToolCallChunks;
|
|
67
|
-
if (isEmptyChunk && chunk.id && chunk.id
|
|
76
|
+
if (isEmptyChunk && chunk.id && chunk.id.startsWith('msg')) {
|
|
68
77
|
if (graph.messageIdsByStepKey.has(chunk.id)) {
|
|
69
78
|
return;
|
|
70
79
|
} else if (graph.prelimMessageIdsByStepKey.has(chunk.id)) {
|
|
@@ -92,7 +101,7 @@ export class ChatModelStreamHandler implements t.EventHandler {
|
|
|
92
101
|
return;
|
|
93
102
|
}
|
|
94
103
|
|
|
95
|
-
const message_id = getMessageId(stepKey, graph);
|
|
104
|
+
const message_id = getMessageId(stepKey, graph) ?? '';
|
|
96
105
|
if (message_id) {
|
|
97
106
|
graph.dispatchRunStep(stepKey, {
|
|
98
107
|
type: StepTypes.MESSAGE_CREATION,
|
|
@@ -127,7 +136,7 @@ hasToolCallChunks: ${hasToolCallChunks}
|
|
|
127
136
|
/* Note: tool call chunks may have non-empty content that matches the current tool chunk generation */
|
|
128
137
|
if (typeof content === 'string' && runStep.type === StepTypes.TOOL_CALLS) {
|
|
129
138
|
return;
|
|
130
|
-
} else if (hasToolCallChunks && chunk.tool_call_chunks?.some((tc) => tc.args === content)) {
|
|
139
|
+
} else if (hasToolCallChunks && (chunk.tool_call_chunks?.some((tc) => tc.args === content) ?? false)) {
|
|
131
140
|
return;
|
|
132
141
|
} else if (typeof content === 'string') {
|
|
133
142
|
graph.dispatchMessageDelta(stepId, {
|
|
@@ -136,10 +145,169 @@ hasToolCallChunks: ${hasToolCallChunks}
|
|
|
136
145
|
text: content,
|
|
137
146
|
}],
|
|
138
147
|
});
|
|
139
|
-
} else if (content
|
|
148
|
+
} else if (content.every((c) => c.type?.startsWith('text'))) {
|
|
140
149
|
graph.dispatchMessageDelta(stepId, {
|
|
141
150
|
content,
|
|
142
151
|
});
|
|
143
152
|
}
|
|
144
153
|
}
|
|
145
|
-
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
export type ContentAggregatorResult = {
|
|
157
|
+
contentParts: Array<t.MessageContentComplex | undefined>;
|
|
158
|
+
aggregateContent: ({ event, data }: {
|
|
159
|
+
event: GraphEvents;
|
|
160
|
+
data: t.RunStep | t.MessageDeltaEvent | t.RunStepDeltaEvent | {
|
|
161
|
+
result: t.ToolEndEvent;
|
|
162
|
+
};
|
|
163
|
+
}) => void
|
|
164
|
+
};
|
|
165
|
+
|
|
166
|
+
export function createContentAggregator(): ContentAggregatorResult {
|
|
167
|
+
const contentParts: Array<t.MessageContentComplex | undefined> = [];
|
|
168
|
+
const stepMap = new Map<string, t.RunStep>();
|
|
169
|
+
const toolCallIdMap = new Map<string, string>();
|
|
170
|
+
|
|
171
|
+
const updateContent = (
|
|
172
|
+
index: number,
|
|
173
|
+
contentPart: t.MessageContentComplex,
|
|
174
|
+
finalUpdate = false,
|
|
175
|
+
): void => {
|
|
176
|
+
const partType = contentPart.type ?? '';
|
|
177
|
+
if (!partType) {
|
|
178
|
+
console.warn('No content type found in content part');
|
|
179
|
+
return;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
if (!contentParts[index]) {
|
|
183
|
+
contentParts[index] = { type: partType };
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
if (
|
|
187
|
+
partType.startsWith(ContentTypes.TEXT) &&
|
|
188
|
+
ContentTypes.TEXT in contentPart &&
|
|
189
|
+
typeof contentPart.text === 'string'
|
|
190
|
+
) {
|
|
191
|
+
const currentContent = contentParts[index] as { type: ContentTypes.TEXT; text: string };
|
|
192
|
+
contentParts[index] = {
|
|
193
|
+
type: ContentTypes.TEXT,
|
|
194
|
+
text: (currentContent.text || '') + contentPart.text,
|
|
195
|
+
};
|
|
196
|
+
} else if (partType === ContentTypes.IMAGE_URL && 'image_url' in contentPart) {
|
|
197
|
+
const currentContent = contentParts[index] as { type: 'image_url'; image_url: string };
|
|
198
|
+
contentParts[index] = {
|
|
199
|
+
...currentContent,
|
|
200
|
+
};
|
|
201
|
+
} else if (partType === ContentTypes.TOOL_CALL && 'tool_call' in contentPart) {
|
|
202
|
+
const existingContent = contentParts[index] as Omit<t.ToolCallContent, 'tool_call'> & { tool_call?: ToolCall } | undefined;
|
|
203
|
+
|
|
204
|
+
const args = finalUpdate
|
|
205
|
+
? contentPart.tool_call.args
|
|
206
|
+
: (existingContent?.tool_call?.args || '') + (contentPart.tool_call.args ?? '');
|
|
207
|
+
|
|
208
|
+
const id = getNonEmptyValue([contentPart.tool_call.id, existingContent?.tool_call?.id]) ?? '';
|
|
209
|
+
const name =
|
|
210
|
+
getNonEmptyValue([contentPart.tool_call.name, existingContent?.tool_call?.name]) ?? '';
|
|
211
|
+
|
|
212
|
+
const newToolCall: ToolCall & t.PartMetadata = {
|
|
213
|
+
id,
|
|
214
|
+
name,
|
|
215
|
+
args,
|
|
216
|
+
type: ToolCallTypes.TOOL_CALL,
|
|
217
|
+
};
|
|
218
|
+
|
|
219
|
+
if (finalUpdate) {
|
|
220
|
+
newToolCall.progress = 1;
|
|
221
|
+
newToolCall.output = contentPart.tool_call.output;
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
contentParts[index] = {
|
|
225
|
+
type: ContentTypes.TOOL_CALL,
|
|
226
|
+
tool_call: newToolCall,
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
};
|
|
230
|
+
|
|
231
|
+
const aggregateContent = ({ event, data }: {
|
|
232
|
+
event: GraphEvents;
|
|
233
|
+
data: t.RunStep | t.MessageDeltaEvent | t.RunStepDeltaEvent | { result: t.ToolEndEvent };
|
|
234
|
+
}): void => {
|
|
235
|
+
|
|
236
|
+
if (event === GraphEvents.ON_RUN_STEP) {
|
|
237
|
+
const runStep = data as t.RunStep;
|
|
238
|
+
stepMap.set(runStep.id, runStep);
|
|
239
|
+
|
|
240
|
+
// Store tool call IDs if present
|
|
241
|
+
if (runStep.stepDetails.type === StepTypes.TOOL_CALLS) {
|
|
242
|
+
runStep.stepDetails.tool_calls.forEach((toolCall) => {
|
|
243
|
+
const toolCallId = toolCall.id ?? '';
|
|
244
|
+
if ('id' in toolCall && toolCallId) {
|
|
245
|
+
toolCallIdMap.set(runStep.id, toolCallId);
|
|
246
|
+
}
|
|
247
|
+
});
|
|
248
|
+
}
|
|
249
|
+
} else if (event === GraphEvents.ON_MESSAGE_DELTA) {
|
|
250
|
+
const messageDelta = data as t.MessageDeltaEvent;
|
|
251
|
+
const runStep = stepMap.get(messageDelta.id);
|
|
252
|
+
if (!runStep) {
|
|
253
|
+
console.warn('No run step or runId found for message delta event');
|
|
254
|
+
return;
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
if (messageDelta.delta.content) {
|
|
258
|
+
const contentPart = Array.isArray(messageDelta.delta.content)
|
|
259
|
+
? messageDelta.delta.content[0]
|
|
260
|
+
: messageDelta.delta.content;
|
|
261
|
+
|
|
262
|
+
updateContent(runStep.index, contentPart);
|
|
263
|
+
}
|
|
264
|
+
} else if (event === GraphEvents.ON_RUN_STEP_DELTA) {
|
|
265
|
+
const runStepDelta = data as t.RunStepDeltaEvent;
|
|
266
|
+
const runStep = stepMap.get(runStepDelta.id);
|
|
267
|
+
if (!runStep) {
|
|
268
|
+
console.warn('No run step or runId found for run step delta event');
|
|
269
|
+
return;
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
if (
|
|
273
|
+
runStepDelta.delta.type === StepTypes.TOOL_CALLS &&
|
|
274
|
+
runStepDelta.delta.tool_calls
|
|
275
|
+
) {
|
|
276
|
+
|
|
277
|
+
runStepDelta.delta.tool_calls.forEach((toolCallDelta) => {
|
|
278
|
+
const toolCallId = toolCallIdMap.get(runStepDelta.id) ?? '';
|
|
279
|
+
|
|
280
|
+
const contentPart: t.MessageContentComplex = {
|
|
281
|
+
type: ContentTypes.TOOL_CALL,
|
|
282
|
+
tool_call: {
|
|
283
|
+
name: toolCallDelta.name ?? '',
|
|
284
|
+
args: toolCallDelta.args ?? '',
|
|
285
|
+
id: toolCallId,
|
|
286
|
+
},
|
|
287
|
+
};
|
|
288
|
+
|
|
289
|
+
updateContent(runStep.index, contentPart);
|
|
290
|
+
});
|
|
291
|
+
}
|
|
292
|
+
} else if (event === GraphEvents.ON_RUN_STEP_COMPLETED) {
|
|
293
|
+
const { result } = data as unknown as { result: t.ToolEndEvent };
|
|
294
|
+
|
|
295
|
+
const { id: stepId } = result;
|
|
296
|
+
|
|
297
|
+
const runStep = stepMap.get(stepId);
|
|
298
|
+
if (!runStep) {
|
|
299
|
+
console.warn('No run step or runId found for completed tool call event');
|
|
300
|
+
return;
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
const contentPart: t.MessageContentComplex = {
|
|
304
|
+
type: ContentTypes.TOOL_CALL,
|
|
305
|
+
tool_call: result.tool_call,
|
|
306
|
+
};
|
|
307
|
+
|
|
308
|
+
updateContent(runStep.index, contentPart, true);
|
|
309
|
+
}
|
|
310
|
+
};
|
|
311
|
+
|
|
312
|
+
return { contentParts, aggregateContent };
|
|
313
|
+
}
|
package/src/types/graph.ts
CHANGED
|
@@ -123,4 +123,12 @@ export type StreamEvent = {
|
|
|
123
123
|
export type GraphConfig = {
|
|
124
124
|
provider: string;
|
|
125
125
|
thread_id?: string;
|
|
126
|
+
};
|
|
127
|
+
|
|
128
|
+
export type PartMetadata = {
|
|
129
|
+
progress?: number;
|
|
130
|
+
asset_pointer?: string;
|
|
131
|
+
status?: string;
|
|
132
|
+
action?: boolean;
|
|
133
|
+
output?: string;
|
|
126
134
|
};
|
package/src/types/stream.ts
CHANGED
|
@@ -112,8 +112,8 @@ export type ToolCallsDetails = {
|
|
|
112
112
|
};
|
|
113
113
|
|
|
114
114
|
export type ToolCallDelta = {
|
|
115
|
-
type: StepTypes
|
|
116
|
-
tool_calls
|
|
115
|
+
type: StepTypes;
|
|
116
|
+
tool_calls?: ToolCallChunk[]; // #new
|
|
117
117
|
};
|
|
118
118
|
|
|
119
119
|
export type AgentToolCall = {
|
package/src/types/tools.ts
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
import type { RunnableToolLike } from '@langchain/core/runnables';
|
|
3
3
|
import type { StructuredToolInterface } from '@langchain/core/tools';
|
|
4
4
|
import type { ToolCall } from '@langchain/core/messages/tool';
|
|
5
|
+
import { ContentTypes } from '@/common';
|
|
5
6
|
|
|
6
7
|
/** Replacement type for `import type { ToolCall } from '@langchain/core/messages/tool'` in order to have stringified args typed */
|
|
7
8
|
export type CustomToolCall = {
|
|
@@ -10,6 +11,7 @@ export type CustomToolCall = {
|
|
|
10
11
|
args: string | Record<string, any>;
|
|
11
12
|
id?: string;
|
|
12
13
|
type?: 'tool_call';
|
|
14
|
+
output?: string;
|
|
13
15
|
}
|
|
14
16
|
|
|
15
17
|
export type GenericTool = StructuredToolInterface | RunnableToolLike;
|
|
@@ -28,4 +30,18 @@ export type ToolNodeOptions = {
|
|
|
28
30
|
loadRuntimeTools?: ToolRefGenerator;
|
|
29
31
|
};
|
|
30
32
|
|
|
31
|
-
export type ToolNodeConstructorParams = ToolRefs & ToolNodeOptions;
|
|
33
|
+
export type ToolNodeConstructorParams = ToolRefs & ToolNodeOptions;
|
|
34
|
+
|
|
35
|
+
export type ToolEndEvent = {
|
|
36
|
+
/** The Step Id of the Tool Call */
|
|
37
|
+
id: string;
|
|
38
|
+
/** The Completed Tool Call */
|
|
39
|
+
tool_call: ToolCall;
|
|
40
|
+
/** The content index of the tool call */
|
|
41
|
+
index: number;
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
export type ToolCallContent = {
|
|
45
|
+
type: ContentTypes.TOOL_CALL;
|
|
46
|
+
tool_call: ToolCall;
|
|
47
|
+
};
|