@librechat/agents 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/cjs/common/enum.cjs +108 -0
- package/dist/cjs/common/enum.cjs.map +1 -0
- package/dist/cjs/events.cjs +104 -0
- package/dist/cjs/events.cjs.map +1 -0
- package/dist/cjs/graphs/Graph.cjs +313 -0
- package/dist/cjs/graphs/Graph.cjs.map +1 -0
- package/dist/cjs/llm/providers.cjs +30 -0
- package/dist/cjs/llm/providers.cjs.map +1 -0
- package/dist/cjs/main.cjs +59 -0
- package/dist/cjs/main.cjs.map +1 -0
- package/dist/cjs/messages.cjs +195 -0
- package/dist/cjs/messages.cjs.map +1 -0
- package/dist/cjs/run.cjs +106 -0
- package/dist/cjs/run.cjs.map +1 -0
- package/dist/cjs/stream.cjs +133 -0
- package/dist/cjs/stream.cjs.map +1 -0
- package/dist/cjs/tools/ToolNode.cjs +80 -0
- package/dist/cjs/tools/ToolNode.cjs.map +1 -0
- package/dist/cjs/utils/graph.cjs +16 -0
- package/dist/cjs/utils/graph.cjs.map +1 -0
- package/dist/cjs/utils/run.cjs +59 -0
- package/dist/cjs/utils/run.cjs.map +1 -0
- package/dist/esm/common/enum.mjs +108 -0
- package/dist/esm/common/enum.mjs.map +1 -0
- package/dist/esm/events.mjs +97 -0
- package/dist/esm/events.mjs.map +1 -0
- package/dist/esm/graphs/Graph.mjs +310 -0
- package/dist/esm/graphs/Graph.mjs.map +1 -0
- package/dist/esm/llm/providers.mjs +27 -0
- package/dist/esm/llm/providers.mjs.map +1 -0
- package/dist/esm/main.mjs +9 -0
- package/dist/esm/main.mjs.map +1 -0
- package/dist/esm/messages.mjs +190 -0
- package/dist/esm/messages.mjs.map +1 -0
- package/dist/esm/run.mjs +104 -0
- package/dist/esm/run.mjs.map +1 -0
- package/dist/esm/stream.mjs +131 -0
- package/dist/esm/stream.mjs.map +1 -0
- package/dist/esm/tools/ToolNode.mjs +77 -0
- package/dist/esm/tools/ToolNode.mjs.map +1 -0
- package/dist/esm/utils/graph.mjs +13 -0
- package/dist/esm/utils/graph.mjs.map +1 -0
- package/dist/esm/utils/run.mjs +57 -0
- package/dist/esm/utils/run.mjs.map +1 -0
- package/dist/types/common/enum.d.ts +79 -0
- package/dist/types/common/index.d.ts +1 -0
- package/dist/types/events.d.ts +22 -0
- package/dist/types/graphs/Graph.d.ts +86 -0
- package/dist/types/graphs/index.d.ts +1 -0
- package/dist/types/index.d.ts +8 -0
- package/dist/types/llm/providers.d.ts +4 -0
- package/dist/types/messages.d.ts +10 -0
- package/dist/types/prompts/collab.d.ts +1 -0
- package/dist/types/prompts/index.d.ts +2 -0
- package/dist/types/prompts/taskmanager.d.ts +41 -0
- package/dist/types/run.d.ts +21 -0
- package/dist/types/scripts/args.d.ts +6 -0
- package/dist/types/scripts/cli.d.ts +1 -0
- package/dist/types/scripts/cli2.d.ts +1 -0
- package/dist/types/scripts/cli3.d.ts +1 -0
- package/dist/types/scripts/cli4.d.ts +1 -0
- package/dist/types/scripts/cli5.d.ts +1 -0
- package/dist/types/scripts/empty_input.d.ts +1 -0
- package/dist/types/stream.d.ts +5 -0
- package/dist/types/tools/ToolNode.d.ts +15 -0
- package/dist/types/tools/example.d.ts +26 -0
- package/dist/types/types/graph.d.ts +108 -0
- package/dist/types/types/index.d.ts +5 -0
- package/dist/types/types/llm.d.ts +25 -0
- package/dist/types/types/run.d.ts +53 -0
- package/dist/types/types/stream.d.ts +134 -0
- package/dist/types/types/tools.d.ts +24 -0
- package/dist/types/utils/graph.d.ts +2 -0
- package/dist/types/utils/index.d.ts +2 -0
- package/dist/types/utils/llmConfig.d.ts +2 -0
- package/dist/types/utils/logging.d.ts +1 -0
- package/dist/types/utils/run.d.ts +20 -0
- package/package.json +142 -0
- package/src/common/enum.ts +121 -0
- package/src/common/index.ts +2 -0
- package/src/events.ts +110 -0
- package/src/graphs/Graph.ts +416 -0
- package/src/graphs/index.ts +1 -0
- package/src/index.ts +15 -0
- package/src/llm/providers.ts +27 -0
- package/src/messages.ts +210 -0
- package/src/prompts/collab.ts +6 -0
- package/src/prompts/index.ts +2 -0
- package/src/prompts/taskmanager.ts +61 -0
- package/src/proto/CollabGraph.ts +269 -0
- package/src/proto/TaskManager.ts +243 -0
- package/src/proto/collab.ts +200 -0
- package/src/proto/collab_design.ts +184 -0
- package/src/proto/collab_design_v2.ts +224 -0
- package/src/proto/collab_design_v3.ts +255 -0
- package/src/proto/collab_design_v4.ts +220 -0
- package/src/proto/collab_design_v5.ts +251 -0
- package/src/proto/collab_graph.ts +181 -0
- package/src/proto/collab_original.ts +123 -0
- package/src/proto/example.ts +93 -0
- package/src/proto/example_new.ts +68 -0
- package/src/proto/example_old.ts +201 -0
- package/src/proto/example_test.ts +152 -0
- package/src/proto/example_test_anthropic.ts +100 -0
- package/src/proto/log_stream.ts +202 -0
- package/src/proto/main_collab_community_event.ts +133 -0
- package/src/proto/main_collab_design_v2.ts +96 -0
- package/src/proto/main_collab_design_v4.ts +100 -0
- package/src/proto/main_collab_design_v5.ts +135 -0
- package/src/proto/main_collab_global_analysis.ts +122 -0
- package/src/proto/main_collab_hackathon_event.ts +153 -0
- package/src/proto/main_collab_space_mission.ts +153 -0
- package/src/proto/main_philosophy.ts +210 -0
- package/src/proto/original_script.ts +126 -0
- package/src/proto/standard.ts +100 -0
- package/src/proto/stream.ts +56 -0
- package/src/proto/tasks.ts +118 -0
- package/src/proto/tools/global_analysis_tools.ts +86 -0
- package/src/proto/tools/space_mission_tools.ts +60 -0
- package/src/proto/vertexai.ts +54 -0
- package/src/run.ts +132 -0
- package/src/scripts/args.ts +42 -0
- package/src/scripts/cli.ts +166 -0
- package/src/scripts/cli2.ts +124 -0
- package/src/scripts/cli3.ts +175 -0
- package/src/scripts/cli4.ts +182 -0
- package/src/scripts/cli5.ts +182 -0
- package/src/scripts/empty_input.ts +136 -0
- package/src/stream.ts +145 -0
- package/src/tools/ToolNode.ts +108 -0
- package/src/tools/example.ts +52 -0
- package/src/types/graph.ts +126 -0
- package/src/types/index.ts +6 -0
- package/src/types/llm.ts +38 -0
- package/src/types/run.ts +56 -0
- package/src/types/stream.ts +174 -0
- package/src/types/tools.ts +31 -0
- package/src/utils/graph.ts +11 -0
- package/src/utils/index.ts +2 -0
- package/src/utils/llmConfig.ts +50 -0
- package/src/utils/logging.ts +48 -0
- package/src/utils/run.ts +91 -0
package/src/run.ts
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
// src/run.ts
|
|
2
|
+
import type { BaseCallbackHandler, CallbackHandlerMethods } from '@langchain/core/callbacks/base';
|
|
3
|
+
import type { BaseMessage, MessageContentComplex } from '@langchain/core/messages';
|
|
4
|
+
import type { RunnableConfig } from '@langchain/core/runnables';
|
|
5
|
+
import type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';
|
|
6
|
+
import type * as t from '@/types';
|
|
7
|
+
import { GraphEvents, Providers, Callback } from '@/common';
|
|
8
|
+
import { StandardGraph } from '@/graphs/Graph';
|
|
9
|
+
import { HandlerRegistry } from '@/events';
|
|
10
|
+
|
|
11
|
+
export class Run<T extends t.BaseGraphState> {
|
|
12
|
+
graphRunnable?: t.CompiledWorkflow<T, Partial<T>, string>;
|
|
13
|
+
// private collab!: CollabGraph;
|
|
14
|
+
// private taskManager!: TaskManager;
|
|
15
|
+
private handlerRegistry: HandlerRegistry;
|
|
16
|
+
private Graph: StandardGraph | undefined;
|
|
17
|
+
provider: Providers | undefined;
|
|
18
|
+
run_id: string | undefined;
|
|
19
|
+
|
|
20
|
+
private constructor(config: t.RunConfig) {
|
|
21
|
+
const handlerRegistry = new HandlerRegistry();
|
|
22
|
+
|
|
23
|
+
if (config.customHandlers) {
|
|
24
|
+
for (const [eventType, handler] of Object.entries(config.customHandlers)) {
|
|
25
|
+
handlerRegistry.register(eventType, handler);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
this.handlerRegistry = handlerRegistry;
|
|
30
|
+
|
|
31
|
+
if (config.graphConfig.type === 'standard' || !config.graphConfig.type) {
|
|
32
|
+
this.provider = config.graphConfig.llmConfig.provider;
|
|
33
|
+
this.graphRunnable = this.createStandardGraph(config.graphConfig) as unknown as t.CompiledWorkflow<T, Partial<T>, string>;
|
|
34
|
+
if (this.Graph) {
|
|
35
|
+
this.Graph.handlerRegistry = handlerRegistry;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
private createStandardGraph(config: t.StandardGraphConfig): t.CompiledWorkflow<t.IState, Partial<t.IState>, string> {
|
|
41
|
+
const { runId, llmConfig, instructions, additional_instructions, tools = [] } = config;
|
|
42
|
+
const { provider, ...clientOptions } = llmConfig;
|
|
43
|
+
|
|
44
|
+
const standardGraph = new StandardGraph({
|
|
45
|
+
runId,
|
|
46
|
+
tools,
|
|
47
|
+
provider,
|
|
48
|
+
instructions,
|
|
49
|
+
clientOptions,
|
|
50
|
+
additional_instructions,
|
|
51
|
+
});
|
|
52
|
+
this.Graph = standardGraph;
|
|
53
|
+
return standardGraph.createWorkflow();
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
static async create<T extends t.BaseGraphState>(config: t.RunConfig): Promise<Run<T>> {
|
|
57
|
+
return new Run<T>(config);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
getRunMessages(): BaseMessage[] | undefined {
|
|
61
|
+
if (!this.Graph) {
|
|
62
|
+
throw new Error('Graph not initialized. Make sure to use Run.create() to instantiate the Run.');
|
|
63
|
+
}
|
|
64
|
+
return this.Graph.getRunMessages();
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
async processStream(
|
|
68
|
+
inputs: t.IState,
|
|
69
|
+
config: Partial<RunnableConfig> & { version: 'v1' | 'v2' },
|
|
70
|
+
clientCallbacks?: ClientCallbacks,
|
|
71
|
+
): Promise<MessageContentComplex[] | undefined> {
|
|
72
|
+
if (!this.graphRunnable) {
|
|
73
|
+
throw new Error('Run not initialized. Make sure to use Run.create() to instantiate the Run.');
|
|
74
|
+
}
|
|
75
|
+
if (!this.Graph) {
|
|
76
|
+
throw new Error('Graph not initialized. Make sure to use Run.create() to instantiate the Run.');
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
this.Graph.resetValues();
|
|
80
|
+
const provider = this.Graph.provider;
|
|
81
|
+
const hasTools = this.Graph.tools ? this.Graph.tools.length > 0 : false;
|
|
82
|
+
if (clientCallbacks) {
|
|
83
|
+
/* TODO: conflicts with callback manager */
|
|
84
|
+
const callbacks = config.callbacks as (BaseCallbackHandler | CallbackHandlerMethods)[] || [];
|
|
85
|
+
config.callbacks = callbacks.concat(this.getCallbacks(clientCallbacks));
|
|
86
|
+
}
|
|
87
|
+
const stream = this.graphRunnable.streamEvents(inputs, config);
|
|
88
|
+
|
|
89
|
+
for await (const event of stream) {
|
|
90
|
+
const { data, name, metadata, ...info } = event;
|
|
91
|
+
|
|
92
|
+
let eventName: t.EventName = info.event;
|
|
93
|
+
if (hasTools && provider === Providers.ANTHROPIC && eventName === GraphEvents.CHAT_MODEL_STREAM) {
|
|
94
|
+
/* Skipping CHAT_MODEL_STREAM event for Anthropic due to double-call edge case */
|
|
95
|
+
continue;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
if (eventName && eventName === GraphEvents.ON_CUSTOM_EVENT) {
|
|
99
|
+
eventName = name;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// console.log(`Event: ${event.event} | Executing Event: ${eventName}`);
|
|
103
|
+
|
|
104
|
+
const handler = this.handlerRegistry.getHandler(eventName);
|
|
105
|
+
if (handler) {
|
|
106
|
+
handler.handle(eventName, data, metadata, this.Graph);
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
return this.Graph.getContentParts();
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
private createSystemCallback<K extends keyof ClientCallbacks>(
|
|
114
|
+
clientCallbacks: ClientCallbacks,
|
|
115
|
+
key: K
|
|
116
|
+
): SystemCallbacks[K] {
|
|
117
|
+
return ((...args: unknown[]) => {
|
|
118
|
+
const clientCallback = clientCallbacks[key];
|
|
119
|
+
if (clientCallback && this.Graph) {
|
|
120
|
+
(clientCallback as (...args: unknown[]) => void)(this.Graph, ...args);
|
|
121
|
+
}
|
|
122
|
+
}) as SystemCallbacks[K];
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
getCallbacks(clientCallbacks: ClientCallbacks): SystemCallbacks {
|
|
126
|
+
return {
|
|
127
|
+
[Callback.TOOL_ERROR]: this.createSystemCallback(clientCallbacks, Callback.TOOL_ERROR),
|
|
128
|
+
[Callback.TOOL_START]: this.createSystemCallback(clientCallbacks, Callback.TOOL_START),
|
|
129
|
+
[Callback.TOOL_END]: this.createSystemCallback(clientCallbacks, Callback.TOOL_END),
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
// src/scripts/args.ts
|
|
2
|
+
import yargs from 'yargs';
|
|
3
|
+
import { hideBin } from 'yargs/helpers';
|
|
4
|
+
|
|
5
|
+
export async function getArgs(): Promise<{ userName: string; location: string; provider: string; currentDate: string; }> {
|
|
6
|
+
const argv = yargs(hideBin(process.argv))
|
|
7
|
+
.option('name', {
|
|
8
|
+
alias: 'n',
|
|
9
|
+
type: 'string',
|
|
10
|
+
description: 'User name',
|
|
11
|
+
default: 'Jo'
|
|
12
|
+
})
|
|
13
|
+
.option('location', {
|
|
14
|
+
alias: 'l',
|
|
15
|
+
type: 'string',
|
|
16
|
+
description: 'User location',
|
|
17
|
+
default: 'New York'
|
|
18
|
+
})
|
|
19
|
+
.option('provider', {
|
|
20
|
+
alias: 'p',
|
|
21
|
+
type: 'string',
|
|
22
|
+
description: 'LLM provider',
|
|
23
|
+
choices: ['openAI', 'anthropic', 'mistralai', 'vertexai', 'aws'],
|
|
24
|
+
default: 'openAI'
|
|
25
|
+
})
|
|
26
|
+
.help()
|
|
27
|
+
.alias('help', 'h')
|
|
28
|
+
.argv;
|
|
29
|
+
|
|
30
|
+
const args = await argv;
|
|
31
|
+
const userName = args.name as string;
|
|
32
|
+
const location = args.location as string;
|
|
33
|
+
const provider = args.provider as string;
|
|
34
|
+
const currentDate = new Date().toLocaleString();
|
|
35
|
+
|
|
36
|
+
return {
|
|
37
|
+
userName,
|
|
38
|
+
location,
|
|
39
|
+
provider,
|
|
40
|
+
currentDate
|
|
41
|
+
};
|
|
42
|
+
}
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
/* eslint-disable no-console */
|
|
2
|
+
// src/scripts/cli.ts
|
|
3
|
+
import { config } from 'dotenv';
|
|
4
|
+
config();
|
|
5
|
+
import { HumanMessage, BaseMessage } from '@langchain/core/messages';
|
|
6
|
+
import { TavilySearchResults } from '@langchain/community/tools/tavily_search';
|
|
7
|
+
import type * as t from '@/types';
|
|
8
|
+
import { ModelEndHandler, ToolEndHandler } from '@/events';
|
|
9
|
+
import { ChatModelStreamHandler } from '@/stream';
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
import { getArgs } from '@/scripts/args';
|
|
13
|
+
import { Run } from '@/run';
|
|
14
|
+
import { GraphEvents, Callback } from '@/common';
|
|
15
|
+
import { getLLMConfig } from '@/utils/llmConfig';
|
|
16
|
+
|
|
17
|
+
const conversationHistory: BaseMessage[] = [];
|
|
18
|
+
async function testStandardStreaming(): Promise<void> {
|
|
19
|
+
const { userName, location, provider, currentDate } = await getArgs();
|
|
20
|
+
const customHandlers = {
|
|
21
|
+
[GraphEvents.TOOL_END]: new ToolEndHandler(),
|
|
22
|
+
[GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(),
|
|
23
|
+
[GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
|
|
24
|
+
[GraphEvents.ON_RUN_STEP_COMPLETED]: {
|
|
25
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
26
|
+
console.log('====== ON_RUN_STEP_COMPLETED ======');
|
|
27
|
+
console.dir(data, { depth: null });
|
|
28
|
+
}
|
|
29
|
+
},
|
|
30
|
+
[GraphEvents.ON_RUN_STEP]: {
|
|
31
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
32
|
+
console.log('====== ON_RUN_STEP ======');
|
|
33
|
+
console.dir(data, { depth: null });
|
|
34
|
+
}
|
|
35
|
+
},
|
|
36
|
+
[GraphEvents.ON_RUN_STEP_DELTA]: {
|
|
37
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
38
|
+
console.log('====== ON_RUN_STEP_DELTA ======');
|
|
39
|
+
console.dir(data, { depth: null });
|
|
40
|
+
}
|
|
41
|
+
},
|
|
42
|
+
[GraphEvents.ON_MESSAGE_DELTA]: {
|
|
43
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
44
|
+
console.log('====== ON_MESSAGE_DELTA ======');
|
|
45
|
+
console.dir(data, { depth: null });
|
|
46
|
+
}
|
|
47
|
+
},
|
|
48
|
+
[GraphEvents.TOOL_START]: {
|
|
49
|
+
handle: (_event: string, data: t.StreamEventData, metadata?: Record<string, unknown>): void => {
|
|
50
|
+
console.log('====== TOOL_START ======');
|
|
51
|
+
console.dir(data, { depth: null });
|
|
52
|
+
}
|
|
53
|
+
},
|
|
54
|
+
// [GraphEvents.LLM_STREAM]: new LLMStreamHandler(),
|
|
55
|
+
// [GraphEvents.LLM_START]: {
|
|
56
|
+
// handle: (_event: string, data: t.StreamEventData): void => {
|
|
57
|
+
// console.log('====== LLM_START ======');
|
|
58
|
+
// console.dir(data, { depth: null });
|
|
59
|
+
// }
|
|
60
|
+
// },
|
|
61
|
+
// [GraphEvents.LLM_END]: {
|
|
62
|
+
// handle: (_event: string, data: t.StreamEventData): void => {
|
|
63
|
+
// console.log('====== LLM_END ======');
|
|
64
|
+
// console.dir(data, { depth: null });
|
|
65
|
+
// }
|
|
66
|
+
// },
|
|
67
|
+
/*
|
|
68
|
+
[GraphEvents.CHAIN_START]: {
|
|
69
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
70
|
+
console.log('====== CHAIN_START ======');
|
|
71
|
+
// console.dir(data, { depth: null });
|
|
72
|
+
}
|
|
73
|
+
},
|
|
74
|
+
[GraphEvents.CHAIN_END]: {
|
|
75
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
76
|
+
console.log('====== CHAIN_END ======');
|
|
77
|
+
// console.dir(data, { depth: null });
|
|
78
|
+
}
|
|
79
|
+
},
|
|
80
|
+
*/
|
|
81
|
+
// [GraphEvents.CHAT_MODEL_START]: {
|
|
82
|
+
// handle: (_event: string, _data: t.StreamEventData): void => {
|
|
83
|
+
// console.log('====== CHAT_MODEL_START ======');
|
|
84
|
+
// console.dir(_data, { depth: null });
|
|
85
|
+
// // Intentionally left empty
|
|
86
|
+
// }
|
|
87
|
+
// },
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
const llmConfig = getLLMConfig(provider);
|
|
91
|
+
|
|
92
|
+
const run = await Run.create<t.IState>({
|
|
93
|
+
graphConfig: {
|
|
94
|
+
type: 'standard',
|
|
95
|
+
llmConfig,
|
|
96
|
+
tools: [new TavilySearchResults()],
|
|
97
|
+
instructions: 'You are a friendly AI assistant. Always address the user by their name.',
|
|
98
|
+
additional_instructions: `The user's name is ${userName} and they are located in ${location}.`,
|
|
99
|
+
},
|
|
100
|
+
customHandlers,
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
const config = {
|
|
104
|
+
configurable: {
|
|
105
|
+
provider,
|
|
106
|
+
thread_id: 'conversation-num-1',
|
|
107
|
+
},
|
|
108
|
+
streamMode: 'values',
|
|
109
|
+
version: 'v2' as const,
|
|
110
|
+
};
|
|
111
|
+
|
|
112
|
+
console.log(' Test 1: Initial greeting');
|
|
113
|
+
|
|
114
|
+
conversationHistory.push(new HumanMessage(`Hi I'm ${userName}.`));
|
|
115
|
+
let inputs = {
|
|
116
|
+
messages: conversationHistory,
|
|
117
|
+
};
|
|
118
|
+
const contentParts = await run.processStream(inputs, config,
|
|
119
|
+
// {
|
|
120
|
+
// [Callback.TOOL_START]: (graph, ...args) => {
|
|
121
|
+
// console.log('TOOL_START callback');
|
|
122
|
+
// },
|
|
123
|
+
// [Callback.TOOL_END]: (graph, ...args) => {
|
|
124
|
+
// console.log('TOOL_END callback');
|
|
125
|
+
// },
|
|
126
|
+
// }
|
|
127
|
+
);
|
|
128
|
+
const finalMessages = run.getRunMessages();
|
|
129
|
+
if (finalMessages) {
|
|
130
|
+
conversationHistory.push(...finalMessages);
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
console.log(' Test 2: Weather query');
|
|
134
|
+
|
|
135
|
+
const userMessage = `
|
|
136
|
+
Make a search for the weather in ${location} today, which is ${currentDate}.
|
|
137
|
+
Make sure to always refer to me by name.
|
|
138
|
+
After giving me a thorough summary, tell me a joke about the weather forecast we went over.
|
|
139
|
+
`;
|
|
140
|
+
|
|
141
|
+
conversationHistory.push(new HumanMessage(userMessage));
|
|
142
|
+
|
|
143
|
+
inputs = {
|
|
144
|
+
messages: conversationHistory,
|
|
145
|
+
};
|
|
146
|
+
const contentParts2 = await run.processStream(inputs, config);
|
|
147
|
+
const finalMessages2 = run.getRunMessages();
|
|
148
|
+
if (finalMessages2) {
|
|
149
|
+
conversationHistory.push(...finalMessages2);
|
|
150
|
+
console.dir(conversationHistory, { depth: null });
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
process.on('unhandledRejection', (reason, promise) => {
|
|
155
|
+
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
|
|
156
|
+
console.log('Conversation history:');
|
|
157
|
+
console.dir(conversationHistory, { depth: null });
|
|
158
|
+
process.exit(1);
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
testStandardStreaming().catch((err) => {
|
|
162
|
+
console.error(err);
|
|
163
|
+
console.log('Conversation history:');
|
|
164
|
+
console.dir(conversationHistory, { depth: null });
|
|
165
|
+
process.exit(1);
|
|
166
|
+
});
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
/* eslint-disable no-console */
|
|
2
|
+
// src/scripts/cli2.ts
|
|
3
|
+
import { config } from 'dotenv';
|
|
4
|
+
config();
|
|
5
|
+
import { HumanMessage, BaseMessage } from '@langchain/core/messages';
|
|
6
|
+
import { TavilySearchResults } from '@langchain/community/tools/tavily_search';
|
|
7
|
+
import type * as t from '@/types';
|
|
8
|
+
import { ChatModelStreamHandler } from '@/stream';
|
|
9
|
+
import { TestLLMStreamHandler } from '@/events';
|
|
10
|
+
|
|
11
|
+
import { getArgs } from '@/scripts/args';
|
|
12
|
+
import { Run } from '@/run';
|
|
13
|
+
import { GraphEvents } from '@/common';
|
|
14
|
+
import { getLLMConfig } from '@/utils/llmConfig';
|
|
15
|
+
|
|
16
|
+
const conversationHistory: BaseMessage[] = [];
|
|
17
|
+
|
|
18
|
+
async function executePersonalizedQuerySuite(): Promise<void> {
|
|
19
|
+
const { userName, location, provider, currentDate } = await getArgs();
|
|
20
|
+
|
|
21
|
+
const customHandlers = {
|
|
22
|
+
[GraphEvents.LLM_STREAM]: new TestLLMStreamHandler(),
|
|
23
|
+
[GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
|
|
24
|
+
[GraphEvents.LLM_START]: {
|
|
25
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
26
|
+
console.log('====== LLM_START ======');
|
|
27
|
+
console.dir(data, { depth: null });
|
|
28
|
+
}
|
|
29
|
+
},
|
|
30
|
+
[GraphEvents.LLM_END]: {
|
|
31
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
32
|
+
console.log('====== LLM_END ======');
|
|
33
|
+
console.dir(data, { depth: null });
|
|
34
|
+
}
|
|
35
|
+
},
|
|
36
|
+
[GraphEvents.CHAT_MODEL_START]: {
|
|
37
|
+
handle: (_event: string, _data: t.StreamEventData): void => {
|
|
38
|
+
console.log('====== CHAT_MODEL_START ======');
|
|
39
|
+
console.dir(_data, { depth: null });
|
|
40
|
+
}
|
|
41
|
+
},
|
|
42
|
+
[GraphEvents.CHAT_MODEL_END]: {
|
|
43
|
+
handle: (_event: string, _data: t.StreamEventData): void => {
|
|
44
|
+
console.log('====== CHAT_MODEL_END ======');
|
|
45
|
+
console.dir(_data, { depth: null });
|
|
46
|
+
}
|
|
47
|
+
},
|
|
48
|
+
[GraphEvents.TOOL_START]: {
|
|
49
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
50
|
+
console.log('====== TOOL_START ======');
|
|
51
|
+
console.dir(data, { depth: null });
|
|
52
|
+
}
|
|
53
|
+
},
|
|
54
|
+
[GraphEvents.TOOL_END]: {
|
|
55
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
56
|
+
console.log('====== TOOL_END ======');
|
|
57
|
+
console.dir(data, { depth: null });
|
|
58
|
+
}
|
|
59
|
+
},
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
const llmConfig = getLLMConfig(provider);
|
|
63
|
+
|
|
64
|
+
const run = await Run.create<t.IState>({
|
|
65
|
+
graphConfig: {
|
|
66
|
+
type: 'standard',
|
|
67
|
+
llmConfig,
|
|
68
|
+
tools: [new TavilySearchResults()],
|
|
69
|
+
},
|
|
70
|
+
customHandlers,
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
const sessionConfig = {
|
|
74
|
+
configurable: {
|
|
75
|
+
provider,
|
|
76
|
+
thread_id: `${userName}-session-${Date.now()}`,
|
|
77
|
+
instructions: `You are a knowledgeable and friendly AI assistant. Tailor your responses to ${userName}'s interests in ${location}.`,
|
|
78
|
+
additional_instructions: `Ensure each topic is thoroughly researched. Today is ${currentDate}. Maintain a warm, personalized tone throughout.`
|
|
79
|
+
},
|
|
80
|
+
streamMode: 'values',
|
|
81
|
+
version: 'v2' as const,
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
console.log(`Initiating personalized query suite for ${userName}`);
|
|
85
|
+
|
|
86
|
+
const queryTopics = [
|
|
87
|
+
{ task: "current weather", description: "Provide a detailed weather forecast" },
|
|
88
|
+
{ task: "popular tourist attraction", description: "Describe a notable sight" },
|
|
89
|
+
{ task: "upcoming events", description: "List major events or festivals this week" },
|
|
90
|
+
// { task: "famous local dish", description: "Share a recipe for a regional specialty" },
|
|
91
|
+
// { task: "local humor", description: "Tell a joke related to the area or findings" }
|
|
92
|
+
];
|
|
93
|
+
|
|
94
|
+
const userPrompt = `
|
|
95
|
+
Greetings! I'm ${userName}, currently in ${location}. Today's date is ${currentDate}.
|
|
96
|
+
I'm seeking information on various aspects of ${location}. Please address the following:
|
|
97
|
+
|
|
98
|
+
${queryTopics.map((topic, index) => `${index + 1}. ${topic.description} in ${location}.`).join('\n ')}
|
|
99
|
+
|
|
100
|
+
For each topic, conduct a separate search to ensure accuracy and depth.
|
|
101
|
+
In your response, please address me as ${userName} and maintain a friendly, informative tone.
|
|
102
|
+
`;
|
|
103
|
+
|
|
104
|
+
conversationHistory.push(new HumanMessage(userPrompt));
|
|
105
|
+
|
|
106
|
+
const runInput = {
|
|
107
|
+
messages: conversationHistory,
|
|
108
|
+
};
|
|
109
|
+
|
|
110
|
+
const contentParts = await run.processStream(runInput, sessionConfig);
|
|
111
|
+
const finalMessages = run.getRunMessages();
|
|
112
|
+
if (finalMessages) {
|
|
113
|
+
conversationHistory.push(...finalMessages);
|
|
114
|
+
console.log("AI Assistant's Response:");
|
|
115
|
+
console.dir(conversationHistory, { depth: null });
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
executePersonalizedQuerySuite().catch((error) => {
|
|
120
|
+
console.error("An error occurred during the query suite execution:", error);
|
|
121
|
+
console.log("Final conversation state:");
|
|
122
|
+
console.dir(conversationHistory, { depth: null });
|
|
123
|
+
process.exit(1);
|
|
124
|
+
});
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
/* eslint-disable no-console */
|
|
2
|
+
// src/scripts/cli.ts
|
|
3
|
+
import { config } from 'dotenv';
|
|
4
|
+
config();
|
|
5
|
+
import { z } from "zod";
|
|
6
|
+
import { HumanMessage, BaseMessage } from '@langchain/core/messages';
|
|
7
|
+
import type { RunnableConfig } from '@langchain/core/runnables';
|
|
8
|
+
import { TavilySearchResults } from '@langchain/community/tools/tavily_search';
|
|
9
|
+
import { tool } from "@langchain/core/tools";
|
|
10
|
+
import type * as t from '@/types';
|
|
11
|
+
import { ModelEndHandler, ToolEndHandler } from '@/events';
|
|
12
|
+
import { ChatModelStreamHandler } from '@/stream';
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
import { getArgs } from '@/scripts/args';
|
|
16
|
+
import { Run } from '@/run';
|
|
17
|
+
import { GraphEvents, Callback } from '@/common';
|
|
18
|
+
import { getLLMConfig } from '@/utils/llmConfig';
|
|
19
|
+
|
|
20
|
+
const conversationHistory: BaseMessage[] = [];
|
|
21
|
+
async function testStandardStreaming(): Promise<void> {
|
|
22
|
+
const { userName, location, provider, currentDate } = await getArgs();
|
|
23
|
+
const customHandlers = {
|
|
24
|
+
[GraphEvents.TOOL_END]: new ToolEndHandler(),
|
|
25
|
+
[GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(),
|
|
26
|
+
[GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
|
|
27
|
+
[GraphEvents.ON_RUN_STEP]: {
|
|
28
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
29
|
+
console.log('====== ON_RUN_STEP ======');
|
|
30
|
+
console.dir(data, { depth: null });
|
|
31
|
+
}
|
|
32
|
+
},
|
|
33
|
+
[GraphEvents.ON_RUN_STEP_DELTA]: {
|
|
34
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
35
|
+
console.log('====== ON_RUN_STEP_DELTA ======');
|
|
36
|
+
console.dir(data, { depth: null });
|
|
37
|
+
}
|
|
38
|
+
},
|
|
39
|
+
[GraphEvents.ON_MESSAGE_DELTA]: {
|
|
40
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
41
|
+
console.log('====== ON_MESSAGE_DELTA ======');
|
|
42
|
+
console.dir(data, { depth: null });
|
|
43
|
+
}
|
|
44
|
+
},
|
|
45
|
+
[GraphEvents.TOOL_START]: {
|
|
46
|
+
handle: (_event: string, data: t.StreamEventData, metadata?: Record<string, unknown>): void => {
|
|
47
|
+
console.log('====== TOOL_START ======');
|
|
48
|
+
console.dir(data, { depth: null });
|
|
49
|
+
}
|
|
50
|
+
},
|
|
51
|
+
// [GraphEvents.LLM_STREAM]: new LLMStreamHandler(),
|
|
52
|
+
// [GraphEvents.LLM_START]: {
|
|
53
|
+
// handle: (_event: string, data: t.StreamEventData): void => {
|
|
54
|
+
// console.log('====== LLM_START ======');
|
|
55
|
+
// console.dir(data, { depth: null });
|
|
56
|
+
// }
|
|
57
|
+
// },
|
|
58
|
+
// [GraphEvents.LLM_END]: {
|
|
59
|
+
// handle: (_event: string, data: t.StreamEventData): void => {
|
|
60
|
+
// console.log('====== LLM_END ======');
|
|
61
|
+
// console.dir(data, { depth: null });
|
|
62
|
+
// }
|
|
63
|
+
// },
|
|
64
|
+
/*
|
|
65
|
+
[GraphEvents.CHAIN_START]: {
|
|
66
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
67
|
+
console.log('====== CHAIN_START ======');
|
|
68
|
+
// console.dir(data, { depth: null });
|
|
69
|
+
}
|
|
70
|
+
},
|
|
71
|
+
[GraphEvents.CHAIN_END]: {
|
|
72
|
+
handle: (_event: string, data: t.StreamEventData): void => {
|
|
73
|
+
console.log('====== CHAIN_END ======');
|
|
74
|
+
// console.dir(data, { depth: null });
|
|
75
|
+
}
|
|
76
|
+
},
|
|
77
|
+
*/
|
|
78
|
+
// [GraphEvents.CHAT_MODEL_START]: {
|
|
79
|
+
// handle: (_event: string, _data: t.StreamEventData): void => {
|
|
80
|
+
// console.log('====== CHAT_MODEL_START ======');
|
|
81
|
+
// console.dir(_data, { depth: null });
|
|
82
|
+
// // Intentionally left empty
|
|
83
|
+
// }
|
|
84
|
+
// },
|
|
85
|
+
};
|
|
86
|
+
|
|
87
|
+
const llmConfig = getLLMConfig(provider);
|
|
88
|
+
|
|
89
|
+
const getWeather = tool(async ({ location }) => {
|
|
90
|
+
if (location === "SAN FRANCISCO") {
|
|
91
|
+
return "It's 60 degrees and foggy";
|
|
92
|
+
} else if (location.toLowerCase() === "san francisco") {
|
|
93
|
+
throw new Error("Input queries must be all capitals");
|
|
94
|
+
} else {
|
|
95
|
+
throw new Error("Invalid input.");
|
|
96
|
+
}
|
|
97
|
+
}, {
|
|
98
|
+
name: "get_weather",
|
|
99
|
+
description: "Call to get the current weather",
|
|
100
|
+
schema: z.object({
|
|
101
|
+
location: z.string(),
|
|
102
|
+
}),
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
const run = await Run.create<t.IState>({
|
|
106
|
+
graphConfig: {
|
|
107
|
+
type: 'standard',
|
|
108
|
+
llmConfig,
|
|
109
|
+
tools: [getWeather],
|
|
110
|
+
// tools: [new TavilySearchResults()],
|
|
111
|
+
},
|
|
112
|
+
customHandlers,
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
const config = {
|
|
116
|
+
configurable: {
|
|
117
|
+
provider,
|
|
118
|
+
thread_id: 'conversation-num-1',
|
|
119
|
+
instructions: 'You are a friendly AI assistant. Always address the user by their name.',
|
|
120
|
+
additional_instructions: `The user's name is ${userName} and they are located in ${location}.`
|
|
121
|
+
},
|
|
122
|
+
streamMode: 'values',
|
|
123
|
+
version: 'v2' as const,
|
|
124
|
+
};
|
|
125
|
+
|
|
126
|
+
console.log(' Test 1: Initial greeting');
|
|
127
|
+
|
|
128
|
+
conversationHistory.push(new HumanMessage(`Hi I'm ${userName}.`));
|
|
129
|
+
let inputs = {
|
|
130
|
+
messages: conversationHistory,
|
|
131
|
+
};
|
|
132
|
+
const contentParts = await run.processStream(inputs, config, {
|
|
133
|
+
[Callback.TOOL_ERROR]: (graph, error, toolId) => {
|
|
134
|
+
console.error(`Tool ${toolId} failed with error: ${error.message}`);
|
|
135
|
+
},
|
|
136
|
+
});
|
|
137
|
+
const finalMessages = run.getRunMessages();
|
|
138
|
+
if (finalMessages) {
|
|
139
|
+
conversationHistory.push(...finalMessages);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
console.log(' Test 2: Weather query');
|
|
143
|
+
|
|
144
|
+
const userMessage = `
|
|
145
|
+
Make a search for the weather in ${location} today, which is ${currentDate}.
|
|
146
|
+
Make sure to always refer to me by name.
|
|
147
|
+
After giving me a thorough summary, tell me a joke about the weather forecast we went over.
|
|
148
|
+
`;
|
|
149
|
+
|
|
150
|
+
conversationHistory.push(new HumanMessage(userMessage));
|
|
151
|
+
|
|
152
|
+
inputs = {
|
|
153
|
+
messages: conversationHistory,
|
|
154
|
+
};
|
|
155
|
+
const contentParts2 = await run.processStream(inputs, config);
|
|
156
|
+
const finalMessages2 = run.getRunMessages();
|
|
157
|
+
if (finalMessages2) {
|
|
158
|
+
conversationHistory.push(...finalMessages2);
|
|
159
|
+
console.dir(conversationHistory, { depth: null });
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
process.on('unhandledRejection', (reason, promise) => {
|
|
164
|
+
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
|
|
165
|
+
console.log('Conversation history:');
|
|
166
|
+
console.dir(conversationHistory, { depth: null });
|
|
167
|
+
process.exit(1);
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
testStandardStreaming().catch((err) => {
|
|
171
|
+
console.error(err);
|
|
172
|
+
console.log('Conversation history:');
|
|
173
|
+
console.dir(conversationHistory, { depth: null });
|
|
174
|
+
process.exit(1);
|
|
175
|
+
});
|