@librechat/agents 2.4.322 → 3.0.0-rc10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/agents/AgentContext.cjs +218 -0
- package/dist/cjs/agents/AgentContext.cjs.map +1 -0
- package/dist/cjs/common/enum.cjs +15 -5
- package/dist/cjs/common/enum.cjs.map +1 -1
- package/dist/cjs/events.cjs +10 -6
- package/dist/cjs/events.cjs.map +1 -1
- package/dist/cjs/graphs/Graph.cjs +309 -213
- package/dist/cjs/graphs/Graph.cjs.map +1 -1
- package/dist/cjs/graphs/MultiAgentGraph.cjs +507 -0
- package/dist/cjs/graphs/MultiAgentGraph.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/index.cjs +54 -9
- package/dist/cjs/llm/anthropic/index.cjs.map +1 -1
- package/dist/cjs/llm/anthropic/types.cjs.map +1 -1
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +52 -6
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +1 -1
- package/dist/cjs/llm/anthropic/utils/message_outputs.cjs +22 -2
- package/dist/cjs/llm/anthropic/utils/message_outputs.cjs.map +1 -1
- package/dist/cjs/llm/anthropic/utils/tools.cjs +29 -0
- package/dist/cjs/llm/anthropic/utils/tools.cjs.map +1 -0
- package/dist/cjs/llm/google/index.cjs +144 -0
- package/dist/cjs/llm/google/index.cjs.map +1 -0
- package/dist/cjs/llm/google/utils/common.cjs +477 -0
- package/dist/cjs/llm/google/utils/common.cjs.map +1 -0
- package/dist/cjs/llm/ollama/index.cjs +67 -0
- package/dist/cjs/llm/ollama/index.cjs.map +1 -0
- package/dist/cjs/llm/ollama/utils.cjs +158 -0
- package/dist/cjs/llm/ollama/utils.cjs.map +1 -0
- package/dist/cjs/llm/openai/index.cjs +422 -3
- package/dist/cjs/llm/openai/index.cjs.map +1 -1
- package/dist/cjs/llm/openai/utils/index.cjs +672 -0
- package/dist/cjs/llm/openai/utils/index.cjs.map +1 -0
- package/dist/cjs/llm/providers.cjs +15 -15
- package/dist/cjs/llm/providers.cjs.map +1 -1
- package/dist/cjs/llm/text.cjs +14 -3
- package/dist/cjs/llm/text.cjs.map +1 -1
- package/dist/cjs/llm/vertexai/index.cjs +330 -0
- package/dist/cjs/llm/vertexai/index.cjs.map +1 -0
- package/dist/cjs/main.cjs +11 -0
- package/dist/cjs/main.cjs.map +1 -1
- package/dist/cjs/run.cjs +137 -85
- package/dist/cjs/run.cjs.map +1 -1
- package/dist/cjs/stream.cjs +86 -52
- package/dist/cjs/stream.cjs.map +1 -1
- package/dist/cjs/tools/ToolNode.cjs +10 -4
- package/dist/cjs/tools/ToolNode.cjs.map +1 -1
- package/dist/cjs/tools/handlers.cjs +119 -13
- package/dist/cjs/tools/handlers.cjs.map +1 -1
- package/dist/cjs/tools/search/anthropic.cjs +40 -0
- package/dist/cjs/tools/search/anthropic.cjs.map +1 -0
- package/dist/cjs/tools/search/firecrawl.cjs +55 -9
- package/dist/cjs/tools/search/firecrawl.cjs.map +1 -1
- package/dist/cjs/tools/search/format.cjs +6 -6
- package/dist/cjs/tools/search/format.cjs.map +1 -1
- package/dist/cjs/tools/search/rerankers.cjs +7 -29
- package/dist/cjs/tools/search/rerankers.cjs.map +1 -1
- package/dist/cjs/tools/search/search.cjs +86 -16
- package/dist/cjs/tools/search/search.cjs.map +1 -1
- package/dist/cjs/tools/search/tool.cjs +4 -2
- package/dist/cjs/tools/search/tool.cjs.map +1 -1
- package/dist/cjs/tools/search/utils.cjs +1 -1
- package/dist/cjs/tools/search/utils.cjs.map +1 -1
- package/dist/cjs/utils/events.cjs +31 -0
- package/dist/cjs/utils/events.cjs.map +1 -0
- package/dist/cjs/utils/title.cjs +57 -21
- package/dist/cjs/utils/title.cjs.map +1 -1
- package/dist/cjs/utils/tokens.cjs +54 -7
- package/dist/cjs/utils/tokens.cjs.map +1 -1
- package/dist/esm/agents/AgentContext.mjs +216 -0
- package/dist/esm/agents/AgentContext.mjs.map +1 -0
- package/dist/esm/common/enum.mjs +16 -6
- package/dist/esm/common/enum.mjs.map +1 -1
- package/dist/esm/events.mjs +10 -6
- package/dist/esm/events.mjs.map +1 -1
- package/dist/esm/graphs/Graph.mjs +311 -215
- package/dist/esm/graphs/Graph.mjs.map +1 -1
- package/dist/esm/graphs/MultiAgentGraph.mjs +505 -0
- package/dist/esm/graphs/MultiAgentGraph.mjs.map +1 -0
- package/dist/esm/llm/anthropic/index.mjs +54 -9
- package/dist/esm/llm/anthropic/index.mjs.map +1 -1
- package/dist/esm/llm/anthropic/types.mjs.map +1 -1
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs +52 -6
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +1 -1
- package/dist/esm/llm/anthropic/utils/message_outputs.mjs +22 -2
- package/dist/esm/llm/anthropic/utils/message_outputs.mjs.map +1 -1
- package/dist/esm/llm/anthropic/utils/tools.mjs +27 -0
- package/dist/esm/llm/anthropic/utils/tools.mjs.map +1 -0
- package/dist/esm/llm/google/index.mjs +142 -0
- package/dist/esm/llm/google/index.mjs.map +1 -0
- package/dist/esm/llm/google/utils/common.mjs +471 -0
- package/dist/esm/llm/google/utils/common.mjs.map +1 -0
- package/dist/esm/llm/ollama/index.mjs +65 -0
- package/dist/esm/llm/ollama/index.mjs.map +1 -0
- package/dist/esm/llm/ollama/utils.mjs +155 -0
- package/dist/esm/llm/ollama/utils.mjs.map +1 -0
- package/dist/esm/llm/openai/index.mjs +421 -4
- package/dist/esm/llm/openai/index.mjs.map +1 -1
- package/dist/esm/llm/openai/utils/index.mjs +666 -0
- package/dist/esm/llm/openai/utils/index.mjs.map +1 -0
- package/dist/esm/llm/providers.mjs +5 -5
- package/dist/esm/llm/providers.mjs.map +1 -1
- package/dist/esm/llm/text.mjs +14 -3
- package/dist/esm/llm/text.mjs.map +1 -1
- package/dist/esm/llm/vertexai/index.mjs +328 -0
- package/dist/esm/llm/vertexai/index.mjs.map +1 -0
- package/dist/esm/main.mjs +6 -5
- package/dist/esm/main.mjs.map +1 -1
- package/dist/esm/run.mjs +138 -87
- package/dist/esm/run.mjs.map +1 -1
- package/dist/esm/stream.mjs +88 -55
- package/dist/esm/stream.mjs.map +1 -1
- package/dist/esm/tools/ToolNode.mjs +10 -4
- package/dist/esm/tools/ToolNode.mjs.map +1 -1
- package/dist/esm/tools/handlers.mjs +119 -15
- package/dist/esm/tools/handlers.mjs.map +1 -1
- package/dist/esm/tools/search/anthropic.mjs +37 -0
- package/dist/esm/tools/search/anthropic.mjs.map +1 -0
- package/dist/esm/tools/search/firecrawl.mjs +55 -9
- package/dist/esm/tools/search/firecrawl.mjs.map +1 -1
- package/dist/esm/tools/search/format.mjs +7 -7
- package/dist/esm/tools/search/format.mjs.map +1 -1
- package/dist/esm/tools/search/rerankers.mjs +7 -29
- package/dist/esm/tools/search/rerankers.mjs.map +1 -1
- package/dist/esm/tools/search/search.mjs +86 -16
- package/dist/esm/tools/search/search.mjs.map +1 -1
- package/dist/esm/tools/search/tool.mjs +4 -2
- package/dist/esm/tools/search/tool.mjs.map +1 -1
- package/dist/esm/tools/search/utils.mjs +1 -1
- package/dist/esm/tools/search/utils.mjs.map +1 -1
- package/dist/esm/utils/events.mjs +29 -0
- package/dist/esm/utils/events.mjs.map +1 -0
- package/dist/esm/utils/title.mjs +57 -22
- package/dist/esm/utils/title.mjs.map +1 -1
- package/dist/esm/utils/tokens.mjs +54 -8
- package/dist/esm/utils/tokens.mjs.map +1 -1
- package/dist/types/agents/AgentContext.d.ts +91 -0
- package/dist/types/common/enum.d.ts +17 -7
- package/dist/types/events.d.ts +5 -4
- package/dist/types/graphs/Graph.d.ts +64 -67
- package/dist/types/graphs/MultiAgentGraph.d.ts +47 -0
- package/dist/types/graphs/index.d.ts +1 -0
- package/dist/types/llm/anthropic/index.d.ts +11 -0
- package/dist/types/llm/anthropic/types.d.ts +9 -3
- package/dist/types/llm/anthropic/utils/message_inputs.d.ts +1 -1
- package/dist/types/llm/anthropic/utils/output_parsers.d.ts +4 -4
- package/dist/types/llm/anthropic/utils/tools.d.ts +3 -0
- package/dist/types/llm/google/index.d.ts +13 -0
- package/dist/types/llm/google/types.d.ts +32 -0
- package/dist/types/llm/google/utils/common.d.ts +19 -0
- package/dist/types/llm/google/utils/tools.d.ts +10 -0
- package/dist/types/llm/google/utils/zod_to_genai_parameters.d.ts +14 -0
- package/dist/types/llm/ollama/index.d.ts +7 -0
- package/dist/types/llm/ollama/utils.d.ts +7 -0
- package/dist/types/llm/openai/index.d.ts +82 -3
- package/dist/types/llm/openai/types.d.ts +10 -0
- package/dist/types/llm/openai/utils/index.d.ts +20 -0
- package/dist/types/llm/text.d.ts +1 -1
- package/dist/types/llm/vertexai/index.d.ts +293 -0
- package/dist/types/messages/reducer.d.ts +9 -0
- package/dist/types/run.d.ts +19 -12
- package/dist/types/stream.d.ts +10 -3
- package/dist/types/tools/CodeExecutor.d.ts +2 -2
- package/dist/types/tools/ToolNode.d.ts +1 -1
- package/dist/types/tools/handlers.d.ts +17 -4
- package/dist/types/tools/search/anthropic.d.ts +16 -0
- package/dist/types/tools/search/firecrawl.d.ts +15 -0
- package/dist/types/tools/search/rerankers.d.ts +0 -1
- package/dist/types/tools/search/types.d.ts +30 -9
- package/dist/types/types/graph.d.ts +129 -15
- package/dist/types/types/llm.d.ts +25 -10
- package/dist/types/types/run.d.ts +50 -8
- package/dist/types/types/stream.d.ts +16 -2
- package/dist/types/types/tools.d.ts +1 -1
- package/dist/types/utils/events.d.ts +6 -0
- package/dist/types/utils/title.d.ts +2 -1
- package/dist/types/utils/tokens.d.ts +24 -0
- package/package.json +41 -17
- package/src/agents/AgentContext.ts +315 -0
- package/src/common/enum.ts +15 -5
- package/src/events.ts +24 -13
- package/src/graphs/Graph.ts +495 -313
- package/src/graphs/MultiAgentGraph.ts +598 -0
- package/src/graphs/index.ts +2 -1
- package/src/llm/anthropic/Jacob_Lee_Resume_2023.pdf +0 -0
- package/src/llm/anthropic/index.ts +78 -13
- package/src/llm/anthropic/llm.spec.ts +491 -115
- package/src/llm/anthropic/types.ts +39 -3
- package/src/llm/anthropic/utils/message_inputs.ts +67 -11
- package/src/llm/anthropic/utils/message_outputs.ts +21 -2
- package/src/llm/anthropic/utils/output_parsers.ts +25 -6
- package/src/llm/anthropic/utils/tools.ts +29 -0
- package/src/llm/google/index.ts +218 -0
- package/src/llm/google/types.ts +43 -0
- package/src/llm/google/utils/common.ts +646 -0
- package/src/llm/google/utils/tools.ts +160 -0
- package/src/llm/google/utils/zod_to_genai_parameters.ts +86 -0
- package/src/llm/ollama/index.ts +89 -0
- package/src/llm/ollama/utils.ts +193 -0
- package/src/llm/openai/index.ts +641 -14
- package/src/llm/openai/types.ts +24 -0
- package/src/llm/openai/utils/index.ts +912 -0
- package/src/llm/openai/utils/isReasoningModel.test.ts +90 -0
- package/src/llm/providers.ts +10 -9
- package/src/llm/text.ts +26 -7
- package/src/llm/vertexai/index.ts +360 -0
- package/src/messages/reducer.ts +80 -0
- package/src/run.ts +196 -116
- package/src/scripts/ant_web_search.ts +158 -0
- package/src/scripts/args.ts +12 -8
- package/src/scripts/cli4.ts +29 -21
- package/src/scripts/cli5.ts +29 -21
- package/src/scripts/code_exec.ts +54 -23
- package/src/scripts/code_exec_files.ts +48 -17
- package/src/scripts/code_exec_simple.ts +46 -27
- package/src/scripts/handoff-test.ts +135 -0
- package/src/scripts/image.ts +52 -20
- package/src/scripts/multi-agent-chain.ts +278 -0
- package/src/scripts/multi-agent-conditional.ts +220 -0
- package/src/scripts/multi-agent-document-review-chain.ts +197 -0
- package/src/scripts/multi-agent-hybrid-flow.ts +310 -0
- package/src/scripts/multi-agent-parallel.ts +341 -0
- package/src/scripts/multi-agent-sequence.ts +212 -0
- package/src/scripts/multi-agent-supervisor.ts +362 -0
- package/src/scripts/multi-agent-test.ts +186 -0
- package/src/scripts/search.ts +1 -9
- package/src/scripts/simple.ts +25 -10
- package/src/scripts/test-custom-prompt-key.ts +145 -0
- package/src/scripts/test-handoff-input.ts +170 -0
- package/src/scripts/test-multi-agent-list-handoff.ts +261 -0
- package/src/scripts/test-tools-before-handoff.ts +233 -0
- package/src/scripts/tools.ts +48 -18
- package/src/specs/anthropic.simple.test.ts +150 -34
- package/src/specs/azure.simple.test.ts +325 -0
- package/src/specs/openai.simple.test.ts +140 -33
- package/src/specs/openrouter.simple.test.ts +107 -0
- package/src/specs/prune.test.ts +4 -9
- package/src/specs/reasoning.test.ts +80 -44
- package/src/specs/token-memoization.test.ts +39 -0
- package/src/stream.test.ts +94 -0
- package/src/stream.ts +143 -61
- package/src/tools/ToolNode.ts +21 -7
- package/src/tools/handlers.ts +192 -18
- package/src/tools/search/anthropic.ts +51 -0
- package/src/tools/search/firecrawl.ts +69 -20
- package/src/tools/search/format.ts +6 -8
- package/src/tools/search/rerankers.ts +7 -40
- package/src/tools/search/search.ts +97 -16
- package/src/tools/search/tool.ts +5 -2
- package/src/tools/search/types.ts +30 -10
- package/src/tools/search/utils.ts +1 -1
- package/src/types/graph.ts +318 -103
- package/src/types/llm.ts +26 -12
- package/src/types/run.ts +56 -13
- package/src/types/stream.ts +22 -1
- package/src/types/tools.ts +16 -10
- package/src/utils/events.ts +32 -0
- package/src/utils/llmConfig.ts +19 -7
- package/src/utils/title.ts +104 -30
- package/src/utils/tokens.ts +69 -10
- package/dist/types/scripts/abort.d.ts +0 -1
- package/dist/types/scripts/args.d.ts +0 -6
- package/dist/types/scripts/caching.d.ts +0 -1
- package/dist/types/scripts/cli.d.ts +0 -1
- package/dist/types/scripts/cli2.d.ts +0 -1
- package/dist/types/scripts/cli3.d.ts +0 -1
- package/dist/types/scripts/cli4.d.ts +0 -1
- package/dist/types/scripts/cli5.d.ts +0 -1
- package/dist/types/scripts/code_exec.d.ts +0 -1
- package/dist/types/scripts/code_exec_files.d.ts +0 -1
- package/dist/types/scripts/code_exec_simple.d.ts +0 -1
- package/dist/types/scripts/content.d.ts +0 -1
- package/dist/types/scripts/empty_input.d.ts +0 -1
- package/dist/types/scripts/image.d.ts +0 -1
- package/dist/types/scripts/memory.d.ts +0 -1
- package/dist/types/scripts/search.d.ts +0 -1
- package/dist/types/scripts/simple.d.ts +0 -1
- package/dist/types/scripts/stream.d.ts +0 -1
- package/dist/types/scripts/thinking.d.ts +0 -1
- package/dist/types/scripts/tools.d.ts +0 -1
- package/dist/types/specs/spec.utils.d.ts +0 -1
package/src/graphs/Graph.ts
CHANGED
|
@@ -4,37 +4,44 @@ import { nanoid } from 'nanoid';
|
|
|
4
4
|
import { concat } from '@langchain/core/utils/stream';
|
|
5
5
|
import { ToolNode } from '@langchain/langgraph/prebuilt';
|
|
6
6
|
import { ChatVertexAI } from '@langchain/google-vertexai';
|
|
7
|
-
import { START, END, StateGraph } from '@langchain/langgraph';
|
|
8
|
-
import { Runnable, RunnableConfig } from '@langchain/core/runnables';
|
|
9
|
-
import { dispatchCustomEvent } from '@langchain/core/callbacks/dispatch';
|
|
10
7
|
import {
|
|
11
|
-
|
|
8
|
+
START,
|
|
9
|
+
END,
|
|
10
|
+
Command,
|
|
11
|
+
StateGraph,
|
|
12
|
+
Annotation,
|
|
13
|
+
messagesStateReducer,
|
|
14
|
+
} from '@langchain/langgraph';
|
|
15
|
+
import {
|
|
16
|
+
Runnable,
|
|
17
|
+
RunnableConfig,
|
|
18
|
+
RunnableLambda,
|
|
19
|
+
} from '@langchain/core/runnables';
|
|
20
|
+
import {
|
|
12
21
|
ToolMessage,
|
|
13
22
|
SystemMessage,
|
|
23
|
+
AIMessageChunk,
|
|
14
24
|
} from '@langchain/core/messages';
|
|
15
25
|
import type {
|
|
16
|
-
BaseMessage,
|
|
17
26
|
BaseMessageFields,
|
|
18
27
|
UsageMetadata,
|
|
28
|
+
BaseMessage,
|
|
19
29
|
} from '@langchain/core/messages';
|
|
30
|
+
import type { ToolCall } from '@langchain/core/messages/tool';
|
|
20
31
|
import type * as t from '@/types';
|
|
21
32
|
import {
|
|
22
|
-
Providers,
|
|
23
|
-
GraphEvents,
|
|
24
33
|
GraphNodeKeys,
|
|
25
|
-
StepTypes,
|
|
26
|
-
Callback,
|
|
27
34
|
ContentTypes,
|
|
35
|
+
GraphEvents,
|
|
36
|
+
Providers,
|
|
37
|
+
StepTypes,
|
|
28
38
|
} from '@/common';
|
|
29
|
-
import type { ToolCall } from '@langchain/core/messages/tool';
|
|
30
|
-
import { getChatModelClass, manualToolStreamProviders } from '@/llm/providers';
|
|
31
|
-
import { ToolNode as CustomToolNode, toolsCondition } from '@/tools/ToolNode';
|
|
32
39
|
import {
|
|
33
|
-
|
|
40
|
+
formatAnthropicArtifactContent,
|
|
41
|
+
convertMessagesToContent,
|
|
34
42
|
modifyDeltaProperties,
|
|
35
43
|
formatArtifactPayload,
|
|
36
|
-
|
|
37
|
-
formatAnthropicArtifactContent,
|
|
44
|
+
createPruneMessages,
|
|
38
45
|
} from '@/messages';
|
|
39
46
|
import {
|
|
40
47
|
resetIfNotEmpty,
|
|
@@ -43,38 +50,37 @@ import {
|
|
|
43
50
|
joinKeys,
|
|
44
51
|
sleep,
|
|
45
52
|
} from '@/utils';
|
|
53
|
+
import { getChatModelClass, manualToolStreamProviders } from '@/llm/providers';
|
|
54
|
+
import { ToolNode as CustomToolNode, toolsCondition } from '@/tools/ToolNode';
|
|
46
55
|
import { ChatOpenAI, AzureChatOpenAI } from '@/llm/openai';
|
|
56
|
+
import { safeDispatchCustomEvent } from '@/utils/events';
|
|
57
|
+
import { AgentContext } from '@/agents/AgentContext';
|
|
47
58
|
import { createFakeStreamingLLM } from '@/llm/fake';
|
|
48
59
|
import { HandlerRegistry } from '@/events';
|
|
49
60
|
|
|
50
61
|
const { AGENT, TOOLS } = GraphNodeKeys;
|
|
51
|
-
export type GraphNode = GraphNodeKeys | typeof START;
|
|
52
|
-
export type ClientCallback<T extends unknown[]> = (
|
|
53
|
-
graph: StandardGraph,
|
|
54
|
-
...args: T
|
|
55
|
-
) => void;
|
|
56
|
-
export type ClientCallbacks = {
|
|
57
|
-
[Callback.TOOL_ERROR]?: ClientCallback<[Error, string]>;
|
|
58
|
-
[Callback.TOOL_START]?: ClientCallback<unknown[]>;
|
|
59
|
-
[Callback.TOOL_END]?: ClientCallback<unknown[]>;
|
|
60
|
-
};
|
|
61
|
-
export type SystemCallbacks = {
|
|
62
|
-
[K in keyof ClientCallbacks]: ClientCallbacks[K] extends ClientCallback<
|
|
63
|
-
infer Args
|
|
64
|
-
>
|
|
65
|
-
? (...args: Args) => void
|
|
66
|
-
: never;
|
|
67
|
-
};
|
|
68
62
|
|
|
69
63
|
export abstract class Graph<
|
|
70
64
|
T extends t.BaseGraphState = t.BaseGraphState,
|
|
71
|
-
|
|
72
|
-
TNodeName extends string = string,
|
|
65
|
+
_TNodeName extends string = string,
|
|
73
66
|
> {
|
|
74
67
|
abstract resetValues(): void;
|
|
75
|
-
abstract
|
|
76
|
-
|
|
77
|
-
|
|
68
|
+
abstract initializeTools({
|
|
69
|
+
currentTools,
|
|
70
|
+
currentToolMap,
|
|
71
|
+
}: {
|
|
72
|
+
currentTools?: t.GraphTools;
|
|
73
|
+
currentToolMap?: t.ToolMap;
|
|
74
|
+
}): CustomToolNode<T> | ToolNode<T>;
|
|
75
|
+
abstract initializeModel({
|
|
76
|
+
currentModel,
|
|
77
|
+
tools,
|
|
78
|
+
clientOptions,
|
|
79
|
+
}: {
|
|
80
|
+
currentModel?: t.ChatModel;
|
|
81
|
+
tools?: t.GraphTools;
|
|
82
|
+
clientOptions?: t.ClientOptions;
|
|
83
|
+
}): Runnable;
|
|
78
84
|
abstract getRunMessages(): BaseMessage[] | undefined;
|
|
79
85
|
abstract getContentParts(): t.MessageContentComplex[] | undefined;
|
|
80
86
|
abstract generateStepId(stepKey: string): [string, number];
|
|
@@ -85,28 +91,32 @@ export abstract class Graph<
|
|
|
85
91
|
abstract checkKeyList(keyList: (string | number | undefined)[]): boolean;
|
|
86
92
|
abstract getStepIdByKey(stepKey: string, index?: number): string;
|
|
87
93
|
abstract getRunStep(stepId: string): t.RunStep | undefined;
|
|
88
|
-
abstract dispatchRunStep(
|
|
89
|
-
|
|
90
|
-
|
|
94
|
+
abstract dispatchRunStep(
|
|
95
|
+
stepKey: string,
|
|
96
|
+
stepDetails: t.StepDetails
|
|
97
|
+
): Promise<string>;
|
|
98
|
+
abstract dispatchRunStepDelta(
|
|
99
|
+
id: string,
|
|
100
|
+
delta: t.ToolCallDelta
|
|
101
|
+
): Promise<void>;
|
|
102
|
+
abstract dispatchMessageDelta(
|
|
103
|
+
id: string,
|
|
104
|
+
delta: t.MessageDelta
|
|
105
|
+
): Promise<void>;
|
|
91
106
|
abstract dispatchReasoningDelta(
|
|
92
107
|
stepId: string,
|
|
93
108
|
delta: t.ReasoningDelta
|
|
94
|
-
): void
|
|
109
|
+
): Promise<void>;
|
|
95
110
|
abstract handleToolCallCompleted(
|
|
96
111
|
data: t.ToolEndData,
|
|
97
|
-
metadata?: Record<string, unknown
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
lastToken?: string;
|
|
106
|
-
tokenTypeSwitch?: 'reasoning' | 'content';
|
|
107
|
-
reasoningKey: 'reasoning_content' | 'reasoning' = 'reasoning_content';
|
|
108
|
-
currentTokenType: ContentTypes.TEXT | ContentTypes.THINK | 'think_and_text' =
|
|
109
|
-
ContentTypes.TEXT;
|
|
112
|
+
metadata?: Record<string, unknown>,
|
|
113
|
+
omitOutput?: boolean
|
|
114
|
+
): Promise<void>;
|
|
115
|
+
|
|
116
|
+
abstract createCallModel(
|
|
117
|
+
agentId?: string,
|
|
118
|
+
currentModel?: t.ChatModel
|
|
119
|
+
): (state: T, config?: RunnableConfig) => Promise<Partial<T>>;
|
|
110
120
|
messageStepHasToolCalls: Map<string, boolean> = new Map();
|
|
111
121
|
messageIdsByStepKey: Map<string, string> = new Map();
|
|
112
122
|
prelimMessageIdsByStepKey: Map<string, string> = new Map();
|
|
@@ -115,95 +125,52 @@ export abstract class Graph<
|
|
|
115
125
|
stepKeyIds: Map<string, string[]> = new Map<string, string[]>();
|
|
116
126
|
contentIndexMap: Map<string, number> = new Map();
|
|
117
127
|
toolCallStepIds: Map<string, string> = new Map();
|
|
118
|
-
currentUsage: Partial<UsageMetadata> | undefined;
|
|
119
|
-
indexTokenCountMap: Record<string, number | undefined> = {};
|
|
120
|
-
maxContextTokens: number | undefined;
|
|
121
|
-
pruneMessages?: ReturnType<typeof createPruneMessages>;
|
|
122
|
-
/** The amount of time that should pass before another consecutive API call */
|
|
123
|
-
streamBuffer: number | undefined;
|
|
124
|
-
tokenCounter?: t.TokenCounter;
|
|
125
128
|
signal?: AbortSignal;
|
|
129
|
+
/** Set of invoked tool call IDs from non-message run steps completed mid-run, if any */
|
|
130
|
+
invokedToolIds?: Set<string>;
|
|
131
|
+
handlerRegistry: HandlerRegistry | undefined;
|
|
126
132
|
}
|
|
127
133
|
|
|
128
|
-
export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
/** The last recorded timestamp that a stream API call was invoked */
|
|
133
|
-
lastStreamCall: number | undefined;
|
|
134
|
-
handlerRegistry: HandlerRegistry | undefined;
|
|
135
|
-
systemMessage: SystemMessage | undefined;
|
|
134
|
+
export class StandardGraph extends Graph<t.BaseGraphState, t.GraphNode> {
|
|
135
|
+
overrideModel?: t.ChatModel;
|
|
136
|
+
/** Optional compile options passed into workflow.compile() */
|
|
137
|
+
compileOptions?: t.CompileOptions | undefined;
|
|
136
138
|
messages: BaseMessage[] = [];
|
|
137
139
|
runId: string | undefined;
|
|
138
|
-
tools?: t.GenericTool[];
|
|
139
|
-
toolMap?: t.ToolMap;
|
|
140
140
|
startIndex: number = 0;
|
|
141
|
-
provider: Providers;
|
|
142
|
-
toolEnd: boolean;
|
|
143
141
|
signal?: AbortSignal;
|
|
142
|
+
/** Map of agent contexts by agent ID */
|
|
143
|
+
agentContexts: Map<string, AgentContext> = new Map();
|
|
144
|
+
/** Default agent ID to use */
|
|
145
|
+
defaultAgentId: string;
|
|
144
146
|
|
|
145
147
|
constructor({
|
|
148
|
+
// parent-level graph inputs
|
|
146
149
|
runId,
|
|
147
|
-
tools,
|
|
148
150
|
signal,
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
instructions,
|
|
153
|
-
reasoningKey,
|
|
154
|
-
clientOptions,
|
|
155
|
-
toolEnd = false,
|
|
156
|
-
additional_instructions = '',
|
|
151
|
+
agents,
|
|
152
|
+
tokenCounter,
|
|
153
|
+
indexTokenCountMap,
|
|
157
154
|
}: t.StandardGraphInput) {
|
|
158
155
|
super();
|
|
159
156
|
this.runId = runId;
|
|
160
|
-
this.tools = tools;
|
|
161
157
|
this.signal = signal;
|
|
162
|
-
this.toolEnd = toolEnd;
|
|
163
|
-
this.toolMap = toolMap;
|
|
164
|
-
this.provider = provider;
|
|
165
|
-
this.streamBuffer = streamBuffer;
|
|
166
|
-
this.clientOptions = clientOptions;
|
|
167
|
-
this.graphState = this.createGraphState();
|
|
168
|
-
this.boundModel = this.initializeModel();
|
|
169
|
-
if (reasoningKey) {
|
|
170
|
-
this.reasoningKey = reasoningKey;
|
|
171
|
-
}
|
|
172
158
|
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
if (additional_instructions) {
|
|
176
|
-
finalInstructions =
|
|
177
|
-
finalInstructions != null && finalInstructions
|
|
178
|
-
? `${finalInstructions}\n\n${additional_instructions}`
|
|
179
|
-
: additional_instructions;
|
|
159
|
+
if (agents.length === 0) {
|
|
160
|
+
throw new Error('At least one agent configuration is required');
|
|
180
161
|
}
|
|
181
162
|
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
).clientOptions?.defaultHeaders?.['anthropic-beta']?.includes(
|
|
189
|
-
'prompt-caching'
|
|
190
|
-
) ??
|
|
191
|
-
false)
|
|
192
|
-
) {
|
|
193
|
-
finalInstructions = {
|
|
194
|
-
content: [
|
|
195
|
-
{
|
|
196
|
-
type: 'text',
|
|
197
|
-
text: instructions,
|
|
198
|
-
cache_control: { type: 'ephemeral' },
|
|
199
|
-
},
|
|
200
|
-
],
|
|
201
|
-
};
|
|
202
|
-
}
|
|
163
|
+
for (const agentConfig of agents) {
|
|
164
|
+
const agentContext = AgentContext.fromConfig(
|
|
165
|
+
agentConfig,
|
|
166
|
+
tokenCounter,
|
|
167
|
+
indexTokenCountMap
|
|
168
|
+
);
|
|
203
169
|
|
|
204
|
-
|
|
205
|
-
this.systemMessage = new SystemMessage(finalInstructions);
|
|
170
|
+
this.agentContexts.set(agentConfig.agentId, agentContext);
|
|
206
171
|
}
|
|
172
|
+
|
|
173
|
+
this.defaultAgentId = agents[0].agentId;
|
|
207
174
|
}
|
|
208
175
|
|
|
209
176
|
/* Init */
|
|
@@ -222,23 +189,17 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
222
189
|
new Map()
|
|
223
190
|
);
|
|
224
191
|
this.messageStepHasToolCalls = resetIfNotEmpty(
|
|
225
|
-
this.
|
|
192
|
+
this.messageStepHasToolCalls,
|
|
226
193
|
new Map()
|
|
227
194
|
);
|
|
228
195
|
this.prelimMessageIdsByStepKey = resetIfNotEmpty(
|
|
229
196
|
this.prelimMessageIdsByStepKey,
|
|
230
197
|
new Map()
|
|
231
198
|
);
|
|
232
|
-
this.
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
this.lastToken = resetIfNotEmpty(this.lastToken, undefined);
|
|
237
|
-
this.tokenTypeSwitch = resetIfNotEmpty(this.tokenTypeSwitch, undefined);
|
|
238
|
-
this.indexTokenCountMap = resetIfNotEmpty(this.indexTokenCountMap, {});
|
|
239
|
-
this.currentUsage = resetIfNotEmpty(this.currentUsage, undefined);
|
|
240
|
-
this.tokenCounter = resetIfNotEmpty(this.tokenCounter, undefined);
|
|
241
|
-
this.maxContextTokens = resetIfNotEmpty(this.maxContextTokens, undefined);
|
|
199
|
+
this.invokedToolIds = resetIfNotEmpty(this.invokedToolIds, undefined);
|
|
200
|
+
for (const context of this.agentContexts.values()) {
|
|
201
|
+
context.reset();
|
|
202
|
+
}
|
|
242
203
|
}
|
|
243
204
|
|
|
244
205
|
/* Run Step Processing */
|
|
@@ -251,6 +212,33 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
251
212
|
return undefined;
|
|
252
213
|
}
|
|
253
214
|
|
|
215
|
+
getAgentContext(metadata: Record<string, unknown> | undefined): AgentContext {
|
|
216
|
+
if (!metadata) {
|
|
217
|
+
throw new Error('No metadata provided to retrieve agent context');
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
const currentNode = metadata.langgraph_node as string;
|
|
221
|
+
if (!currentNode) {
|
|
222
|
+
throw new Error(
|
|
223
|
+
'No langgraph_node in metadata to retrieve agent context'
|
|
224
|
+
);
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
let agentId: string | undefined;
|
|
228
|
+
if (currentNode.startsWith(AGENT)) {
|
|
229
|
+
agentId = currentNode.substring(AGENT.length);
|
|
230
|
+
} else if (currentNode.startsWith(TOOLS)) {
|
|
231
|
+
agentId = currentNode.substring(TOOLS.length);
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
const agentContext = this.agentContexts.get(agentId ?? '');
|
|
235
|
+
if (!agentContext) {
|
|
236
|
+
throw new Error(`No agent context found for agent ID ${agentId}`);
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
return agentContext;
|
|
240
|
+
}
|
|
241
|
+
|
|
254
242
|
getStepKey(metadata: Record<string, unknown> | undefined): string {
|
|
255
243
|
if (!metadata) return '';
|
|
256
244
|
|
|
@@ -304,13 +292,19 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
304
292
|
metadata.langgraph_step as number,
|
|
305
293
|
metadata.checkpoint_ns as string,
|
|
306
294
|
];
|
|
295
|
+
|
|
296
|
+
const agentContext = this.getAgentContext(metadata);
|
|
307
297
|
if (
|
|
308
|
-
|
|
309
|
-
|
|
298
|
+
agentContext.currentTokenType === ContentTypes.THINK ||
|
|
299
|
+
agentContext.currentTokenType === 'think_and_text'
|
|
310
300
|
) {
|
|
311
301
|
keyList.push('reasoning');
|
|
312
302
|
}
|
|
313
303
|
|
|
304
|
+
if (this.invokedToolIds != null && this.invokedToolIds.size > 0) {
|
|
305
|
+
keyList.push(this.invokedToolIds.size + '');
|
|
306
|
+
}
|
|
307
|
+
|
|
314
308
|
return keyList;
|
|
315
309
|
}
|
|
316
310
|
|
|
@@ -330,87 +324,126 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
330
324
|
|
|
331
325
|
/* Graph */
|
|
332
326
|
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
327
|
+
createSystemRunnable({
|
|
328
|
+
provider,
|
|
329
|
+
clientOptions,
|
|
330
|
+
instructions,
|
|
331
|
+
additional_instructions,
|
|
332
|
+
}: {
|
|
333
|
+
provider?: Providers;
|
|
334
|
+
clientOptions?: t.ClientOptions;
|
|
335
|
+
instructions?: string;
|
|
336
|
+
additional_instructions?: string;
|
|
337
|
+
}): t.SystemRunnable | undefined {
|
|
338
|
+
let finalInstructions: string | BaseMessageFields | undefined =
|
|
339
|
+
instructions;
|
|
340
|
+
if (additional_instructions != null && additional_instructions !== '') {
|
|
341
|
+
finalInstructions =
|
|
342
|
+
finalInstructions != null && finalInstructions
|
|
343
|
+
? `${finalInstructions}\n\n${additional_instructions}`
|
|
344
|
+
: additional_instructions;
|
|
345
|
+
}
|
|
341
346
|
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
347
|
+
if (
|
|
348
|
+
finalInstructions != null &&
|
|
349
|
+
finalInstructions &&
|
|
350
|
+
provider === Providers.ANTHROPIC &&
|
|
351
|
+
((
|
|
352
|
+
(clientOptions as t.AnthropicClientOptions).clientOptions
|
|
353
|
+
?.defaultHeaders as Record<string, string> | undefined
|
|
354
|
+
)?.['anthropic-beta']?.includes('prompt-caching') ??
|
|
355
|
+
false)
|
|
356
|
+
) {
|
|
357
|
+
finalInstructions = {
|
|
358
|
+
content: [
|
|
359
|
+
{
|
|
360
|
+
type: 'text',
|
|
361
|
+
text: instructions,
|
|
362
|
+
cache_control: { type: 'ephemeral' },
|
|
363
|
+
},
|
|
364
|
+
],
|
|
365
|
+
};
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
if (finalInstructions != null && finalInstructions !== '') {
|
|
369
|
+
const systemMessage = new SystemMessage(finalInstructions);
|
|
370
|
+
return RunnableLambda.from((messages: BaseMessage[]) => {
|
|
371
|
+
return [systemMessage, ...messages];
|
|
372
|
+
}).withConfig({ runName: 'prompt' });
|
|
373
|
+
}
|
|
351
374
|
}
|
|
352
375
|
|
|
353
|
-
initializeTools(
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
376
|
+
initializeTools({
|
|
377
|
+
currentTools,
|
|
378
|
+
currentToolMap,
|
|
379
|
+
}: {
|
|
380
|
+
currentTools?: t.GraphTools;
|
|
381
|
+
currentToolMap?: t.ToolMap;
|
|
382
|
+
}): CustomToolNode<t.BaseGraphState> | ToolNode<t.BaseGraphState> {
|
|
357
383
|
return new CustomToolNode<t.BaseGraphState>({
|
|
358
|
-
tools:
|
|
359
|
-
toolMap:
|
|
384
|
+
tools: (currentTools as t.GenericTool[] | undefined) ?? [],
|
|
385
|
+
toolMap: currentToolMap,
|
|
360
386
|
toolCallStepIds: this.toolCallStepIds,
|
|
361
387
|
errorHandler: (data, metadata) =>
|
|
362
388
|
StandardGraph.handleToolCallErrorStatic(this, data, metadata),
|
|
363
389
|
});
|
|
364
390
|
}
|
|
365
391
|
|
|
366
|
-
initializeModel(
|
|
367
|
-
|
|
368
|
-
|
|
392
|
+
initializeModel({
|
|
393
|
+
provider,
|
|
394
|
+
tools,
|
|
395
|
+
clientOptions,
|
|
396
|
+
}: {
|
|
397
|
+
provider: Providers;
|
|
398
|
+
tools?: t.GraphTools;
|
|
399
|
+
clientOptions?: t.ClientOptions;
|
|
400
|
+
}): Runnable {
|
|
401
|
+
const ChatModelClass = getChatModelClass(provider);
|
|
402
|
+
const model = new ChatModelClass(clientOptions ?? {});
|
|
369
403
|
|
|
370
404
|
if (
|
|
371
|
-
isOpenAILike(
|
|
405
|
+
isOpenAILike(provider) &&
|
|
372
406
|
(model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)
|
|
373
407
|
) {
|
|
374
|
-
model.temperature = (
|
|
408
|
+
model.temperature = (clientOptions as t.OpenAIClientOptions)
|
|
375
409
|
.temperature as number;
|
|
376
|
-
model.topP = (
|
|
377
|
-
model.frequencyPenalty = (
|
|
410
|
+
model.topP = (clientOptions as t.OpenAIClientOptions).topP as number;
|
|
411
|
+
model.frequencyPenalty = (clientOptions as t.OpenAIClientOptions)
|
|
378
412
|
.frequencyPenalty as number;
|
|
379
|
-
model.presencePenalty = (
|
|
413
|
+
model.presencePenalty = (clientOptions as t.OpenAIClientOptions)
|
|
380
414
|
.presencePenalty as number;
|
|
381
|
-
model.n = (
|
|
415
|
+
model.n = (clientOptions as t.OpenAIClientOptions).n as number;
|
|
382
416
|
} else if (
|
|
383
|
-
|
|
417
|
+
provider === Providers.VERTEXAI &&
|
|
384
418
|
model instanceof ChatVertexAI
|
|
385
419
|
) {
|
|
386
|
-
model.temperature = (
|
|
420
|
+
model.temperature = (clientOptions as t.VertexAIClientOptions)
|
|
387
421
|
.temperature as number;
|
|
388
|
-
model.topP = (
|
|
389
|
-
|
|
390
|
-
model.
|
|
391
|
-
.topK as number;
|
|
392
|
-
model.topLogprobs = (this.clientOptions as t.VertexAIClientOptions)
|
|
422
|
+
model.topP = (clientOptions as t.VertexAIClientOptions).topP as number;
|
|
423
|
+
model.topK = (clientOptions as t.VertexAIClientOptions).topK as number;
|
|
424
|
+
model.topLogprobs = (clientOptions as t.VertexAIClientOptions)
|
|
393
425
|
.topLogprobs as number;
|
|
394
|
-
model.frequencyPenalty = (
|
|
426
|
+
model.frequencyPenalty = (clientOptions as t.VertexAIClientOptions)
|
|
395
427
|
.frequencyPenalty as number;
|
|
396
|
-
model.presencePenalty = (
|
|
428
|
+
model.presencePenalty = (clientOptions as t.VertexAIClientOptions)
|
|
397
429
|
.presencePenalty as number;
|
|
398
|
-
model.maxOutputTokens = (
|
|
430
|
+
model.maxOutputTokens = (clientOptions as t.VertexAIClientOptions)
|
|
399
431
|
.maxOutputTokens as number;
|
|
400
432
|
}
|
|
401
433
|
|
|
402
|
-
if (!
|
|
434
|
+
if (!tools || tools.length === 0) {
|
|
403
435
|
return model as unknown as Runnable;
|
|
404
436
|
}
|
|
405
437
|
|
|
406
|
-
return (model as t.ModelWithTools).bindTools(
|
|
438
|
+
return (model as t.ModelWithTools).bindTools(tools);
|
|
407
439
|
}
|
|
440
|
+
|
|
408
441
|
overrideTestModel(
|
|
409
442
|
responses: string[],
|
|
410
443
|
sleep?: number,
|
|
411
444
|
toolCalls?: ToolCall[]
|
|
412
445
|
): void {
|
|
413
|
-
this.
|
|
446
|
+
this.overrideModel = createFakeStreamingLLM({
|
|
414
447
|
responses,
|
|
415
448
|
sleep,
|
|
416
449
|
toolCalls,
|
|
@@ -418,42 +451,84 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
418
451
|
}
|
|
419
452
|
|
|
420
453
|
getNewModel({
|
|
421
|
-
|
|
422
|
-
|
|
454
|
+
provider,
|
|
455
|
+
clientOptions,
|
|
423
456
|
}: {
|
|
457
|
+
provider: Providers;
|
|
424
458
|
clientOptions?: t.ClientOptions;
|
|
425
|
-
omitOriginalOptions?: Set<string>;
|
|
426
459
|
}): t.ChatModelInstance {
|
|
427
|
-
const ChatModelClass = getChatModelClass(
|
|
428
|
-
|
|
429
|
-
? Object.fromEntries(
|
|
430
|
-
Object.entries(this.clientOptions).filter(
|
|
431
|
-
([key]) => !omitOriginalOptions.has(key)
|
|
432
|
-
)
|
|
433
|
-
)
|
|
434
|
-
: this.clientOptions;
|
|
435
|
-
const options = Object.assign(_options, clientOptions);
|
|
436
|
-
return new ChatModelClass(options);
|
|
460
|
+
const ChatModelClass = getChatModelClass(provider);
|
|
461
|
+
return new ChatModelClass(clientOptions ?? {});
|
|
437
462
|
}
|
|
438
463
|
|
|
439
|
-
|
|
464
|
+
getUsageMetadata(
|
|
465
|
+
finalMessage?: BaseMessage
|
|
466
|
+
): Partial<UsageMetadata> | undefined {
|
|
440
467
|
if (
|
|
441
468
|
finalMessage &&
|
|
442
469
|
'usage_metadata' in finalMessage &&
|
|
443
470
|
finalMessage.usage_metadata != null
|
|
444
471
|
) {
|
|
445
|
-
|
|
472
|
+
return finalMessage.usage_metadata as Partial<UsageMetadata>;
|
|
446
473
|
}
|
|
447
474
|
}
|
|
448
475
|
|
|
449
|
-
|
|
476
|
+
/** Execute model invocation with streaming support */
|
|
477
|
+
private async attemptInvoke(
|
|
478
|
+
{
|
|
479
|
+
currentModel,
|
|
480
|
+
finalMessages,
|
|
481
|
+
provider,
|
|
482
|
+
tools,
|
|
483
|
+
}: {
|
|
484
|
+
currentModel?: t.ChatModel;
|
|
485
|
+
finalMessages: BaseMessage[];
|
|
486
|
+
provider: Providers;
|
|
487
|
+
tools?: t.GraphTools;
|
|
488
|
+
},
|
|
489
|
+
config?: RunnableConfig
|
|
490
|
+
): Promise<Partial<t.BaseGraphState>> {
|
|
491
|
+
const model = this.overrideModel ?? currentModel;
|
|
492
|
+
if (!model) {
|
|
493
|
+
throw new Error('No model found');
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
if ((tools?.length ?? 0) > 0 && manualToolStreamProviders.has(provider)) {
|
|
497
|
+
if (!model.stream) {
|
|
498
|
+
throw new Error('Model does not support stream');
|
|
499
|
+
}
|
|
500
|
+
const stream = await model.stream(finalMessages, config);
|
|
501
|
+
let finalChunk: AIMessageChunk | undefined;
|
|
502
|
+
for await (const chunk of stream) {
|
|
503
|
+
await safeDispatchCustomEvent(
|
|
504
|
+
GraphEvents.CHAT_MODEL_STREAM,
|
|
505
|
+
{ chunk, emitted: true },
|
|
506
|
+
config
|
|
507
|
+
);
|
|
508
|
+
finalChunk = finalChunk ? concat(finalChunk, chunk) : chunk;
|
|
509
|
+
}
|
|
510
|
+
finalChunk = modifyDeltaProperties(provider, finalChunk);
|
|
511
|
+
return { messages: [finalChunk as AIMessageChunk] };
|
|
512
|
+
} else {
|
|
513
|
+
const finalMessage = await model.invoke(finalMessages, config);
|
|
514
|
+
if ((finalMessage.tool_calls?.length ?? 0) > 0) {
|
|
515
|
+
finalMessage.tool_calls = finalMessage.tool_calls?.filter(
|
|
516
|
+
(tool_call: ToolCall) => !!tool_call.name
|
|
517
|
+
);
|
|
518
|
+
}
|
|
519
|
+
return { messages: [finalMessage] };
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
cleanupSignalListener(currentModel?: t.ChatModel): void {
|
|
450
524
|
if (!this.signal) {
|
|
451
525
|
return;
|
|
452
526
|
}
|
|
453
|
-
|
|
527
|
+
const model = this.overrideModel ?? currentModel;
|
|
528
|
+
if (!model) {
|
|
454
529
|
return;
|
|
455
530
|
}
|
|
456
|
-
const client = (
|
|
531
|
+
const client = (model as ChatOpenAI | undefined)?.exposedClient;
|
|
457
532
|
if (!client?.abortHandler) {
|
|
458
533
|
return;
|
|
459
534
|
}
|
|
@@ -461,18 +536,30 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
461
536
|
client.abortHandler = undefined;
|
|
462
537
|
}
|
|
463
538
|
|
|
464
|
-
createCallModel() {
|
|
539
|
+
createCallModel(agentId = 'default', currentModel?: t.ChatModel) {
|
|
465
540
|
return async (
|
|
466
541
|
state: t.BaseGraphState,
|
|
467
542
|
config?: RunnableConfig
|
|
468
543
|
): Promise<Partial<t.BaseGraphState>> => {
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
544
|
+
/**
|
|
545
|
+
* Get agent context - it must exist by this point
|
|
546
|
+
*/
|
|
547
|
+
const agentContext = this.agentContexts.get(agentId);
|
|
548
|
+
if (!agentContext) {
|
|
549
|
+
throw new Error(`Agent context not found for agentId: ${agentId}`);
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
const model = this.overrideModel ?? currentModel;
|
|
553
|
+
if (!model) {
|
|
472
554
|
throw new Error('No Graph model found');
|
|
473
555
|
}
|
|
474
|
-
if (!config
|
|
475
|
-
throw new Error(
|
|
556
|
+
if (!config) {
|
|
557
|
+
throw new Error('No config provided');
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
// Ensure token calculations are complete before proceeding
|
|
561
|
+
if (agentContext.tokenCalculationPromise) {
|
|
562
|
+
await agentContext.tokenCalculationPromise;
|
|
476
563
|
}
|
|
477
564
|
if (!config.signal) {
|
|
478
565
|
config.signal = this.signal;
|
|
@@ -482,35 +569,35 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
482
569
|
|
|
483
570
|
let messagesToUse = messages;
|
|
484
571
|
if (
|
|
485
|
-
!
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
572
|
+
!agentContext.pruneMessages &&
|
|
573
|
+
agentContext.tokenCounter &&
|
|
574
|
+
agentContext.maxContextTokens != null &&
|
|
575
|
+
agentContext.indexTokenCountMap[0] != null
|
|
489
576
|
) {
|
|
490
577
|
const isAnthropicWithThinking =
|
|
491
|
-
(
|
|
492
|
-
(
|
|
578
|
+
(agentContext.provider === Providers.ANTHROPIC &&
|
|
579
|
+
(agentContext.clientOptions as t.AnthropicClientOptions).thinking !=
|
|
493
580
|
null) ||
|
|
494
|
-
(
|
|
495
|
-
(
|
|
581
|
+
(agentContext.provider === Providers.BEDROCK &&
|
|
582
|
+
(agentContext.clientOptions as t.BedrockAnthropicInput)
|
|
496
583
|
.additionalModelRequestFields?.['thinking'] != null);
|
|
497
584
|
|
|
498
|
-
|
|
499
|
-
provider: this.provider,
|
|
500
|
-
indexTokenCountMap: this.indexTokenCountMap,
|
|
501
|
-
maxTokens: this.maxContextTokens,
|
|
502
|
-
tokenCounter: this.tokenCounter,
|
|
585
|
+
agentContext.pruneMessages = createPruneMessages({
|
|
503
586
|
startIndex: this.startIndex,
|
|
587
|
+
provider: agentContext.provider,
|
|
588
|
+
tokenCounter: agentContext.tokenCounter,
|
|
589
|
+
maxTokens: agentContext.maxContextTokens,
|
|
504
590
|
thinkingEnabled: isAnthropicWithThinking,
|
|
591
|
+
indexTokenCountMap: agentContext.indexTokenCountMap,
|
|
505
592
|
});
|
|
506
593
|
}
|
|
507
|
-
if (
|
|
508
|
-
const { context, indexTokenCountMap } =
|
|
594
|
+
if (agentContext.pruneMessages) {
|
|
595
|
+
const { context, indexTokenCountMap } = agentContext.pruneMessages({
|
|
509
596
|
messages,
|
|
510
|
-
usageMetadata:
|
|
597
|
+
usageMetadata: agentContext.currentUsage,
|
|
511
598
|
// startOnMessageType: 'human',
|
|
512
599
|
});
|
|
513
|
-
|
|
600
|
+
agentContext.indexTokenCountMap = indexTokenCountMap;
|
|
514
601
|
messagesToUse = context;
|
|
515
602
|
}
|
|
516
603
|
|
|
@@ -525,7 +612,7 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
525
612
|
: null;
|
|
526
613
|
|
|
527
614
|
if (
|
|
528
|
-
provider === Providers.BEDROCK &&
|
|
615
|
+
agentContext.provider === Providers.BEDROCK &&
|
|
529
616
|
lastMessageX instanceof AIMessageChunk &&
|
|
530
617
|
lastMessageY instanceof ToolMessage &&
|
|
531
618
|
typeof lastMessageX.content === 'string'
|
|
@@ -535,92 +622,165 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
535
622
|
|
|
536
623
|
const isLatestToolMessage = lastMessageY instanceof ToolMessage;
|
|
537
624
|
|
|
538
|
-
if (
|
|
625
|
+
if (
|
|
626
|
+
isLatestToolMessage &&
|
|
627
|
+
agentContext.provider === Providers.ANTHROPIC
|
|
628
|
+
) {
|
|
539
629
|
formatAnthropicArtifactContent(finalMessages);
|
|
540
630
|
} else if (
|
|
541
631
|
isLatestToolMessage &&
|
|
542
|
-
(isOpenAILike(provider) ||
|
|
632
|
+
(isOpenAILike(agentContext.provider) ||
|
|
633
|
+
isGoogleLike(agentContext.provider))
|
|
543
634
|
) {
|
|
544
635
|
formatArtifactPayload(finalMessages);
|
|
545
636
|
}
|
|
546
637
|
|
|
547
|
-
if (
|
|
548
|
-
|
|
549
|
-
|
|
638
|
+
if (
|
|
639
|
+
agentContext.lastStreamCall != null &&
|
|
640
|
+
agentContext.streamBuffer != null
|
|
641
|
+
) {
|
|
642
|
+
const timeSinceLastCall = Date.now() - agentContext.lastStreamCall;
|
|
643
|
+
if (timeSinceLastCall < agentContext.streamBuffer) {
|
|
550
644
|
const timeToWait =
|
|
551
|
-
Math.ceil((
|
|
645
|
+
Math.ceil((agentContext.streamBuffer - timeSinceLastCall) / 1000) *
|
|
646
|
+
1000;
|
|
552
647
|
await sleep(timeToWait);
|
|
553
648
|
}
|
|
554
649
|
}
|
|
555
650
|
|
|
556
|
-
|
|
651
|
+
agentContext.lastStreamCall = Date.now();
|
|
557
652
|
|
|
558
|
-
let result: Partial<t.BaseGraphState
|
|
559
|
-
|
|
560
|
-
(
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
}
|
|
570
|
-
|
|
653
|
+
let result: Partial<t.BaseGraphState> | undefined;
|
|
654
|
+
const fallbacks =
|
|
655
|
+
(agentContext.clientOptions as t.LLMConfig | undefined)?.fallbacks ??
|
|
656
|
+
[];
|
|
657
|
+
try {
|
|
658
|
+
result = await this.attemptInvoke(
|
|
659
|
+
{
|
|
660
|
+
currentModel: model,
|
|
661
|
+
finalMessages,
|
|
662
|
+
provider: agentContext.provider,
|
|
663
|
+
tools: agentContext.tools,
|
|
664
|
+
},
|
|
665
|
+
config
|
|
666
|
+
);
|
|
667
|
+
} catch (primaryError) {
|
|
668
|
+
let lastError: unknown = primaryError;
|
|
669
|
+
for (const fb of fallbacks) {
|
|
670
|
+
try {
|
|
671
|
+
let model = this.getNewModel({
|
|
672
|
+
provider: fb.provider,
|
|
673
|
+
clientOptions: fb.clientOptions,
|
|
674
|
+
});
|
|
675
|
+
const bindableTools = agentContext.tools;
|
|
676
|
+
model = (
|
|
677
|
+
!bindableTools || bindableTools.length === 0
|
|
678
|
+
? model
|
|
679
|
+
: model.bindTools(bindableTools)
|
|
680
|
+
) as t.ChatModelInstance;
|
|
681
|
+
result = await this.attemptInvoke(
|
|
682
|
+
{
|
|
683
|
+
currentModel: model,
|
|
684
|
+
finalMessages,
|
|
685
|
+
provider: fb.provider,
|
|
686
|
+
tools: agentContext.tools,
|
|
687
|
+
},
|
|
688
|
+
config
|
|
689
|
+
);
|
|
690
|
+
lastError = undefined;
|
|
691
|
+
break;
|
|
692
|
+
} catch (e) {
|
|
693
|
+
lastError = e;
|
|
694
|
+
continue;
|
|
571
695
|
}
|
|
572
696
|
}
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
result = { messages: [finalChunk as AIMessageChunk] };
|
|
576
|
-
} else {
|
|
577
|
-
const finalMessage = (await this.boundModel.invoke(
|
|
578
|
-
finalMessages,
|
|
579
|
-
config
|
|
580
|
-
)) as AIMessageChunk;
|
|
581
|
-
if ((finalMessage.tool_calls?.length ?? 0) > 0) {
|
|
582
|
-
finalMessage.tool_calls = finalMessage.tool_calls?.filter(
|
|
583
|
-
(tool_call) => {
|
|
584
|
-
if (!tool_call.name) {
|
|
585
|
-
return false;
|
|
586
|
-
}
|
|
587
|
-
return true;
|
|
588
|
-
}
|
|
589
|
-
);
|
|
697
|
+
if (lastError !== undefined) {
|
|
698
|
+
throw lastError;
|
|
590
699
|
}
|
|
591
|
-
result = { messages: [finalMessage] };
|
|
592
700
|
}
|
|
593
701
|
|
|
594
|
-
|
|
702
|
+
if (!result) {
|
|
703
|
+
throw new Error('No result after model invocation');
|
|
704
|
+
}
|
|
705
|
+
agentContext.currentUsage = this.getUsageMetadata(result.messages?.[0]);
|
|
595
706
|
this.cleanupSignalListener();
|
|
596
707
|
return result;
|
|
597
708
|
};
|
|
598
709
|
}
|
|
599
710
|
|
|
600
|
-
|
|
711
|
+
createAgentNode(agentId: string): t.CompiledAgentWorfklow {
|
|
712
|
+
const agentContext = this.agentContexts.get(agentId);
|
|
713
|
+
if (!agentContext) {
|
|
714
|
+
throw new Error(`Agent context not found for agentId: ${agentId}`);
|
|
715
|
+
}
|
|
716
|
+
|
|
717
|
+
let currentModel = this.initializeModel({
|
|
718
|
+
tools: agentContext.tools,
|
|
719
|
+
provider: agentContext.provider,
|
|
720
|
+
clientOptions: agentContext.clientOptions,
|
|
721
|
+
});
|
|
722
|
+
|
|
723
|
+
if (agentContext.systemRunnable) {
|
|
724
|
+
currentModel = agentContext.systemRunnable.pipe(currentModel);
|
|
725
|
+
}
|
|
726
|
+
|
|
727
|
+
const agentNode = `${AGENT}${agentId}` as const;
|
|
728
|
+
const toolNode = `${TOOLS}${agentId}` as const;
|
|
729
|
+
|
|
601
730
|
const routeMessage = (
|
|
602
731
|
state: t.BaseGraphState,
|
|
603
732
|
config?: RunnableConfig
|
|
604
733
|
): string => {
|
|
605
734
|
this.config = config;
|
|
606
|
-
|
|
607
|
-
// if (!lastMessage?.tool_calls?.length) {
|
|
608
|
-
// return END;
|
|
609
|
-
// }
|
|
610
|
-
// return TOOLS;
|
|
611
|
-
return toolsCondition(state);
|
|
735
|
+
return toolsCondition(state, toolNode, this.invokedToolIds);
|
|
612
736
|
};
|
|
613
737
|
|
|
614
|
-
const
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
738
|
+
const StateAnnotation = Annotation.Root({
|
|
739
|
+
messages: Annotation<BaseMessage[]>({
|
|
740
|
+
reducer: messagesStateReducer,
|
|
741
|
+
default: () => [],
|
|
742
|
+
}),
|
|
743
|
+
});
|
|
744
|
+
|
|
745
|
+
const workflow = new StateGraph(StateAnnotation)
|
|
746
|
+
.addNode(agentNode, this.createCallModel(agentId, currentModel))
|
|
747
|
+
.addNode(
|
|
748
|
+
toolNode,
|
|
749
|
+
this.initializeTools({
|
|
750
|
+
currentTools: agentContext.tools,
|
|
751
|
+
currentToolMap: agentContext.toolMap,
|
|
752
|
+
})
|
|
753
|
+
)
|
|
754
|
+
.addEdge(START, agentNode)
|
|
755
|
+
.addConditionalEdges(agentNode, routeMessage)
|
|
756
|
+
.addEdge(toolNode, agentContext.toolEnd ? END : agentNode);
|
|
757
|
+
|
|
758
|
+
// Cast to unknown to avoid tight coupling to external types; options are opt-in
|
|
759
|
+
return workflow.compile(this.compileOptions as unknown as never);
|
|
760
|
+
}
|
|
622
761
|
|
|
623
|
-
|
|
762
|
+
createWorkflow(): t.CompiledStateWorkflow {
|
|
763
|
+
/** Use the default (first) agent for now */
|
|
764
|
+
const agentNode = this.createAgentNode(this.defaultAgentId);
|
|
765
|
+
const StateAnnotation = Annotation.Root({
|
|
766
|
+
messages: Annotation<BaseMessage[]>({
|
|
767
|
+
reducer: (a, b) => {
|
|
768
|
+
if (!a.length) {
|
|
769
|
+
this.startIndex = a.length + b.length;
|
|
770
|
+
}
|
|
771
|
+
const result = messagesStateReducer(a, b);
|
|
772
|
+
this.messages = result;
|
|
773
|
+
return result;
|
|
774
|
+
},
|
|
775
|
+
default: () => [],
|
|
776
|
+
}),
|
|
777
|
+
});
|
|
778
|
+
const workflow = new StateGraph(StateAnnotation)
|
|
779
|
+
.addNode(this.defaultAgentId, agentNode, { ends: [END] })
|
|
780
|
+
.addEdge(START, this.defaultAgentId)
|
|
781
|
+
.compile();
|
|
782
|
+
|
|
783
|
+
return workflow;
|
|
624
784
|
}
|
|
625
785
|
|
|
626
786
|
/* Dispatchers */
|
|
@@ -628,7 +788,10 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
628
788
|
/**
|
|
629
789
|
* Dispatches a run step to the client, returns the step ID
|
|
630
790
|
*/
|
|
631
|
-
dispatchRunStep(
|
|
791
|
+
async dispatchRunStep(
|
|
792
|
+
stepKey: string,
|
|
793
|
+
stepDetails: t.StepDetails
|
|
794
|
+
): Promise<string> {
|
|
632
795
|
if (!this.config) {
|
|
633
796
|
throw new Error('No config provided');
|
|
634
797
|
}
|
|
@@ -660,14 +823,19 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
660
823
|
|
|
661
824
|
this.contentData.push(runStep);
|
|
662
825
|
this.contentIndexMap.set(stepId, runStep.index);
|
|
663
|
-
|
|
826
|
+
await safeDispatchCustomEvent(
|
|
827
|
+
GraphEvents.ON_RUN_STEP,
|
|
828
|
+
runStep,
|
|
829
|
+
this.config
|
|
830
|
+
);
|
|
664
831
|
return stepId;
|
|
665
832
|
}
|
|
666
833
|
|
|
667
|
-
handleToolCallCompleted(
|
|
834
|
+
async handleToolCallCompleted(
|
|
668
835
|
data: t.ToolEndData,
|
|
669
|
-
metadata?: Record<string, unknown
|
|
670
|
-
|
|
836
|
+
metadata?: Record<string, unknown>,
|
|
837
|
+
omitOutput?: boolean
|
|
838
|
+
): Promise<void> {
|
|
671
839
|
if (!this.config) {
|
|
672
840
|
throw new Error('No config provided');
|
|
673
841
|
}
|
|
@@ -676,7 +844,11 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
676
844
|
return;
|
|
677
845
|
}
|
|
678
846
|
|
|
679
|
-
const { input, output } = data;
|
|
847
|
+
const { input, output: _output } = data;
|
|
848
|
+
if ((_output as Command | undefined)?.lg_name === 'Command') {
|
|
849
|
+
return;
|
|
850
|
+
}
|
|
851
|
+
const output = _output as ToolMessage;
|
|
680
852
|
const { tool_call_id } = output;
|
|
681
853
|
const stepId = this.toolCallStepIds.get(tool_call_id) ?? '';
|
|
682
854
|
if (!stepId) {
|
|
@@ -688,41 +860,45 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
688
860
|
throw new Error(`No run step found for stepId ${stepId}`);
|
|
689
861
|
}
|
|
690
862
|
|
|
863
|
+
const dispatchedOutput =
|
|
864
|
+
typeof output.content === 'string'
|
|
865
|
+
? output.content
|
|
866
|
+
: JSON.stringify(output.content);
|
|
867
|
+
|
|
691
868
|
const args = typeof input === 'string' ? input : input.input;
|
|
692
869
|
const tool_call = {
|
|
693
870
|
args: typeof args === 'string' ? args : JSON.stringify(args),
|
|
694
871
|
name: output.name ?? '',
|
|
695
872
|
id: output.tool_call_id,
|
|
696
|
-
output:
|
|
697
|
-
typeof output.content === 'string'
|
|
698
|
-
? output.content
|
|
699
|
-
: JSON.stringify(output.content),
|
|
873
|
+
output: omitOutput === true ? '' : dispatchedOutput,
|
|
700
874
|
progress: 1,
|
|
701
875
|
};
|
|
702
876
|
|
|
703
|
-
this.handlerRegistry
|
|
704
|
-
GraphEvents.ON_RUN_STEP_COMPLETED
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
877
|
+
await this.handlerRegistry
|
|
878
|
+
?.getHandler(GraphEvents.ON_RUN_STEP_COMPLETED)
|
|
879
|
+
?.handle(
|
|
880
|
+
GraphEvents.ON_RUN_STEP_COMPLETED,
|
|
881
|
+
{
|
|
882
|
+
result: {
|
|
883
|
+
id: stepId,
|
|
884
|
+
index: runStep.index,
|
|
885
|
+
type: 'tool_call',
|
|
886
|
+
tool_call,
|
|
887
|
+
} as t.ToolCompleteEvent,
|
|
888
|
+
},
|
|
889
|
+
metadata,
|
|
890
|
+
this
|
|
891
|
+
);
|
|
716
892
|
}
|
|
717
893
|
/**
|
|
718
894
|
* Static version of handleToolCallError to avoid creating strong references
|
|
719
895
|
* that prevent garbage collection
|
|
720
896
|
*/
|
|
721
|
-
static handleToolCallErrorStatic(
|
|
897
|
+
static async handleToolCallErrorStatic(
|
|
722
898
|
graph: StandardGraph,
|
|
723
899
|
data: t.ToolErrorData,
|
|
724
900
|
metadata?: Record<string, unknown>
|
|
725
|
-
): void {
|
|
901
|
+
): Promise<void> {
|
|
726
902
|
if (!graph.config) {
|
|
727
903
|
throw new Error('No config provided');
|
|
728
904
|
}
|
|
@@ -752,7 +928,7 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
752
928
|
progress: 1,
|
|
753
929
|
};
|
|
754
930
|
|
|
755
|
-
graph.handlerRegistry
|
|
931
|
+
await graph.handlerRegistry
|
|
756
932
|
?.getHandler(GraphEvents.ON_RUN_STEP_COMPLETED)
|
|
757
933
|
?.handle(
|
|
758
934
|
GraphEvents.ON_RUN_STEP_COMPLETED,
|
|
@@ -773,14 +949,17 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
773
949
|
* Instance method that delegates to the static method
|
|
774
950
|
* Kept for backward compatibility
|
|
775
951
|
*/
|
|
776
|
-
handleToolCallError(
|
|
952
|
+
async handleToolCallError(
|
|
777
953
|
data: t.ToolErrorData,
|
|
778
954
|
metadata?: Record<string, unknown>
|
|
779
|
-
): void {
|
|
780
|
-
StandardGraph.handleToolCallErrorStatic(this, data, metadata);
|
|
955
|
+
): Promise<void> {
|
|
956
|
+
await StandardGraph.handleToolCallErrorStatic(this, data, metadata);
|
|
781
957
|
}
|
|
782
958
|
|
|
783
|
-
dispatchRunStepDelta(
|
|
959
|
+
async dispatchRunStepDelta(
|
|
960
|
+
id: string,
|
|
961
|
+
delta: t.ToolCallDelta
|
|
962
|
+
): Promise<void> {
|
|
784
963
|
if (!this.config) {
|
|
785
964
|
throw new Error('No config provided');
|
|
786
965
|
} else if (!id) {
|
|
@@ -790,14 +969,14 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
790
969
|
id,
|
|
791
970
|
delta,
|
|
792
971
|
};
|
|
793
|
-
|
|
972
|
+
await safeDispatchCustomEvent(
|
|
794
973
|
GraphEvents.ON_RUN_STEP_DELTA,
|
|
795
974
|
runStepDelta,
|
|
796
975
|
this.config
|
|
797
976
|
);
|
|
798
977
|
}
|
|
799
978
|
|
|
800
|
-
dispatchMessageDelta(id: string, delta: t.MessageDelta): void {
|
|
979
|
+
async dispatchMessageDelta(id: string, delta: t.MessageDelta): Promise<void> {
|
|
801
980
|
if (!this.config) {
|
|
802
981
|
throw new Error('No config provided');
|
|
803
982
|
}
|
|
@@ -805,14 +984,17 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
805
984
|
id,
|
|
806
985
|
delta,
|
|
807
986
|
};
|
|
808
|
-
|
|
987
|
+
await safeDispatchCustomEvent(
|
|
809
988
|
GraphEvents.ON_MESSAGE_DELTA,
|
|
810
989
|
messageDelta,
|
|
811
990
|
this.config
|
|
812
991
|
);
|
|
813
992
|
}
|
|
814
993
|
|
|
815
|
-
dispatchReasoningDelta = (
|
|
994
|
+
dispatchReasoningDelta = async (
|
|
995
|
+
stepId: string,
|
|
996
|
+
delta: t.ReasoningDelta
|
|
997
|
+
): Promise<void> => {
|
|
816
998
|
if (!this.config) {
|
|
817
999
|
throw new Error('No config provided');
|
|
818
1000
|
}
|
|
@@ -820,7 +1002,7 @@ export class StandardGraph extends Graph<t.BaseGraphState, GraphNode> {
|
|
|
820
1002
|
id: stepId,
|
|
821
1003
|
delta,
|
|
822
1004
|
};
|
|
823
|
-
|
|
1005
|
+
await safeDispatchCustomEvent(
|
|
824
1006
|
GraphEvents.ON_REASONING_DELTA,
|
|
825
1007
|
reasoningDelta,
|
|
826
1008
|
this.config
|