@librechat/agents 1.9.94 → 1.9.96
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/common/enum.cjs +3 -0
- package/dist/cjs/common/enum.cjs.map +1 -1
- package/dist/cjs/graphs/Graph.cjs.map +1 -1
- package/dist/cjs/main.cjs +3 -0
- package/dist/cjs/main.cjs.map +1 -1
- package/dist/cjs/splitStream.cjs +175 -0
- package/dist/cjs/splitStream.cjs.map +1 -0
- package/dist/cjs/stream.cjs +24 -0
- package/dist/cjs/stream.cjs.map +1 -1
- package/dist/esm/common/enum.mjs +3 -0
- package/dist/esm/common/enum.mjs.map +1 -1
- package/dist/esm/graphs/Graph.mjs.map +1 -1
- package/dist/esm/main.mjs +1 -0
- package/dist/esm/main.mjs.map +1 -1
- package/dist/esm/splitStream.mjs +172 -0
- package/dist/esm/splitStream.mjs.map +1 -0
- package/dist/esm/stream.mjs +24 -0
- package/dist/esm/stream.mjs.map +1 -1
- package/dist/types/common/enum.d.ts +3 -0
- package/dist/types/graphs/Graph.d.ts +1 -1
- package/dist/types/index.d.ts +1 -0
- package/dist/types/mockStream.d.ts +32 -0
- package/dist/types/splitStream.d.ts +35 -0
- package/dist/types/types/stream.d.ts +54 -4
- package/package.json +11 -11
- package/src/common/enum.ts +3 -0
- package/src/graphs/Graph.ts +1 -1
- package/src/index.ts +1 -0
- package/src/mockStream.ts +99 -0
- package/src/splitStream.test.ts +539 -0
- package/src/splitStream.ts +193 -0
- package/src/stream.ts +26 -0
- package/src/types/stream.ts +47 -4
- package/src/utils/llmConfig.ts +3 -1
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import type * as t from '@/types';
|
|
2
|
+
import { ContentTypes } from '@/common';
|
|
3
|
+
export declare const SEPARATORS: string[];
|
|
4
|
+
export declare class SplitStreamHandler {
|
|
5
|
+
private inCodeBlock;
|
|
6
|
+
private inThinkBlock;
|
|
7
|
+
private accumulate;
|
|
8
|
+
tokens: string[];
|
|
9
|
+
lastToken: string;
|
|
10
|
+
reasoningTokens: string[];
|
|
11
|
+
currentStepId?: string;
|
|
12
|
+
currentMessageId?: string;
|
|
13
|
+
currentType?: ContentTypes.TEXT | ContentTypes.THINK;
|
|
14
|
+
currentLength: number;
|
|
15
|
+
reasoningKey: 'reasoning_content' | 'reasoning';
|
|
16
|
+
currentIndex: number;
|
|
17
|
+
blockThreshold: number;
|
|
18
|
+
/** The run ID AKA the Message ID associated with the complete generation */
|
|
19
|
+
runId: string;
|
|
20
|
+
handlers?: t.SplitStreamHandlers;
|
|
21
|
+
constructor({ runId, handlers, accumulate, reasoningKey, blockThreshold, }: {
|
|
22
|
+
runId: string;
|
|
23
|
+
accumulate?: boolean;
|
|
24
|
+
handlers: t.SplitStreamHandlers;
|
|
25
|
+
blockThreshold?: number;
|
|
26
|
+
reasoningKey?: 'reasoning_content' | 'reasoning';
|
|
27
|
+
});
|
|
28
|
+
getMessageId: () => string | undefined;
|
|
29
|
+
createMessageStep: (type?: ContentTypes.TEXT | ContentTypes.THINK) => [string, string];
|
|
30
|
+
dispatchRunStep: (stepId: string, stepDetails: t.StepDetails) => void;
|
|
31
|
+
dispatchMessageDelta: (stepId: string, delta: t.MessageDelta) => void;
|
|
32
|
+
dispatchReasoningDelta: (stepId: string, delta: t.ReasoningDelta) => void;
|
|
33
|
+
handleContent: (content: string, stepId: string, _type: ContentTypes.TEXT | ContentTypes.THINK) => void;
|
|
34
|
+
handle(chunk?: t.CustomChunk): void;
|
|
35
|
+
}
|
|
@@ -1,7 +1,8 @@
|
|
|
1
|
+
import type OpenAITypes from 'openai';
|
|
1
2
|
import type { MessageContentImageUrl, MessageContentText, ToolMessage, BaseMessage } from '@langchain/core/messages';
|
|
2
3
|
import type { ToolCall, ToolCallChunk } from '@langchain/core/messages/tool';
|
|
3
4
|
import type { LLMResult, Generation } from '@langchain/core/outputs';
|
|
4
|
-
import { StepTypes, ContentTypes } from '@/common/enum';
|
|
5
|
+
import { StepTypes, ContentTypes, GraphEvents } from '@/common/enum';
|
|
5
6
|
export type HandleLLMEnd = (output: LLMResult, runId: string, parentRunId?: string, tags?: string[]) => void;
|
|
6
7
|
export type MetadataAggregatorResult = {
|
|
7
8
|
handleLLMEnd: HandleLLMEnd;
|
|
@@ -36,7 +37,7 @@ export type RunStep = {
|
|
|
36
37
|
index: number;
|
|
37
38
|
stepIndex?: number;
|
|
38
39
|
stepDetails: StepDetails;
|
|
39
|
-
usage
|
|
40
|
+
usage?: null | {};
|
|
40
41
|
};
|
|
41
42
|
/**
|
|
42
43
|
* Represents a run step delta i.e. any changed fields on a run step during
|
|
@@ -139,16 +140,65 @@ export interface MessageDelta {
|
|
|
139
140
|
*/
|
|
140
141
|
tool_call_ids?: string[];
|
|
141
142
|
}
|
|
143
|
+
/**
|
|
144
|
+
* Represents a reasoning delta i.e. any changed fields on a message during
|
|
145
|
+
* streaming.
|
|
146
|
+
*/
|
|
147
|
+
export interface ReasoningDeltaEvent {
|
|
148
|
+
/**
|
|
149
|
+
* The identifier of the message, which can be referenced in API endpoints.
|
|
150
|
+
*/
|
|
151
|
+
id: string;
|
|
152
|
+
/**
|
|
153
|
+
* The delta containing the fields that have changed.
|
|
154
|
+
*/
|
|
155
|
+
delta: ReasoningDelta;
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* The reasoning delta containing the fields that have changed on the Message.
|
|
159
|
+
*/
|
|
160
|
+
export interface ReasoningDelta {
|
|
161
|
+
/**
|
|
162
|
+
* The content of the message in array of text and/or images.
|
|
163
|
+
*/
|
|
164
|
+
content?: MessageContentComplex[];
|
|
165
|
+
}
|
|
142
166
|
export type MessageDeltaUpdate = {
|
|
143
167
|
type: ContentTypes.TEXT;
|
|
144
168
|
text: string;
|
|
145
169
|
tool_call_ids?: string[];
|
|
146
170
|
};
|
|
147
|
-
export type
|
|
171
|
+
export type ReasoningDeltaUpdate = {
|
|
172
|
+
type: ContentTypes.THINK;
|
|
173
|
+
think: string;
|
|
174
|
+
};
|
|
175
|
+
export type ContentType = 'text' | 'image_url' | 'tool_call' | 'think' | string;
|
|
148
176
|
export type MessageContentComplex = (MessageContentText | MessageContentImageUrl | (Record<string, any> & {
|
|
149
|
-
type?: 'text' | 'image_url' | string;
|
|
177
|
+
type?: 'text' | 'image_url' | 'think' | string;
|
|
150
178
|
}) | (Record<string, any> & {
|
|
151
179
|
type?: never;
|
|
152
180
|
})) & {
|
|
153
181
|
tool_call_ids?: string[];
|
|
154
182
|
};
|
|
183
|
+
export type CustomChunk = Partial<OpenAITypes.ChatCompletionChunk> & {
|
|
184
|
+
choices?: Partial<Array<Partial<OpenAITypes.Chat.Completions.ChatCompletionChunk.Choice> & {
|
|
185
|
+
delta?: Partial<OpenAITypes.Chat.Completions.ChatCompletionChunk.Choice.Delta> & {
|
|
186
|
+
reasoning?: string | null;
|
|
187
|
+
reasoning_content?: string | null;
|
|
188
|
+
};
|
|
189
|
+
}>>;
|
|
190
|
+
};
|
|
191
|
+
export type SplitStreamHandlers = Partial<{
|
|
192
|
+
[GraphEvents.ON_RUN_STEP]: ({ event, data }: {
|
|
193
|
+
event: GraphEvents;
|
|
194
|
+
data: RunStep;
|
|
195
|
+
}) => void;
|
|
196
|
+
[GraphEvents.ON_MESSAGE_DELTA]: ({ event, data }: {
|
|
197
|
+
event: GraphEvents;
|
|
198
|
+
data: MessageDeltaEvent;
|
|
199
|
+
}) => void;
|
|
200
|
+
[GraphEvents.ON_REASONING_DELTA]: ({ event, data }: {
|
|
201
|
+
event: GraphEvents;
|
|
202
|
+
data: ReasoningDeltaEvent;
|
|
203
|
+
}) => void;
|
|
204
|
+
}>;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@librechat/agents",
|
|
3
|
-
"version": "1.9.
|
|
3
|
+
"version": "1.9.96",
|
|
4
4
|
"main": "./dist/cjs/main.cjs",
|
|
5
5
|
"module": "./dist/esm/main.mjs",
|
|
6
6
|
"types": "./dist/types/index.d.ts",
|
|
@@ -70,16 +70,16 @@
|
|
|
70
70
|
"@aws-crypto/sha256-js": "^5.2.0",
|
|
71
71
|
"@aws-sdk/credential-provider-node": "^3.613.0",
|
|
72
72
|
"@aws-sdk/types": "^3.609.0",
|
|
73
|
-
"@langchain/anthropic": "^0.3.
|
|
73
|
+
"@langchain/anthropic": "^0.3.12",
|
|
74
74
|
"@langchain/aws": "^0.1.3",
|
|
75
|
-
"@langchain/community": "^0.3.
|
|
76
|
-
"@langchain/core": "^0.3.
|
|
77
|
-
"@langchain/google-genai": "^0.1.
|
|
78
|
-
"@langchain/google-vertexai": "^0.1.
|
|
79
|
-
"@langchain/langgraph": "^0.2.
|
|
75
|
+
"@langchain/community": "^0.3.27",
|
|
76
|
+
"@langchain/core": "^0.3.36",
|
|
77
|
+
"@langchain/google-genai": "^0.1.7",
|
|
78
|
+
"@langchain/google-vertexai": "^0.1.8",
|
|
79
|
+
"@langchain/langgraph": "^0.2.41",
|
|
80
80
|
"@langchain/mistralai": "^0.0.26",
|
|
81
|
-
"@langchain/ollama": "^0.1.
|
|
82
|
-
"@langchain/openai": "^0.
|
|
81
|
+
"@langchain/ollama": "^0.1.5",
|
|
82
|
+
"@langchain/openai": "^0.4.2",
|
|
83
83
|
"@smithy/eventstream-codec": "^2.2.0",
|
|
84
84
|
"@smithy/protocol-http": "^3.0.6",
|
|
85
85
|
"@smithy/signature-v4": "^2.0.10",
|
|
@@ -108,7 +108,7 @@
|
|
|
108
108
|
"@types/babel__generator": "^7.6.8",
|
|
109
109
|
"@types/babel__template": "^7.4.4",
|
|
110
110
|
"@types/istanbul-lib-report": "^3.0.3",
|
|
111
|
-
"@types/jest": "^29.5.
|
|
111
|
+
"@types/jest": "^29.5.14",
|
|
112
112
|
"@types/node": "^20.14.11",
|
|
113
113
|
"@types/rollup": "^0.54.0",
|
|
114
114
|
"@types/yargs-parser": "^21.0.3",
|
|
@@ -132,7 +132,7 @@
|
|
|
132
132
|
"rollup-plugin-cleaner": "^1.0.0",
|
|
133
133
|
"rollup-plugin-obfuscator": "^1.1.0",
|
|
134
134
|
"rollup-plugin-visualizer": "^5.12.0",
|
|
135
|
-
"ts-jest": "^29.2.
|
|
135
|
+
"ts-jest": "^29.2.5",
|
|
136
136
|
"ts-node": "^10.9.2",
|
|
137
137
|
"ts-node-dev": "^2.0.0",
|
|
138
138
|
"tsc-alias": "^1.8.10",
|
package/src/common/enum.ts
CHANGED
|
@@ -15,6 +15,8 @@ export enum GraphEvents {
|
|
|
15
15
|
ON_RUN_STEP_COMPLETED = 'on_run_step_completed',
|
|
16
16
|
/** [Custom] Delta events for messages */
|
|
17
17
|
ON_MESSAGE_DELTA = 'on_message_delta',
|
|
18
|
+
/** [Custom] Reasoning Delta events for messages */
|
|
19
|
+
ON_REASONING_DELTA = 'on_reasoning_delta',
|
|
18
20
|
|
|
19
21
|
/* Official Events */
|
|
20
22
|
|
|
@@ -101,6 +103,7 @@ export enum StepTypes {
|
|
|
101
103
|
|
|
102
104
|
export enum ContentTypes {
|
|
103
105
|
TEXT = 'text',
|
|
106
|
+
THINK = 'think',
|
|
104
107
|
TOOL_CALL = 'tool_call',
|
|
105
108
|
IMAGE_FILE = 'image_file',
|
|
106
109
|
IMAGE_URL = 'image_url',
|
package/src/graphs/Graph.ts
CHANGED
|
@@ -76,7 +76,7 @@ export class StandardGraph extends Graph<
|
|
|
76
76
|
GraphNode
|
|
77
77
|
> {
|
|
78
78
|
private graphState: t.GraphStateChannels<t.BaseGraphState>;
|
|
79
|
-
|
|
79
|
+
clientOptions: t.ClientOptions;
|
|
80
80
|
boundModel: Runnable;
|
|
81
81
|
/** The last recorded timestamp that a stream API call was invoked */
|
|
82
82
|
lastStreamCall: number | undefined;
|
package/src/index.ts
CHANGED
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
/* eslint-disable no-console */
|
|
2
|
+
// import { nanoid } from 'nanoid';
|
|
3
|
+
import type OpenAITypes from 'openai';
|
|
4
|
+
import type * as t from '@/types';
|
|
5
|
+
// import { SplitStreamHandler } from '@/splitStream';
|
|
6
|
+
// import { GraphEvents } from '@/common';
|
|
7
|
+
import { sleep } from '@/utils';
|
|
8
|
+
|
|
9
|
+
const choiceProps: OpenAITypes.Chat.Completions.ChatCompletionChunk.Choice = { finish_reason: null, index: 0, delta: {} };
|
|
10
|
+
const reasoningSplitRegex = /(?<=\s+)|(?=\s+)/;
|
|
11
|
+
const contentSplitRegex = /(?<=<\/?think>)|(?=<\/?think>)|(?<=\s+)|(?=\s+)/;
|
|
12
|
+
export const createMockStream = (options: {
|
|
13
|
+
text?: string;
|
|
14
|
+
reasoningText?: string;
|
|
15
|
+
streamRate?: number;
|
|
16
|
+
reasoningKey?: 'reasoning' | 'reasoning_content';
|
|
17
|
+
} = {}) => {
|
|
18
|
+
const {
|
|
19
|
+
text,
|
|
20
|
+
reasoningText,
|
|
21
|
+
streamRate = 25,
|
|
22
|
+
reasoningKey = 'reasoning_content'
|
|
23
|
+
} = options;
|
|
24
|
+
|
|
25
|
+
return async function* mockOpenAIStream(): AsyncGenerator<t.CustomChunk> {
|
|
26
|
+
const content = text ?? `Here's a sample message that includes code:
|
|
27
|
+
\`\`\`python
|
|
28
|
+
def hello_world():
|
|
29
|
+
print("Hello, World!")
|
|
30
|
+
# This is a long code block
|
|
31
|
+
# That shouldn't be split
|
|
32
|
+
return True
|
|
33
|
+
\`\`\`
|
|
34
|
+
Now we're back to regular text. This is a very long sentence that should probably be split at some point because it exceeds our threshold and contains multiple natural breaking points. Let's see how it handles this case properly.
|
|
35
|
+
|
|
36
|
+
Here's another code block:
|
|
37
|
+
\`\`\`javascript
|
|
38
|
+
console.log("Another test");
|
|
39
|
+
// More code here
|
|
40
|
+
\`\`\`
|
|
41
|
+
And finally some more regular text to test our splitting logic.`;
|
|
42
|
+
|
|
43
|
+
if (reasoningText != null && reasoningText) {
|
|
44
|
+
// Split reasoning text into "token-like" chunks
|
|
45
|
+
const reasoningTokens = reasoningText.split(reasoningSplitRegex);
|
|
46
|
+
for (const token of reasoningTokens) {
|
|
47
|
+
yield {
|
|
48
|
+
choices: [{
|
|
49
|
+
...choiceProps,
|
|
50
|
+
delta: {
|
|
51
|
+
[reasoningKey]: token,
|
|
52
|
+
},
|
|
53
|
+
}]
|
|
54
|
+
};
|
|
55
|
+
await sleep(streamRate);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Split main content into "token-like" chunks
|
|
60
|
+
const tokens = content.split(contentSplitRegex);
|
|
61
|
+
for (const token of tokens) {
|
|
62
|
+
yield {
|
|
63
|
+
choices: [{
|
|
64
|
+
...choiceProps,
|
|
65
|
+
delta: {
|
|
66
|
+
content: token
|
|
67
|
+
}
|
|
68
|
+
}]
|
|
69
|
+
};
|
|
70
|
+
await sleep(streamRate);
|
|
71
|
+
}
|
|
72
|
+
};
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
(async function testStream(): Promise<void> {
|
|
77
|
+
const runId = nanoid();
|
|
78
|
+
|
|
79
|
+
const streamHandler = new SplitStreamHandler({
|
|
80
|
+
runId,
|
|
81
|
+
handlers: {
|
|
82
|
+
[GraphEvents.ON_RUN_STEP]: (data): void => {
|
|
83
|
+
console.dir(data, { depth: null });
|
|
84
|
+
},
|
|
85
|
+
[GraphEvents.ON_MESSAGE_DELTA]: (): void => {
|
|
86
|
+
// console.dir(data, { depth: null });
|
|
87
|
+
},
|
|
88
|
+
},
|
|
89
|
+
});
|
|
90
|
+
const stream = createMockStream({
|
|
91
|
+
reasoningText: 'This is a test reasoning text.',
|
|
92
|
+
streamRate: 5,
|
|
93
|
+
})();
|
|
94
|
+
|
|
95
|
+
for await (const chunk of stream) {
|
|
96
|
+
streamHandler.handle(chunk);
|
|
97
|
+
}
|
|
98
|
+
})();
|
|
99
|
+
*/
|