@librechat/agents 1.9.0 → 1.9.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -98,18 +98,30 @@ class CustomAnthropic extends anthropic.ChatAnthropicMessages {
|
|
|
98
98
|
const generationChunk = createGenerationChunk(token, chunk);
|
|
99
99
|
yield generationChunk;
|
|
100
100
|
await runManager?.handleLLMNewToken(token, undefined, undefined, undefined, undefined, { chunk: generationChunk });
|
|
101
|
-
continue;
|
|
102
101
|
}
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
102
|
+
else {
|
|
103
|
+
const textStream = new text.TextStream(token, { delay: this._lc_stream_delay });
|
|
104
|
+
const generator = textStream.generateText();
|
|
105
|
+
let result = await generator.next();
|
|
106
|
+
while (result.done !== true) {
|
|
107
|
+
const currentToken = result.value;
|
|
108
|
+
const newChunk = cloneChunk(currentToken, tokenType, chunk);
|
|
109
|
+
const generationChunk = createGenerationChunk(currentToken, newChunk);
|
|
110
|
+
yield generationChunk;
|
|
111
|
+
await runManager?.handleLLMNewToken(token, undefined, undefined, undefined, undefined, { chunk: generationChunk });
|
|
112
|
+
result = await generator.next();
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
/** Hack to force langgraph to continue */
|
|
117
|
+
if (options.signal?.aborted !== true) {
|
|
118
|
+
try {
|
|
119
|
+
stream.controller.abort();
|
|
120
|
+
}
|
|
121
|
+
catch (e) {
|
|
122
|
+
console.warn('Error aborting stream after data stream', e);
|
|
123
|
+
console.error(e);
|
|
124
|
+
// Ignore
|
|
113
125
|
}
|
|
114
126
|
}
|
|
115
127
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm.cjs","sources":["../../../../src/llm/anthropic/llm.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatAnthropicMessages } from '@langchain/anthropic';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport type { BaseMessage, MessageContentComplex } from '@langchain/core/messages';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { AnthropicInput } from '@langchain/anthropic';\nimport type { AnthropicMessageCreateParams } from '@/llm/anthropic/types';\nimport { _makeMessageChunkFromAnthropicEvent } from './utils/message_outputs';\nimport { _convertMessagesToAnthropicPayload } from './utils/message_inputs';\nimport { TextStream } from '@/llm/text';\n\nfunction _toolsInParams(params: AnthropicMessageCreateParams): boolean {\n return !!(params.tools && params.tools.length > 0);\n}\n\nfunction extractToken(chunk: AIMessageChunk): [string, 'string' | 'input' | 'content'] | [undefined] {\n if (typeof chunk.content === 'string') {\n return [chunk.content, 'string'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'input' in chunk.content[0]\n ) {\n return typeof chunk.content[0].input === 'string'\n ? [chunk.content[0].input, 'input']\n : [JSON.stringify(chunk.content[0].input), 'input'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'text' in chunk.content[0]\n ) {\n return [chunk.content[0].text, 'content'];\n }\n return [undefined];\n}\n\nfunction cloneChunk(text: string, tokenType: string, chunk: AIMessageChunk): AIMessageChunk {\n if (tokenType === 'string') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: text }));\n } else if (tokenType === 'input') {\n return chunk;\n }\n const content = chunk.content[0] as MessageContentComplex;\n if (tokenType === 'content' && content.type === 'text') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));\n } else if (tokenType === 'content' && content.type === 'text_delta') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));\n }\n\n return chunk;\n}\n\nexport type CustomAnthropicInput = AnthropicInput & { _lc_stream_delay?: number };\n\nexport class CustomAnthropic extends ChatAnthropicMessages {\n _lc_stream_delay: number;\n constructor(fields: CustomAnthropicInput) {\n super(fields);\n this._lc_stream_delay = fields._lc_stream_delay ?? 25;\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const params = this.invocationParams(options);\n const formattedMessages = _convertMessagesToAnthropicPayload(messages);\n const coerceContentToString = !_toolsInParams({\n ...params,\n ...formattedMessages,\n stream: false,\n });\n\n const stream = await this.createStreamWithRetry(\n {\n ...params,\n ...formattedMessages,\n stream: true,\n },\n {\n headers: options.headers,\n }\n );\n\n for await (const data of stream) {\n if (options.signal?.aborted === true) {\n stream.controller.abort();\n throw new Error('AbortError: User aborted the request.');\n }\n const shouldStreamUsage = this.streamUsage ?? options.streamUsage;\n const result = _makeMessageChunkFromAnthropicEvent(data, {\n streamUsage: shouldStreamUsage,\n coerceContentToString,\n });\n if (!result) continue;\n\n const { chunk } = result;\n\n // Extract the text content token for text field and runManager.\n const [token = '', tokenType] = extractToken(chunk);\n const createGenerationChunk = (text: string, incomingChunk: AIMessageChunk): ChatGenerationChunk => {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({\n // Just yield chunk as it is and tool_use will be concat by BaseChatModel._generateUncached().\n content: incomingChunk.content,\n additional_kwargs: incomingChunk.additional_kwargs,\n tool_call_chunks: incomingChunk.tool_call_chunks,\n usage_metadata: shouldStreamUsage ? incomingChunk.usage_metadata : undefined,\n response_metadata: incomingChunk.response_metadata,\n id: incomingChunk.id,\n }),\n text,\n });\n };\n\n if (!tokenType || tokenType === 'input') {\n const generationChunk = createGenerationChunk(token, chunk);\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n continue;\n }\n\n const textStream = new TextStream(token, { delay: this._lc_stream_delay });\n const generator = textStream.generateText();\n let textResult = await generator.next();\n\n while (textResult.done !== true) {\n const currentToken = textResult.value;\n const newChunk = cloneChunk(currentToken, tokenType, chunk);\n const generationChunk = createGenerationChunk(currentToken, newChunk);\n yield generationChunk;\n\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n\n textResult = await generator.next();\n }\n }\n }\n}\n"],"names":["AIMessageChunk","ChatAnthropicMessages","messages","_convertMessagesToAnthropicPayload","_makeMessageChunkFromAnthropicEvent","ChatGenerationChunk","TextStream"],"mappings":";;;;;;;;;AAWA,SAAS,cAAc,CAAC,MAAoC,EAAA;AAC1D,IAAA,OAAO,CAAC,EAAE,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,YAAY,CAAC,KAAqB,EAAA;AACzC,IAAA,IAAI,OAAO,KAAK,CAAC,OAAO,KAAK,QAAQ,EAAE;AACrC,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;KAClC;AAAM,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,OAAO,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC3B;QACA,OAAO,OAAO,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,KAAK,QAAQ;AAC/C,cAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,OAAO,CAAC;AACnC,cAAE,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC,CAAC;KACvD;AAAM,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC1B;AACA,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;KAC3C;IACD,OAAO,CAAC,SAAS,CAAC,CAAC;AACrB,CAAC;AAED,SAAS,UAAU,CAAC,IAAY,EAAE,SAAiB,EAAE,KAAqB,EAAA;AACxE,IAAA,IAAI,SAAS,KAAK,QAAQ,EAAE;AAC1B,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;KACxE;AAAM,SAAA,IAAI,SAAS,KAAK,OAAO,EAAE;AAChC,QAAA,OAAO,KAAK,CAAC;KACd;IACD,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAA0B,CAAC;IAC1D,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE;AACtD,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC1G;SAAM,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,YAAY,EAAE;AACnE,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC1G;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAIK,MAAO,eAAgB,SAAQC,+BAAqB,CAAA;AACxD,IAAA,gBAAgB,CAAS;AACzB,IAAA,WAAA,CAAY,MAA4B,EAAA;QACtC,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,gBAAgB,GAAG,MAAM,CAAC,gBAAgB,IAAI,EAAE,CAAC;KACvD;IAED,OAAO,qBAAqB,CAC1BC,UAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;QAErC,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC;AAC9C,QAAA,MAAM,iBAAiB,GAAGC,iDAAkC,CAACD,UAAQ,CAAC,CAAC;AACvE,QAAA,MAAM,qBAAqB,GAAG,CAAC,cAAc,CAAC;AAC5C,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,KAAK;AACd,SAAA,CAAC,CAAC;AAEH,QAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAC7C;AACE,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,IAAI;SACb,EACD;YACE,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA,CACF,CAAC;AAEF,QAAA,WAAW,MAAM,IAAI,IAAI,MAAM,EAAE;YAC/B,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,EAAE;AACpC,gBAAA,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;AAC1B,gBAAA,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,MAAM,iBAAiB,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC;AAClE,YAAA,MAAM,MAAM,GAAGE,mDAAmC,CAAC,IAAI,EAAE;AACvD,gBAAA,WAAW,EAAE,iBAAiB;gBAC9B,qBAAqB;AACtB,aAAA,CAAC,CAAC;AACH,YAAA,IAAI,CAAC,MAAM;gBAAE,SAAS;AAEtB,YAAA,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,CAAC;;AAGzB,YAAA,MAAM,CAAC,KAAK,GAAG,EAAE,EAAE,SAAS,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;AACpD,YAAA,MAAM,qBAAqB,GAAG,CAAC,IAAY,EAAE,aAA6B,KAAyB;gBACjG,OAAO,IAAIC,2BAAmB,CAAC;oBAC7B,OAAO,EAAE,IAAIL,uBAAc,CAAC;;wBAE1B,OAAO,EAAE,aAAa,CAAC,OAAO;wBAC9B,iBAAiB,EAAE,aAAa,CAAC,iBAAiB;wBAClD,gBAAgB,EAAE,aAAa,CAAC,gBAAgB;wBAChD,cAAc,EAAE,iBAAiB,GAAG,aAAa,CAAC,cAAc,GAAG,SAAS;wBAC5E,iBAAiB,EAAE,aAAa,CAAC,iBAAiB;wBAClD,EAAE,EAAE,aAAa,CAAC,EAAE;qBACrB,CAAC;oBACF,IAAI;AACL,iBAAA,CAAC,CAAC;AACL,aAAC,CAAC;AAEF,YAAA,IAAI,CAAC,SAAS,IAAI,SAAS,KAAK,OAAO,EAAE;gBACvC,MAAM,eAAe,GAAG,qBAAqB,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC5D,gBAAA,MAAM,eAAe,CAAC;gBACtB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B,CAAC;gBACF,SAAS;aACV;AAED,YAAA,MAAM,UAAU,GAAG,IAAIM,eAAU,CAAC,KAAK,EAAE,EAAE,KAAK,EAAE,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;AAC3E,YAAA,MAAM,SAAS,GAAG,UAAU,CAAC,YAAY,EAAE,CAAC;AAC5C,YAAA,IAAI,UAAU,GAAG,MAAM,SAAS,CAAC,IAAI,EAAE,CAAC;AAExC,YAAA,OAAO,UAAU,CAAC,IAAI,KAAK,IAAI,EAAE;AAC/B,gBAAA,MAAM,YAAY,GAAG,UAAU,CAAC,KAAK,CAAC;gBACtC,MAAM,QAAQ,GAAG,UAAU,CAAC,YAAY,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC;gBAC5D,MAAM,eAAe,GAAG,qBAAqB,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;AACtE,gBAAA,MAAM,eAAe,CAAC;gBAEtB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B,CAAC;AAEF,gBAAA,UAAU,GAAG,MAAM,SAAS,CAAC,IAAI,EAAE,CAAC;aACrC;SACF;KACF;AACF;;;;"}
|
|
1
|
+
{"version":3,"file":"llm.cjs","sources":["../../../../src/llm/anthropic/llm.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatAnthropicMessages } from '@langchain/anthropic';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport type { BaseMessage, MessageContentComplex } from '@langchain/core/messages';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { AnthropicInput } from '@langchain/anthropic';\nimport type { AnthropicMessageCreateParams } from '@/llm/anthropic/types';\nimport { _makeMessageChunkFromAnthropicEvent } from './utils/message_outputs';\nimport { _convertMessagesToAnthropicPayload } from './utils/message_inputs';\nimport { TextStream } from '@/llm/text';\n\nfunction _toolsInParams(params: AnthropicMessageCreateParams): boolean {\n return !!(params.tools && params.tools.length > 0);\n}\n\nfunction extractToken(chunk: AIMessageChunk): [string, 'string' | 'input' | 'content'] | [undefined] {\n if (typeof chunk.content === 'string') {\n return [chunk.content, 'string'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'input' in chunk.content[0]\n ) {\n return typeof chunk.content[0].input === 'string'\n ? [chunk.content[0].input, 'input']\n : [JSON.stringify(chunk.content[0].input), 'input'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'text' in chunk.content[0]\n ) {\n return [chunk.content[0].text, 'content'];\n }\n return [undefined];\n}\n\nfunction cloneChunk(text: string, tokenType: string, chunk: AIMessageChunk): AIMessageChunk {\n if (tokenType === 'string') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: text }));\n } else if (tokenType === 'input') {\n return chunk;\n }\n const content = chunk.content[0] as MessageContentComplex;\n if (tokenType === 'content' && content.type === 'text') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));\n } else if (tokenType === 'content' && content.type === 'text_delta') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));\n }\n\n return chunk;\n}\n\nexport type CustomAnthropicInput = AnthropicInput & { _lc_stream_delay?: number };\n\nexport class CustomAnthropic extends ChatAnthropicMessages {\n _lc_stream_delay: number;\n constructor(fields: CustomAnthropicInput) {\n super(fields);\n this._lc_stream_delay = fields._lc_stream_delay ?? 25;\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const params = this.invocationParams(options);\n const formattedMessages = _convertMessagesToAnthropicPayload(messages);\n const coerceContentToString = !_toolsInParams({\n ...params,\n ...formattedMessages,\n stream: false,\n });\n\n const stream = await this.createStreamWithRetry(\n {\n ...params,\n ...formattedMessages,\n stream: true,\n },\n {\n headers: options.headers,\n }\n );\n\n for await (const data of stream) {\n if (options.signal?.aborted === true) {\n stream.controller.abort();\n throw new Error('AbortError: User aborted the request.');\n }\n const shouldStreamUsage = this.streamUsage ?? options.streamUsage;\n const result = _makeMessageChunkFromAnthropicEvent(data, {\n streamUsage: shouldStreamUsage,\n coerceContentToString,\n });\n if (!result) continue;\n\n const { chunk } = result;\n\n // Extract the text content token for text field and runManager.\n const [token = '', tokenType] = extractToken(chunk);\n const createGenerationChunk = (text: string, incomingChunk: AIMessageChunk): ChatGenerationChunk => {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({\n // Just yield chunk as it is and tool_use will be concat by BaseChatModel._generateUncached().\n content: incomingChunk.content,\n additional_kwargs: incomingChunk.additional_kwargs,\n tool_call_chunks: incomingChunk.tool_call_chunks,\n usage_metadata: shouldStreamUsage ? incomingChunk.usage_metadata : undefined,\n response_metadata: incomingChunk.response_metadata,\n id: incomingChunk.id,\n }),\n text,\n });\n };\n\n if (!tokenType || tokenType === 'input') {\n const generationChunk = createGenerationChunk(token, chunk);\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n } else {\n const textStream = new TextStream(token, { delay: this._lc_stream_delay });\n const generator = textStream.generateText();\n let result = await generator.next();\n\n while (result.done !== true) {\n const currentToken = result.value;\n const newChunk = cloneChunk(currentToken, tokenType, chunk);\n const generationChunk = createGenerationChunk(currentToken, newChunk);\n yield generationChunk;\n\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n\n result = await generator.next();\n }\n }\n }\n\n /** Hack to force langgraph to continue */\n if (options.signal?.aborted !== true) {\n try {\n stream.controller.abort();\n } catch (e) {\n console.warn('Error aborting stream after data stream', e);\n console.error(e);\n // Ignore\n }\n }\n }\n}\n"],"names":["AIMessageChunk","ChatAnthropicMessages","messages","_convertMessagesToAnthropicPayload","_makeMessageChunkFromAnthropicEvent","ChatGenerationChunk","TextStream"],"mappings":";;;;;;;;;AAWA,SAAS,cAAc,CAAC,MAAoC,EAAA;AAC1D,IAAA,OAAO,CAAC,EAAE,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,YAAY,CAAC,KAAqB,EAAA;AACzC,IAAA,IAAI,OAAO,KAAK,CAAC,OAAO,KAAK,QAAQ,EAAE;AACrC,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;KAClC;AAAM,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,OAAO,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC3B;QACA,OAAO,OAAO,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,KAAK,QAAQ;AAC/C,cAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,OAAO,CAAC;AACnC,cAAE,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC,CAAC;KACvD;AAAM,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC1B;AACA,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;KAC3C;IACD,OAAO,CAAC,SAAS,CAAC,CAAC;AACrB,CAAC;AAED,SAAS,UAAU,CAAC,IAAY,EAAE,SAAiB,EAAE,KAAqB,EAAA;AACxE,IAAA,IAAI,SAAS,KAAK,QAAQ,EAAE;AAC1B,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;KACxE;AAAM,SAAA,IAAI,SAAS,KAAK,OAAO,EAAE;AAChC,QAAA,OAAO,KAAK,CAAC;KACd;IACD,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAA0B,CAAC;IAC1D,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE;AACtD,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC1G;SAAM,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,YAAY,EAAE;AACnE,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC1G;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAIK,MAAO,eAAgB,SAAQC,+BAAqB,CAAA;AACxD,IAAA,gBAAgB,CAAS;AACzB,IAAA,WAAA,CAAY,MAA4B,EAAA;QACtC,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,gBAAgB,GAAG,MAAM,CAAC,gBAAgB,IAAI,EAAE,CAAC;KACvD;IAED,OAAO,qBAAqB,CAC1BC,UAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;QAErC,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC;AAC9C,QAAA,MAAM,iBAAiB,GAAGC,iDAAkC,CAACD,UAAQ,CAAC,CAAC;AACvE,QAAA,MAAM,qBAAqB,GAAG,CAAC,cAAc,CAAC;AAC5C,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,KAAK;AACd,SAAA,CAAC,CAAC;AAEH,QAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAC7C;AACE,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,IAAI;SACb,EACD;YACE,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA,CACF,CAAC;AAEF,QAAA,WAAW,MAAM,IAAI,IAAI,MAAM,EAAE;YAC/B,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,EAAE;AACpC,gBAAA,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;AAC1B,gBAAA,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,MAAM,iBAAiB,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC;AAClE,YAAA,MAAM,MAAM,GAAGE,mDAAmC,CAAC,IAAI,EAAE;AACvD,gBAAA,WAAW,EAAE,iBAAiB;gBAC9B,qBAAqB;AACtB,aAAA,CAAC,CAAC;AACH,YAAA,IAAI,CAAC,MAAM;gBAAE,SAAS;AAEtB,YAAA,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,CAAC;;AAGzB,YAAA,MAAM,CAAC,KAAK,GAAG,EAAE,EAAE,SAAS,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;AACpD,YAAA,MAAM,qBAAqB,GAAG,CAAC,IAAY,EAAE,aAA6B,KAAyB;gBACjG,OAAO,IAAIC,2BAAmB,CAAC;oBAC7B,OAAO,EAAE,IAAIL,uBAAc,CAAC;;wBAE1B,OAAO,EAAE,aAAa,CAAC,OAAO;wBAC9B,iBAAiB,EAAE,aAAa,CAAC,iBAAiB;wBAClD,gBAAgB,EAAE,aAAa,CAAC,gBAAgB;wBAChD,cAAc,EAAE,iBAAiB,GAAG,aAAa,CAAC,cAAc,GAAG,SAAS;wBAC5E,iBAAiB,EAAE,aAAa,CAAC,iBAAiB;wBAClD,EAAE,EAAE,aAAa,CAAC,EAAE;qBACrB,CAAC;oBACF,IAAI;AACL,iBAAA,CAAC,CAAC;AACL,aAAC,CAAC;AAEF,YAAA,IAAI,CAAC,SAAS,IAAI,SAAS,KAAK,OAAO,EAAE;gBACvC,MAAM,eAAe,GAAG,qBAAqB,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC5D,gBAAA,MAAM,eAAe,CAAC;gBACtB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B,CAAC;aACH;iBAAM;AACL,gBAAA,MAAM,UAAU,GAAG,IAAIM,eAAU,CAAC,KAAK,EAAE,EAAE,KAAK,EAAE,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;AAC3E,gBAAA,MAAM,SAAS,GAAG,UAAU,CAAC,YAAY,EAAE,CAAC;AAC5C,gBAAA,IAAI,MAAM,GAAG,MAAM,SAAS,CAAC,IAAI,EAAE,CAAC;AAEpC,gBAAA,OAAO,MAAM,CAAC,IAAI,KAAK,IAAI,EAAE;AAC3B,oBAAA,MAAM,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC;oBAClC,MAAM,QAAQ,GAAG,UAAU,CAAC,YAAY,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC;oBAC5D,MAAM,eAAe,GAAG,qBAAqB,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;AACtE,oBAAA,MAAM,eAAe,CAAC;oBAEtB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B,CAAC;AAEF,oBAAA,MAAM,GAAG,MAAM,SAAS,CAAC,IAAI,EAAE,CAAC;iBACjC;aACF;SACF;;QAGD,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,EAAE;AACpC,YAAA,IAAI;AACF,gBAAA,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;aAC3B;YAAC,OAAO,CAAC,EAAE;AACV,gBAAA,OAAO,CAAC,IAAI,CAAC,yCAAyC,EAAE,CAAC,CAAC,CAAC;AAC3D,gBAAA,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;;aAElB;SACF;KACF;AACF;;;;"}
|
|
@@ -96,18 +96,30 @@ class CustomAnthropic extends ChatAnthropicMessages {
|
|
|
96
96
|
const generationChunk = createGenerationChunk(token, chunk);
|
|
97
97
|
yield generationChunk;
|
|
98
98
|
await runManager?.handleLLMNewToken(token, undefined, undefined, undefined, undefined, { chunk: generationChunk });
|
|
99
|
-
continue;
|
|
100
99
|
}
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
100
|
+
else {
|
|
101
|
+
const textStream = new TextStream(token, { delay: this._lc_stream_delay });
|
|
102
|
+
const generator = textStream.generateText();
|
|
103
|
+
let result = await generator.next();
|
|
104
|
+
while (result.done !== true) {
|
|
105
|
+
const currentToken = result.value;
|
|
106
|
+
const newChunk = cloneChunk(currentToken, tokenType, chunk);
|
|
107
|
+
const generationChunk = createGenerationChunk(currentToken, newChunk);
|
|
108
|
+
yield generationChunk;
|
|
109
|
+
await runManager?.handleLLMNewToken(token, undefined, undefined, undefined, undefined, { chunk: generationChunk });
|
|
110
|
+
result = await generator.next();
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
/** Hack to force langgraph to continue */
|
|
115
|
+
if (options.signal?.aborted !== true) {
|
|
116
|
+
try {
|
|
117
|
+
stream.controller.abort();
|
|
118
|
+
}
|
|
119
|
+
catch (e) {
|
|
120
|
+
console.warn('Error aborting stream after data stream', e);
|
|
121
|
+
console.error(e);
|
|
122
|
+
// Ignore
|
|
111
123
|
}
|
|
112
124
|
}
|
|
113
125
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm.mjs","sources":["../../../../src/llm/anthropic/llm.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatAnthropicMessages } from '@langchain/anthropic';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport type { BaseMessage, MessageContentComplex } from '@langchain/core/messages';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { AnthropicInput } from '@langchain/anthropic';\nimport type { AnthropicMessageCreateParams } from '@/llm/anthropic/types';\nimport { _makeMessageChunkFromAnthropicEvent } from './utils/message_outputs';\nimport { _convertMessagesToAnthropicPayload } from './utils/message_inputs';\nimport { TextStream } from '@/llm/text';\n\nfunction _toolsInParams(params: AnthropicMessageCreateParams): boolean {\n return !!(params.tools && params.tools.length > 0);\n}\n\nfunction extractToken(chunk: AIMessageChunk): [string, 'string' | 'input' | 'content'] | [undefined] {\n if (typeof chunk.content === 'string') {\n return [chunk.content, 'string'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'input' in chunk.content[0]\n ) {\n return typeof chunk.content[0].input === 'string'\n ? [chunk.content[0].input, 'input']\n : [JSON.stringify(chunk.content[0].input), 'input'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'text' in chunk.content[0]\n ) {\n return [chunk.content[0].text, 'content'];\n }\n return [undefined];\n}\n\nfunction cloneChunk(text: string, tokenType: string, chunk: AIMessageChunk): AIMessageChunk {\n if (tokenType === 'string') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: text }));\n } else if (tokenType === 'input') {\n return chunk;\n }\n const content = chunk.content[0] as MessageContentComplex;\n if (tokenType === 'content' && content.type === 'text') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));\n } else if (tokenType === 'content' && content.type === 'text_delta') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));\n }\n\n return chunk;\n}\n\nexport type CustomAnthropicInput = AnthropicInput & { _lc_stream_delay?: number };\n\nexport class CustomAnthropic extends ChatAnthropicMessages {\n _lc_stream_delay: number;\n constructor(fields: CustomAnthropicInput) {\n super(fields);\n this._lc_stream_delay = fields._lc_stream_delay ?? 25;\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const params = this.invocationParams(options);\n const formattedMessages = _convertMessagesToAnthropicPayload(messages);\n const coerceContentToString = !_toolsInParams({\n ...params,\n ...formattedMessages,\n stream: false,\n });\n\n const stream = await this.createStreamWithRetry(\n {\n ...params,\n ...formattedMessages,\n stream: true,\n },\n {\n headers: options.headers,\n }\n );\n\n for await (const data of stream) {\n if (options.signal?.aborted === true) {\n stream.controller.abort();\n throw new Error('AbortError: User aborted the request.');\n }\n const shouldStreamUsage = this.streamUsage ?? options.streamUsage;\n const result = _makeMessageChunkFromAnthropicEvent(data, {\n streamUsage: shouldStreamUsage,\n coerceContentToString,\n });\n if (!result) continue;\n\n const { chunk } = result;\n\n // Extract the text content token for text field and runManager.\n const [token = '', tokenType] = extractToken(chunk);\n const createGenerationChunk = (text: string, incomingChunk: AIMessageChunk): ChatGenerationChunk => {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({\n // Just yield chunk as it is and tool_use will be concat by BaseChatModel._generateUncached().\n content: incomingChunk.content,\n additional_kwargs: incomingChunk.additional_kwargs,\n tool_call_chunks: incomingChunk.tool_call_chunks,\n usage_metadata: shouldStreamUsage ? incomingChunk.usage_metadata : undefined,\n response_metadata: incomingChunk.response_metadata,\n id: incomingChunk.id,\n }),\n text,\n });\n };\n\n if (!tokenType || tokenType === 'input') {\n const generationChunk = createGenerationChunk(token, chunk);\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n continue;\n }\n\n const textStream = new TextStream(token, { delay: this._lc_stream_delay });\n const generator = textStream.generateText();\n let textResult = await generator.next();\n\n while (textResult.done !== true) {\n const currentToken = textResult.value;\n const newChunk = cloneChunk(currentToken, tokenType, chunk);\n const generationChunk = createGenerationChunk(currentToken, newChunk);\n yield generationChunk;\n\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n\n textResult = await generator.next();\n }\n }\n }\n}\n"],"names":[],"mappings":";;;;;;;AAWA,SAAS,cAAc,CAAC,MAAoC,EAAA;AAC1D,IAAA,OAAO,CAAC,EAAE,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,YAAY,CAAC,KAAqB,EAAA;AACzC,IAAA,IAAI,OAAO,KAAK,CAAC,OAAO,KAAK,QAAQ,EAAE;AACrC,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;KAClC;AAAM,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,OAAO,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC3B;QACA,OAAO,OAAO,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,KAAK,QAAQ;AAC/C,cAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,OAAO,CAAC;AACnC,cAAE,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC,CAAC;KACvD;AAAM,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC1B;AACA,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;KAC3C;IACD,OAAO,CAAC,SAAS,CAAC,CAAC;AACrB,CAAC;AAED,SAAS,UAAU,CAAC,IAAY,EAAE,SAAiB,EAAE,KAAqB,EAAA;AACxE,IAAA,IAAI,SAAS,KAAK,QAAQ,EAAE;AAC1B,QAAA,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;KACxE;AAAM,SAAA,IAAI,SAAS,KAAK,OAAO,EAAE;AAChC,QAAA,OAAO,KAAK,CAAC;KACd;IACD,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAA0B,CAAC;IAC1D,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE;AACtD,QAAA,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC1G;SAAM,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,YAAY,EAAE;AACnE,QAAA,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC1G;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAIK,MAAO,eAAgB,SAAQ,qBAAqB,CAAA;AACxD,IAAA,gBAAgB,CAAS;AACzB,IAAA,WAAA,CAAY,MAA4B,EAAA;QACtC,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,gBAAgB,GAAG,MAAM,CAAC,gBAAgB,IAAI,EAAE,CAAC;KACvD;IAED,OAAO,qBAAqB,CAC1B,QAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;QAErC,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC;AAC9C,QAAA,MAAM,iBAAiB,GAAG,kCAAkC,CAAC,QAAQ,CAAC,CAAC;AACvE,QAAA,MAAM,qBAAqB,GAAG,CAAC,cAAc,CAAC;AAC5C,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,KAAK;AACd,SAAA,CAAC,CAAC;AAEH,QAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAC7C;AACE,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,IAAI;SACb,EACD;YACE,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA,CACF,CAAC;AAEF,QAAA,WAAW,MAAM,IAAI,IAAI,MAAM,EAAE;YAC/B,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,EAAE;AACpC,gBAAA,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;AAC1B,gBAAA,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,MAAM,iBAAiB,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC;AAClE,YAAA,MAAM,MAAM,GAAG,mCAAmC,CAAC,IAAI,EAAE;AACvD,gBAAA,WAAW,EAAE,iBAAiB;gBAC9B,qBAAqB;AACtB,aAAA,CAAC,CAAC;AACH,YAAA,IAAI,CAAC,MAAM;gBAAE,SAAS;AAEtB,YAAA,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,CAAC;;AAGzB,YAAA,MAAM,CAAC,KAAK,GAAG,EAAE,EAAE,SAAS,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;AACpD,YAAA,MAAM,qBAAqB,GAAG,CAAC,IAAY,EAAE,aAA6B,KAAyB;gBACjG,OAAO,IAAI,mBAAmB,CAAC;oBAC7B,OAAO,EAAE,IAAI,cAAc,CAAC;;wBAE1B,OAAO,EAAE,aAAa,CAAC,OAAO;wBAC9B,iBAAiB,EAAE,aAAa,CAAC,iBAAiB;wBAClD,gBAAgB,EAAE,aAAa,CAAC,gBAAgB;wBAChD,cAAc,EAAE,iBAAiB,GAAG,aAAa,CAAC,cAAc,GAAG,SAAS;wBAC5E,iBAAiB,EAAE,aAAa,CAAC,iBAAiB;wBAClD,EAAE,EAAE,aAAa,CAAC,EAAE;qBACrB,CAAC;oBACF,IAAI;AACL,iBAAA,CAAC,CAAC;AACL,aAAC,CAAC;AAEF,YAAA,IAAI,CAAC,SAAS,IAAI,SAAS,KAAK,OAAO,EAAE;gBACvC,MAAM,eAAe,GAAG,qBAAqB,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC5D,gBAAA,MAAM,eAAe,CAAC;gBACtB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B,CAAC;gBACF,SAAS;aACV;AAED,YAAA,MAAM,UAAU,GAAG,IAAI,UAAU,CAAC,KAAK,EAAE,EAAE,KAAK,EAAE,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;AAC3E,YAAA,MAAM,SAAS,GAAG,UAAU,CAAC,YAAY,EAAE,CAAC;AAC5C,YAAA,IAAI,UAAU,GAAG,MAAM,SAAS,CAAC,IAAI,EAAE,CAAC;AAExC,YAAA,OAAO,UAAU,CAAC,IAAI,KAAK,IAAI,EAAE;AAC/B,gBAAA,MAAM,YAAY,GAAG,UAAU,CAAC,KAAK,CAAC;gBACtC,MAAM,QAAQ,GAAG,UAAU,CAAC,YAAY,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC;gBAC5D,MAAM,eAAe,GAAG,qBAAqB,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;AACtE,gBAAA,MAAM,eAAe,CAAC;gBAEtB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B,CAAC;AAEF,gBAAA,UAAU,GAAG,MAAM,SAAS,CAAC,IAAI,EAAE,CAAC;aACrC;SACF;KACF;AACF;;;;"}
|
|
1
|
+
{"version":3,"file":"llm.mjs","sources":["../../../../src/llm/anthropic/llm.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatAnthropicMessages } from '@langchain/anthropic';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport type { BaseMessage, MessageContentComplex } from '@langchain/core/messages';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { AnthropicInput } from '@langchain/anthropic';\nimport type { AnthropicMessageCreateParams } from '@/llm/anthropic/types';\nimport { _makeMessageChunkFromAnthropicEvent } from './utils/message_outputs';\nimport { _convertMessagesToAnthropicPayload } from './utils/message_inputs';\nimport { TextStream } from '@/llm/text';\n\nfunction _toolsInParams(params: AnthropicMessageCreateParams): boolean {\n return !!(params.tools && params.tools.length > 0);\n}\n\nfunction extractToken(chunk: AIMessageChunk): [string, 'string' | 'input' | 'content'] | [undefined] {\n if (typeof chunk.content === 'string') {\n return [chunk.content, 'string'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'input' in chunk.content[0]\n ) {\n return typeof chunk.content[0].input === 'string'\n ? [chunk.content[0].input, 'input']\n : [JSON.stringify(chunk.content[0].input), 'input'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'text' in chunk.content[0]\n ) {\n return [chunk.content[0].text, 'content'];\n }\n return [undefined];\n}\n\nfunction cloneChunk(text: string, tokenType: string, chunk: AIMessageChunk): AIMessageChunk {\n if (tokenType === 'string') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: text }));\n } else if (tokenType === 'input') {\n return chunk;\n }\n const content = chunk.content[0] as MessageContentComplex;\n if (tokenType === 'content' && content.type === 'text') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));\n } else if (tokenType === 'content' && content.type === 'text_delta') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));\n }\n\n return chunk;\n}\n\nexport type CustomAnthropicInput = AnthropicInput & { _lc_stream_delay?: number };\n\nexport class CustomAnthropic extends ChatAnthropicMessages {\n _lc_stream_delay: number;\n constructor(fields: CustomAnthropicInput) {\n super(fields);\n this._lc_stream_delay = fields._lc_stream_delay ?? 25;\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const params = this.invocationParams(options);\n const formattedMessages = _convertMessagesToAnthropicPayload(messages);\n const coerceContentToString = !_toolsInParams({\n ...params,\n ...formattedMessages,\n stream: false,\n });\n\n const stream = await this.createStreamWithRetry(\n {\n ...params,\n ...formattedMessages,\n stream: true,\n },\n {\n headers: options.headers,\n }\n );\n\n for await (const data of stream) {\n if (options.signal?.aborted === true) {\n stream.controller.abort();\n throw new Error('AbortError: User aborted the request.');\n }\n const shouldStreamUsage = this.streamUsage ?? options.streamUsage;\n const result = _makeMessageChunkFromAnthropicEvent(data, {\n streamUsage: shouldStreamUsage,\n coerceContentToString,\n });\n if (!result) continue;\n\n const { chunk } = result;\n\n // Extract the text content token for text field and runManager.\n const [token = '', tokenType] = extractToken(chunk);\n const createGenerationChunk = (text: string, incomingChunk: AIMessageChunk): ChatGenerationChunk => {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({\n // Just yield chunk as it is and tool_use will be concat by BaseChatModel._generateUncached().\n content: incomingChunk.content,\n additional_kwargs: incomingChunk.additional_kwargs,\n tool_call_chunks: incomingChunk.tool_call_chunks,\n usage_metadata: shouldStreamUsage ? incomingChunk.usage_metadata : undefined,\n response_metadata: incomingChunk.response_metadata,\n id: incomingChunk.id,\n }),\n text,\n });\n };\n\n if (!tokenType || tokenType === 'input') {\n const generationChunk = createGenerationChunk(token, chunk);\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n } else {\n const textStream = new TextStream(token, { delay: this._lc_stream_delay });\n const generator = textStream.generateText();\n let result = await generator.next();\n\n while (result.done !== true) {\n const currentToken = result.value;\n const newChunk = cloneChunk(currentToken, tokenType, chunk);\n const generationChunk = createGenerationChunk(currentToken, newChunk);\n yield generationChunk;\n\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n\n result = await generator.next();\n }\n }\n }\n\n /** Hack to force langgraph to continue */\n if (options.signal?.aborted !== true) {\n try {\n stream.controller.abort();\n } catch (e) {\n console.warn('Error aborting stream after data stream', e);\n console.error(e);\n // Ignore\n }\n }\n }\n}\n"],"names":[],"mappings":";;;;;;;AAWA,SAAS,cAAc,CAAC,MAAoC,EAAA;AAC1D,IAAA,OAAO,CAAC,EAAE,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,YAAY,CAAC,KAAqB,EAAA;AACzC,IAAA,IAAI,OAAO,KAAK,CAAC,OAAO,KAAK,QAAQ,EAAE;AACrC,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;KAClC;AAAM,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,OAAO,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC3B;QACA,OAAO,OAAO,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,KAAK,QAAQ;AAC/C,cAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,OAAO,CAAC;AACnC,cAAE,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC,CAAC;KACvD;AAAM,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC1B;AACA,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;KAC3C;IACD,OAAO,CAAC,SAAS,CAAC,CAAC;AACrB,CAAC;AAED,SAAS,UAAU,CAAC,IAAY,EAAE,SAAiB,EAAE,KAAqB,EAAA;AACxE,IAAA,IAAI,SAAS,KAAK,QAAQ,EAAE;AAC1B,QAAA,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;KACxE;AAAM,SAAA,IAAI,SAAS,KAAK,OAAO,EAAE;AAChC,QAAA,OAAO,KAAK,CAAC;KACd;IACD,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAA0B,CAAC;IAC1D,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE;AACtD,QAAA,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC1G;SAAM,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,YAAY,EAAE;AACnE,QAAA,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC1G;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAIK,MAAO,eAAgB,SAAQ,qBAAqB,CAAA;AACxD,IAAA,gBAAgB,CAAS;AACzB,IAAA,WAAA,CAAY,MAA4B,EAAA;QACtC,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,gBAAgB,GAAG,MAAM,CAAC,gBAAgB,IAAI,EAAE,CAAC;KACvD;IAED,OAAO,qBAAqB,CAC1B,QAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;QAErC,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC;AAC9C,QAAA,MAAM,iBAAiB,GAAG,kCAAkC,CAAC,QAAQ,CAAC,CAAC;AACvE,QAAA,MAAM,qBAAqB,GAAG,CAAC,cAAc,CAAC;AAC5C,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,KAAK;AACd,SAAA,CAAC,CAAC;AAEH,QAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAC7C;AACE,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,IAAI;SACb,EACD;YACE,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA,CACF,CAAC;AAEF,QAAA,WAAW,MAAM,IAAI,IAAI,MAAM,EAAE;YAC/B,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,EAAE;AACpC,gBAAA,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;AAC1B,gBAAA,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,MAAM,iBAAiB,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC;AAClE,YAAA,MAAM,MAAM,GAAG,mCAAmC,CAAC,IAAI,EAAE;AACvD,gBAAA,WAAW,EAAE,iBAAiB;gBAC9B,qBAAqB;AACtB,aAAA,CAAC,CAAC;AACH,YAAA,IAAI,CAAC,MAAM;gBAAE,SAAS;AAEtB,YAAA,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,CAAC;;AAGzB,YAAA,MAAM,CAAC,KAAK,GAAG,EAAE,EAAE,SAAS,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;AACpD,YAAA,MAAM,qBAAqB,GAAG,CAAC,IAAY,EAAE,aAA6B,KAAyB;gBACjG,OAAO,IAAI,mBAAmB,CAAC;oBAC7B,OAAO,EAAE,IAAI,cAAc,CAAC;;wBAE1B,OAAO,EAAE,aAAa,CAAC,OAAO;wBAC9B,iBAAiB,EAAE,aAAa,CAAC,iBAAiB;wBAClD,gBAAgB,EAAE,aAAa,CAAC,gBAAgB;wBAChD,cAAc,EAAE,iBAAiB,GAAG,aAAa,CAAC,cAAc,GAAG,SAAS;wBAC5E,iBAAiB,EAAE,aAAa,CAAC,iBAAiB;wBAClD,EAAE,EAAE,aAAa,CAAC,EAAE;qBACrB,CAAC;oBACF,IAAI;AACL,iBAAA,CAAC,CAAC;AACL,aAAC,CAAC;AAEF,YAAA,IAAI,CAAC,SAAS,IAAI,SAAS,KAAK,OAAO,EAAE;gBACvC,MAAM,eAAe,GAAG,qBAAqB,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC5D,gBAAA,MAAM,eAAe,CAAC;gBACtB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B,CAAC;aACH;iBAAM;AACL,gBAAA,MAAM,UAAU,GAAG,IAAI,UAAU,CAAC,KAAK,EAAE,EAAE,KAAK,EAAE,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;AAC3E,gBAAA,MAAM,SAAS,GAAG,UAAU,CAAC,YAAY,EAAE,CAAC;AAC5C,gBAAA,IAAI,MAAM,GAAG,MAAM,SAAS,CAAC,IAAI,EAAE,CAAC;AAEpC,gBAAA,OAAO,MAAM,CAAC,IAAI,KAAK,IAAI,EAAE;AAC3B,oBAAA,MAAM,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC;oBAClC,MAAM,QAAQ,GAAG,UAAU,CAAC,YAAY,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC;oBAC5D,MAAM,eAAe,GAAG,qBAAqB,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;AACtE,oBAAA,MAAM,eAAe,CAAC;oBAEtB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B,CAAC;AAEF,oBAAA,MAAM,GAAG,MAAM,SAAS,CAAC,IAAI,EAAE,CAAC;iBACjC;aACF;SACF;;QAGD,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,EAAE;AACpC,YAAA,IAAI;AACF,gBAAA,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;aAC3B;YAAC,OAAO,CAAC,EAAE;AACV,gBAAA,OAAO,CAAC,IAAI,CAAC,yCAAyC,EAAE,CAAC,CAAC,CAAC;AAC3D,gBAAA,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;;aAElB;SACF;KACF;AACF;;;;"}
|
package/package.json
CHANGED
package/src/llm/anthropic/llm.ts
CHANGED
|
@@ -125,29 +125,39 @@ export class CustomAnthropic extends ChatAnthropicMessages {
|
|
|
125
125
|
undefined,
|
|
126
126
|
{ chunk: generationChunk }
|
|
127
127
|
);
|
|
128
|
-
|
|
128
|
+
} else {
|
|
129
|
+
const textStream = new TextStream(token, { delay: this._lc_stream_delay });
|
|
130
|
+
const generator = textStream.generateText();
|
|
131
|
+
let result = await generator.next();
|
|
132
|
+
|
|
133
|
+
while (result.done !== true) {
|
|
134
|
+
const currentToken = result.value;
|
|
135
|
+
const newChunk = cloneChunk(currentToken, tokenType, chunk);
|
|
136
|
+
const generationChunk = createGenerationChunk(currentToken, newChunk);
|
|
137
|
+
yield generationChunk;
|
|
138
|
+
|
|
139
|
+
await runManager?.handleLLMNewToken(
|
|
140
|
+
token,
|
|
141
|
+
undefined,
|
|
142
|
+
undefined,
|
|
143
|
+
undefined,
|
|
144
|
+
undefined,
|
|
145
|
+
{ chunk: generationChunk }
|
|
146
|
+
);
|
|
147
|
+
|
|
148
|
+
result = await generator.next();
|
|
149
|
+
}
|
|
129
150
|
}
|
|
151
|
+
}
|
|
130
152
|
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
yield generationChunk;
|
|
140
|
-
|
|
141
|
-
await runManager?.handleLLMNewToken(
|
|
142
|
-
token,
|
|
143
|
-
undefined,
|
|
144
|
-
undefined,
|
|
145
|
-
undefined,
|
|
146
|
-
undefined,
|
|
147
|
-
{ chunk: generationChunk }
|
|
148
|
-
);
|
|
149
|
-
|
|
150
|
-
textResult = await generator.next();
|
|
153
|
+
/** Hack to force langgraph to continue */
|
|
154
|
+
if (options.signal?.aborted !== true) {
|
|
155
|
+
try {
|
|
156
|
+
stream.controller.abort();
|
|
157
|
+
} catch (e) {
|
|
158
|
+
console.warn('Error aborting stream after data stream', e);
|
|
159
|
+
console.error(e);
|
|
160
|
+
// Ignore
|
|
151
161
|
}
|
|
152
162
|
}
|
|
153
163
|
}
|