@librechat/agents 2.4.22 → 2.4.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/llm/anthropic/index.cjs +1 -1
- package/dist/cjs/llm/anthropic/index.cjs.map +1 -1
- package/dist/cjs/llm/anthropic/types.cjs +50 -0
- package/dist/cjs/llm/anthropic/types.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +227 -21
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +1 -1
- package/dist/cjs/llm/anthropic/utils/message_outputs.cjs +1 -0
- package/dist/cjs/llm/anthropic/utils/message_outputs.cjs.map +1 -1
- package/dist/cjs/llm/openai/index.cjs.map +1 -1
- package/dist/cjs/run.cjs.map +1 -1
- package/dist/esm/llm/anthropic/index.mjs +1 -1
- package/dist/esm/llm/anthropic/index.mjs.map +1 -1
- package/dist/esm/llm/anthropic/types.mjs +48 -0
- package/dist/esm/llm/anthropic/types.mjs.map +1 -0
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs +228 -22
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +1 -1
- package/dist/esm/llm/anthropic/utils/message_outputs.mjs +1 -0
- package/dist/esm/llm/anthropic/utils/message_outputs.mjs.map +1 -1
- package/dist/esm/llm/openai/index.mjs.map +1 -1
- package/dist/esm/run.mjs.map +1 -1
- package/dist/types/llm/anthropic/index.d.ts +3 -4
- package/dist/types/llm/anthropic/types.d.ts +4 -35
- package/dist/types/llm/anthropic/utils/message_inputs.d.ts +2 -2
- package/dist/types/llm/anthropic/utils/message_outputs.d.ts +1 -3
- package/dist/types/llm/anthropic/utils/output_parsers.d.ts +22 -0
- package/dist/types/llm/openai/index.d.ts +3 -2
- package/dist/types/tools/example.d.ts +21 -3
- package/package.json +9 -9
- package/src/llm/anthropic/index.ts +6 -5
- package/src/llm/anthropic/llm.spec.ts +176 -179
- package/src/llm/anthropic/types.ts +64 -39
- package/src/llm/anthropic/utils/message_inputs.ts +275 -37
- package/src/llm/anthropic/utils/message_outputs.ts +4 -21
- package/src/llm/anthropic/utils/output_parsers.ts +114 -0
- package/src/llm/openai/index.ts +7 -6
- package/src/run.ts +1 -1
package/dist/esm/run.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"run.mjs","sources":["../../src/run.ts"],"sourcesContent":["// src/run.ts\nimport { zodToJsonSchema } from 'zod-to-json-schema';\nimport { PromptTemplate } from '@langchain/core/prompts';\nimport { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai';\nimport { SystemMessage } from '@langchain/core/messages';\nimport type {\n BaseMessage,\n MessageContentComplex,\n} from '@langchain/core/messages';\nimport type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';\nimport type { RunnableConfig } from '@langchain/core/runnables';\nimport type * as t from '@/types';\nimport { GraphEvents, Providers, Callback } from '@/common';\nimport { manualToolStreamProviders } from '@/llm/providers';\nimport { shiftIndexTokenCountMap } from '@/messages/format';\nimport { createTitleRunnable } from '@/utils/title';\nimport { createTokenCounter } from '@/utils/tokens';\nimport { StandardGraph } from '@/graphs/Graph';\nimport { HandlerRegistry } from '@/events';\nimport { isOpenAILike } from '@/utils/llm';\n\nexport class Run<T extends t.BaseGraphState> {\n graphRunnable?: t.CompiledWorkflow<T, Partial<T>, string>;\n // private collab!: CollabGraph;\n // private taskManager!: TaskManager;\n private handlerRegistry: HandlerRegistry;\n id: string;\n Graph: StandardGraph | undefined;\n provider: Providers | undefined;\n returnContent: boolean = false;\n\n private constructor(config: Partial<t.RunConfig>) {\n const runId = config.runId ?? '';\n if (!runId) {\n throw new Error('Run ID not provided');\n }\n\n this.id = runId;\n\n const handlerRegistry = new HandlerRegistry();\n\n if (config.customHandlers) {\n for (const [eventType, handler] of Object.entries(\n config.customHandlers\n )) {\n handlerRegistry.register(eventType, handler);\n }\n }\n\n this.handlerRegistry = handlerRegistry;\n\n if (!config.graphConfig) {\n throw new Error('Graph config not provided');\n }\n\n if (config.graphConfig.type === 'standard' || !config.graphConfig.type) {\n this.provider = config.graphConfig.llmConfig.provider;\n this.graphRunnable = this.createStandardGraph(\n config.graphConfig\n ) as unknown as t.CompiledWorkflow<T, Partial<T>, string>;\n if (this.Graph) {\n this.Graph.handlerRegistry = handlerRegistry;\n }\n }\n\n this.returnContent = config.returnContent ?? false;\n }\n\n private createStandardGraph(\n config: t.StandardGraphConfig\n ): t.CompiledWorkflow<t.IState, Partial<t.IState>, string> {\n const { llmConfig, tools = [], ...graphInput } = config;\n const { provider, ...clientOptions } = llmConfig;\n\n const standardGraph = new StandardGraph({\n tools,\n provider,\n clientOptions,\n ...graphInput,\n runId: this.id,\n });\n this.Graph = standardGraph;\n return standardGraph.createWorkflow();\n }\n\n static async create<T extends t.BaseGraphState>(\n config: t.RunConfig\n ): Promise<Run<T>> {\n return new Run<T>(config);\n }\n\n getRunMessages(): BaseMessage[] | undefined {\n if (!this.Graph) {\n throw new Error(\n 'Graph not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n return this.Graph.getRunMessages();\n }\n\n async processStream(\n inputs: t.IState,\n config: Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string },\n streamOptions?: t.EventStreamOptions\n ): Promise<MessageContentComplex[] | undefined> {\n if (!this.graphRunnable) {\n throw new Error(\n 'Run not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n if (!this.Graph) {\n throw new Error(\n 'Graph not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n\n this.Graph.resetValues(streamOptions?.keepContent);\n const provider = this.Graph.provider;\n const hasTools = this.Graph.tools ? this.Graph.tools.length > 0 : false;\n if (streamOptions?.callbacks) {\n /* TODO: conflicts with callback manager */\n const callbacks = (config.callbacks as t.ProvidedCallbacks) ?? [];\n config.callbacks = callbacks.concat(\n this.getCallbacks(streamOptions.callbacks)\n );\n }\n\n if (!this.id) {\n throw new Error('Run ID not provided');\n }\n\n const tokenCounter =\n streamOptions?.tokenCounter ??\n (streamOptions?.indexTokenCountMap\n ? await createTokenCounter()\n : undefined);\n const toolTokens = tokenCounter\n ? (this.Graph.tools?.reduce((acc, tool) => {\n if (!(tool as Partial<t.GenericTool>).schema) {\n return acc;\n }\n\n const jsonSchema = zodToJsonSchema(\n tool.schema.describe(tool.description ?? ''),\n tool.name\n );\n return (\n acc + tokenCounter(new SystemMessage(JSON.stringify(jsonSchema)))\n );\n }, 0) ?? 0)\n : 0;\n let instructionTokens = toolTokens;\n if (this.Graph.systemMessage && tokenCounter) {\n instructionTokens += tokenCounter(this.Graph.systemMessage);\n }\n const tokenMap = streamOptions?.indexTokenCountMap ?? {};\n if (this.Graph.systemMessage && instructionTokens > 0) {\n this.Graph.indexTokenCountMap = shiftIndexTokenCountMap(\n tokenMap,\n instructionTokens\n );\n } else if (instructionTokens > 0) {\n tokenMap[0] = tokenMap[0] + instructionTokens;\n this.Graph.indexTokenCountMap = tokenMap;\n } else {\n this.Graph.indexTokenCountMap = tokenMap;\n }\n\n this.Graph.maxContextTokens = streamOptions?.maxContextTokens;\n this.Graph.tokenCounter = tokenCounter;\n\n config.run_id = this.id;\n config.configurable = Object.assign(config.configurable ?? {}, {\n run_id: this.id,\n provider: this.provider,\n });\n\n const stream = this.graphRunnable.streamEvents(inputs, config);\n\n for await (const event of stream) {\n const { data, name, metadata, ...info } = event;\n\n let eventName: t.EventName = info.event;\n if (\n hasTools &&\n manualToolStreamProviders.has(provider) &&\n eventName === GraphEvents.CHAT_MODEL_STREAM\n ) {\n /* Skipping CHAT_MODEL_STREAM event due to double-call edge case */\n continue;\n }\n\n if (eventName && eventName === GraphEvents.ON_CUSTOM_EVENT) {\n eventName = name;\n }\n\n const handler = this.handlerRegistry.getHandler(eventName);\n if (handler) {\n handler.handle(eventName, data, metadata, this.Graph);\n }\n }\n\n if (this.returnContent) {\n return this.Graph.getContentParts();\n }\n }\n\n private createSystemCallback<K extends keyof ClientCallbacks>(\n clientCallbacks: ClientCallbacks,\n key: K\n ): SystemCallbacks[K] {\n return ((...args: unknown[]) => {\n const clientCallback = clientCallbacks[key];\n if (clientCallback && this.Graph) {\n (clientCallback as (...args: unknown[]) => void)(this.Graph, ...args);\n }\n }) as SystemCallbacks[K];\n }\n\n getCallbacks(clientCallbacks: ClientCallbacks): SystemCallbacks {\n return {\n [Callback.TOOL_ERROR]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_ERROR\n ),\n [Callback.TOOL_START]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_START\n ),\n [Callback.TOOL_END]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_END\n ),\n };\n }\n\n async generateTitle({\n inputText,\n contentParts,\n titlePrompt,\n clientOptions,\n chainOptions,\n skipLanguage,\n }: t.RunTitleOptions): Promise<{ language: string; title: string }> {\n const convoTemplate = PromptTemplate.fromTemplate(\n 'User: {input}\\nAI: {output}'\n );\n const response = contentParts\n .map((part) => {\n if (part?.type === 'text') return part.text;\n return '';\n })\n .join('\\n');\n const convo = (\n await convoTemplate.invoke({ input: inputText, output: response })\n ).value;\n const model = this.Graph?.getNewModel({\n clientOptions,\n omitOriginalOptions: new Set([\n 'clientOptions',\n 'streaming',\n 'stream',\n 'thinking',\n 'maxTokens',\n 'maxOutputTokens',\n 'additionalModelRequestFields',\n ]),\n });\n if (!model) {\n return { language: '', title: '' };\n }\n if (\n isOpenAILike(this.provider) &&\n (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)\n ) {\n model.temperature = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.temperature as number;\n model.topP = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.topP as number;\n model.frequencyPenalty = (\n clientOptions as t.OpenAIClientOptions | undefined\n )?.frequencyPenalty as number;\n model.presencePenalty = (\n clientOptions as t.OpenAIClientOptions | undefined\n )?.presencePenalty as number;\n model.n = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.n as number;\n }\n const chain = await createTitleRunnable(model, titlePrompt);\n return (await chain.invoke(\n { convo, inputText, skipLanguage },\n chainOptions\n )) as { language: string; title: string };\n }\n}\n"],"names":["ChatOpenAI","AzureChatOpenAI"],"mappings":";;;;;;;;;;;;;AAAA;MAqBa,GAAG,CAAA;AACd,IAAA,aAAa;;;AAGL,IAAA,eAAe;AACvB,IAAA,EAAE;AACF,IAAA,KAAK;AACL,IAAA,QAAQ;IACR,aAAa,GAAY,KAAK;AAE9B,IAAA,WAAA,CAAoB,MAA4B,EAAA;AAC9C,QAAA,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,EAAE;QAChC,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,IAAI,CAAC,EAAE,GAAG,KAAK;AAEf,QAAA,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE;AAE7C,QAAA,IAAI,MAAM,CAAC,cAAc,EAAE;AACzB,YAAA,KAAK,MAAM,CAAC,SAAS,EAAE,OAAO,CAAC,IAAI,MAAM,CAAC,OAAO,CAC/C,MAAM,CAAC,cAAc,CACtB,EAAE;AACD,gBAAA,eAAe,CAAC,QAAQ,CAAC,SAAS,EAAE,OAAO,CAAC;;;AAIhD,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe;AAEtC,QAAA,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC;;AAG9C,QAAA,IAAI,MAAM,CAAC,WAAW,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;YACtE,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,QAAQ;YACrD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,mBAAmB,CAC3C,MAAM,CAAC,WAAW,CACqC;AACzD,YAAA,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,gBAAA,IAAI,CAAC,KAAK,CAAC,eAAe,GAAG,eAAe;;;QAIhD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,KAAK;;AAG5C,IAAA,mBAAmB,CACzB,MAA6B,EAAA;AAE7B,QAAA,MAAM,EAAE,SAAS,EAAE,KAAK,GAAG,EAAE,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM;QACvD,MAAM,EAAE,QAAQ,EAAE,GAAG,aAAa,EAAE,GAAG,SAAS;AAEhD,QAAA,MAAM,aAAa,GAAG,IAAI,aAAa,CAAC;YACtC,KAAK;YACL,QAAQ;YACR,aAAa;AACb,YAAA,GAAG,UAAU;YACb,KAAK,EAAE,IAAI,CAAC,EAAE;AACf,SAAA,CAAC;AACF,QAAA,IAAI,CAAC,KAAK,GAAG,aAAa;AAC1B,QAAA,OAAO,aAAa,CAAC,cAAc,EAAE;;AAGvC,IAAA,aAAa,MAAM,CACjB,MAAmB,EAAA;AAEnB,QAAA,OAAO,IAAI,GAAG,CAAI,MAAM,CAAC;;IAG3B,cAAc,GAAA;AACZ,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E;;AAEH,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE;;AAGpC,IAAA,MAAM,aAAa,CACjB,MAAgB,EAChB,MAA2E,EAC3E,aAAoC,EAAA;AAEpC,QAAA,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CACb,4EAA4E,CAC7E;;AAEH,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E;;QAGH,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC;AAClD,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ;QACpC,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,GAAG,KAAK;AACvE,QAAA,IAAI,aAAa,EAAE,SAAS,EAAE;;AAE5B,YAAA,MAAM,SAAS,GAAI,MAAM,CAAC,SAAiC,IAAI,EAAE;AACjE,YAAA,MAAM,CAAC,SAAS,GAAG,SAAS,CAAC,MAAM,CACjC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,SAAS,CAAC,CAC3C;;AAGH,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,MAAM,YAAY,GAChB,aAAa,EAAE,YAAY;aAC1B,aAAa,EAAE;kBACZ,MAAM,kBAAkB;kBACxB,SAAS,CAAC;QAChB,MAAM,UAAU,GAAG;AACjB,eAAG,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,GAAG,EAAE,IAAI,KAAI;AACxC,gBAAA,IAAI,CAAE,IAA+B,CAAC,MAAM,EAAE;AAC5C,oBAAA,OAAO,GAAG;;gBAGZ,MAAM,UAAU,GAAG,eAAe,CAChC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,IAAI,EAAE,CAAC,EAC5C,IAAI,CAAC,IAAI,CACV;AACD,gBAAA,QACE,GAAG,GAAG,YAAY,CAAC,IAAI,aAAa,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC;AAErE,aAAC,EAAE,CAAC,CAAC,IAAI,CAAC;cACR,CAAC;QACL,IAAI,iBAAiB,GAAG,UAAU;QAClC,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,YAAY,EAAE;YAC5C,iBAAiB,IAAI,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC;;AAE7D,QAAA,MAAM,QAAQ,GAAG,aAAa,EAAE,kBAAkB,IAAI,EAAE;QACxD,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,iBAAiB,GAAG,CAAC,EAAE;YACrD,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,uBAAuB,CACrD,QAAQ,EACR,iBAAiB,CAClB;;AACI,aAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;YAChC,QAAQ,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,GAAG,iBAAiB;AAC7C,YAAA,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,QAAQ;;aACnC;AACL,YAAA,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,QAAQ;;QAG1C,IAAI,CAAC,KAAK,CAAC,gBAAgB,GAAG,aAAa,EAAE,gBAAgB;AAC7D,QAAA,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,YAAY;AAEtC,QAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;AACvB,QAAA,MAAM,CAAC,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,IAAI,EAAE,EAAE;YAC7D,MAAM,EAAE,IAAI,CAAC,EAAE;YACf,QAAQ,EAAE,IAAI,CAAC,QAAQ;AACxB,SAAA,CAAC;AAEF,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,MAAM,EAAE,MAAM,CAAC;AAE9D,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;AAChC,YAAA,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK;AAE/C,YAAA,IAAI,SAAS,GAAgB,IAAI,CAAC,KAAK;AACvC,YAAA,IACE,QAAQ;AACR,gBAAA,yBAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC;AACvC,gBAAA,SAAS,KAAK,WAAW,CAAC,iBAAiB,EAC3C;;gBAEA;;YAGF,IAAI,SAAS,IAAI,SAAS,KAAK,WAAW,CAAC,eAAe,EAAE;gBAC1D,SAAS,GAAG,IAAI;;YAGlB,MAAM,OAAO,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,SAAS,CAAC;YAC1D,IAAI,OAAO,EAAE;AACX,gBAAA,OAAO,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC;;;AAIzD,QAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;;;IAI/B,oBAAoB,CAC1B,eAAgC,EAChC,GAAM,EAAA;AAEN,QAAA,QAAQ,CAAC,GAAG,IAAe,KAAI;AAC7B,YAAA,MAAM,cAAc,GAAG,eAAe,CAAC,GAAG,CAAC;AAC3C,YAAA,IAAI,cAAc,IAAI,IAAI,CAAC,KAAK,EAAE;gBAC/B,cAA+C,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,IAAI,CAAC;;AAEzE,SAAC;;AAGH,IAAA,YAAY,CAAC,eAAgC,EAAA;QAC3C,OAAO;AACL,YAAA,CAAC,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAC9C,eAAe,EACf,QAAQ,CAAC,UAAU,CACpB;AACD,YAAA,CAAC,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAC9C,eAAe,EACf,QAAQ,CAAC,UAAU,CACpB;AACD,YAAA,CAAC,QAAQ,CAAC,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAC5C,eAAe,EACf,QAAQ,CAAC,QAAQ,CAClB;SACF;;AAGH,IAAA,MAAM,aAAa,CAAC,EAClB,SAAS,EACT,YAAY,EACZ,WAAW,EACX,aAAa,EACb,YAAY,EACZ,YAAY,GACM,EAAA;QAClB,MAAM,aAAa,GAAG,cAAc,CAAC,YAAY,CAC/C,6BAA6B,CAC9B;QACD,MAAM,QAAQ,GAAG;AACd,aAAA,GAAG,CAAC,CAAC,IAAI,KAAI;AACZ,YAAA,IAAI,IAAI,EAAE,IAAI,KAAK,MAAM;gBAAE,OAAO,IAAI,CAAC,IAAI;AAC3C,YAAA,OAAO,EAAE;AACX,SAAC;aACA,IAAI,CAAC,IAAI,CAAC;QACb,MAAM,KAAK,GAAG,CACZ,MAAM,aAAa,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,EAClE,KAAK;AACP,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC;YACpC,aAAa;YACb,mBAAmB,EAAE,IAAI,GAAG,CAAC;gBAC3B,eAAe;gBACf,WAAW;gBACX,QAAQ;gBACR,UAAU;gBACV,WAAW;gBACX,iBAAiB;gBACjB,8BAA8B;aAC/B,CAAC;AACH,SAAA,CAAC;QACF,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,EAAE,QAAQ,EAAE,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE;;AAEpC,QAAA,IACE,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC;aAC1B,KAAK,YAAYA,YAAU,IAAI,KAAK,YAAYC,iBAAe,CAAC,EACjE;YACA,KAAK,CAAC,WAAW,GAAI;AACnB,kBAAE,WAAqB;YACzB,KAAK,CAAC,IAAI,GAAI;AACZ,kBAAE,IAAc;AAClB,YAAA,KAAK,CAAC,gBAAgB,GACpB,aACD,EAAE,gBAA0B;AAC7B,YAAA,KAAK,CAAC,eAAe,GACnB,aACD,EAAE,eAAyB;YAC5B,KAAK,CAAC,CAAC,GAAI;AACT,kBAAE,CAAW;;QAEjB,MAAM,KAAK,GAAG,MAAM,mBAAmB,CAAC,KAAK,EAAE,WAAW,CAAC;AAC3D,QAAA,QAAQ,MAAM,KAAK,CAAC,MAAM,CACxB,EAAE,KAAK,EAAE,SAAS,EAAE,YAAY,EAAE,EAClC,YAAY,CACb;;AAEJ;;;;"}
|
|
1
|
+
{"version":3,"file":"run.mjs","sources":["../../src/run.ts"],"sourcesContent":["// src/run.ts\nimport { zodToJsonSchema } from 'zod-to-json-schema';\nimport { PromptTemplate } from '@langchain/core/prompts';\nimport { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai';\nimport { SystemMessage } from '@langchain/core/messages';\nimport type {\n BaseMessage,\n MessageContentComplex,\n} from '@langchain/core/messages';\nimport type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';\nimport type { RunnableConfig } from '@langchain/core/runnables';\nimport type * as t from '@/types';\nimport { GraphEvents, Providers, Callback } from '@/common';\nimport { manualToolStreamProviders } from '@/llm/providers';\nimport { shiftIndexTokenCountMap } from '@/messages/format';\nimport { createTitleRunnable } from '@/utils/title';\nimport { createTokenCounter } from '@/utils/tokens';\nimport { StandardGraph } from '@/graphs/Graph';\nimport { HandlerRegistry } from '@/events';\nimport { isOpenAILike } from '@/utils/llm';\n\nexport class Run<T extends t.BaseGraphState> {\n graphRunnable?: t.CompiledWorkflow<T, Partial<T>, string>;\n // private collab!: CollabGraph;\n // private taskManager!: TaskManager;\n private handlerRegistry: HandlerRegistry;\n id: string;\n Graph: StandardGraph | undefined;\n provider: Providers | undefined;\n returnContent: boolean = false;\n\n private constructor(config: Partial<t.RunConfig>) {\n const runId = config.runId ?? '';\n if (!runId) {\n throw new Error('Run ID not provided');\n }\n\n this.id = runId;\n\n const handlerRegistry = new HandlerRegistry();\n\n if (config.customHandlers) {\n for (const [eventType, handler] of Object.entries(\n config.customHandlers\n )) {\n handlerRegistry.register(eventType, handler);\n }\n }\n\n this.handlerRegistry = handlerRegistry;\n\n if (!config.graphConfig) {\n throw new Error('Graph config not provided');\n }\n\n if (config.graphConfig.type === 'standard' || !config.graphConfig.type) {\n this.provider = config.graphConfig.llmConfig.provider;\n this.graphRunnable = this.createStandardGraph(\n config.graphConfig\n ) as unknown as t.CompiledWorkflow<T, Partial<T>, string>;\n if (this.Graph) {\n this.Graph.handlerRegistry = handlerRegistry;\n }\n }\n\n this.returnContent = config.returnContent ?? false;\n }\n\n private createStandardGraph(\n config: t.StandardGraphConfig\n ): t.CompiledWorkflow<t.IState, Partial<t.IState>, string> {\n const { llmConfig, tools = [], ...graphInput } = config;\n const { provider, ...clientOptions } = llmConfig;\n\n const standardGraph = new StandardGraph({\n tools,\n provider,\n clientOptions,\n ...graphInput,\n runId: this.id,\n });\n this.Graph = standardGraph;\n return standardGraph.createWorkflow();\n }\n\n static async create<T extends t.BaseGraphState>(\n config: t.RunConfig\n ): Promise<Run<T>> {\n return new Run<T>(config);\n }\n\n getRunMessages(): BaseMessage[] | undefined {\n if (!this.Graph) {\n throw new Error(\n 'Graph not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n return this.Graph.getRunMessages();\n }\n\n async processStream(\n inputs: t.IState,\n config: Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string },\n streamOptions?: t.EventStreamOptions\n ): Promise<MessageContentComplex[] | undefined> {\n if (!this.graphRunnable) {\n throw new Error(\n 'Run not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n if (!this.Graph) {\n throw new Error(\n 'Graph not initialized. Make sure to use Run.create() to instantiate the Run.'\n );\n }\n\n this.Graph.resetValues(streamOptions?.keepContent);\n const provider = this.Graph.provider;\n const hasTools = this.Graph.tools ? this.Graph.tools.length > 0 : false;\n if (streamOptions?.callbacks) {\n /* TODO: conflicts with callback manager */\n const callbacks = (config.callbacks as t.ProvidedCallbacks) ?? [];\n config.callbacks = callbacks.concat(\n this.getCallbacks(streamOptions.callbacks)\n );\n }\n\n if (!this.id) {\n throw new Error('Run ID not provided');\n }\n\n const tokenCounter =\n streamOptions?.tokenCounter ??\n (streamOptions?.indexTokenCountMap\n ? await createTokenCounter()\n : undefined);\n const toolTokens = tokenCounter\n ? (this.Graph.tools?.reduce((acc, tool) => {\n if (!(tool as Partial<t.GenericTool>).schema) {\n return acc;\n }\n\n const jsonSchema = zodToJsonSchema(\n (tool.schema as t.ZodObjectAny).describe(tool.description ?? ''),\n tool.name\n );\n return (\n acc + tokenCounter(new SystemMessage(JSON.stringify(jsonSchema)))\n );\n }, 0) ?? 0)\n : 0;\n let instructionTokens = toolTokens;\n if (this.Graph.systemMessage && tokenCounter) {\n instructionTokens += tokenCounter(this.Graph.systemMessage);\n }\n const tokenMap = streamOptions?.indexTokenCountMap ?? {};\n if (this.Graph.systemMessage && instructionTokens > 0) {\n this.Graph.indexTokenCountMap = shiftIndexTokenCountMap(\n tokenMap,\n instructionTokens\n );\n } else if (instructionTokens > 0) {\n tokenMap[0] = tokenMap[0] + instructionTokens;\n this.Graph.indexTokenCountMap = tokenMap;\n } else {\n this.Graph.indexTokenCountMap = tokenMap;\n }\n\n this.Graph.maxContextTokens = streamOptions?.maxContextTokens;\n this.Graph.tokenCounter = tokenCounter;\n\n config.run_id = this.id;\n config.configurable = Object.assign(config.configurable ?? {}, {\n run_id: this.id,\n provider: this.provider,\n });\n\n const stream = this.graphRunnable.streamEvents(inputs, config);\n\n for await (const event of stream) {\n const { data, name, metadata, ...info } = event;\n\n let eventName: t.EventName = info.event;\n if (\n hasTools &&\n manualToolStreamProviders.has(provider) &&\n eventName === GraphEvents.CHAT_MODEL_STREAM\n ) {\n /* Skipping CHAT_MODEL_STREAM event due to double-call edge case */\n continue;\n }\n\n if (eventName && eventName === GraphEvents.ON_CUSTOM_EVENT) {\n eventName = name;\n }\n\n const handler = this.handlerRegistry.getHandler(eventName);\n if (handler) {\n handler.handle(eventName, data, metadata, this.Graph);\n }\n }\n\n if (this.returnContent) {\n return this.Graph.getContentParts();\n }\n }\n\n private createSystemCallback<K extends keyof ClientCallbacks>(\n clientCallbacks: ClientCallbacks,\n key: K\n ): SystemCallbacks[K] {\n return ((...args: unknown[]) => {\n const clientCallback = clientCallbacks[key];\n if (clientCallback && this.Graph) {\n (clientCallback as (...args: unknown[]) => void)(this.Graph, ...args);\n }\n }) as SystemCallbacks[K];\n }\n\n getCallbacks(clientCallbacks: ClientCallbacks): SystemCallbacks {\n return {\n [Callback.TOOL_ERROR]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_ERROR\n ),\n [Callback.TOOL_START]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_START\n ),\n [Callback.TOOL_END]: this.createSystemCallback(\n clientCallbacks,\n Callback.TOOL_END\n ),\n };\n }\n\n async generateTitle({\n inputText,\n contentParts,\n titlePrompt,\n clientOptions,\n chainOptions,\n skipLanguage,\n }: t.RunTitleOptions): Promise<{ language: string; title: string }> {\n const convoTemplate = PromptTemplate.fromTemplate(\n 'User: {input}\\nAI: {output}'\n );\n const response = contentParts\n .map((part) => {\n if (part?.type === 'text') return part.text;\n return '';\n })\n .join('\\n');\n const convo = (\n await convoTemplate.invoke({ input: inputText, output: response })\n ).value;\n const model = this.Graph?.getNewModel({\n clientOptions,\n omitOriginalOptions: new Set([\n 'clientOptions',\n 'streaming',\n 'stream',\n 'thinking',\n 'maxTokens',\n 'maxOutputTokens',\n 'additionalModelRequestFields',\n ]),\n });\n if (!model) {\n return { language: '', title: '' };\n }\n if (\n isOpenAILike(this.provider) &&\n (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)\n ) {\n model.temperature = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.temperature as number;\n model.topP = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.topP as number;\n model.frequencyPenalty = (\n clientOptions as t.OpenAIClientOptions | undefined\n )?.frequencyPenalty as number;\n model.presencePenalty = (\n clientOptions as t.OpenAIClientOptions | undefined\n )?.presencePenalty as number;\n model.n = (clientOptions as t.OpenAIClientOptions | undefined)\n ?.n as number;\n }\n const chain = await createTitleRunnable(model, titlePrompt);\n return (await chain.invoke(\n { convo, inputText, skipLanguage },\n chainOptions\n )) as { language: string; title: string };\n }\n}\n"],"names":["ChatOpenAI","AzureChatOpenAI"],"mappings":";;;;;;;;;;;;;AAAA;MAqBa,GAAG,CAAA;AACd,IAAA,aAAa;;;AAGL,IAAA,eAAe;AACvB,IAAA,EAAE;AACF,IAAA,KAAK;AACL,IAAA,QAAQ;IACR,aAAa,GAAY,KAAK;AAE9B,IAAA,WAAA,CAAoB,MAA4B,EAAA;AAC9C,QAAA,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,EAAE;QAChC,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,IAAI,CAAC,EAAE,GAAG,KAAK;AAEf,QAAA,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE;AAE7C,QAAA,IAAI,MAAM,CAAC,cAAc,EAAE;AACzB,YAAA,KAAK,MAAM,CAAC,SAAS,EAAE,OAAO,CAAC,IAAI,MAAM,CAAC,OAAO,CAC/C,MAAM,CAAC,cAAc,CACtB,EAAE;AACD,gBAAA,eAAe,CAAC,QAAQ,CAAC,SAAS,EAAE,OAAO,CAAC;;;AAIhD,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe;AAEtC,QAAA,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC;;AAG9C,QAAA,IAAI,MAAM,CAAC,WAAW,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;YACtE,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,QAAQ;YACrD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,mBAAmB,CAC3C,MAAM,CAAC,WAAW,CACqC;AACzD,YAAA,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,gBAAA,IAAI,CAAC,KAAK,CAAC,eAAe,GAAG,eAAe;;;QAIhD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,KAAK;;AAG5C,IAAA,mBAAmB,CACzB,MAA6B,EAAA;AAE7B,QAAA,MAAM,EAAE,SAAS,EAAE,KAAK,GAAG,EAAE,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM;QACvD,MAAM,EAAE,QAAQ,EAAE,GAAG,aAAa,EAAE,GAAG,SAAS;AAEhD,QAAA,MAAM,aAAa,GAAG,IAAI,aAAa,CAAC;YACtC,KAAK;YACL,QAAQ;YACR,aAAa;AACb,YAAA,GAAG,UAAU;YACb,KAAK,EAAE,IAAI,CAAC,EAAE;AACf,SAAA,CAAC;AACF,QAAA,IAAI,CAAC,KAAK,GAAG,aAAa;AAC1B,QAAA,OAAO,aAAa,CAAC,cAAc,EAAE;;AAGvC,IAAA,aAAa,MAAM,CACjB,MAAmB,EAAA;AAEnB,QAAA,OAAO,IAAI,GAAG,CAAI,MAAM,CAAC;;IAG3B,cAAc,GAAA;AACZ,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E;;AAEH,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE;;AAGpC,IAAA,MAAM,aAAa,CACjB,MAAgB,EAChB,MAA2E,EAC3E,aAAoC,EAAA;AAEpC,QAAA,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CACb,4EAA4E,CAC7E;;AAEH,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E;;QAGH,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC;AAClD,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ;QACpC,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,GAAG,KAAK;AACvE,QAAA,IAAI,aAAa,EAAE,SAAS,EAAE;;AAE5B,YAAA,MAAM,SAAS,GAAI,MAAM,CAAC,SAAiC,IAAI,EAAE;AACjE,YAAA,MAAM,CAAC,SAAS,GAAG,SAAS,CAAC,MAAM,CACjC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,SAAS,CAAC,CAC3C;;AAGH,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,MAAM,YAAY,GAChB,aAAa,EAAE,YAAY;aAC1B,aAAa,EAAE;kBACZ,MAAM,kBAAkB;kBACxB,SAAS,CAAC;QAChB,MAAM,UAAU,GAAG;AACjB,eAAG,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,GAAG,EAAE,IAAI,KAAI;AACxC,gBAAA,IAAI,CAAE,IAA+B,CAAC,MAAM,EAAE;AAC5C,oBAAA,OAAO,GAAG;;gBAGZ,MAAM,UAAU,GAAG,eAAe,CAC/B,IAAI,CAAC,MAAyB,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,IAAI,EAAE,CAAC,EAChE,IAAI,CAAC,IAAI,CACV;AACD,gBAAA,QACE,GAAG,GAAG,YAAY,CAAC,IAAI,aAAa,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC;AAErE,aAAC,EAAE,CAAC,CAAC,IAAI,CAAC;cACR,CAAC;QACL,IAAI,iBAAiB,GAAG,UAAU;QAClC,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,YAAY,EAAE;YAC5C,iBAAiB,IAAI,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC;;AAE7D,QAAA,MAAM,QAAQ,GAAG,aAAa,EAAE,kBAAkB,IAAI,EAAE;QACxD,IAAI,IAAI,CAAC,KAAK,CAAC,aAAa,IAAI,iBAAiB,GAAG,CAAC,EAAE;YACrD,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,uBAAuB,CACrD,QAAQ,EACR,iBAAiB,CAClB;;AACI,aAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;YAChC,QAAQ,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,GAAG,iBAAiB;AAC7C,YAAA,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,QAAQ;;aACnC;AACL,YAAA,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,QAAQ;;QAG1C,IAAI,CAAC,KAAK,CAAC,gBAAgB,GAAG,aAAa,EAAE,gBAAgB;AAC7D,QAAA,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,YAAY;AAEtC,QAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;AACvB,QAAA,MAAM,CAAC,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,IAAI,EAAE,EAAE;YAC7D,MAAM,EAAE,IAAI,CAAC,EAAE;YACf,QAAQ,EAAE,IAAI,CAAC,QAAQ;AACxB,SAAA,CAAC;AAEF,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,MAAM,EAAE,MAAM,CAAC;AAE9D,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;AAChC,YAAA,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK;AAE/C,YAAA,IAAI,SAAS,GAAgB,IAAI,CAAC,KAAK;AACvC,YAAA,IACE,QAAQ;AACR,gBAAA,yBAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC;AACvC,gBAAA,SAAS,KAAK,WAAW,CAAC,iBAAiB,EAC3C;;gBAEA;;YAGF,IAAI,SAAS,IAAI,SAAS,KAAK,WAAW,CAAC,eAAe,EAAE;gBAC1D,SAAS,GAAG,IAAI;;YAGlB,MAAM,OAAO,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,SAAS,CAAC;YAC1D,IAAI,OAAO,EAAE;AACX,gBAAA,OAAO,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC;;;AAIzD,QAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;;;IAI/B,oBAAoB,CAC1B,eAAgC,EAChC,GAAM,EAAA;AAEN,QAAA,QAAQ,CAAC,GAAG,IAAe,KAAI;AAC7B,YAAA,MAAM,cAAc,GAAG,eAAe,CAAC,GAAG,CAAC;AAC3C,YAAA,IAAI,cAAc,IAAI,IAAI,CAAC,KAAK,EAAE;gBAC/B,cAA+C,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,IAAI,CAAC;;AAEzE,SAAC;;AAGH,IAAA,YAAY,CAAC,eAAgC,EAAA;QAC3C,OAAO;AACL,YAAA,CAAC,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAC9C,eAAe,EACf,QAAQ,CAAC,UAAU,CACpB;AACD,YAAA,CAAC,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAC9C,eAAe,EACf,QAAQ,CAAC,UAAU,CACpB;AACD,YAAA,CAAC,QAAQ,CAAC,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAC5C,eAAe,EACf,QAAQ,CAAC,QAAQ,CAClB;SACF;;AAGH,IAAA,MAAM,aAAa,CAAC,EAClB,SAAS,EACT,YAAY,EACZ,WAAW,EACX,aAAa,EACb,YAAY,EACZ,YAAY,GACM,EAAA;QAClB,MAAM,aAAa,GAAG,cAAc,CAAC,YAAY,CAC/C,6BAA6B,CAC9B;QACD,MAAM,QAAQ,GAAG;AACd,aAAA,GAAG,CAAC,CAAC,IAAI,KAAI;AACZ,YAAA,IAAI,IAAI,EAAE,IAAI,KAAK,MAAM;gBAAE,OAAO,IAAI,CAAC,IAAI;AAC3C,YAAA,OAAO,EAAE;AACX,SAAC;aACA,IAAI,CAAC,IAAI,CAAC;QACb,MAAM,KAAK,GAAG,CACZ,MAAM,aAAa,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,EAClE,KAAK;AACP,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC;YACpC,aAAa;YACb,mBAAmB,EAAE,IAAI,GAAG,CAAC;gBAC3B,eAAe;gBACf,WAAW;gBACX,QAAQ;gBACR,UAAU;gBACV,WAAW;gBACX,iBAAiB;gBACjB,8BAA8B;aAC/B,CAAC;AACH,SAAA,CAAC;QACF,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,EAAE,QAAQ,EAAE,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE;;AAEpC,QAAA,IACE,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC;aAC1B,KAAK,YAAYA,YAAU,IAAI,KAAK,YAAYC,iBAAe,CAAC,EACjE;YACA,KAAK,CAAC,WAAW,GAAI;AACnB,kBAAE,WAAqB;YACzB,KAAK,CAAC,IAAI,GAAI;AACZ,kBAAE,IAAc;AAClB,YAAA,KAAK,CAAC,gBAAgB,GACpB,aACD,EAAE,gBAA0B;AAC7B,YAAA,KAAK,CAAC,eAAe,GACnB,aACD,EAAE,eAAyB;YAC5B,KAAK,CAAC,CAAC,GAAI;AACT,kBAAE,CAAW;;QAEjB,MAAM,KAAK,GAAG,MAAM,mBAAmB,CAAC,KAAK,EAAE,WAAW,CAAC;AAC3D,QAAA,QAAQ,MAAM,KAAK,CAAC,MAAM,CACxB,EAAE,KAAK,EAAE,SAAS,EAAE,YAAY,EAAE,EAClC,YAAY,CACb;;AAEJ;;;;"}
|
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
import { ChatAnthropicMessages } from '@langchain/anthropic';
|
|
2
2
|
import { ChatGenerationChunk } from '@langchain/core/outputs';
|
|
3
3
|
import type { BaseChatModelParams } from '@langchain/core/language_models/chat_models';
|
|
4
|
-
import type { BaseMessage } from '@langchain/core/messages';
|
|
4
|
+
import type { BaseMessage, UsageMetadata } from '@langchain/core/messages';
|
|
5
5
|
import type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
|
|
6
6
|
import type { AnthropicInput } from '@langchain/anthropic';
|
|
7
|
-
import type { AnthropicStreamUsage } from '@/llm/anthropic/types';
|
|
8
7
|
export type CustomAnthropicInput = AnthropicInput & {
|
|
9
8
|
_lc_stream_delay?: number;
|
|
10
9
|
} & BaseChatModelParams;
|
|
@@ -17,9 +16,9 @@ export declare class CustomAnthropic extends ChatAnthropicMessages {
|
|
|
17
16
|
constructor(fields?: CustomAnthropicInput);
|
|
18
17
|
/**
|
|
19
18
|
* Get stream usage as returned by this client's API response.
|
|
20
|
-
* @returns
|
|
19
|
+
* @returns The stream usage object.
|
|
21
20
|
*/
|
|
22
|
-
getStreamUsage():
|
|
21
|
+
getStreamUsage(): UsageMetadata | undefined;
|
|
23
22
|
resetTokenEvents(): void;
|
|
24
23
|
private createGenerationChunk;
|
|
25
24
|
_streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import Anthropic from '@anthropic-ai/sdk';
|
|
2
|
-
import type { Tool as AnthropicTool } from '@anthropic-ai/sdk/resources';
|
|
3
2
|
import { BindToolsInput } from '@langchain/core/language_models/chat_models';
|
|
4
3
|
export type AnthropicToolResponse = {
|
|
5
4
|
type: 'tool_use';
|
|
@@ -7,19 +6,21 @@ export type AnthropicToolResponse = {
|
|
|
7
6
|
name: string;
|
|
8
7
|
input: Record<string, any>;
|
|
9
8
|
};
|
|
9
|
+
export type AnthropicStreamUsage = Anthropic.Usage;
|
|
10
10
|
export type AnthropicMessageParam = Anthropic.MessageParam;
|
|
11
11
|
export type AnthropicMessageDeltaEvent = Anthropic.MessageDeltaEvent;
|
|
12
12
|
export type AnthropicMessageStartEvent = Anthropic.MessageStartEvent;
|
|
13
13
|
export type AnthropicMessageResponse = Anthropic.ContentBlock | AnthropicToolResponse;
|
|
14
14
|
export type AnthropicMessageCreateParams = Anthropic.MessageCreateParamsNonStreaming;
|
|
15
15
|
export type AnthropicStreamingMessageCreateParams = Anthropic.MessageCreateParamsStreaming;
|
|
16
|
+
export type AnthropicThinkingConfigParam = Anthropic.ThinkingConfigParam;
|
|
16
17
|
export type AnthropicMessageStreamEvent = Anthropic.MessageStreamEvent;
|
|
17
18
|
export type AnthropicRequestOptions = Anthropic.RequestOptions;
|
|
18
19
|
export type AnthropicToolChoice = {
|
|
19
20
|
type: 'tool';
|
|
20
21
|
name: string;
|
|
21
22
|
} | 'any' | 'auto' | 'none' | string;
|
|
22
|
-
export type ChatAnthropicToolType =
|
|
23
|
+
export type ChatAnthropicToolType = Anthropic.Messages.Tool | BindToolsInput;
|
|
23
24
|
export type AnthropicTextBlockParam = Anthropic.Messages.TextBlockParam;
|
|
24
25
|
export type AnthropicImageBlockParam = Anthropic.Messages.ImageBlockParam;
|
|
25
26
|
export type AnthropicToolUseBlockParam = Anthropic.Messages.ToolUseBlockParam;
|
|
@@ -27,36 +28,4 @@ export type AnthropicToolResultBlockParam = Anthropic.Messages.ToolResultBlockPa
|
|
|
27
28
|
export type AnthropicDocumentBlockParam = Anthropic.Messages.DocumentBlockParam;
|
|
28
29
|
export type AnthropicThinkingBlockParam = Anthropic.Messages.ThinkingBlockParam;
|
|
29
30
|
export type AnthropicRedactedThinkingBlockParam = Anthropic.Messages.RedactedThinkingBlockParam;
|
|
30
|
-
|
|
31
|
-
* Stream usage information for Anthropic API calls
|
|
32
|
-
* @see https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching#pricing
|
|
33
|
-
*/
|
|
34
|
-
export interface AnthropicStreamUsage {
|
|
35
|
-
/**
|
|
36
|
-
* The number of input tokens used in the request
|
|
37
|
-
*/
|
|
38
|
-
input_tokens: number;
|
|
39
|
-
/**
|
|
40
|
-
* The number of cache creation input tokens used (write operations)
|
|
41
|
-
*/
|
|
42
|
-
cache_creation_input_tokens?: number;
|
|
43
|
-
/**
|
|
44
|
-
* The number of cache input tokens used (read operations)
|
|
45
|
-
*/
|
|
46
|
-
cache_read_input_tokens?: number;
|
|
47
|
-
/**
|
|
48
|
-
* The number of output tokens generated in the response
|
|
49
|
-
*/
|
|
50
|
-
output_tokens: number;
|
|
51
|
-
/**
|
|
52
|
-
* The total number of tokens generated in the response
|
|
53
|
-
*/
|
|
54
|
-
total_tokens: number;
|
|
55
|
-
/**
|
|
56
|
-
* Details about input token usage
|
|
57
|
-
*/
|
|
58
|
-
input_token_details?: {
|
|
59
|
-
cache_creation: number;
|
|
60
|
-
cache_read: number;
|
|
61
|
-
};
|
|
62
|
-
}
|
|
31
|
+
export declare function isAnthropicImageBlockParam(block: unknown): block is AnthropicImageBlockParam;
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* This util file contains functions for converting LangChain messages to Anthropic messages.
|
|
3
3
|
*/
|
|
4
|
-
import { BaseMessage } from '@langchain/core/messages';
|
|
4
|
+
import { type BaseMessage } from '@langchain/core/messages';
|
|
5
5
|
import { ToolCall } from '@langchain/core/messages/tool';
|
|
6
|
-
import type {
|
|
6
|
+
import type { AnthropicMessageCreateParams, AnthropicToolResponse } from '@/llm/anthropic/types';
|
|
7
7
|
export declare function _convertLangChainToolCallToAnthropic(toolCall: ToolCall): AnthropicToolResponse;
|
|
8
8
|
/**
|
|
9
9
|
* Formats messages as a prompt for the model.
|
|
@@ -3,10 +3,8 @@
|
|
|
3
3
|
*/
|
|
4
4
|
import Anthropic from '@anthropic-ai/sdk';
|
|
5
5
|
import { AIMessageChunk } from '@langchain/core/messages';
|
|
6
|
-
import { ToolCall } from '@langchain/core/messages/tool';
|
|
7
6
|
import { ChatGeneration } from '@langchain/core/outputs';
|
|
8
|
-
import { AnthropicMessageResponse } from '../types
|
|
9
|
-
export declare function extractToolCalls(content: Record<string, any>[]): ToolCall[];
|
|
7
|
+
import { AnthropicMessageResponse } from '../types';
|
|
10
8
|
export declare function _makeMessageChunkFromAnthropicEvent(data: Anthropic.Messages.RawMessageStreamEvent, fields: {
|
|
11
9
|
streamUsage: boolean;
|
|
12
10
|
coerceContentToString: boolean;
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
import { BaseLLMOutputParser } from '@langchain/core/output_parsers';
|
|
3
|
+
import { JsonOutputKeyToolsParserParams } from '@langchain/core/output_parsers/openai_tools';
|
|
4
|
+
import { ChatGeneration } from '@langchain/core/outputs';
|
|
5
|
+
import { ToolCall } from '@langchain/core/messages/tool';
|
|
6
|
+
interface AnthropicToolsOutputParserParams<T extends Record<string, any>> extends JsonOutputKeyToolsParserParams<T> {
|
|
7
|
+
}
|
|
8
|
+
export declare class AnthropicToolsOutputParser<T extends Record<string, any> = Record<string, any>> extends BaseLLMOutputParser<T> {
|
|
9
|
+
static lc_name(): string;
|
|
10
|
+
lc_namespace: string[];
|
|
11
|
+
returnId: boolean;
|
|
12
|
+
/** The type of tool calls to return. */
|
|
13
|
+
keyName: string;
|
|
14
|
+
/** Whether to return only the first tool call. */
|
|
15
|
+
returnSingle: boolean;
|
|
16
|
+
zodSchema?: z.ZodType<T>;
|
|
17
|
+
constructor(params: AnthropicToolsOutputParserParams<T>);
|
|
18
|
+
protected _validateResult(result: unknown): Promise<T>;
|
|
19
|
+
parseResult(generations: ChatGeneration[]): Promise<T>;
|
|
20
|
+
}
|
|
21
|
+
export declare function extractToolCalls(content: Record<string, any>[]): ToolCall[];
|
|
22
|
+
export {};
|
|
@@ -2,6 +2,7 @@ import { AzureOpenAI as AzureOpenAIClient } from 'openai';
|
|
|
2
2
|
import { ChatXAI as OriginalChatXAI } from '@langchain/xai';
|
|
3
3
|
import { ChatDeepSeek as OriginalChatDeepSeek } from '@langchain/deepseek';
|
|
4
4
|
import { OpenAIClient, ChatOpenAI as OriginalChatOpenAI, AzureChatOpenAI as OriginalAzureChatOpenAI } from '@langchain/openai';
|
|
5
|
+
import type { OpenAICoreRequestOptions } from 'node_modules/@langchain/deepseek/node_modules/@langchain/openai';
|
|
5
6
|
import type * as t from '@langchain/openai';
|
|
6
7
|
export declare class CustomOpenAIClient extends OpenAIClient {
|
|
7
8
|
abortHandler?: () => void;
|
|
@@ -21,9 +22,9 @@ export declare class AzureChatOpenAI extends OriginalAzureChatOpenAI {
|
|
|
21
22
|
}
|
|
22
23
|
export declare class ChatDeepSeek extends OriginalChatDeepSeek {
|
|
23
24
|
get exposedClient(): CustomOpenAIClient;
|
|
24
|
-
protected _getClientOptions(options?:
|
|
25
|
+
protected _getClientOptions(options?: OpenAICoreRequestOptions): OpenAICoreRequestOptions;
|
|
25
26
|
}
|
|
26
27
|
export declare class ChatXAI extends OriginalChatXAI {
|
|
27
28
|
get exposedClient(): CustomOpenAIClient;
|
|
28
|
-
protected _getClientOptions(options?:
|
|
29
|
+
protected _getClientOptions(options?: OpenAICoreRequestOptions): OpenAICoreRequestOptions;
|
|
29
30
|
}
|
|
@@ -7,14 +7,22 @@ export declare const fetchRandomImageTool: DynamicStructuredTool<z.ZodObject<{
|
|
|
7
7
|
input?: string | undefined;
|
|
8
8
|
}, {
|
|
9
9
|
input?: string | undefined;
|
|
10
|
-
}
|
|
10
|
+
}>, {
|
|
11
|
+
input?: string | undefined;
|
|
12
|
+
}, {
|
|
13
|
+
input?: string | undefined;
|
|
14
|
+
}>;
|
|
11
15
|
export declare const fetchRandomImageURL: DynamicStructuredTool<z.ZodObject<{
|
|
12
16
|
input: z.ZodOptional<z.ZodString>;
|
|
13
17
|
}, "strip", z.ZodTypeAny, {
|
|
14
18
|
input?: string | undefined;
|
|
15
19
|
}, {
|
|
16
20
|
input?: string | undefined;
|
|
17
|
-
}
|
|
21
|
+
}>, {
|
|
22
|
+
input?: string | undefined;
|
|
23
|
+
}, {
|
|
24
|
+
input?: string | undefined;
|
|
25
|
+
}>;
|
|
18
26
|
export declare const chartTool: DynamicStructuredTool<z.ZodObject<{
|
|
19
27
|
data: z.ZodArray<z.ZodObject<{
|
|
20
28
|
label: z.ZodString;
|
|
@@ -36,5 +44,15 @@ export declare const chartTool: DynamicStructuredTool<z.ZodObject<{
|
|
|
36
44
|
value: number;
|
|
37
45
|
label: string;
|
|
38
46
|
}[];
|
|
39
|
-
}
|
|
47
|
+
}>, {
|
|
48
|
+
data: {
|
|
49
|
+
value: number;
|
|
50
|
+
label: string;
|
|
51
|
+
}[];
|
|
52
|
+
}, {
|
|
53
|
+
data: {
|
|
54
|
+
value: number;
|
|
55
|
+
label: string;
|
|
56
|
+
}[];
|
|
57
|
+
}>;
|
|
40
58
|
export declare const tavilyTool: TavilySearchResults;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@librechat/agents",
|
|
3
|
-
"version": "2.4.
|
|
3
|
+
"version": "2.4.30",
|
|
4
4
|
"main": "./dist/cjs/main.cjs",
|
|
5
5
|
"module": "./dist/esm/main.mjs",
|
|
6
6
|
"types": "./dist/types/index.d.ts",
|
|
@@ -71,17 +71,17 @@
|
|
|
71
71
|
"format": "prettier --write ."
|
|
72
72
|
},
|
|
73
73
|
"dependencies": {
|
|
74
|
-
"@langchain/anthropic": "^0.3.
|
|
75
|
-
"@langchain/aws": "^0.1.
|
|
76
|
-
"@langchain/community": "^0.3.
|
|
77
|
-
"@langchain/core": "^0.3.
|
|
74
|
+
"@langchain/anthropic": "^0.3.20",
|
|
75
|
+
"@langchain/aws": "^0.1.9",
|
|
76
|
+
"@langchain/community": "^0.3.42",
|
|
77
|
+
"@langchain/core": "^0.3.51",
|
|
78
78
|
"@langchain/deepseek": "^0.0.1",
|
|
79
|
-
"@langchain/google-genai": "^0.2.
|
|
80
|
-
"@langchain/google-vertexai": "^0.2.
|
|
81
|
-
"@langchain/langgraph": "^0.2.
|
|
79
|
+
"@langchain/google-genai": "^0.2.5",
|
|
80
|
+
"@langchain/google-vertexai": "^0.2.5",
|
|
81
|
+
"@langchain/langgraph": "^0.2.67",
|
|
82
82
|
"@langchain/mistralai": "^0.2.0",
|
|
83
83
|
"@langchain/ollama": "^0.2.0",
|
|
84
|
-
"@langchain/openai": "^0.5.
|
|
84
|
+
"@langchain/openai": "^0.5.10",
|
|
85
85
|
"@langchain/xai": "^0.0.2",
|
|
86
86
|
"dotenv": "^16.4.7",
|
|
87
87
|
"https-proxy-agent": "^7.0.6",
|
|
@@ -4,6 +4,7 @@ import { ChatGenerationChunk } from '@langchain/core/outputs';
|
|
|
4
4
|
import type { BaseChatModelParams } from '@langchain/core/language_models/chat_models';
|
|
5
5
|
import type {
|
|
6
6
|
BaseMessage,
|
|
7
|
+
UsageMetadata,
|
|
7
8
|
MessageContentComplex,
|
|
8
9
|
} from '@langchain/core/messages';
|
|
9
10
|
import type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
|
|
@@ -132,9 +133,9 @@ export class CustomAnthropic extends ChatAnthropicMessages {
|
|
|
132
133
|
|
|
133
134
|
/**
|
|
134
135
|
* Get stream usage as returned by this client's API response.
|
|
135
|
-
* @returns
|
|
136
|
+
* @returns The stream usage object.
|
|
136
137
|
*/
|
|
137
|
-
getStreamUsage():
|
|
138
|
+
getStreamUsage(): UsageMetadata | undefined {
|
|
138
139
|
if (this.emitted_usage === true) {
|
|
139
140
|
return;
|
|
140
141
|
}
|
|
@@ -147,7 +148,7 @@ export class CustomAnthropic extends ChatAnthropicMessages {
|
|
|
147
148
|
if (!outputUsage) {
|
|
148
149
|
return;
|
|
149
150
|
}
|
|
150
|
-
const totalUsage:
|
|
151
|
+
const totalUsage: UsageMetadata = {
|
|
151
152
|
input_tokens: inputUsage?.input_tokens ?? 0,
|
|
152
153
|
output_tokens: outputUsage.output_tokens ?? 0,
|
|
153
154
|
total_tokens:
|
|
@@ -184,7 +185,7 @@ export class CustomAnthropic extends ChatAnthropicMessages {
|
|
|
184
185
|
token?: string;
|
|
185
186
|
chunk: AIMessageChunk;
|
|
186
187
|
shouldStreamUsage: boolean;
|
|
187
|
-
usageMetadata?:
|
|
188
|
+
usageMetadata?: UsageMetadata;
|
|
188
189
|
}): ChatGenerationChunk {
|
|
189
190
|
const usage_metadata = shouldStreamUsage
|
|
190
191
|
? (usageMetadata ?? chunk.usage_metadata)
|
|
@@ -245,7 +246,7 @@ export class CustomAnthropic extends ChatAnthropicMessages {
|
|
|
245
246
|
this.message_delta = data as AnthropicMessageDeltaEvent;
|
|
246
247
|
}
|
|
247
248
|
|
|
248
|
-
let usageMetadata:
|
|
249
|
+
let usageMetadata: UsageMetadata | undefined;
|
|
249
250
|
if (this.tools_in_params !== true && this.emitted_usage !== true) {
|
|
250
251
|
usageMetadata = this.getStreamUsage();
|
|
251
252
|
}
|