@librechat/agents 2.2.8 → 2.2.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"Graph.mjs","sources":["../../../src/graphs/Graph.ts"],"sourcesContent":["// src/graphs/Graph.ts\nimport { nanoid } from 'nanoid';\nimport { concat } from '@langchain/core/utils/stream';\nimport { ToolNode } from '@langchain/langgraph/prebuilt';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { START, END, StateGraph } from '@langchain/langgraph';\nimport { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';\nimport { Runnable, RunnableConfig } from '@langchain/core/runnables';\nimport { dispatchCustomEvent } from '@langchain/core/callbacks/dispatch';\nimport { AIMessageChunk, ToolMessage, SystemMessage } from '@langchain/core/messages';\nimport type { BaseMessage, BaseMessageFields, UsageMetadata } from '@langchain/core/messages';\nimport type * as t from '@/types';\nimport { Providers, GraphEvents, GraphNodeKeys, StepTypes, Callback, ContentTypes } from '@/common';\nimport type { ToolCall } from '@langchain/core/messages/tool';\nimport { getChatModelClass, manualToolStreamProviders } from '@/llm/providers';\nimport { ToolNode as CustomToolNode, toolsCondition } from '@/tools/ToolNode';\nimport {\n createPruneMessages,\n modifyDeltaProperties,\n formatArtifactPayload,\n convertMessagesToContent,\n formatAnthropicArtifactContent,\n} from '@/messages';\nimport { resetIfNotEmpty, isOpenAILike, isGoogleLike, joinKeys, sleep } from '@/utils';\nimport { createFakeStreamingLLM } from '@/llm/fake';\nimport { HandlerRegistry } from '@/events';\n\nconst { AGENT, TOOLS } = GraphNodeKeys;\nexport type GraphNode = GraphNodeKeys | typeof START;\nexport type ClientCallback<T extends unknown[]> = (graph: StandardGraph, ...args: T) => void;\nexport type ClientCallbacks = {\n [Callback.TOOL_ERROR]?: ClientCallback<[Error, string]>;\n [Callback.TOOL_START]?: ClientCallback<unknown[]>;\n [Callback.TOOL_END]?: ClientCallback<unknown[]>;\n}\nexport type SystemCallbacks = {\n [K in keyof ClientCallbacks]: ClientCallbacks[K] extends ClientCallback<infer Args>\n ? (...args: Args) => void\n : never;\n};\n\nexport abstract class Graph<\n T extends t.BaseGraphState = t.BaseGraphState,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n TNodeName extends string = string,\n> {\n abstract resetValues(): void;\n abstract createGraphState(): t.GraphStateChannels<T>;\n abstract initializeTools(): CustomToolNode<T> | ToolNode<T>;\n abstract initializeModel(): Runnable;\n abstract getRunMessages(): BaseMessage[] | undefined;\n abstract getContentParts(): t.MessageContentComplex[] | undefined;\n abstract generateStepId(stepKey: string): [string, number];\n abstract getKeyList(metadata: Record<string, unknown> | undefined): (string | number | undefined)[];\n abstract getStepKey(metadata: Record<string, unknown> | undefined): string;\n abstract checkKeyList(keyList: (string | number | undefined)[]): boolean;\n abstract getStepIdByKey(stepKey: string, index?: number): string\n abstract getRunStep(stepId: string): t.RunStep | undefined;\n abstract dispatchRunStep(stepKey: string, stepDetails: t.StepDetails): string;\n abstract dispatchRunStepDelta(id: string, delta: t.ToolCallDelta): void;\n abstract dispatchMessageDelta(id: string, delta: t.MessageDelta): void;\n abstract dispatchReasoningDelta(stepId: string, delta: t.ReasoningDelta): void;\n abstract handleToolCallCompleted(data: t.ToolEndData, metadata?: Record<string, unknown>): void;\n\n abstract createCallModel(): (state: T, config?: RunnableConfig) => Promise<Partial<T>>;\n abstract createWorkflow(): t.CompiledWorkflow<T>;\n lastToken?: string;\n tokenTypeSwitch?: 'reasoning' | 'content';\n reasoningKey: 'reasoning_content' | 'reasoning' = 'reasoning_content';\n currentTokenType: ContentTypes.TEXT | ContentTypes.THINK = ContentTypes.TEXT;\n messageStepHasToolCalls: Map<string, boolean> = new Map();\n messageIdsByStepKey: Map<string, string> = new Map();\n prelimMessageIdsByStepKey: Map<string, string> = new Map();\n config: RunnableConfig | undefined;\n contentData: t.RunStep[] = [];\n stepKeyIds: Map<string, string[]> = new Map<string, string[]>();\n contentIndexMap: Map<string, number> = new Map();\n toolCallStepIds: Map<string, string> = new Map();\n currentUsage: Partial<UsageMetadata> | undefined;\n indexTokenCountMap: Record<string, number> = {};\n maxContextTokens: number | undefined;\n pruneMessages?: ReturnType<typeof createPruneMessages>;\n /** The amount of time that should pass before another consecutive API call */\n streamBuffer: number | undefined;\n tokenCounter?: t.TokenCounter;\n signal?: AbortSignal;\n}\n\nexport class StandardGraph extends Graph<\n t.BaseGraphState,\n GraphNode\n> {\n private graphState: t.GraphStateChannels<t.BaseGraphState>;\n clientOptions: t.ClientOptions;\n boundModel: Runnable;\n /** The last recorded timestamp that a stream API call was invoked */\n lastStreamCall: number | undefined;\n handlerRegistry: HandlerRegistry | undefined;\n systemMessage: SystemMessage | undefined;\n messages: BaseMessage[] = [];\n runId: string | undefined;\n tools?: t.GenericTool[];\n toolMap?: t.ToolMap;\n startIndex: number = 0;\n provider: Providers;\n toolEnd: boolean;\n signal: AbortSignal | undefined;\n\n constructor({\n runId,\n tools,\n signal,\n toolMap,\n provider,\n streamBuffer,\n instructions,\n reasoningKey,\n clientOptions,\n toolEnd = false,\n additional_instructions = '',\n } : t.StandardGraphInput) {\n super();\n this.runId = runId;\n this.tools = tools;\n this.signal = signal;\n this.toolEnd = toolEnd;\n this.toolMap = toolMap;\n this.provider = provider;\n this.streamBuffer = streamBuffer;\n this.clientOptions = clientOptions;\n this.graphState = this.createGraphState();\n this.boundModel = this.initializeModel();\n if (reasoningKey) {\n this.reasoningKey = reasoningKey;\n }\n\n let finalInstructions: string | BaseMessageFields = instructions ?? '';\n if (additional_instructions) {\n finalInstructions = finalInstructions ? `${finalInstructions}\\n\\n${additional_instructions}` : additional_instructions;\n }\n\n if (finalInstructions && provider === Providers.ANTHROPIC && (clientOptions as t.AnthropicClientOptions)?.clientOptions?.defaultHeaders?.['anthropic-beta']?.includes('prompt-caching')) {\n finalInstructions = {\n content: [\n {\n type: \"text\",\n text: instructions,\n cache_control: { type: \"ephemeral\" },\n },\n ],\n };\n }\n\n if (finalInstructions) {\n this.systemMessage = new SystemMessage(finalInstructions);\n }\n }\n\n /* Init */\n\n resetValues(keepContent?: boolean): void {\n this.messages = [];\n this.config = resetIfNotEmpty(this.config, undefined);\n if (keepContent !== true) {\n this.contentData = resetIfNotEmpty(this.contentData, []);\n this.contentIndexMap = resetIfNotEmpty(this.contentIndexMap, new Map());\n }\n this.stepKeyIds = resetIfNotEmpty(this.stepKeyIds, new Map());\n this.toolCallStepIds = resetIfNotEmpty(this.toolCallStepIds, new Map());\n this.messageIdsByStepKey = resetIfNotEmpty(this.messageIdsByStepKey, new Map());\n this.messageStepHasToolCalls = resetIfNotEmpty(this.prelimMessageIdsByStepKey, new Map());\n this.prelimMessageIdsByStepKey = resetIfNotEmpty(this.prelimMessageIdsByStepKey, new Map());\n this.currentTokenType = resetIfNotEmpty(this.currentTokenType, ContentTypes.TEXT);\n this.lastToken = resetIfNotEmpty(this.lastToken, undefined);\n this.tokenTypeSwitch = resetIfNotEmpty(this.tokenTypeSwitch, undefined);\n this.indexTokenCountMap = resetIfNotEmpty(this.indexTokenCountMap, {});\n this.currentUsage = resetIfNotEmpty(this.currentUsage, undefined);\n this.tokenCounter = resetIfNotEmpty(this.tokenCounter, undefined);\n this.maxContextTokens = resetIfNotEmpty(this.maxContextTokens, undefined);\n }\n\n /* Run Step Processing */\n\n getRunStep(stepId: string): t.RunStep | undefined {\n const index = this.contentIndexMap.get(stepId);\n if (index !== undefined) {\n return this.contentData[index];\n }\n return undefined;\n }\n\n getStepKey(metadata: Record<string, unknown> | undefined): string {\n if (!metadata) return '';\n\n const keyList = this.getKeyList(metadata);\n if (this.checkKeyList(keyList)) {\n throw new Error('Missing metadata');\n }\n\n return joinKeys(keyList);\n }\n\n getStepIdByKey(stepKey: string, index?: number): string {\n const stepIds = this.stepKeyIds.get(stepKey);\n if (!stepIds) {\n throw new Error(`No step IDs found for stepKey ${stepKey}`);\n }\n\n if (index === undefined) {\n return stepIds[stepIds.length - 1];\n }\n\n return stepIds[index];\n }\n\n generateStepId(stepKey: string): [string, number] {\n const stepIds = this.stepKeyIds.get(stepKey);\n let newStepId: string | undefined;\n let stepIndex = 0;\n if (stepIds) {\n stepIndex = stepIds.length;\n newStepId = `step_${nanoid()}`;\n stepIds.push(newStepId);\n this.stepKeyIds.set(stepKey, stepIds);\n } else {\n newStepId = `step_${nanoid()}`;\n this.stepKeyIds.set(stepKey, [newStepId]);\n }\n\n return [newStepId, stepIndex];\n }\n\n getKeyList(metadata: Record<string, unknown> | undefined): (string | number | undefined)[] {\n if (!metadata) return [];\n\n const keyList = [\n metadata.run_id as string,\n metadata.thread_id as string,\n metadata.langgraph_node as string,\n metadata.langgraph_step as number,\n metadata.checkpoint_ns as string,\n ];\n if (this.currentTokenType === ContentTypes.THINK) {\n keyList.push('reasoning');\n }\n\n return keyList;\n }\n\n checkKeyList(keyList: (string | number | undefined)[]): boolean {\n return keyList.some((key) => key === undefined);\n }\n\n /* Misc.*/\n\n getRunMessages(): BaseMessage[] | undefined {\n return this.messages.slice(this.startIndex);\n }\n\n getContentParts(): t.MessageContentComplex[] | undefined {\n return convertMessagesToContent(this.messages.slice(this.startIndex));\n }\n\n /* Graph */\n\n createGraphState(): t.GraphStateChannels<t.BaseGraphState> {\n return {\n messages: {\n value: (x: BaseMessage[], y: BaseMessage[]): BaseMessage[] => {\n if (!x.length) {\n if (this.systemMessage) {\n x.push(this.systemMessage);\n }\n\n this.startIndex = x.length + y.length;\n }\n const current = x.concat(y);\n this.messages = current;\n return current;\n },\n default: () => [],\n },\n };\n }\n\n initializeTools(): CustomToolNode<t.BaseGraphState> | ToolNode<t.BaseGraphState> {\n // return new ToolNode<t.BaseGraphState>(this.tools);\n return new CustomToolNode<t.BaseGraphState>({\n tools: this.tools || [],\n toolMap: this.toolMap,\n toolCallStepIds: this.toolCallStepIds,\n errorHandler: this.handleToolCallError.bind(this),\n });\n }\n\n initializeModel(): Runnable {\n const ChatModelClass = getChatModelClass(this.provider);\n const model = new ChatModelClass(this.clientOptions);\n\n if (isOpenAILike(this.provider) && (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)) {\n model.temperature = (this.clientOptions as t.OpenAIClientOptions).temperature as number;\n model.topP = (this.clientOptions as t.OpenAIClientOptions).topP as number;\n model.frequencyPenalty = (this.clientOptions as t.OpenAIClientOptions).frequencyPenalty as number;\n model.presencePenalty = (this.clientOptions as t.OpenAIClientOptions).presencePenalty as number;\n model.n = (this.clientOptions as t.OpenAIClientOptions).n as number;\n } else if (this.provider === Providers.VERTEXAI && model instanceof ChatVertexAI) {\n model.temperature = (this.clientOptions as t.VertexAIClientOptions).temperature as number;\n model.topP = (this.clientOptions as t.VertexAIClientOptions).topP as number;\n model.topK = (this.clientOptions as t.VertexAIClientOptions).topK as number;\n model.topLogprobs = (this.clientOptions as t.VertexAIClientOptions).topLogprobs as number;\n model.frequencyPenalty = (this.clientOptions as t.VertexAIClientOptions).frequencyPenalty as number;\n model.presencePenalty = (this.clientOptions as t.VertexAIClientOptions).presencePenalty as number;\n model.maxOutputTokens = (this.clientOptions as t.VertexAIClientOptions).maxOutputTokens as number;\n }\n\n if (!this.tools || this.tools.length === 0) {\n return model as unknown as Runnable;\n }\n\n return (model as t.ModelWithTools).bindTools(this.tools);\n }\n overrideTestModel(responses: string[], sleep?: number, toolCalls?: ToolCall[]): void {\n this.boundModel = createFakeStreamingLLM({\n responses,\n sleep,\n toolCalls,\n });\n }\n\n getNewModel({\n clientOptions = {},\n omitOriginalOptions,\n } : {\n clientOptions?: t.ClientOptions;\n omitOriginalOptions?: string[]\n }): t.ChatModelInstance {\n const ChatModelClass = getChatModelClass(this.provider);\n const _options = omitOriginalOptions ? Object.fromEntries(\n Object.entries(this.clientOptions).filter(([key]) => !omitOriginalOptions.includes(key)),\n ) : this.clientOptions;\n const options = Object.assign(_options, clientOptions);\n return new ChatModelClass(options);\n }\n\n storeUsageMetadata(finalMessage?: BaseMessage): void {\n if (finalMessage && 'usage_metadata' in finalMessage && finalMessage.usage_metadata) {\n this.currentUsage = finalMessage.usage_metadata as Partial<UsageMetadata>;\n }\n }\n\n createCallModel() {\n return async (state: t.BaseGraphState, config?: RunnableConfig): Promise<Partial<t.BaseGraphState>> => {\n const { provider = '' } = (config?.configurable as t.GraphConfig | undefined) ?? {} ;\n if (!config || !provider) {\n throw new Error(`No ${config ? 'provider' : 'config'} provided`);\n }\n if (!config.signal) {\n config.signal = this.signal;\n }\n this.config = config;\n const { messages } = state;\n\n let messagesToUse = messages;\n if (!this.pruneMessages && this.tokenCounter && this.maxContextTokens && this.indexTokenCountMap[0] != null) {\n this.pruneMessages = createPruneMessages({\n indexTokenCountMap: this.indexTokenCountMap,\n maxTokens: this.maxContextTokens,\n tokenCounter: this.tokenCounter,\n startIndex: this.startIndex,\n });\n }\n if (this.pruneMessages) {\n const { context, indexTokenCountMap } = this.pruneMessages({\n messages,\n usageMetadata: this.currentUsage,\n startOnMessageType: 'human',\n });\n this.indexTokenCountMap = indexTokenCountMap;\n messagesToUse = context;\n }\n\n const finalMessages = messagesToUse;\n const lastMessageX = finalMessages.length >= 2 ? finalMessages[finalMessages.length - 2] : null;\n const lastMessageY = finalMessages.length >= 1 ? finalMessages[finalMessages.length - 1] : null;\n\n if (\n provider === Providers.BEDROCK\n && lastMessageX instanceof AIMessageChunk\n && lastMessageY instanceof ToolMessage\n && typeof lastMessageX.content === 'string'\n ) {\n finalMessages[finalMessages.length - 2].content = '';\n }\n\n const isLatestToolMessage = lastMessageY instanceof ToolMessage;\n\n if (isLatestToolMessage && provider === Providers.ANTHROPIC) {\n formatAnthropicArtifactContent(finalMessages);\n } else if (\n isLatestToolMessage &&\n (isOpenAILike(provider) || isGoogleLike(provider))\n ) {\n formatArtifactPayload(finalMessages);\n }\n\n if (this.lastStreamCall != null && this.streamBuffer != null) {\n const timeSinceLastCall = Date.now() - this.lastStreamCall;\n if (timeSinceLastCall < this.streamBuffer) {\n const timeToWait = Math.ceil((this.streamBuffer - timeSinceLastCall) / 1000) * 1000;\n await sleep(timeToWait);\n }\n }\n\n this.lastStreamCall = Date.now();\n\n let result: Partial<t.BaseGraphState>;\n if ((this.tools?.length ?? 0) > 0 && manualToolStreamProviders.has(provider)) {\n const stream = await this.boundModel.stream(finalMessages, config);\n let finalChunk: AIMessageChunk | undefined;\n for await (const chunk of stream) {\n dispatchCustomEvent(GraphEvents.CHAT_MODEL_STREAM, { chunk }, config);\n if (!finalChunk) {\n finalChunk = chunk;\n } else {\n finalChunk = concat(finalChunk, chunk);\n }\n }\n\n finalChunk = modifyDeltaProperties(this.provider, finalChunk);\n result = { messages: [finalChunk as AIMessageChunk] };\n } else {\n const finalMessage = (await this.boundModel.invoke(finalMessages, config)) as AIMessageChunk;\n if ((finalMessage.tool_calls?.length ?? 0) > 0) {\n finalMessage.tool_calls = finalMessage.tool_calls?.filter((tool_call) => {\n if (!tool_call.name) {\n return false;\n }\n return true;\n });\n }\n result = { messages: [finalMessage] };\n }\n \n this.storeUsageMetadata(result?.messages?.[0]);\n return result;\n };\n }\n\n createWorkflow(): t.CompiledWorkflow<t.BaseGraphState> {\n const routeMessage = (state: t.BaseGraphState, config?: RunnableConfig): string => {\n this.config = config;\n // const lastMessage = state.messages[state.messages.length - 1] as AIMessage;\n // if (!lastMessage?.tool_calls?.length) {\n // return END;\n // }\n // return TOOLS;\n return toolsCondition(state);\n };\n\n const workflow = new StateGraph<t.BaseGraphState>({\n channels: this.graphState,\n })\n .addNode(AGENT, this.createCallModel())\n .addNode(TOOLS, this.initializeTools())\n .addEdge(START, AGENT)\n .addConditionalEdges(AGENT, routeMessage)\n .addEdge(TOOLS, this.toolEnd ? END : AGENT);\n\n return workflow.compile();\n }\n\n /* Dispatchers */\n\n /**\n * Dispatches a run step to the client, returns the step ID\n */\n dispatchRunStep(stepKey: string, stepDetails: t.StepDetails): string {\n if (!this.config) {\n throw new Error('No config provided');\n }\n\n const [stepId, stepIndex] = this.generateStepId(stepKey);\n if (stepDetails.type === StepTypes.TOOL_CALLS && stepDetails.tool_calls) {\n for (const tool_call of stepDetails.tool_calls) {\n const toolCallId = tool_call.id ?? '';\n if (!toolCallId || this.toolCallStepIds.has(toolCallId)) {\n continue;\n }\n this.toolCallStepIds.set(toolCallId, stepId);\n }\n }\n\n const runStep: t.RunStep = {\n stepIndex,\n id: stepId,\n type: stepDetails.type,\n index: this.contentData.length,\n stepDetails,\n usage: null,\n };\n\n const runId = this.runId ?? '';\n if (runId) {\n runStep.runId = runId;\n }\n\n this.contentData.push(runStep);\n this.contentIndexMap.set(stepId, runStep.index);\n dispatchCustomEvent(GraphEvents.ON_RUN_STEP, runStep, this.config);\n return stepId;\n }\n\n handleToolCallCompleted(data: t.ToolEndData, metadata?: Record<string, unknown>): void {\n if (!this.config) {\n throw new Error('No config provided');\n }\n\n if (!data.output) {\n return;\n }\n\n const { input, output } = data;\n const { tool_call_id } = output;\n const stepId = this.toolCallStepIds.get(tool_call_id) ?? '';\n if (!stepId) {\n throw new Error(`No stepId found for tool_call_id ${tool_call_id}`);\n }\n\n const runStep = this.getRunStep(stepId);\n if (!runStep) {\n throw new Error(`No run step found for stepId ${stepId}`);\n }\n\n const args = typeof input === 'string' ? input : input.input;\n const tool_call = {\n args: typeof args === 'string' ? args : JSON.stringify(args),\n name: output.name ?? '',\n id: output.tool_call_id,\n output: typeof output.content === 'string'\n ? output.content\n : JSON.stringify(output.content),\n progress: 1,\n };\n\n this.handlerRegistry?.getHandler(GraphEvents.ON_RUN_STEP_COMPLETED)?.handle(\n GraphEvents.ON_RUN_STEP_COMPLETED,\n { result: {\n id: stepId,\n index: runStep.index,\n type: 'tool_call',\n tool_call\n } as t.ToolCompleteEvent,\n },\n metadata,\n this,\n );\n }\n handleToolCallError(data: t.ToolErrorData, metadata?: Record<string, unknown>): void {\n if (!this.config) {\n throw new Error('No config provided');\n }\n\n if (!data.id) {\n console.warn('No Tool ID provided for Tool Error');\n return;\n }\n\n const stepId = this.toolCallStepIds.get(data.id) ?? '';\n if (!stepId) {\n throw new Error(`No stepId found for tool_call_id ${data.id}`);\n }\n\n const { name, input: args, error } = data;\n\n const runStep = this.getRunStep(stepId);\n if (!runStep) {\n throw new Error(`No run step found for stepId ${stepId}`);\n }\n\n const tool_call: t.ProcessedToolCall = {\n id: data.id,\n name: name ?? '',\n args: typeof args === 'string' ? args : JSON.stringify(args),\n output: `Error processing tool${error?.message ? `: ${error.message}` : ''}`,\n progress: 1,\n };\n\n this.handlerRegistry?.getHandler(GraphEvents.ON_RUN_STEP_COMPLETED)?.handle(\n GraphEvents.ON_RUN_STEP_COMPLETED,\n { result: {\n id: stepId,\n index: runStep.index,\n type: 'tool_call',\n tool_call\n } as t.ToolCompleteEvent,\n },\n metadata,\n this,\n );\n }\n\n dispatchRunStepDelta(id: string, delta: t.ToolCallDelta): void {\n if (!this.config) {\n throw new Error('No config provided');\n } else if (!id) {\n throw new Error('No step ID found');\n }\n const runStepDelta: t.RunStepDeltaEvent = {\n id,\n delta,\n };\n dispatchCustomEvent(GraphEvents.ON_RUN_STEP_DELTA, runStepDelta, this.config);\n }\n\n dispatchMessageDelta(id: string, delta: t.MessageDelta): void {\n if (!this.config) {\n throw new Error('No config provided');\n }\n const messageDelta: t.MessageDeltaEvent = {\n id,\n delta,\n };\n dispatchCustomEvent(GraphEvents.ON_MESSAGE_DELTA, messageDelta, this.config);\n }\n\n dispatchReasoningDelta = (stepId: string, delta: t.ReasoningDelta): void => {\n if (!this.config) {\n throw new Error('No config provided');\n }\n const reasoningDelta: t.ReasoningDeltaEvent = {\n id: stepId,\n delta,\n };\n dispatchCustomEvent(GraphEvents.ON_REASONING_DELTA, reasoningDelta, this.config);\n };\n}\n"],"names":["CustomToolNode"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AA2BA,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,aAAa;MAchB,KAAK,CAAA;AAyBzB,IAAA,SAAS;AACT,IAAA,eAAe;IACf,YAAY,GAAsC,mBAAmB;AACrE,IAAA,gBAAgB,GAA2C,YAAY,CAAC,IAAI;AAC5E,IAAA,uBAAuB,GAAyB,IAAI,GAAG,EAAE;AACzD,IAAA,mBAAmB,GAAwB,IAAI,GAAG,EAAE;AACpD,IAAA,yBAAyB,GAAwB,IAAI,GAAG,EAAE;AAC1D,IAAA,MAAM;IACN,WAAW,GAAgB,EAAE;AAC7B,IAAA,UAAU,GAA0B,IAAI,GAAG,EAAoB;AAC/D,IAAA,eAAe,GAAwB,IAAI,GAAG,EAAE;AAChD,IAAA,eAAe,GAAwB,IAAI,GAAG,EAAE;AAChD,IAAA,YAAY;IACZ,kBAAkB,GAA2B,EAAE;AAC/C,IAAA,gBAAgB;AAChB,IAAA,aAAa;;AAEb,IAAA,YAAY;AACZ,IAAA,YAAY;AACZ,IAAA,MAAM;AACP;AAEK,MAAO,aAAc,SAAQ,KAGlC,CAAA;AACS,IAAA,UAAU;AAClB,IAAA,aAAa;AACb,IAAA,UAAU;;AAEV,IAAA,cAAc;AACd,IAAA,eAAe;AACf,IAAA,aAAa;IACb,QAAQ,GAAkB,EAAE;AAC5B,IAAA,KAAK;AACL,IAAA,KAAK;AACL,IAAA,OAAO;IACP,UAAU,GAAW,CAAC;AACtB,IAAA,QAAQ;AACR,IAAA,OAAO;AACP,IAAA,MAAM;IAEN,WAAY,CAAA,EACV,KAAK,EACL,KAAK,EACL,MAAM,EACN,OAAO,EACP,QAAQ,EACR,YAAY,EACZ,YAAY,EACZ,YAAY,EACZ,aAAa,EACb,OAAO,GAAG,KAAK,EACf,uBAAuB,GAAG,EAAE,GACN,EAAA;AACtB,QAAA,KAAK,EAAE;AACP,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;AAClB,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;AAClB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;AACpB,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO;AACtB,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO;AACtB,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ;AACxB,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY;AAChC,QAAA,IAAI,CAAC,aAAa,GAAG,aAAa;AAClC,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,gBAAgB,EAAE;AACzC,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,eAAe,EAAE;QACxC,IAAI,YAAY,EAAE;AAChB,YAAA,IAAI,CAAC,YAAY,GAAG,YAAY;;AAGlC,QAAA,IAAI,iBAAiB,GAA+B,YAAY,IAAI,EAAE;QACtE,IAAI,uBAAuB,EAAE;AAC3B,YAAA,iBAAiB,GAAG,iBAAiB,GAAG,CAAG,EAAA,iBAAiB,CAAO,IAAA,EAAA,uBAAuB,CAAE,CAAA,GAAG,uBAAuB;;QAGxH,IAAI,iBAAiB,IAAI,QAAQ,KAAK,SAAS,CAAC,SAAS,IAAK,aAA0C,EAAE,aAAa,EAAE,cAAc,GAAG,gBAAgB,CAAC,EAAE,QAAQ,CAAC,gBAAgB,CAAC,EAAE;AACvL,YAAA,iBAAiB,GAAG;AAClB,gBAAA,OAAO,EAAE;AACP,oBAAA;AACE,wBAAA,IAAI,EAAE,MAAM;AACZ,wBAAA,IAAI,EAAE,YAAY;AAClB,wBAAA,aAAa,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE;AACrC,qBAAA;AACF,iBAAA;aACF;;QAGH,IAAI,iBAAiB,EAAE;YACrB,IAAI,CAAC,aAAa,GAAG,IAAI,aAAa,CAAC,iBAAiB,CAAC;;;;AAM7D,IAAA,WAAW,CAAC,WAAqB,EAAA;AAC/B,QAAA,IAAI,CAAC,QAAQ,GAAG,EAAE;QAClB,IAAI,CAAC,MAAM,GAAG,eAAe,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;AACrD,QAAA,IAAI,WAAW,KAAK,IAAI,EAAE;YACxB,IAAI,CAAC,WAAW,GAAG,eAAe,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,CAAC;AACxD,YAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,GAAG,EAAE,CAAC;;AAEzE,QAAA,IAAI,CAAC,UAAU,GAAG,eAAe,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,GAAG,EAAE,CAAC;AAC7D,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,GAAG,EAAE,CAAC;AACvE,QAAA,IAAI,CAAC,mBAAmB,GAAG,eAAe,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,GAAG,EAAE,CAAC;AAC/E,QAAA,IAAI,CAAC,uBAAuB,GAAG,eAAe,CAAC,IAAI,CAAC,yBAAyB,EAAE,IAAI,GAAG,EAAE,CAAC;AACzF,QAAA,IAAI,CAAC,yBAAyB,GAAG,eAAe,CAAC,IAAI,CAAC,yBAAyB,EAAE,IAAI,GAAG,EAAE,CAAC;AAC3F,QAAA,IAAI,CAAC,gBAAgB,GAAG,eAAe,CAAC,IAAI,CAAC,gBAAgB,EAAE,YAAY,CAAC,IAAI,CAAC;QACjF,IAAI,CAAC,SAAS,GAAG,eAAe,CAAC,IAAI,CAAC,SAAS,EAAE,SAAS,CAAC;QAC3D,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC,IAAI,CAAC,eAAe,EAAE,SAAS,CAAC;QACvE,IAAI,CAAC,kBAAkB,GAAG,eAAe,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE,CAAC;QACtE,IAAI,CAAC,YAAY,GAAG,eAAe,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,CAAC;QACjE,IAAI,CAAC,YAAY,GAAG,eAAe,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,CAAC;QACjE,IAAI,CAAC,gBAAgB,GAAG,eAAe,CAAC,IAAI,CAAC,gBAAgB,EAAE,SAAS,CAAC;;;AAK3E,IAAA,UAAU,CAAC,MAAc,EAAA;QACvB,MAAM,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,MAAM,CAAC;AAC9C,QAAA,IAAI,KAAK,KAAK,SAAS,EAAE;AACvB,YAAA,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC;;AAEhC,QAAA,OAAO,SAAS;;AAGlB,IAAA,UAAU,CAAC,QAA6C,EAAA;AACtD,QAAA,IAAI,CAAC,QAAQ;AAAE,YAAA,OAAO,EAAE;QAExB,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC;AACzC,QAAA,IAAI,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC,EAAE;AAC9B,YAAA,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC;;AAGrC,QAAA,OAAO,QAAQ,CAAC,OAAO,CAAC;;IAG1B,cAAc,CAAC,OAAe,EAAE,KAAc,EAAA;QAC5C,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC;QAC5C,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,OAAO,CAAA,CAAE,CAAC;;AAG7D,QAAA,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,OAAO,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC;;AAGpC,QAAA,OAAO,OAAO,CAAC,KAAK,CAAC;;AAGvB,IAAA,cAAc,CAAC,OAAe,EAAA;QAC5B,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC;AAC5C,QAAA,IAAI,SAA6B;QACjC,IAAI,SAAS,GAAG,CAAC;QACjB,IAAI,OAAO,EAAE;AACX,YAAA,SAAS,GAAG,OAAO,CAAC,MAAM;AAC1B,YAAA,SAAS,GAAG,CAAA,KAAA,EAAQ,MAAM,EAAE,EAAE;AAC9B,YAAA,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC;YACvB,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,EAAE,OAAO,CAAC;;aAChC;AACL,YAAA,SAAS,GAAG,CAAA,KAAA,EAAQ,MAAM,EAAE,EAAE;YAC9B,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,EAAE,CAAC,SAAS,CAAC,CAAC;;AAG3C,QAAA,OAAO,CAAC,SAAS,EAAE,SAAS,CAAC;;AAG/B,IAAA,UAAU,CAAC,QAA6C,EAAA;AACtD,QAAA,IAAI,CAAC,QAAQ;AAAE,YAAA,OAAO,EAAE;AAExB,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,QAAQ,CAAC,MAAgB;AACzB,YAAA,QAAQ,CAAC,SAAmB;AAC5B,YAAA,QAAQ,CAAC,cAAwB;AACjC,YAAA,QAAQ,CAAC,cAAwB;AACjC,YAAA,QAAQ,CAAC,aAAuB;SACjC;QACD,IAAI,IAAI,CAAC,gBAAgB,KAAK,YAAY,CAAC,KAAK,EAAE;AAChD,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC;;AAG3B,QAAA,OAAO,OAAO;;AAGhB,IAAA,YAAY,CAAC,OAAwC,EAAA;AACnD,QAAA,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,GAAG,KAAK,GAAG,KAAK,SAAS,CAAC;;;IAKjD,cAAc,GAAA;QACZ,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC;;IAG7C,eAAe,GAAA;AACb,QAAA,OAAO,wBAAwB,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;;;IAKvE,gBAAgB,GAAA;QACd,OAAO;AACL,YAAA,QAAQ,EAAE;AACR,gBAAA,KAAK,EAAE,CAAC,CAAgB,EAAE,CAAgB,KAAmB;AAC3D,oBAAA,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;AACb,wBAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,4BAAA,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC;;wBAG5B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM;;oBAEvC,MAAM,OAAO,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3B,oBAAA,IAAI,CAAC,QAAQ,GAAG,OAAO;AACvB,oBAAA,OAAO,OAAO;iBACf;AACD,gBAAA,OAAO,EAAE,MAAM,EAAE;AAClB,aAAA;SACF;;IAGH,eAAe,GAAA;;QAEb,OAAO,IAAIA,QAAc,CAAmB;AAC1C,YAAA,KAAK,EAAE,IAAI,CAAC,KAAK,IAAI,EAAE;YACvB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,eAAe,EAAE,IAAI,CAAC,eAAe;YACrC,YAAY,EAAE,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,IAAI,CAAC;AAClD,SAAA,CAAC;;IAGJ,eAAe,GAAA;QACb,MAAM,cAAc,GAAG,iBAAiB,CAAC,IAAI,CAAC,QAAQ,CAAC;QACvD,MAAM,KAAK,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,aAAa,CAAC;AAEpD,QAAA,IAAI,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,YAAY,UAAU,IAAI,KAAK,YAAY,eAAe,CAAC,EAAE;YACpG,KAAK,CAAC,WAAW,GAAI,IAAI,CAAC,aAAuC,CAAC,WAAqB;YACvF,KAAK,CAAC,IAAI,GAAI,IAAI,CAAC,aAAuC,CAAC,IAAc;YACzE,KAAK,CAAC,gBAAgB,GAAI,IAAI,CAAC,aAAuC,CAAC,gBAA0B;YACjG,KAAK,CAAC,eAAe,GAAI,IAAI,CAAC,aAAuC,CAAC,eAAyB;YAC/F,KAAK,CAAC,CAAC,GAAI,IAAI,CAAC,aAAuC,CAAC,CAAW;;AAC9D,aAAA,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,QAAQ,IAAI,KAAK,YAAY,YAAY,EAAE;YAChF,KAAK,CAAC,WAAW,GAAI,IAAI,CAAC,aAAyC,CAAC,WAAqB;YACzF,KAAK,CAAC,IAAI,GAAI,IAAI,CAAC,aAAyC,CAAC,IAAc;YAC3E,KAAK,CAAC,IAAI,GAAI,IAAI,CAAC,aAAyC,CAAC,IAAc;YAC3E,KAAK,CAAC,WAAW,GAAI,IAAI,CAAC,aAAyC,CAAC,WAAqB;YACzF,KAAK,CAAC,gBAAgB,GAAI,IAAI,CAAC,aAAyC,CAAC,gBAA0B;YACnG,KAAK,CAAC,eAAe,GAAI,IAAI,CAAC,aAAyC,CAAC,eAAyB;YACjG,KAAK,CAAC,eAAe,GAAI,IAAI,CAAC,aAAyC,CAAC,eAAyB;;AAGnG,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;AAC1C,YAAA,OAAO,KAA4B;;QAGrC,OAAQ,KAA0B,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC;;AAE1D,IAAA,iBAAiB,CAAC,SAAmB,EAAE,KAAc,EAAE,SAAsB,EAAA;AAC3E,QAAA,IAAI,CAAC,UAAU,GAAG,sBAAsB,CAAC;YACvC,SAAS;YACT,KAAK;YACL,SAAS;AACV,SAAA,CAAC;;AAGJ,IAAA,WAAW,CAAC,EACV,aAAa,GAAG,EAAE,EAClB,mBAAmB,GAIpB,EAAA;QACC,MAAM,cAAc,GAAG,iBAAiB,CAAC,IAAI,CAAC,QAAQ,CAAC;AACvD,QAAA,MAAM,QAAQ,GAAG,mBAAmB,GAAG,MAAM,CAAC,WAAW,CACvD,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,mBAAmB,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CACzF,GAAG,IAAI,CAAC,aAAa;QACtB,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE,aAAa,CAAC;AACtD,QAAA,OAAO,IAAI,cAAc,CAAC,OAAO,CAAC;;AAGpC,IAAA,kBAAkB,CAAC,YAA0B,EAAA;QAC3C,IAAI,YAAY,IAAI,gBAAgB,IAAI,YAAY,IAAI,YAAY,CAAC,cAAc,EAAE;AACnF,YAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC,cAAwC;;;IAI7E,eAAe,GAAA;AACb,QAAA,OAAO,OAAO,KAAuB,EAAE,MAAuB,KAAwC;YACpG,MAAM,EAAE,QAAQ,GAAG,EAAE,EAAE,GAAI,MAAM,EAAE,YAA0C,IAAI,EAAE;AACnF,YAAA,IAAI,CAAC,MAAM,IAAI,CAAC,QAAQ,EAAE;AACxB,gBAAA,MAAM,IAAI,KAAK,CAAC,CAAA,GAAA,EAAM,MAAM,GAAG,UAAU,GAAG,QAAQ,CAAA,SAAA,CAAW,CAAC;;AAElE,YAAA,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;AAClB,gBAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM;;AAE7B,YAAA,IAAI,CAAC,MAAM,GAAG,MAAM;AACpB,YAAA,MAAM,EAAE,QAAQ,EAAE,GAAG,KAAK;YAE1B,IAAI,aAAa,GAAG,QAAQ;YAC5B,IAAI,CAAC,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE;AAC3G,gBAAA,IAAI,CAAC,aAAa,GAAG,mBAAmB,CAAC;oBACvC,kBAAkB,EAAE,IAAI,CAAC,kBAAkB;oBAC3C,SAAS,EAAE,IAAI,CAAC,gBAAgB;oBAChC,YAAY,EAAE,IAAI,CAAC,YAAY;oBAC/B,UAAU,EAAE,IAAI,CAAC,UAAU;AAC5B,iBAAA,CAAC;;AAEJ,YAAA,IAAI,IAAI,CAAC,aAAa,EAAE;gBACtB,MAAM,EAAE,OAAO,EAAE,kBAAkB,EAAE,GAAG,IAAI,CAAC,aAAa,CAAC;oBACzD,QAAQ;oBACR,aAAa,EAAE,IAAI,CAAC,YAAY;AAChC,oBAAA,kBAAkB,EAAE,OAAO;AAC5B,iBAAA,CAAC;AACF,gBAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB;gBAC5C,aAAa,GAAG,OAAO;;YAGzB,MAAM,aAAa,GAAG,aAAa;YACnC,MAAM,YAAY,GAAG,aAAa,CAAC,MAAM,IAAI,CAAC,GAAG,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,IAAI;YAC/F,MAAM,YAAY,GAAG,aAAa,CAAC,MAAM,IAAI,CAAC,GAAG,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,IAAI;AAE/F,YAAA,IACE,QAAQ,KAAK,SAAS,CAAC;AACpB,mBAAA,YAAY,YAAY;AACxB,mBAAA,YAAY,YAAY;AACxB,mBAAA,OAAO,YAAY,CAAC,OAAO,KAAK,QAAQ,EAC3C;gBACA,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,OAAO,GAAG,EAAE;;AAGtD,YAAA,MAAM,mBAAmB,GAAG,YAAY,YAAY,WAAW;YAE/D,IAAI,mBAAmB,IAAI,QAAQ,KAAK,SAAS,CAAC,SAAS,EAAE;gBAC3D,8BAA8B,CAAC,aAAa,CAAC;;AACxC,iBAAA,IACL,mBAAmB;iBAClB,YAAY,CAAC,QAAQ,CAAC,IAAI,YAAY,CAAC,QAAQ,CAAC,CAAC,EAClD;gBACA,qBAAqB,CAAC,aAAa,CAAC;;AAGtC,YAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,EAAE;gBAC5D,MAAM,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,cAAc;AAC1D,gBAAA,IAAI,iBAAiB,GAAG,IAAI,CAAC,YAAY,EAAE;AACzC,oBAAA,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,YAAY,GAAG,iBAAiB,IAAI,IAAI,CAAC,GAAG,IAAI;AACnF,oBAAA,MAAM,KAAK,CAAC,UAAU,CAAC;;;AAI3B,YAAA,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,GAAG,EAAE;AAEhC,YAAA,IAAI,MAAiC;AACrC,YAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,IAAI,CAAC,IAAI,CAAC,IAAI,yBAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;AAC5E,gBAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,aAAa,EAAE,MAAM,CAAC;AAClE,gBAAA,IAAI,UAAsC;AAC1C,gBAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;oBAChC,mBAAmB,CAAC,WAAW,CAAC,iBAAiB,EAAE,EAAE,KAAK,EAAE,EAAE,MAAM,CAAC;oBACrE,IAAI,CAAC,UAAU,EAAE;wBACf,UAAU,GAAG,KAAK;;yBACb;AACL,wBAAA,UAAU,GAAG,MAAM,CAAC,UAAU,EAAE,KAAK,CAAC;;;gBAI1C,UAAU,GAAG,qBAAqB,CAAC,IAAI,CAAC,QAAQ,EAAE,UAAU,CAAC;gBAC7D,MAAM,GAAG,EAAE,QAAQ,EAAE,CAAC,UAA4B,CAAC,EAAE;;iBAChD;AACL,gBAAA,MAAM,YAAY,IAAI,MAAM,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,aAAa,EAAE,MAAM,CAAC,CAAmB;AAC5F,gBAAA,IAAI,CAAC,YAAY,CAAC,UAAU,EAAE,MAAM,IAAI,CAAC,IAAI,CAAC,EAAE;AAC9C,oBAAA,YAAY,CAAC,UAAU,GAAG,YAAY,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC,SAAS,KAAI;AACtE,wBAAA,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE;AACnB,4BAAA,OAAO,KAAK;;AAEd,wBAAA,OAAO,IAAI;AACb,qBAAC,CAAC;;gBAEJ,MAAM,GAAG,EAAE,QAAQ,EAAE,CAAC,YAAY,CAAC,EAAE;;YAGvC,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,QAAQ,GAAG,CAAC,CAAC,CAAC;AAC9C,YAAA,OAAO,MAAM;AACf,SAAC;;IAGH,cAAc,GAAA;AACZ,QAAA,MAAM,YAAY,GAAG,CAAC,KAAuB,EAAE,MAAuB,KAAY;AAChF,YAAA,IAAI,CAAC,MAAM,GAAG,MAAM;;;;;;AAMpB,YAAA,OAAO,cAAc,CAAC,KAAK,CAAC;AAC9B,SAAC;AAED,QAAA,MAAM,QAAQ,GAAG,IAAI,UAAU,CAAmB;YAChD,QAAQ,EAAE,IAAI,CAAC,UAAU;SAC1B;AACE,aAAA,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,eAAe,EAAE;AACrC,aAAA,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,eAAe,EAAE;AACrC,aAAA,OAAO,CAAC,KAAK,EAAE,KAAK;AACpB,aAAA,mBAAmB,CAAC,KAAK,EAAE,YAAY;AACvC,aAAA,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,GAAG,GAAG,GAAG,KAAK,CAAC;AAE7C,QAAA,OAAO,QAAQ,CAAC,OAAO,EAAE;;;AAK3B;;AAEG;IACH,eAAe,CAAC,OAAe,EAAE,WAA0B,EAAA;AACzD,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAGvC,QAAA,MAAM,CAAC,MAAM,EAAE,SAAS,CAAC,GAAG,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC;AACxD,QAAA,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,CAAC,UAAU,IAAI,WAAW,CAAC,UAAU,EAAE;AACvE,YAAA,KAAK,MAAM,SAAS,IAAI,WAAW,CAAC,UAAU,EAAE;AAC9C,gBAAA,MAAM,UAAU,GAAG,SAAS,CAAC,EAAE,IAAI,EAAE;AACrC,gBAAA,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE;oBACvD;;gBAEF,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,UAAU,EAAE,MAAM,CAAC;;;AAIhD,QAAA,MAAM,OAAO,GAAc;YACzB,SAAS;AACT,YAAA,EAAE,EAAE,MAAM;YACV,IAAI,EAAE,WAAW,CAAC,IAAI;AACtB,YAAA,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,MAAM;YAC9B,WAAW;AACX,YAAA,KAAK,EAAE,IAAI;SACZ;AAED,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI,EAAE;QAC9B,IAAI,KAAK,EAAE;AACT,YAAA,OAAO,CAAC,KAAK,GAAG,KAAK;;AAGvB,QAAA,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,CAAC;QAC/C,mBAAmB,CAAC,WAAW,CAAC,WAAW,EAAE,OAAO,EAAE,IAAI,CAAC,MAAM,CAAC;AAClE,QAAA,OAAO,MAAM;;IAGf,uBAAuB,CAAC,IAAmB,EAAE,QAAkC,EAAA;AAC7E,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAGvC,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB;;AAGF,QAAA,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,IAAI;AAC9B,QAAA,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM;AAC/B,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE;QAC3D,IAAI,CAAC,MAAM,EAAE;AACX,YAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,YAAY,CAAA,CAAE,CAAC;;QAGrE,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;QACvC,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,gCAAgC,MAAM,CAAA,CAAE,CAAC;;AAG3D,QAAA,MAAM,IAAI,GAAG,OAAO,KAAK,KAAK,QAAQ,GAAG,KAAK,GAAG,KAAK,CAAC,KAAK;AAC5D,QAAA,MAAM,SAAS,GAAG;AAChB,YAAA,IAAI,EAAE,OAAO,IAAI,KAAK,QAAQ,GAAG,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;AAC5D,YAAA,IAAI,EAAE,MAAM,CAAC,IAAI,IAAI,EAAE;YACvB,EAAE,EAAE,MAAM,CAAC,YAAY;AACvB,YAAA,MAAM,EAAE,OAAO,MAAM,CAAC,OAAO,KAAK;kBAC9B,MAAM,CAAC;kBACP,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC;AAClC,YAAA,QAAQ,EAAE,CAAC;SACZ;AAED,QAAA,IAAI,CAAC,eAAe,EAAE,UAAU,CAAC,WAAW,CAAC,qBAAqB,CAAC,EAAE,MAAM,CACzE,WAAW,CAAC,qBAAqB,EACjC,EAAE,MAAM,EAAE;AACR,gBAAA,EAAE,EAAE,MAAM;gBACV,KAAK,EAAE,OAAO,CAAC,KAAK;AACpB,gBAAA,IAAI,EAAE,WAAW;gBACjB;AACsB,aAAA;AACvB,SAAA,EACD,QAAQ,EACR,IAAI,CACL;;IAEH,mBAAmB,CAAC,IAAqB,EAAE,QAAkC,EAAA;AAC3E,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAGvC,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,OAAO,CAAC,IAAI,CAAC,oCAAoC,CAAC;YAClD;;AAGF,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE;QACtD,IAAI,CAAC,MAAM,EAAE;YACX,MAAM,IAAI,KAAK,CAAC,CAAA,iCAAA,EAAoC,IAAI,CAAC,EAAE,CAAE,CAAA,CAAC;;QAGhE,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,IAAI;QAEzC,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;QACvC,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,gCAAgC,MAAM,CAAA,CAAE,CAAC;;AAG3D,QAAA,MAAM,SAAS,GAAwB;YACrC,EAAE,EAAE,IAAI,CAAC,EAAE;YACX,IAAI,EAAE,IAAI,IAAI,EAAE;AAChB,YAAA,IAAI,EAAE,OAAO,IAAI,KAAK,QAAQ,GAAG,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;AAC5D,YAAA,MAAM,EAAE,CAAwB,qBAAA,EAAA,KAAK,EAAE,OAAO,GAAG,CAAK,EAAA,EAAA,KAAK,CAAC,OAAO,CAAA,CAAE,GAAG,EAAE,CAAE,CAAA;AAC5E,YAAA,QAAQ,EAAE,CAAC;SACZ;AAED,QAAA,IAAI,CAAC,eAAe,EAAE,UAAU,CAAC,WAAW,CAAC,qBAAqB,CAAC,EAAE,MAAM,CACzE,WAAW,CAAC,qBAAqB,EACjC,EAAE,MAAM,EAAE;AACR,gBAAA,EAAE,EAAE,MAAM;gBACV,KAAK,EAAE,OAAO,CAAC,KAAK;AACpB,gBAAA,IAAI,EAAE,WAAW;gBACjB;AACsB,aAAA;AACvB,SAAA,EACD,QAAQ,EACR,IAAI,CACL;;IAGH,oBAAoB,CAAC,EAAU,EAAE,KAAsB,EAAA;AACrD,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;aAChC,IAAI,CAAC,EAAE,EAAE;AACd,YAAA,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC;;AAErC,QAAA,MAAM,YAAY,GAAwB;YACxC,EAAE;YACF,KAAK;SACN;QACD,mBAAmB,CAAC,WAAW,CAAC,iBAAiB,EAAE,YAAY,EAAE,IAAI,CAAC,MAAM,CAAC;;IAG/E,oBAAoB,CAAC,EAAU,EAAE,KAAqB,EAAA;AACpD,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAEvC,QAAA,MAAM,YAAY,GAAwB;YACxC,EAAE;YACF,KAAK;SACN;QACD,mBAAmB,CAAC,WAAW,CAAC,gBAAgB,EAAE,YAAY,EAAE,IAAI,CAAC,MAAM,CAAC;;AAG9E,IAAA,sBAAsB,GAAG,CAAC,MAAc,EAAE,KAAuB,KAAU;AACzE,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAEvC,QAAA,MAAM,cAAc,GAA0B;AAC5C,YAAA,EAAE,EAAE,MAAM;YACV,KAAK;SACN;QACD,mBAAmB,CAAC,WAAW,CAAC,kBAAkB,EAAE,cAAc,EAAE,IAAI,CAAC,MAAM,CAAC;AAClF,KAAC;AACF;;;;"}
1
+ {"version":3,"file":"Graph.mjs","sources":["../../../src/graphs/Graph.ts"],"sourcesContent":["// src/graphs/Graph.ts\nimport { nanoid } from 'nanoid';\nimport { concat } from '@langchain/core/utils/stream';\nimport { ToolNode } from '@langchain/langgraph/prebuilt';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { START, END, StateGraph } from '@langchain/langgraph';\nimport { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';\nimport { Runnable, RunnableConfig } from '@langchain/core/runnables';\nimport { dispatchCustomEvent } from '@langchain/core/callbacks/dispatch';\nimport { AIMessageChunk, ToolMessage, SystemMessage } from '@langchain/core/messages';\nimport type { BaseMessage, BaseMessageFields, UsageMetadata } from '@langchain/core/messages';\nimport type * as t from '@/types';\nimport { Providers, GraphEvents, GraphNodeKeys, StepTypes, Callback, ContentTypes } from '@/common';\nimport type { ToolCall } from '@langchain/core/messages/tool';\nimport { getChatModelClass, manualToolStreamProviders } from '@/llm/providers';\nimport { ToolNode as CustomToolNode, toolsCondition } from '@/tools/ToolNode';\nimport {\n createPruneMessages,\n modifyDeltaProperties,\n formatArtifactPayload,\n convertMessagesToContent,\n formatAnthropicArtifactContent,\n} from '@/messages';\nimport { resetIfNotEmpty, isOpenAILike, isGoogleLike, joinKeys, sleep } from '@/utils';\nimport { createFakeStreamingLLM } from '@/llm/fake';\nimport { HandlerRegistry } from '@/events';\n\nconst { AGENT, TOOLS } = GraphNodeKeys;\nexport type GraphNode = GraphNodeKeys | typeof START;\nexport type ClientCallback<T extends unknown[]> = (graph: StandardGraph, ...args: T) => void;\nexport type ClientCallbacks = {\n [Callback.TOOL_ERROR]?: ClientCallback<[Error, string]>;\n [Callback.TOOL_START]?: ClientCallback<unknown[]>;\n [Callback.TOOL_END]?: ClientCallback<unknown[]>;\n}\nexport type SystemCallbacks = {\n [K in keyof ClientCallbacks]: ClientCallbacks[K] extends ClientCallback<infer Args>\n ? (...args: Args) => void\n : never;\n};\n\nexport abstract class Graph<\n T extends t.BaseGraphState = t.BaseGraphState,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n TNodeName extends string = string,\n> {\n abstract resetValues(): void;\n abstract createGraphState(): t.GraphStateChannels<T>;\n abstract initializeTools(): CustomToolNode<T> | ToolNode<T>;\n abstract initializeModel(): Runnable;\n abstract getRunMessages(): BaseMessage[] | undefined;\n abstract getContentParts(): t.MessageContentComplex[] | undefined;\n abstract generateStepId(stepKey: string): [string, number];\n abstract getKeyList(metadata: Record<string, unknown> | undefined): (string | number | undefined)[];\n abstract getStepKey(metadata: Record<string, unknown> | undefined): string;\n abstract checkKeyList(keyList: (string | number | undefined)[]): boolean;\n abstract getStepIdByKey(stepKey: string, index?: number): string\n abstract getRunStep(stepId: string): t.RunStep | undefined;\n abstract dispatchRunStep(stepKey: string, stepDetails: t.StepDetails): string;\n abstract dispatchRunStepDelta(id: string, delta: t.ToolCallDelta): void;\n abstract dispatchMessageDelta(id: string, delta: t.MessageDelta): void;\n abstract dispatchReasoningDelta(stepId: string, delta: t.ReasoningDelta): void;\n abstract handleToolCallCompleted(data: t.ToolEndData, metadata?: Record<string, unknown>): void;\n\n abstract createCallModel(): (state: T, config?: RunnableConfig) => Promise<Partial<T>>;\n abstract createWorkflow(): t.CompiledWorkflow<T>;\n lastToken?: string;\n tokenTypeSwitch?: 'reasoning' | 'content';\n reasoningKey: 'reasoning_content' | 'reasoning' = 'reasoning_content';\n currentTokenType: ContentTypes.TEXT | ContentTypes.THINK = ContentTypes.TEXT;\n messageStepHasToolCalls: Map<string, boolean> = new Map();\n messageIdsByStepKey: Map<string, string> = new Map();\n prelimMessageIdsByStepKey: Map<string, string> = new Map();\n config: RunnableConfig | undefined;\n contentData: t.RunStep[] = [];\n stepKeyIds: Map<string, string[]> = new Map<string, string[]>();\n contentIndexMap: Map<string, number> = new Map();\n toolCallStepIds: Map<string, string> = new Map();\n currentUsage: Partial<UsageMetadata> | undefined;\n indexTokenCountMap: Record<string, number> = {};\n maxContextTokens: number | undefined;\n pruneMessages?: ReturnType<typeof createPruneMessages>;\n /** The amount of time that should pass before another consecutive API call */\n streamBuffer: number | undefined;\n tokenCounter?: t.TokenCounter;\n signal?: AbortSignal;\n}\n\nexport class StandardGraph extends Graph<\n t.BaseGraphState,\n GraphNode\n> {\n private graphState: t.GraphStateChannels<t.BaseGraphState>;\n clientOptions: t.ClientOptions;\n boundModel: Runnable;\n /** The last recorded timestamp that a stream API call was invoked */\n lastStreamCall: number | undefined;\n handlerRegistry: HandlerRegistry | undefined;\n systemMessage: SystemMessage | undefined;\n messages: BaseMessage[] = [];\n runId: string | undefined;\n tools?: t.GenericTool[];\n toolMap?: t.ToolMap;\n startIndex: number = 0;\n provider: Providers;\n toolEnd: boolean;\n signal: AbortSignal | undefined;\n\n constructor({\n runId,\n tools,\n signal,\n toolMap,\n provider,\n streamBuffer,\n instructions,\n reasoningKey,\n clientOptions,\n toolEnd = false,\n additional_instructions = '',\n } : t.StandardGraphInput) {\n super();\n this.runId = runId;\n this.tools = tools;\n this.signal = signal;\n this.toolEnd = toolEnd;\n this.toolMap = toolMap;\n this.provider = provider;\n this.streamBuffer = streamBuffer;\n this.clientOptions = clientOptions;\n this.graphState = this.createGraphState();\n this.boundModel = this.initializeModel();\n if (reasoningKey) {\n this.reasoningKey = reasoningKey;\n }\n\n let finalInstructions: string | BaseMessageFields = instructions ?? '';\n if (additional_instructions) {\n finalInstructions = finalInstructions ? `${finalInstructions}\\n\\n${additional_instructions}` : additional_instructions;\n }\n\n if (finalInstructions && provider === Providers.ANTHROPIC && (clientOptions as t.AnthropicClientOptions)?.clientOptions?.defaultHeaders?.['anthropic-beta']?.includes('prompt-caching')) {\n finalInstructions = {\n content: [\n {\n type: \"text\",\n text: instructions,\n cache_control: { type: \"ephemeral\" },\n },\n ],\n };\n }\n\n if (finalInstructions) {\n this.systemMessage = new SystemMessage(finalInstructions);\n }\n }\n\n /* Init */\n\n resetValues(keepContent?: boolean): void {\n this.messages = [];\n this.config = resetIfNotEmpty(this.config, undefined);\n if (keepContent !== true) {\n this.contentData = resetIfNotEmpty(this.contentData, []);\n this.contentIndexMap = resetIfNotEmpty(this.contentIndexMap, new Map());\n }\n this.stepKeyIds = resetIfNotEmpty(this.stepKeyIds, new Map());\n this.toolCallStepIds = resetIfNotEmpty(this.toolCallStepIds, new Map());\n this.messageIdsByStepKey = resetIfNotEmpty(this.messageIdsByStepKey, new Map());\n this.messageStepHasToolCalls = resetIfNotEmpty(this.prelimMessageIdsByStepKey, new Map());\n this.prelimMessageIdsByStepKey = resetIfNotEmpty(this.prelimMessageIdsByStepKey, new Map());\n this.currentTokenType = resetIfNotEmpty(this.currentTokenType, ContentTypes.TEXT);\n this.lastToken = resetIfNotEmpty(this.lastToken, undefined);\n this.tokenTypeSwitch = resetIfNotEmpty(this.tokenTypeSwitch, undefined);\n this.indexTokenCountMap = resetIfNotEmpty(this.indexTokenCountMap, {});\n this.currentUsage = resetIfNotEmpty(this.currentUsage, undefined);\n this.tokenCounter = resetIfNotEmpty(this.tokenCounter, undefined);\n this.maxContextTokens = resetIfNotEmpty(this.maxContextTokens, undefined);\n }\n\n /* Run Step Processing */\n\n getRunStep(stepId: string): t.RunStep | undefined {\n const index = this.contentIndexMap.get(stepId);\n if (index !== undefined) {\n return this.contentData[index];\n }\n return undefined;\n }\n\n getStepKey(metadata: Record<string, unknown> | undefined): string {\n if (!metadata) return '';\n\n const keyList = this.getKeyList(metadata);\n if (this.checkKeyList(keyList)) {\n throw new Error('Missing metadata');\n }\n\n return joinKeys(keyList);\n }\n\n getStepIdByKey(stepKey: string, index?: number): string {\n const stepIds = this.stepKeyIds.get(stepKey);\n if (!stepIds) {\n throw new Error(`No step IDs found for stepKey ${stepKey}`);\n }\n\n if (index === undefined) {\n return stepIds[stepIds.length - 1];\n }\n\n return stepIds[index];\n }\n\n generateStepId(stepKey: string): [string, number] {\n const stepIds = this.stepKeyIds.get(stepKey);\n let newStepId: string | undefined;\n let stepIndex = 0;\n if (stepIds) {\n stepIndex = stepIds.length;\n newStepId = `step_${nanoid()}`;\n stepIds.push(newStepId);\n this.stepKeyIds.set(stepKey, stepIds);\n } else {\n newStepId = `step_${nanoid()}`;\n this.stepKeyIds.set(stepKey, [newStepId]);\n }\n\n return [newStepId, stepIndex];\n }\n\n getKeyList(metadata: Record<string, unknown> | undefined): (string | number | undefined)[] {\n if (!metadata) return [];\n\n const keyList = [\n metadata.run_id as string,\n metadata.thread_id as string,\n metadata.langgraph_node as string,\n metadata.langgraph_step as number,\n metadata.checkpoint_ns as string,\n ];\n if (this.currentTokenType === ContentTypes.THINK) {\n keyList.push('reasoning');\n }\n\n return keyList;\n }\n\n checkKeyList(keyList: (string | number | undefined)[]): boolean {\n return keyList.some((key) => key === undefined);\n }\n\n /* Misc.*/\n\n getRunMessages(): BaseMessage[] | undefined {\n return this.messages.slice(this.startIndex);\n }\n\n getContentParts(): t.MessageContentComplex[] | undefined {\n return convertMessagesToContent(this.messages.slice(this.startIndex));\n }\n\n /* Graph */\n\n createGraphState(): t.GraphStateChannels<t.BaseGraphState> {\n return {\n messages: {\n value: (x: BaseMessage[], y: BaseMessage[]): BaseMessage[] => {\n if (!x.length) {\n if (this.systemMessage) {\n x.push(this.systemMessage);\n }\n\n this.startIndex = x.length + y.length;\n }\n const current = x.concat(y);\n this.messages = current;\n return current;\n },\n default: () => [],\n },\n };\n }\n\n initializeTools(): CustomToolNode<t.BaseGraphState> | ToolNode<t.BaseGraphState> {\n // return new ToolNode<t.BaseGraphState>(this.tools);\n return new CustomToolNode<t.BaseGraphState>({\n tools: this.tools || [],\n toolMap: this.toolMap,\n toolCallStepIds: this.toolCallStepIds,\n errorHandler: this.handleToolCallError.bind(this),\n });\n }\n\n initializeModel(): Runnable {\n const ChatModelClass = getChatModelClass(this.provider);\n const model = new ChatModelClass(this.clientOptions);\n\n if (isOpenAILike(this.provider) && (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)) {\n model.temperature = (this.clientOptions as t.OpenAIClientOptions).temperature as number;\n model.topP = (this.clientOptions as t.OpenAIClientOptions).topP as number;\n model.frequencyPenalty = (this.clientOptions as t.OpenAIClientOptions).frequencyPenalty as number;\n model.presencePenalty = (this.clientOptions as t.OpenAIClientOptions).presencePenalty as number;\n model.n = (this.clientOptions as t.OpenAIClientOptions).n as number;\n } else if (this.provider === Providers.VERTEXAI && model instanceof ChatVertexAI) {\n model.temperature = (this.clientOptions as t.VertexAIClientOptions).temperature as number;\n model.topP = (this.clientOptions as t.VertexAIClientOptions).topP as number;\n model.topK = (this.clientOptions as t.VertexAIClientOptions).topK as number;\n model.topLogprobs = (this.clientOptions as t.VertexAIClientOptions).topLogprobs as number;\n model.frequencyPenalty = (this.clientOptions as t.VertexAIClientOptions).frequencyPenalty as number;\n model.presencePenalty = (this.clientOptions as t.VertexAIClientOptions).presencePenalty as number;\n model.maxOutputTokens = (this.clientOptions as t.VertexAIClientOptions).maxOutputTokens as number;\n }\n\n if (!this.tools || this.tools.length === 0) {\n return model as unknown as Runnable;\n }\n\n return (model as t.ModelWithTools).bindTools(this.tools);\n }\n overrideTestModel(responses: string[], sleep?: number, toolCalls?: ToolCall[]): void {\n this.boundModel = createFakeStreamingLLM({\n responses,\n sleep,\n toolCalls,\n });\n }\n\n getNewModel({\n clientOptions = {},\n omitOriginalOptions,\n } : {\n clientOptions?: t.ClientOptions;\n omitOriginalOptions?: string[]\n }): t.ChatModelInstance {\n const ChatModelClass = getChatModelClass(this.provider);\n const _options = omitOriginalOptions ? Object.fromEntries(\n Object.entries(this.clientOptions).filter(([key]) => !omitOriginalOptions.includes(key)),\n ) : this.clientOptions;\n const options = Object.assign(_options, clientOptions);\n return new ChatModelClass(options);\n }\n\n storeUsageMetadata(finalMessage?: BaseMessage): void {\n if (finalMessage && 'usage_metadata' in finalMessage && finalMessage.usage_metadata) {\n this.currentUsage = finalMessage.usage_metadata as Partial<UsageMetadata>;\n }\n }\n\n createCallModel() {\n return async (state: t.BaseGraphState, config?: RunnableConfig): Promise<Partial<t.BaseGraphState>> => {\n const { provider = '' } = (config?.configurable as t.GraphConfig | undefined) ?? {} ;\n if (!config || !provider) {\n throw new Error(`No ${config ? 'provider' : 'config'} provided`);\n }\n if (!config.signal) {\n config.signal = this.signal;\n }\n this.config = config;\n const { messages } = state;\n\n let messagesToUse = messages;\n if (!this.pruneMessages && this.tokenCounter && this.maxContextTokens && this.indexTokenCountMap[0] != null) {\n const isAnthropicWithThinking = (\n (this.provider === Providers.ANTHROPIC\n || (this.provider === Providers.BEDROCK && (this.clientOptions as t.BedrockClientOptions)?.model?.includes('anthropic')))\n && (this.clientOptions as t.AnthropicClientOptions)?.thinking != null);\n \n this.pruneMessages = createPruneMessages({\n indexTokenCountMap: this.indexTokenCountMap,\n maxTokens: this.maxContextTokens,\n tokenCounter: this.tokenCounter,\n startIndex: this.startIndex,\n thinkingEnabled: isAnthropicWithThinking,\n });\n }\n if (this.pruneMessages) {\n const { context, indexTokenCountMap } = this.pruneMessages({\n messages,\n usageMetadata: this.currentUsage,\n // startOnMessageType: 'human',\n });\n this.indexTokenCountMap = indexTokenCountMap;\n messagesToUse = context;\n }\n\n const finalMessages = messagesToUse;\n const lastMessageX = finalMessages.length >= 2 ? finalMessages[finalMessages.length - 2] : null;\n const lastMessageY = finalMessages.length >= 1 ? finalMessages[finalMessages.length - 1] : null;\n\n if (\n provider === Providers.BEDROCK\n && lastMessageX instanceof AIMessageChunk\n && lastMessageY instanceof ToolMessage\n && typeof lastMessageX.content === 'string'\n ) {\n finalMessages[finalMessages.length - 2].content = '';\n }\n\n const isLatestToolMessage = lastMessageY instanceof ToolMessage;\n\n if (isLatestToolMessage && provider === Providers.ANTHROPIC) {\n formatAnthropicArtifactContent(finalMessages);\n } else if (\n isLatestToolMessage &&\n (isOpenAILike(provider) || isGoogleLike(provider))\n ) {\n formatArtifactPayload(finalMessages);\n }\n\n if (this.lastStreamCall != null && this.streamBuffer != null) {\n const timeSinceLastCall = Date.now() - this.lastStreamCall;\n if (timeSinceLastCall < this.streamBuffer) {\n const timeToWait = Math.ceil((this.streamBuffer - timeSinceLastCall) / 1000) * 1000;\n await sleep(timeToWait);\n }\n }\n\n this.lastStreamCall = Date.now();\n\n let result: Partial<t.BaseGraphState>;\n if ((this.tools?.length ?? 0) > 0 && manualToolStreamProviders.has(provider)) {\n const stream = await this.boundModel.stream(finalMessages, config);\n let finalChunk: AIMessageChunk | undefined;\n for await (const chunk of stream) {\n dispatchCustomEvent(GraphEvents.CHAT_MODEL_STREAM, { chunk }, config);\n if (!finalChunk) {\n finalChunk = chunk;\n } else {\n finalChunk = concat(finalChunk, chunk);\n }\n }\n\n finalChunk = modifyDeltaProperties(this.provider, finalChunk);\n result = { messages: [finalChunk as AIMessageChunk] };\n } else {\n const finalMessage = (await this.boundModel.invoke(finalMessages, config)) as AIMessageChunk;\n if ((finalMessage.tool_calls?.length ?? 0) > 0) {\n finalMessage.tool_calls = finalMessage.tool_calls?.filter((tool_call) => {\n if (!tool_call.name) {\n return false;\n }\n return true;\n });\n }\n result = { messages: [finalMessage] };\n }\n \n this.storeUsageMetadata(result?.messages?.[0]);\n return result;\n };\n }\n\n createWorkflow(): t.CompiledWorkflow<t.BaseGraphState> {\n const routeMessage = (state: t.BaseGraphState, config?: RunnableConfig): string => {\n this.config = config;\n // const lastMessage = state.messages[state.messages.length - 1] as AIMessage;\n // if (!lastMessage?.tool_calls?.length) {\n // return END;\n // }\n // return TOOLS;\n return toolsCondition(state);\n };\n\n const workflow = new StateGraph<t.BaseGraphState>({\n channels: this.graphState,\n })\n .addNode(AGENT, this.createCallModel())\n .addNode(TOOLS, this.initializeTools())\n .addEdge(START, AGENT)\n .addConditionalEdges(AGENT, routeMessage)\n .addEdge(TOOLS, this.toolEnd ? END : AGENT);\n\n return workflow.compile();\n }\n\n /* Dispatchers */\n\n /**\n * Dispatches a run step to the client, returns the step ID\n */\n dispatchRunStep(stepKey: string, stepDetails: t.StepDetails): string {\n if (!this.config) {\n throw new Error('No config provided');\n }\n\n const [stepId, stepIndex] = this.generateStepId(stepKey);\n if (stepDetails.type === StepTypes.TOOL_CALLS && stepDetails.tool_calls) {\n for (const tool_call of stepDetails.tool_calls) {\n const toolCallId = tool_call.id ?? '';\n if (!toolCallId || this.toolCallStepIds.has(toolCallId)) {\n continue;\n }\n this.toolCallStepIds.set(toolCallId, stepId);\n }\n }\n\n const runStep: t.RunStep = {\n stepIndex,\n id: stepId,\n type: stepDetails.type,\n index: this.contentData.length,\n stepDetails,\n usage: null,\n };\n\n const runId = this.runId ?? '';\n if (runId) {\n runStep.runId = runId;\n }\n\n this.contentData.push(runStep);\n this.contentIndexMap.set(stepId, runStep.index);\n dispatchCustomEvent(GraphEvents.ON_RUN_STEP, runStep, this.config);\n return stepId;\n }\n\n handleToolCallCompleted(data: t.ToolEndData, metadata?: Record<string, unknown>): void {\n if (!this.config) {\n throw new Error('No config provided');\n }\n\n if (!data.output) {\n return;\n }\n\n const { input, output } = data;\n const { tool_call_id } = output;\n const stepId = this.toolCallStepIds.get(tool_call_id) ?? '';\n if (!stepId) {\n throw new Error(`No stepId found for tool_call_id ${tool_call_id}`);\n }\n\n const runStep = this.getRunStep(stepId);\n if (!runStep) {\n throw new Error(`No run step found for stepId ${stepId}`);\n }\n\n const args = typeof input === 'string' ? input : input.input;\n const tool_call = {\n args: typeof args === 'string' ? args : JSON.stringify(args),\n name: output.name ?? '',\n id: output.tool_call_id,\n output: typeof output.content === 'string'\n ? output.content\n : JSON.stringify(output.content),\n progress: 1,\n };\n\n this.handlerRegistry?.getHandler(GraphEvents.ON_RUN_STEP_COMPLETED)?.handle(\n GraphEvents.ON_RUN_STEP_COMPLETED,\n { result: {\n id: stepId,\n index: runStep.index,\n type: 'tool_call',\n tool_call\n } as t.ToolCompleteEvent,\n },\n metadata,\n this,\n );\n }\n handleToolCallError(data: t.ToolErrorData, metadata?: Record<string, unknown>): void {\n if (!this.config) {\n throw new Error('No config provided');\n }\n\n if (!data.id) {\n console.warn('No Tool ID provided for Tool Error');\n return;\n }\n\n const stepId = this.toolCallStepIds.get(data.id) ?? '';\n if (!stepId) {\n throw new Error(`No stepId found for tool_call_id ${data.id}`);\n }\n\n const { name, input: args, error } = data;\n\n const runStep = this.getRunStep(stepId);\n if (!runStep) {\n throw new Error(`No run step found for stepId ${stepId}`);\n }\n\n const tool_call: t.ProcessedToolCall = {\n id: data.id,\n name: name ?? '',\n args: typeof args === 'string' ? args : JSON.stringify(args),\n output: `Error processing tool${error?.message ? `: ${error.message}` : ''}`,\n progress: 1,\n };\n\n this.handlerRegistry?.getHandler(GraphEvents.ON_RUN_STEP_COMPLETED)?.handle(\n GraphEvents.ON_RUN_STEP_COMPLETED,\n { result: {\n id: stepId,\n index: runStep.index,\n type: 'tool_call',\n tool_call\n } as t.ToolCompleteEvent,\n },\n metadata,\n this,\n );\n }\n\n dispatchRunStepDelta(id: string, delta: t.ToolCallDelta): void {\n if (!this.config) {\n throw new Error('No config provided');\n } else if (!id) {\n throw new Error('No step ID found');\n }\n const runStepDelta: t.RunStepDeltaEvent = {\n id,\n delta,\n };\n dispatchCustomEvent(GraphEvents.ON_RUN_STEP_DELTA, runStepDelta, this.config);\n }\n\n dispatchMessageDelta(id: string, delta: t.MessageDelta): void {\n if (!this.config) {\n throw new Error('No config provided');\n }\n const messageDelta: t.MessageDeltaEvent = {\n id,\n delta,\n };\n dispatchCustomEvent(GraphEvents.ON_MESSAGE_DELTA, messageDelta, this.config);\n }\n\n dispatchReasoningDelta = (stepId: string, delta: t.ReasoningDelta): void => {\n if (!this.config) {\n throw new Error('No config provided');\n }\n const reasoningDelta: t.ReasoningDeltaEvent = {\n id: stepId,\n delta,\n };\n dispatchCustomEvent(GraphEvents.ON_REASONING_DELTA, reasoningDelta, this.config);\n };\n}\n"],"names":["CustomToolNode"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AA2BA,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,aAAa;MAchB,KAAK,CAAA;AAyBzB,IAAA,SAAS;AACT,IAAA,eAAe;IACf,YAAY,GAAsC,mBAAmB;AACrE,IAAA,gBAAgB,GAA2C,YAAY,CAAC,IAAI;AAC5E,IAAA,uBAAuB,GAAyB,IAAI,GAAG,EAAE;AACzD,IAAA,mBAAmB,GAAwB,IAAI,GAAG,EAAE;AACpD,IAAA,yBAAyB,GAAwB,IAAI,GAAG,EAAE;AAC1D,IAAA,MAAM;IACN,WAAW,GAAgB,EAAE;AAC7B,IAAA,UAAU,GAA0B,IAAI,GAAG,EAAoB;AAC/D,IAAA,eAAe,GAAwB,IAAI,GAAG,EAAE;AAChD,IAAA,eAAe,GAAwB,IAAI,GAAG,EAAE;AAChD,IAAA,YAAY;IACZ,kBAAkB,GAA2B,EAAE;AAC/C,IAAA,gBAAgB;AAChB,IAAA,aAAa;;AAEb,IAAA,YAAY;AACZ,IAAA,YAAY;AACZ,IAAA,MAAM;AACP;AAEK,MAAO,aAAc,SAAQ,KAGlC,CAAA;AACS,IAAA,UAAU;AAClB,IAAA,aAAa;AACb,IAAA,UAAU;;AAEV,IAAA,cAAc;AACd,IAAA,eAAe;AACf,IAAA,aAAa;IACb,QAAQ,GAAkB,EAAE;AAC5B,IAAA,KAAK;AACL,IAAA,KAAK;AACL,IAAA,OAAO;IACP,UAAU,GAAW,CAAC;AACtB,IAAA,QAAQ;AACR,IAAA,OAAO;AACP,IAAA,MAAM;IAEN,WAAY,CAAA,EACV,KAAK,EACL,KAAK,EACL,MAAM,EACN,OAAO,EACP,QAAQ,EACR,YAAY,EACZ,YAAY,EACZ,YAAY,EACZ,aAAa,EACb,OAAO,GAAG,KAAK,EACf,uBAAuB,GAAG,EAAE,GACN,EAAA;AACtB,QAAA,KAAK,EAAE;AACP,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;AAClB,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;AAClB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;AACpB,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO;AACtB,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO;AACtB,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ;AACxB,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY;AAChC,QAAA,IAAI,CAAC,aAAa,GAAG,aAAa;AAClC,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,gBAAgB,EAAE;AACzC,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,eAAe,EAAE;QACxC,IAAI,YAAY,EAAE;AAChB,YAAA,IAAI,CAAC,YAAY,GAAG,YAAY;;AAGlC,QAAA,IAAI,iBAAiB,GAA+B,YAAY,IAAI,EAAE;QACtE,IAAI,uBAAuB,EAAE;AAC3B,YAAA,iBAAiB,GAAG,iBAAiB,GAAG,CAAG,EAAA,iBAAiB,CAAO,IAAA,EAAA,uBAAuB,CAAE,CAAA,GAAG,uBAAuB;;QAGxH,IAAI,iBAAiB,IAAI,QAAQ,KAAK,SAAS,CAAC,SAAS,IAAK,aAA0C,EAAE,aAAa,EAAE,cAAc,GAAG,gBAAgB,CAAC,EAAE,QAAQ,CAAC,gBAAgB,CAAC,EAAE;AACvL,YAAA,iBAAiB,GAAG;AAClB,gBAAA,OAAO,EAAE;AACP,oBAAA;AACE,wBAAA,IAAI,EAAE,MAAM;AACZ,wBAAA,IAAI,EAAE,YAAY;AAClB,wBAAA,aAAa,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE;AACrC,qBAAA;AACF,iBAAA;aACF;;QAGH,IAAI,iBAAiB,EAAE;YACrB,IAAI,CAAC,aAAa,GAAG,IAAI,aAAa,CAAC,iBAAiB,CAAC;;;;AAM7D,IAAA,WAAW,CAAC,WAAqB,EAAA;AAC/B,QAAA,IAAI,CAAC,QAAQ,GAAG,EAAE;QAClB,IAAI,CAAC,MAAM,GAAG,eAAe,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;AACrD,QAAA,IAAI,WAAW,KAAK,IAAI,EAAE;YACxB,IAAI,CAAC,WAAW,GAAG,eAAe,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,CAAC;AACxD,YAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,GAAG,EAAE,CAAC;;AAEzE,QAAA,IAAI,CAAC,UAAU,GAAG,eAAe,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,GAAG,EAAE,CAAC;AAC7D,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,GAAG,EAAE,CAAC;AACvE,QAAA,IAAI,CAAC,mBAAmB,GAAG,eAAe,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,GAAG,EAAE,CAAC;AAC/E,QAAA,IAAI,CAAC,uBAAuB,GAAG,eAAe,CAAC,IAAI,CAAC,yBAAyB,EAAE,IAAI,GAAG,EAAE,CAAC;AACzF,QAAA,IAAI,CAAC,yBAAyB,GAAG,eAAe,CAAC,IAAI,CAAC,yBAAyB,EAAE,IAAI,GAAG,EAAE,CAAC;AAC3F,QAAA,IAAI,CAAC,gBAAgB,GAAG,eAAe,CAAC,IAAI,CAAC,gBAAgB,EAAE,YAAY,CAAC,IAAI,CAAC;QACjF,IAAI,CAAC,SAAS,GAAG,eAAe,CAAC,IAAI,CAAC,SAAS,EAAE,SAAS,CAAC;QAC3D,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC,IAAI,CAAC,eAAe,EAAE,SAAS,CAAC;QACvE,IAAI,CAAC,kBAAkB,GAAG,eAAe,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE,CAAC;QACtE,IAAI,CAAC,YAAY,GAAG,eAAe,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,CAAC;QACjE,IAAI,CAAC,YAAY,GAAG,eAAe,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,CAAC;QACjE,IAAI,CAAC,gBAAgB,GAAG,eAAe,CAAC,IAAI,CAAC,gBAAgB,EAAE,SAAS,CAAC;;;AAK3E,IAAA,UAAU,CAAC,MAAc,EAAA;QACvB,MAAM,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,MAAM,CAAC;AAC9C,QAAA,IAAI,KAAK,KAAK,SAAS,EAAE;AACvB,YAAA,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC;;AAEhC,QAAA,OAAO,SAAS;;AAGlB,IAAA,UAAU,CAAC,QAA6C,EAAA;AACtD,QAAA,IAAI,CAAC,QAAQ;AAAE,YAAA,OAAO,EAAE;QAExB,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC;AACzC,QAAA,IAAI,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC,EAAE;AAC9B,YAAA,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC;;AAGrC,QAAA,OAAO,QAAQ,CAAC,OAAO,CAAC;;IAG1B,cAAc,CAAC,OAAe,EAAE,KAAc,EAAA;QAC5C,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC;QAC5C,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,OAAO,CAAA,CAAE,CAAC;;AAG7D,QAAA,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,OAAO,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC;;AAGpC,QAAA,OAAO,OAAO,CAAC,KAAK,CAAC;;AAGvB,IAAA,cAAc,CAAC,OAAe,EAAA;QAC5B,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC;AAC5C,QAAA,IAAI,SAA6B;QACjC,IAAI,SAAS,GAAG,CAAC;QACjB,IAAI,OAAO,EAAE;AACX,YAAA,SAAS,GAAG,OAAO,CAAC,MAAM;AAC1B,YAAA,SAAS,GAAG,CAAA,KAAA,EAAQ,MAAM,EAAE,EAAE;AAC9B,YAAA,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC;YACvB,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,EAAE,OAAO,CAAC;;aAChC;AACL,YAAA,SAAS,GAAG,CAAA,KAAA,EAAQ,MAAM,EAAE,EAAE;YAC9B,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,EAAE,CAAC,SAAS,CAAC,CAAC;;AAG3C,QAAA,OAAO,CAAC,SAAS,EAAE,SAAS,CAAC;;AAG/B,IAAA,UAAU,CAAC,QAA6C,EAAA;AACtD,QAAA,IAAI,CAAC,QAAQ;AAAE,YAAA,OAAO,EAAE;AAExB,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,QAAQ,CAAC,MAAgB;AACzB,YAAA,QAAQ,CAAC,SAAmB;AAC5B,YAAA,QAAQ,CAAC,cAAwB;AACjC,YAAA,QAAQ,CAAC,cAAwB;AACjC,YAAA,QAAQ,CAAC,aAAuB;SACjC;QACD,IAAI,IAAI,CAAC,gBAAgB,KAAK,YAAY,CAAC,KAAK,EAAE;AAChD,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC;;AAG3B,QAAA,OAAO,OAAO;;AAGhB,IAAA,YAAY,CAAC,OAAwC,EAAA;AACnD,QAAA,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,GAAG,KAAK,GAAG,KAAK,SAAS,CAAC;;;IAKjD,cAAc,GAAA;QACZ,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC;;IAG7C,eAAe,GAAA;AACb,QAAA,OAAO,wBAAwB,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;;;IAKvE,gBAAgB,GAAA;QACd,OAAO;AACL,YAAA,QAAQ,EAAE;AACR,gBAAA,KAAK,EAAE,CAAC,CAAgB,EAAE,CAAgB,KAAmB;AAC3D,oBAAA,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;AACb,wBAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,4BAAA,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC;;wBAG5B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM;;oBAEvC,MAAM,OAAO,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3B,oBAAA,IAAI,CAAC,QAAQ,GAAG,OAAO;AACvB,oBAAA,OAAO,OAAO;iBACf;AACD,gBAAA,OAAO,EAAE,MAAM,EAAE;AAClB,aAAA;SACF;;IAGH,eAAe,GAAA;;QAEb,OAAO,IAAIA,QAAc,CAAmB;AAC1C,YAAA,KAAK,EAAE,IAAI,CAAC,KAAK,IAAI,EAAE;YACvB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,eAAe,EAAE,IAAI,CAAC,eAAe;YACrC,YAAY,EAAE,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,IAAI,CAAC;AAClD,SAAA,CAAC;;IAGJ,eAAe,GAAA;QACb,MAAM,cAAc,GAAG,iBAAiB,CAAC,IAAI,CAAC,QAAQ,CAAC;QACvD,MAAM,KAAK,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,aAAa,CAAC;AAEpD,QAAA,IAAI,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,YAAY,UAAU,IAAI,KAAK,YAAY,eAAe,CAAC,EAAE;YACpG,KAAK,CAAC,WAAW,GAAI,IAAI,CAAC,aAAuC,CAAC,WAAqB;YACvF,KAAK,CAAC,IAAI,GAAI,IAAI,CAAC,aAAuC,CAAC,IAAc;YACzE,KAAK,CAAC,gBAAgB,GAAI,IAAI,CAAC,aAAuC,CAAC,gBAA0B;YACjG,KAAK,CAAC,eAAe,GAAI,IAAI,CAAC,aAAuC,CAAC,eAAyB;YAC/F,KAAK,CAAC,CAAC,GAAI,IAAI,CAAC,aAAuC,CAAC,CAAW;;AAC9D,aAAA,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,QAAQ,IAAI,KAAK,YAAY,YAAY,EAAE;YAChF,KAAK,CAAC,WAAW,GAAI,IAAI,CAAC,aAAyC,CAAC,WAAqB;YACzF,KAAK,CAAC,IAAI,GAAI,IAAI,CAAC,aAAyC,CAAC,IAAc;YAC3E,KAAK,CAAC,IAAI,GAAI,IAAI,CAAC,aAAyC,CAAC,IAAc;YAC3E,KAAK,CAAC,WAAW,GAAI,IAAI,CAAC,aAAyC,CAAC,WAAqB;YACzF,KAAK,CAAC,gBAAgB,GAAI,IAAI,CAAC,aAAyC,CAAC,gBAA0B;YACnG,KAAK,CAAC,eAAe,GAAI,IAAI,CAAC,aAAyC,CAAC,eAAyB;YACjG,KAAK,CAAC,eAAe,GAAI,IAAI,CAAC,aAAyC,CAAC,eAAyB;;AAGnG,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;AAC1C,YAAA,OAAO,KAA4B;;QAGrC,OAAQ,KAA0B,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC;;AAE1D,IAAA,iBAAiB,CAAC,SAAmB,EAAE,KAAc,EAAE,SAAsB,EAAA;AAC3E,QAAA,IAAI,CAAC,UAAU,GAAG,sBAAsB,CAAC;YACvC,SAAS;YACT,KAAK;YACL,SAAS;AACV,SAAA,CAAC;;AAGJ,IAAA,WAAW,CAAC,EACV,aAAa,GAAG,EAAE,EAClB,mBAAmB,GAIpB,EAAA;QACC,MAAM,cAAc,GAAG,iBAAiB,CAAC,IAAI,CAAC,QAAQ,CAAC;AACvD,QAAA,MAAM,QAAQ,GAAG,mBAAmB,GAAG,MAAM,CAAC,WAAW,CACvD,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,mBAAmB,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CACzF,GAAG,IAAI,CAAC,aAAa;QACtB,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE,aAAa,CAAC;AACtD,QAAA,OAAO,IAAI,cAAc,CAAC,OAAO,CAAC;;AAGpC,IAAA,kBAAkB,CAAC,YAA0B,EAAA;QAC3C,IAAI,YAAY,IAAI,gBAAgB,IAAI,YAAY,IAAI,YAAY,CAAC,cAAc,EAAE;AACnF,YAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC,cAAwC;;;IAI7E,eAAe,GAAA;AACb,QAAA,OAAO,OAAO,KAAuB,EAAE,MAAuB,KAAwC;YACpG,MAAM,EAAE,QAAQ,GAAG,EAAE,EAAE,GAAI,MAAM,EAAE,YAA0C,IAAI,EAAE;AACnF,YAAA,IAAI,CAAC,MAAM,IAAI,CAAC,QAAQ,EAAE;AACxB,gBAAA,MAAM,IAAI,KAAK,CAAC,CAAA,GAAA,EAAM,MAAM,GAAG,UAAU,GAAG,QAAQ,CAAA,SAAA,CAAW,CAAC;;AAElE,YAAA,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;AAClB,gBAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM;;AAE7B,YAAA,IAAI,CAAC,MAAM,GAAG,MAAM;AACpB,YAAA,MAAM,EAAE,QAAQ,EAAE,GAAG,KAAK;YAE1B,IAAI,aAAa,GAAG,QAAQ;YAC5B,IAAI,CAAC,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE;gBAC3G,MAAM,uBAAuB,IAC3B,CAAC,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC;AAC1B,wBAAC,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,OAAO,IAAK,IAAI,CAAC,aAAwC,EAAE,KAAK,EAAE,QAAQ,CAAC,WAAW,CAAC,CAAC;AACpH,uBAAA,IAAI,CAAC,aAA0C,EAAE,QAAQ,IAAI,IAAI,CAAC;AAExE,gBAAA,IAAI,CAAC,aAAa,GAAG,mBAAmB,CAAC;oBACvC,kBAAkB,EAAE,IAAI,CAAC,kBAAkB;oBAC3C,SAAS,EAAE,IAAI,CAAC,gBAAgB;oBAChC,YAAY,EAAE,IAAI,CAAC,YAAY;oBAC/B,UAAU,EAAE,IAAI,CAAC,UAAU;AAC3B,oBAAA,eAAe,EAAE,uBAAuB;AACzC,iBAAA,CAAC;;AAEJ,YAAA,IAAI,IAAI,CAAC,aAAa,EAAE;gBACtB,MAAM,EAAE,OAAO,EAAE,kBAAkB,EAAE,GAAG,IAAI,CAAC,aAAa,CAAC;oBACzD,QAAQ;oBACR,aAAa,EAAE,IAAI,CAAC,YAAY;;AAEjC,iBAAA,CAAC;AACF,gBAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB;gBAC5C,aAAa,GAAG,OAAO;;YAGzB,MAAM,aAAa,GAAG,aAAa;YACnC,MAAM,YAAY,GAAG,aAAa,CAAC,MAAM,IAAI,CAAC,GAAG,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,IAAI;YAC/F,MAAM,YAAY,GAAG,aAAa,CAAC,MAAM,IAAI,CAAC,GAAG,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,IAAI;AAE/F,YAAA,IACE,QAAQ,KAAK,SAAS,CAAC;AACpB,mBAAA,YAAY,YAAY;AACxB,mBAAA,YAAY,YAAY;AACxB,mBAAA,OAAO,YAAY,CAAC,OAAO,KAAK,QAAQ,EAC3C;gBACA,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,OAAO,GAAG,EAAE;;AAGtD,YAAA,MAAM,mBAAmB,GAAG,YAAY,YAAY,WAAW;YAE/D,IAAI,mBAAmB,IAAI,QAAQ,KAAK,SAAS,CAAC,SAAS,EAAE;gBAC3D,8BAA8B,CAAC,aAAa,CAAC;;AACxC,iBAAA,IACL,mBAAmB;iBAClB,YAAY,CAAC,QAAQ,CAAC,IAAI,YAAY,CAAC,QAAQ,CAAC,CAAC,EAClD;gBACA,qBAAqB,CAAC,aAAa,CAAC;;AAGtC,YAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,EAAE;gBAC5D,MAAM,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,cAAc;AAC1D,gBAAA,IAAI,iBAAiB,GAAG,IAAI,CAAC,YAAY,EAAE;AACzC,oBAAA,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,YAAY,GAAG,iBAAiB,IAAI,IAAI,CAAC,GAAG,IAAI;AACnF,oBAAA,MAAM,KAAK,CAAC,UAAU,CAAC;;;AAI3B,YAAA,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,GAAG,EAAE;AAEhC,YAAA,IAAI,MAAiC;AACrC,YAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,IAAI,CAAC,IAAI,CAAC,IAAI,yBAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;AAC5E,gBAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,aAAa,EAAE,MAAM,CAAC;AAClE,gBAAA,IAAI,UAAsC;AAC1C,gBAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;oBAChC,mBAAmB,CAAC,WAAW,CAAC,iBAAiB,EAAE,EAAE,KAAK,EAAE,EAAE,MAAM,CAAC;oBACrE,IAAI,CAAC,UAAU,EAAE;wBACf,UAAU,GAAG,KAAK;;yBACb;AACL,wBAAA,UAAU,GAAG,MAAM,CAAC,UAAU,EAAE,KAAK,CAAC;;;gBAI1C,UAAU,GAAG,qBAAqB,CAAC,IAAI,CAAC,QAAQ,EAAE,UAAU,CAAC;gBAC7D,MAAM,GAAG,EAAE,QAAQ,EAAE,CAAC,UAA4B,CAAC,EAAE;;iBAChD;AACL,gBAAA,MAAM,YAAY,IAAI,MAAM,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,aAAa,EAAE,MAAM,CAAC,CAAmB;AAC5F,gBAAA,IAAI,CAAC,YAAY,CAAC,UAAU,EAAE,MAAM,IAAI,CAAC,IAAI,CAAC,EAAE;AAC9C,oBAAA,YAAY,CAAC,UAAU,GAAG,YAAY,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC,SAAS,KAAI;AACtE,wBAAA,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE;AACnB,4BAAA,OAAO,KAAK;;AAEd,wBAAA,OAAO,IAAI;AACb,qBAAC,CAAC;;gBAEJ,MAAM,GAAG,EAAE,QAAQ,EAAE,CAAC,YAAY,CAAC,EAAE;;YAGvC,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,QAAQ,GAAG,CAAC,CAAC,CAAC;AAC9C,YAAA,OAAO,MAAM;AACf,SAAC;;IAGH,cAAc,GAAA;AACZ,QAAA,MAAM,YAAY,GAAG,CAAC,KAAuB,EAAE,MAAuB,KAAY;AAChF,YAAA,IAAI,CAAC,MAAM,GAAG,MAAM;;;;;;AAMpB,YAAA,OAAO,cAAc,CAAC,KAAK,CAAC;AAC9B,SAAC;AAED,QAAA,MAAM,QAAQ,GAAG,IAAI,UAAU,CAAmB;YAChD,QAAQ,EAAE,IAAI,CAAC,UAAU;SAC1B;AACE,aAAA,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,eAAe,EAAE;AACrC,aAAA,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,eAAe,EAAE;AACrC,aAAA,OAAO,CAAC,KAAK,EAAE,KAAK;AACpB,aAAA,mBAAmB,CAAC,KAAK,EAAE,YAAY;AACvC,aAAA,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,GAAG,GAAG,GAAG,KAAK,CAAC;AAE7C,QAAA,OAAO,QAAQ,CAAC,OAAO,EAAE;;;AAK3B;;AAEG;IACH,eAAe,CAAC,OAAe,EAAE,WAA0B,EAAA;AACzD,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAGvC,QAAA,MAAM,CAAC,MAAM,EAAE,SAAS,CAAC,GAAG,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC;AACxD,QAAA,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,CAAC,UAAU,IAAI,WAAW,CAAC,UAAU,EAAE;AACvE,YAAA,KAAK,MAAM,SAAS,IAAI,WAAW,CAAC,UAAU,EAAE;AAC9C,gBAAA,MAAM,UAAU,GAAG,SAAS,CAAC,EAAE,IAAI,EAAE;AACrC,gBAAA,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE;oBACvD;;gBAEF,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,UAAU,EAAE,MAAM,CAAC;;;AAIhD,QAAA,MAAM,OAAO,GAAc;YACzB,SAAS;AACT,YAAA,EAAE,EAAE,MAAM;YACV,IAAI,EAAE,WAAW,CAAC,IAAI;AACtB,YAAA,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,MAAM;YAC9B,WAAW;AACX,YAAA,KAAK,EAAE,IAAI;SACZ;AAED,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI,EAAE;QAC9B,IAAI,KAAK,EAAE;AACT,YAAA,OAAO,CAAC,KAAK,GAAG,KAAK;;AAGvB,QAAA,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,CAAC;QAC/C,mBAAmB,CAAC,WAAW,CAAC,WAAW,EAAE,OAAO,EAAE,IAAI,CAAC,MAAM,CAAC;AAClE,QAAA,OAAO,MAAM;;IAGf,uBAAuB,CAAC,IAAmB,EAAE,QAAkC,EAAA;AAC7E,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAGvC,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB;;AAGF,QAAA,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,IAAI;AAC9B,QAAA,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM;AAC/B,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE;QAC3D,IAAI,CAAC,MAAM,EAAE;AACX,YAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,YAAY,CAAA,CAAE,CAAC;;QAGrE,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;QACvC,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,gCAAgC,MAAM,CAAA,CAAE,CAAC;;AAG3D,QAAA,MAAM,IAAI,GAAG,OAAO,KAAK,KAAK,QAAQ,GAAG,KAAK,GAAG,KAAK,CAAC,KAAK;AAC5D,QAAA,MAAM,SAAS,GAAG;AAChB,YAAA,IAAI,EAAE,OAAO,IAAI,KAAK,QAAQ,GAAG,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;AAC5D,YAAA,IAAI,EAAE,MAAM,CAAC,IAAI,IAAI,EAAE;YACvB,EAAE,EAAE,MAAM,CAAC,YAAY;AACvB,YAAA,MAAM,EAAE,OAAO,MAAM,CAAC,OAAO,KAAK;kBAC9B,MAAM,CAAC;kBACP,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC;AAClC,YAAA,QAAQ,EAAE,CAAC;SACZ;AAED,QAAA,IAAI,CAAC,eAAe,EAAE,UAAU,CAAC,WAAW,CAAC,qBAAqB,CAAC,EAAE,MAAM,CACzE,WAAW,CAAC,qBAAqB,EACjC,EAAE,MAAM,EAAE;AACR,gBAAA,EAAE,EAAE,MAAM;gBACV,KAAK,EAAE,OAAO,CAAC,KAAK;AACpB,gBAAA,IAAI,EAAE,WAAW;gBACjB;AACsB,aAAA;AACvB,SAAA,EACD,QAAQ,EACR,IAAI,CACL;;IAEH,mBAAmB,CAAC,IAAqB,EAAE,QAAkC,EAAA;AAC3E,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAGvC,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,OAAO,CAAC,IAAI,CAAC,oCAAoC,CAAC;YAClD;;AAGF,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE;QACtD,IAAI,CAAC,MAAM,EAAE;YACX,MAAM,IAAI,KAAK,CAAC,CAAA,iCAAA,EAAoC,IAAI,CAAC,EAAE,CAAE,CAAA,CAAC;;QAGhE,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,IAAI;QAEzC,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;QACvC,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,gCAAgC,MAAM,CAAA,CAAE,CAAC;;AAG3D,QAAA,MAAM,SAAS,GAAwB;YACrC,EAAE,EAAE,IAAI,CAAC,EAAE;YACX,IAAI,EAAE,IAAI,IAAI,EAAE;AAChB,YAAA,IAAI,EAAE,OAAO,IAAI,KAAK,QAAQ,GAAG,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;AAC5D,YAAA,MAAM,EAAE,CAAwB,qBAAA,EAAA,KAAK,EAAE,OAAO,GAAG,CAAK,EAAA,EAAA,KAAK,CAAC,OAAO,CAAA,CAAE,GAAG,EAAE,CAAE,CAAA;AAC5E,YAAA,QAAQ,EAAE,CAAC;SACZ;AAED,QAAA,IAAI,CAAC,eAAe,EAAE,UAAU,CAAC,WAAW,CAAC,qBAAqB,CAAC,EAAE,MAAM,CACzE,WAAW,CAAC,qBAAqB,EACjC,EAAE,MAAM,EAAE;AACR,gBAAA,EAAE,EAAE,MAAM;gBACV,KAAK,EAAE,OAAO,CAAC,KAAK;AACpB,gBAAA,IAAI,EAAE,WAAW;gBACjB;AACsB,aAAA;AACvB,SAAA,EACD,QAAQ,EACR,IAAI,CACL;;IAGH,oBAAoB,CAAC,EAAU,EAAE,KAAsB,EAAA;AACrD,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;aAChC,IAAI,CAAC,EAAE,EAAE;AACd,YAAA,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC;;AAErC,QAAA,MAAM,YAAY,GAAwB;YACxC,EAAE;YACF,KAAK;SACN;QACD,mBAAmB,CAAC,WAAW,CAAC,iBAAiB,EAAE,YAAY,EAAE,IAAI,CAAC,MAAM,CAAC;;IAG/E,oBAAoB,CAAC,EAAU,EAAE,KAAqB,EAAA;AACpD,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAEvC,QAAA,MAAM,YAAY,GAAwB;YACxC,EAAE;YACF,KAAK;SACN;QACD,mBAAmB,CAAC,WAAW,CAAC,gBAAgB,EAAE,YAAY,EAAE,IAAI,CAAC,MAAM,CAAC;;AAG9E,IAAA,sBAAsB,GAAG,CAAC,MAAc,EAAE,KAAuB,KAAU;AACzE,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAEvC,QAAA,MAAM,cAAc,GAA0B;AAC5C,YAAA,EAAE,EAAE,MAAM;YACV,KAAK;SACN;QACD,mBAAmB,CAAC,WAAW,CAAC,kBAAkB,EAAE,cAAc,EAAE,IAAI,CAAC,MAAM,CAAC;AAClF,KAAC;AACF;;;;"}
@@ -1,3 +1,5 @@
1
+ import { AIMessage } from '@langchain/core/messages';
2
+
1
3
  /**
2
4
  * Calculates the total tokens from a single usage object
3
5
  *
@@ -23,7 +25,7 @@ function calculateTotalTokens(usage) {
23
25
  * @param options Configuration options for processing messages
24
26
  * @returns Object containing the message context, remaining tokens, messages not included, and summary index
25
27
  */
26
- function getMessagesWithinTokenLimit({ messages: _messages, maxContextTokens, indexTokenCountMap, startOnMessageType, }) {
28
+ function getMessagesWithinTokenLimit({ messages: _messages, maxContextTokens, indexTokenCountMap, startOnMessageType, thinkingEnabled, tokenCounter, }) {
27
29
  // Every reply is primed with <|start|>assistant<|message|>, so we
28
30
  // start with 3 tokens for the label after all messages have been counted.
29
31
  let summaryIndex = -1;
@@ -53,12 +55,91 @@ function getMessagesWithinTokenLimit({ messages: _messages, maxContextTokens, in
53
55
  break;
54
56
  }
55
57
  }
58
+ // Handle startOnMessageType requirement
56
59
  if (startOnMessageType && context.length > 0) {
57
60
  const requiredTypeIndex = context.findIndex(msg => msg.getType() === startOnMessageType);
58
61
  if (requiredTypeIndex > 0) {
59
62
  context = context.slice(requiredTypeIndex);
60
63
  }
61
64
  }
65
+ // Handle thinking mode requirement for Anthropic
66
+ if (thinkingEnabled && context.length > 0 && tokenCounter) {
67
+ // Process only if we have an assistant message in the context
68
+ const firstAssistantIndex = context.findIndex(msg => msg.getType() === 'ai');
69
+ if (firstAssistantIndex >= 0) {
70
+ const firstAssistantMsg = context[firstAssistantIndex];
71
+ // Check if the first assistant message already has a thinking block
72
+ const hasThinkingBlock = Array.isArray(firstAssistantMsg.content) &&
73
+ firstAssistantMsg.content.some(item => item && typeof item === 'object' && item.type === 'thinking');
74
+ // Only proceed if we need to add thinking blocks
75
+ if (!hasThinkingBlock) {
76
+ // Collect thinking blocks from pruned assistant messages
77
+ const thinkingBlocks = [];
78
+ // Look through pruned messages for thinking blocks
79
+ for (const msg of messages) {
80
+ if (msg.getType() === 'ai' && Array.isArray(msg.content)) {
81
+ for (const item of msg.content) {
82
+ if (item && typeof item === 'object' && item.type === 'thinking') {
83
+ thinkingBlocks.push(item);
84
+ // We only need one thinking block
85
+ break;
86
+ }
87
+ }
88
+ if (thinkingBlocks.length > 0)
89
+ break; // Stop after finding one thinking block
90
+ }
91
+ }
92
+ // If we found thinking blocks, add them to the first assistant message
93
+ if (thinkingBlocks.length > 0) {
94
+ // Calculate token count of original message
95
+ const originalTokenCount = tokenCounter(firstAssistantMsg);
96
+ // Create a new content array with thinking blocks at the beginning
97
+ let newContent;
98
+ if (Array.isArray(firstAssistantMsg.content)) {
99
+ // Keep the original content (excluding any existing thinking blocks)
100
+ const originalContent = firstAssistantMsg.content.filter(item => !(item && typeof item === 'object' && item.type === 'thinking'));
101
+ newContent = [...thinkingBlocks, ...originalContent];
102
+ }
103
+ else if (typeof firstAssistantMsg.content === 'string') {
104
+ newContent = [
105
+ ...thinkingBlocks,
106
+ { type: 'text', text: firstAssistantMsg.content }
107
+ ];
108
+ }
109
+ else {
110
+ newContent = thinkingBlocks;
111
+ }
112
+ // Create a new message with the updated content
113
+ const newMessage = new AIMessage({
114
+ content: newContent,
115
+ additional_kwargs: firstAssistantMsg.additional_kwargs,
116
+ response_metadata: firstAssistantMsg.response_metadata,
117
+ });
118
+ // Calculate token count of new message
119
+ const newTokenCount = tokenCounter(newMessage);
120
+ // Adjust current token count
121
+ currentTokenCount += (newTokenCount - originalTokenCount);
122
+ // Replace the first assistant message
123
+ context[firstAssistantIndex] = newMessage;
124
+ // If we've exceeded the token limit, we need to prune more messages
125
+ if (currentTokenCount > remainingContextTokens) {
126
+ // Remove messages from the end of the context until we're under the token limit
127
+ // But make sure to keep the first assistant message with thinking block
128
+ let i = context.length - 1;
129
+ while (i > firstAssistantIndex && currentTokenCount > remainingContextTokens) {
130
+ const msgToRemove = context[i];
131
+ const msgTokenCount = tokenCounter(msgToRemove);
132
+ context.splice(i, 1);
133
+ currentTokenCount -= msgTokenCount;
134
+ i--;
135
+ }
136
+ // Update remainingContextTokens to reflect the new token count
137
+ remainingContextTokens = maxContextTokens - currentTokenCount;
138
+ }
139
+ }
140
+ }
141
+ }
142
+ }
62
143
  }
63
144
  if (instructions && _messages.length > 0) {
64
145
  context.push(_messages[0]);
@@ -115,11 +196,14 @@ function createPruneMessages(factoryParams) {
115
196
  if (totalTokens <= factoryParams.maxTokens) {
116
197
  return { context: params.messages, indexTokenCountMap };
117
198
  }
199
+ // Pass the tokenCounter to getMessagesWithinTokenLimit for token recalculation
118
200
  const { context } = getMessagesWithinTokenLimit({
119
201
  maxContextTokens: factoryParams.maxTokens,
120
202
  messages: params.messages,
121
203
  indexTokenCountMap,
122
204
  startOnMessageType: params.startOnMessageType,
205
+ thinkingEnabled: factoryParams.thinkingEnabled,
206
+ tokenCounter: factoryParams.tokenCounter,
123
207
  });
124
208
  return { context, indexTokenCountMap };
125
209
  };
@@ -1 +1 @@
1
- {"version":3,"file":"prune.mjs","sources":["../../../src/messages/prune.ts"],"sourcesContent":["import type { BaseMessage, UsageMetadata } from '@langchain/core/messages';\nimport type { TokenCounter } from '@/types/run';\nexport type PruneMessagesFactoryParams = {\n maxTokens: number;\n startIndex: number;\n tokenCounter: TokenCounter;\n indexTokenCountMap: Record<string, number>;\n};\nexport type PruneMessagesParams = {\n messages: BaseMessage[];\n usageMetadata?: Partial<UsageMetadata>;\n startOnMessageType?: ReturnType<BaseMessage['getType']>;\n}\n\n/**\n * Calculates the total tokens from a single usage object\n * \n * @param usage The usage metadata object containing token information\n * @returns An object containing the total input and output tokens\n */\nfunction calculateTotalTokens(usage: Partial<UsageMetadata>): UsageMetadata {\n const baseInputTokens = Number(usage.input_tokens) || 0;\n const cacheCreation = Number(usage.input_token_details?.cache_creation) || 0;\n const cacheRead = Number(usage.input_token_details?.cache_read) || 0;\n \n const totalInputTokens = baseInputTokens + cacheCreation + cacheRead;\n const totalOutputTokens = Number(usage.output_tokens) || 0;\n\n return {\n input_tokens: totalInputTokens,\n output_tokens: totalOutputTokens,\n total_tokens: totalInputTokens + totalOutputTokens\n };\n}\n\n/**\n * Processes an array of messages and returns a context of messages that fit within a specified token limit.\n * It iterates over the messages from newest to oldest, adding them to the context until the token limit is reached.\n * \n * @param options Configuration options for processing messages\n * @returns Object containing the message context, remaining tokens, messages not included, and summary index\n */\nfunction getMessagesWithinTokenLimit({\n messages: _messages,\n maxContextTokens,\n indexTokenCountMap,\n startOnMessageType,\n}: {\n messages: BaseMessage[];\n maxContextTokens: number;\n indexTokenCountMap: Record<string, number>;\n startOnMessageType?: string;\n}): {\n context: BaseMessage[];\n remainingContextTokens: number;\n messagesToRefine: BaseMessage[];\n summaryIndex: number;\n} {\n // Every reply is primed with <|start|>assistant<|message|>, so we\n // start with 3 tokens for the label after all messages have been counted.\n let summaryIndex = -1;\n let currentTokenCount = 3;\n const instructions = _messages?.[0]?.getType() === 'system' ? _messages[0] : undefined;\n const instructionsTokenCount = instructions != null ? indexTokenCountMap[0] : 0;\n let remainingContextTokens = maxContextTokens - instructionsTokenCount;\n const messages = [..._messages];\n let context: BaseMessage[] = [];\n\n if (currentTokenCount < remainingContextTokens) {\n let currentIndex = messages.length;\n while (messages.length > 0 && currentTokenCount < remainingContextTokens && currentIndex > 1) {\n currentIndex--;\n if (messages.length === 1 && instructions) {\n break;\n }\n const poppedMessage = messages.pop();\n if (!poppedMessage) continue;\n \n const tokenCount = indexTokenCountMap[currentIndex] || 0;\n\n if ((currentTokenCount + tokenCount) <= remainingContextTokens) {\n context.push(poppedMessage);\n currentTokenCount += tokenCount;\n } else {\n messages.push(poppedMessage);\n break;\n }\n }\n \n if (startOnMessageType && context.length > 0) {\n const requiredTypeIndex = context.findIndex(msg => msg.getType() === startOnMessageType);\n \n if (requiredTypeIndex > 0) {\n context = context.slice(requiredTypeIndex);\n }\n }\n }\n\n if (instructions && _messages.length > 0) {\n context.push(_messages[0] as BaseMessage);\n messages.shift();\n }\n\n const prunedMemory = messages;\n summaryIndex = prunedMemory.length - 1;\n remainingContextTokens -= currentTokenCount;\n\n return {\n summaryIndex,\n remainingContextTokens,\n context: context.reverse(),\n messagesToRefine: prunedMemory,\n };\n}\n\nfunction checkValidNumber(value: unknown): value is number {\n return typeof value === 'number' && !isNaN(value) && value > 0;\n}\n\nexport function createPruneMessages(factoryParams: PruneMessagesFactoryParams) {\n const indexTokenCountMap = { ...factoryParams.indexTokenCountMap };\n let lastTurnStartIndex = factoryParams.startIndex;\n let totalTokens = (Object.values(indexTokenCountMap)).reduce((a, b) => a + b, 0);\n return function pruneMessages(params: PruneMessagesParams): {\n context: BaseMessage[];\n indexTokenCountMap: Record<string, number>;\n } {\n let currentUsage: UsageMetadata | undefined;\n if (params.usageMetadata && (\n checkValidNumber(params.usageMetadata.input_tokens)\n || (\n checkValidNumber(params.usageMetadata.input_token_details)\n && (\n checkValidNumber(params.usageMetadata.input_token_details.cache_creation)\n || checkValidNumber(params.usageMetadata.input_token_details.cache_read)\n )\n )\n ) && checkValidNumber(params.usageMetadata.output_tokens)) {\n currentUsage = calculateTotalTokens(params.usageMetadata);\n totalTokens = currentUsage.total_tokens;\n }\n\n for (let i = lastTurnStartIndex; i < params.messages.length; i++) {\n const message = params.messages[i];\n if (i === lastTurnStartIndex && indexTokenCountMap[i] === undefined && currentUsage) {\n indexTokenCountMap[i] = currentUsage.output_tokens;\n } else if (indexTokenCountMap[i] === undefined) {\n indexTokenCountMap[i] = factoryParams.tokenCounter(message);\n totalTokens += indexTokenCountMap[i];\n }\n }\n\n // If `currentUsage` is defined, we need to distribute the current total tokensto our `indexTokenCountMap`,\n // for all message index keys before `lastTurnStartIndex`, as it has the most accurate count for those messages.\n // We must distribute it in a weighted manner, so that the total token count is equal to `currentUsage.total_tokens`,\n // relative the manually counted tokens in `indexTokenCountMap`.\n if (currentUsage) {\n const totalIndexTokens = Object.values(indexTokenCountMap).reduce((a, b) => a + b, 0);\n const ratio = currentUsage.total_tokens / totalIndexTokens;\n for (const key in indexTokenCountMap) {\n indexTokenCountMap[key] = Math.round(indexTokenCountMap[key] * ratio);\n }\n }\n\n lastTurnStartIndex = params.messages.length;\n if (totalTokens <= factoryParams.maxTokens) {\n return { context: params.messages, indexTokenCountMap };\n }\n\n const { context } = getMessagesWithinTokenLimit({\n maxContextTokens: factoryParams.maxTokens,\n messages: params.messages,\n indexTokenCountMap,\n startOnMessageType: params.startOnMessageType,\n });\n\n return { context, indexTokenCountMap };\n }\n}\n"],"names":[],"mappings":"AAcA;;;;;AAKG;AACH,SAAS,oBAAoB,CAAC,KAA6B,EAAA;IACzD,MAAM,eAAe,GAAG,MAAM,CAAC,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC;AACvD,IAAA,MAAM,aAAa,GAAG,MAAM,CAAC,KAAK,CAAC,mBAAmB,EAAE,cAAc,CAAC,IAAI,CAAC;AAC5E,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC,mBAAmB,EAAE,UAAU,CAAC,IAAI,CAAC;AAEpE,IAAA,MAAM,gBAAgB,GAAG,eAAe,GAAG,aAAa,GAAG,SAAS;IACpE,MAAM,iBAAiB,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC;IAE1D,OAAO;AACL,QAAA,YAAY,EAAE,gBAAgB;AAC9B,QAAA,aAAa,EAAE,iBAAiB;QAChC,YAAY,EAAE,gBAAgB,GAAG;KAClC;AACH;AAEA;;;;;;AAMG;AACH,SAAS,2BAA2B,CAAC,EACnC,QAAQ,EAAE,SAAS,EACnB,gBAAgB,EAChB,kBAAkB,EAClB,kBAAkB,GAMnB,EAAA;;;AAQC,IAAA,IAAI,YAAY,GAAG,EAAE;IACrB,IAAI,iBAAiB,GAAG,CAAC;IACzB,MAAM,YAAY,GAAG,SAAS,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC,GAAG,SAAS;AACtF,IAAA,MAAM,sBAAsB,GAAG,YAAY,IAAI,IAAI,GAAG,kBAAkB,CAAC,CAAC,CAAC,GAAG,CAAC;AAC/E,IAAA,IAAI,sBAAsB,GAAG,gBAAgB,GAAG,sBAAsB;AACtE,IAAA,MAAM,QAAQ,GAAG,CAAC,GAAG,SAAS,CAAC;IAC/B,IAAI,OAAO,GAAkB,EAAE;AAE/B,IAAA,IAAI,iBAAiB,GAAG,sBAAsB,EAAE;AAC9C,QAAA,IAAI,YAAY,GAAG,QAAQ,CAAC,MAAM;AAClC,QAAA,OAAO,QAAQ,CAAC,MAAM,GAAG,CAAC,IAAI,iBAAiB,GAAG,sBAAsB,IAAI,YAAY,GAAG,CAAC,EAAE;AAC5F,YAAA,YAAY,EAAE;YACd,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,IAAI,YAAY,EAAE;gBACzC;;AAEF,YAAA,MAAM,aAAa,GAAG,QAAQ,CAAC,GAAG,EAAE;AACpC,YAAA,IAAI,CAAC,aAAa;gBAAE;YAEpB,MAAM,UAAU,GAAG,kBAAkB,CAAC,YAAY,CAAC,IAAI,CAAC;YAExD,IAAI,CAAC,iBAAiB,GAAG,UAAU,KAAK,sBAAsB,EAAE;AAC9D,gBAAA,OAAO,CAAC,IAAI,CAAC,aAAa,CAAC;gBAC3B,iBAAiB,IAAI,UAAU;;iBAC1B;AACL,gBAAA,QAAQ,CAAC,IAAI,CAAC,aAAa,CAAC;gBAC5B;;;QAIJ,IAAI,kBAAkB,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;AAC5C,YAAA,MAAM,iBAAiB,GAAG,OAAO,CAAC,SAAS,CAAC,GAAG,IAAI,GAAG,CAAC,OAAO,EAAE,KAAK,kBAAkB,CAAC;AAExF,YAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;AACzB,gBAAA,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC,iBAAiB,CAAC;;;;IAKhD,IAAI,YAAY,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;QACxC,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAgB,CAAC;QACzC,QAAQ,CAAC,KAAK,EAAE;;IAGlB,MAAM,YAAY,GAAG,QAAQ;AAC7B,IAAA,YAAY,GAAG,YAAY,CAAC,MAAM,GAAG,CAAC;IACtC,sBAAsB,IAAI,iBAAiB;IAE3C,OAAO;QACL,YAAY;QACZ,sBAAsB;AACtB,QAAA,OAAO,EAAE,OAAO,CAAC,OAAO,EAAE;AAC1B,QAAA,gBAAgB,EAAE,YAAY;KAC/B;AACH;AAEA,SAAS,gBAAgB,CAAC,KAAc,EAAA;AACtC,IAAA,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,GAAG,CAAC;AAChE;AAEM,SAAU,mBAAmB,CAAC,aAAyC,EAAA;IAC3E,MAAM,kBAAkB,GAAG,EAAE,GAAG,aAAa,CAAC,kBAAkB,EAAE;AAClE,IAAA,IAAI,kBAAkB,GAAG,aAAa,CAAC,UAAU;IACjD,IAAI,WAAW,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;IAChF,OAAO,SAAS,aAAa,CAAC,MAA2B,EAAA;AAIvD,QAAA,IAAI,YAAuC;AAC3C,QAAA,IAAI,MAAM,CAAC,aAAa,KACtB,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,YAAY;AAC/C,gBACD,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,mBAAmB;oBAEvD,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,mBAAmB,CAAC,cAAc;uBACrE,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,mBAAmB,CAAC,UAAU,CAAC,CACzE,CACF,CACF,IAAI,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,EAAE;AACzD,YAAA,YAAY,GAAG,oBAAoB,CAAC,MAAM,CAAC,aAAa,CAAC;AACzD,YAAA,WAAW,GAAG,YAAY,CAAC,YAAY;;AAGzC,QAAA,KAAK,IAAI,CAAC,GAAG,kBAAkB,EAAE,CAAC,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAChE,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC;AAClC,YAAA,IAAI,CAAC,KAAK,kBAAkB,IAAI,kBAAkB,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,YAAY,EAAE;AACnF,gBAAA,kBAAkB,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,aAAa;;AAC7C,iBAAA,IAAI,kBAAkB,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE;gBAC9C,kBAAkB,CAAC,CAAC,CAAC,GAAG,aAAa,CAAC,YAAY,CAAC,OAAO,CAAC;AAC3D,gBAAA,WAAW,IAAI,kBAAkB,CAAC,CAAC,CAAC;;;;;;;QAQxC,IAAI,YAAY,EAAE;YAChB,MAAM,gBAAgB,GAAG,MAAM,CAAC,MAAM,CAAC,kBAAkB,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;AACrF,YAAA,MAAM,KAAK,GAAG,YAAY,CAAC,YAAY,GAAG,gBAAgB;AAC1D,YAAA,KAAK,MAAM,GAAG,IAAI,kBAAkB,EAAE;AACpC,gBAAA,kBAAkB,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,kBAAkB,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;;;AAIzE,QAAA,kBAAkB,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM;AAC3C,QAAA,IAAI,WAAW,IAAI,aAAa,CAAC,SAAS,EAAE;YAC1C,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,QAAQ,EAAE,kBAAkB,EAAE;;AAGzD,QAAA,MAAM,EAAE,OAAO,EAAE,GAAG,2BAA2B,CAAC;YAC9C,gBAAgB,EAAE,aAAa,CAAC,SAAS;YACzC,QAAQ,EAAE,MAAM,CAAC,QAAQ;YACzB,kBAAkB;YAClB,kBAAkB,EAAE,MAAM,CAAC,kBAAkB;AAC9C,SAAA,CAAC;AAEF,QAAA,OAAO,EAAE,OAAO,EAAE,kBAAkB,EAAE;AACxC,KAAC;AACH;;;;"}
1
+ {"version":3,"file":"prune.mjs","sources":["../../../src/messages/prune.ts"],"sourcesContent":["import { AIMessage } from '@langchain/core/messages';\nimport type { BaseMessage, UsageMetadata } from '@langchain/core/messages';\nimport type { TokenCounter } from '@/types/run';\nexport type PruneMessagesFactoryParams = {\n maxTokens: number;\n startIndex: number;\n tokenCounter: TokenCounter;\n indexTokenCountMap: Record<string, number>;\n thinkingEnabled?: boolean;\n};\nexport type PruneMessagesParams = {\n messages: BaseMessage[];\n usageMetadata?: Partial<UsageMetadata>;\n startOnMessageType?: ReturnType<BaseMessage['getType']>;\n}\n\n/**\n * Calculates the total tokens from a single usage object\n * \n * @param usage The usage metadata object containing token information\n * @returns An object containing the total input and output tokens\n */\nfunction calculateTotalTokens(usage: Partial<UsageMetadata>): UsageMetadata {\n const baseInputTokens = Number(usage.input_tokens) || 0;\n const cacheCreation = Number(usage.input_token_details?.cache_creation) || 0;\n const cacheRead = Number(usage.input_token_details?.cache_read) || 0;\n \n const totalInputTokens = baseInputTokens + cacheCreation + cacheRead;\n const totalOutputTokens = Number(usage.output_tokens) || 0;\n\n return {\n input_tokens: totalInputTokens,\n output_tokens: totalOutputTokens,\n total_tokens: totalInputTokens + totalOutputTokens\n };\n}\n\n/**\n * Processes an array of messages and returns a context of messages that fit within a specified token limit.\n * It iterates over the messages from newest to oldest, adding them to the context until the token limit is reached.\n * \n * @param options Configuration options for processing messages\n * @returns Object containing the message context, remaining tokens, messages not included, and summary index\n */\nfunction getMessagesWithinTokenLimit({\n messages: _messages,\n maxContextTokens,\n indexTokenCountMap,\n startOnMessageType,\n thinkingEnabled,\n tokenCounter,\n}: {\n messages: BaseMessage[];\n maxContextTokens: number;\n indexTokenCountMap: Record<string, number>;\n startOnMessageType?: string;\n thinkingEnabled?: boolean;\n tokenCounter?: TokenCounter;\n}): {\n context: BaseMessage[];\n remainingContextTokens: number;\n messagesToRefine: BaseMessage[];\n summaryIndex: number;\n} {\n // Every reply is primed with <|start|>assistant<|message|>, so we\n // start with 3 tokens for the label after all messages have been counted.\n let summaryIndex = -1;\n let currentTokenCount = 3;\n const instructions = _messages?.[0]?.getType() === 'system' ? _messages[0] : undefined;\n const instructionsTokenCount = instructions != null ? indexTokenCountMap[0] : 0;\n let remainingContextTokens = maxContextTokens - instructionsTokenCount;\n const messages = [..._messages];\n let context: BaseMessage[] = [];\n\n if (currentTokenCount < remainingContextTokens) {\n let currentIndex = messages.length;\n while (messages.length > 0 && currentTokenCount < remainingContextTokens && currentIndex > 1) {\n currentIndex--;\n if (messages.length === 1 && instructions) {\n break;\n }\n const poppedMessage = messages.pop();\n if (!poppedMessage) continue;\n \n const tokenCount = indexTokenCountMap[currentIndex] || 0;\n\n if ((currentTokenCount + tokenCount) <= remainingContextTokens) {\n context.push(poppedMessage);\n currentTokenCount += tokenCount;\n } else {\n messages.push(poppedMessage);\n break;\n }\n }\n \n // Handle startOnMessageType requirement\n if (startOnMessageType && context.length > 0) {\n const requiredTypeIndex = context.findIndex(msg => msg.getType() === startOnMessageType);\n \n if (requiredTypeIndex > 0) {\n context = context.slice(requiredTypeIndex);\n }\n }\n \n // Handle thinking mode requirement for Anthropic\n if (thinkingEnabled && context.length > 0 && tokenCounter) {\n // Process only if we have an assistant message in the context\n const firstAssistantIndex = context.findIndex(msg => msg.getType() === 'ai');\n if (firstAssistantIndex >= 0) {\n const firstAssistantMsg = context[firstAssistantIndex];\n \n // Check if the first assistant message already has a thinking block\n const hasThinkingBlock = Array.isArray(firstAssistantMsg.content) && \n firstAssistantMsg.content.some(item => \n item && typeof item === 'object' && item.type === 'thinking');\n \n // Only proceed if we need to add thinking blocks\n if (!hasThinkingBlock) {\n // Collect thinking blocks from pruned assistant messages\n const thinkingBlocks: any[] = [];\n \n // Look through pruned messages for thinking blocks\n for (const msg of messages) {\n if (msg.getType() === 'ai' && Array.isArray(msg.content)) {\n for (const item of msg.content) {\n if (item && typeof item === 'object' && item.type === 'thinking') {\n thinkingBlocks.push(item);\n // We only need one thinking block\n break;\n }\n }\n if (thinkingBlocks.length > 0) break; // Stop after finding one thinking block\n }\n }\n \n // If we found thinking blocks, add them to the first assistant message\n if (thinkingBlocks.length > 0) {\n // Calculate token count of original message\n const originalTokenCount = tokenCounter(firstAssistantMsg);\n \n // Create a new content array with thinking blocks at the beginning\n let newContent: any[];\n \n if (Array.isArray(firstAssistantMsg.content)) {\n // Keep the original content (excluding any existing thinking blocks)\n const originalContent = firstAssistantMsg.content.filter(item => \n !(item && typeof item === 'object' && item.type === 'thinking'));\n \n newContent = [...thinkingBlocks, ...originalContent];\n } else if (typeof firstAssistantMsg.content === 'string') {\n newContent = [\n ...thinkingBlocks,\n { type: 'text', text: firstAssistantMsg.content }\n ];\n } else {\n newContent = thinkingBlocks;\n }\n \n // Create a new message with the updated content\n const newMessage = new AIMessage({\n content: newContent,\n additional_kwargs: firstAssistantMsg.additional_kwargs,\n response_metadata: firstAssistantMsg.response_metadata,\n });\n \n // Calculate token count of new message\n const newTokenCount = tokenCounter(newMessage);\n \n // Adjust current token count\n currentTokenCount += (newTokenCount - originalTokenCount);\n \n // Replace the first assistant message\n context[firstAssistantIndex] = newMessage;\n \n // If we've exceeded the token limit, we need to prune more messages\n if (currentTokenCount > remainingContextTokens) {\n // Remove messages from the end of the context until we're under the token limit\n // But make sure to keep the first assistant message with thinking block\n let i = context.length - 1;\n while (i > firstAssistantIndex && currentTokenCount > remainingContextTokens) {\n const msgToRemove = context[i];\n const msgTokenCount = tokenCounter(msgToRemove);\n context.splice(i, 1);\n currentTokenCount -= msgTokenCount;\n i--;\n }\n \n // Update remainingContextTokens to reflect the new token count\n remainingContextTokens = maxContextTokens - currentTokenCount;\n }\n }\n }\n }\n }\n }\n\n if (instructions && _messages.length > 0) {\n context.push(_messages[0] as BaseMessage);\n messages.shift();\n }\n\n const prunedMemory = messages;\n summaryIndex = prunedMemory.length - 1;\n remainingContextTokens -= currentTokenCount;\n\n return {\n summaryIndex,\n remainingContextTokens,\n context: context.reverse(),\n messagesToRefine: prunedMemory,\n };\n}\n\nfunction checkValidNumber(value: unknown): value is number {\n return typeof value === 'number' && !isNaN(value) && value > 0;\n}\n\nexport function createPruneMessages(factoryParams: PruneMessagesFactoryParams) {\n const indexTokenCountMap = { ...factoryParams.indexTokenCountMap };\n let lastTurnStartIndex = factoryParams.startIndex;\n let totalTokens = (Object.values(indexTokenCountMap)).reduce((a, b) => a + b, 0);\n \n return function pruneMessages(params: PruneMessagesParams): {\n context: BaseMessage[];\n indexTokenCountMap: Record<string, number>;\n } {\n let currentUsage: UsageMetadata | undefined;\n if (params.usageMetadata && (\n checkValidNumber(params.usageMetadata.input_tokens)\n || (\n checkValidNumber(params.usageMetadata.input_token_details)\n && (\n checkValidNumber(params.usageMetadata.input_token_details.cache_creation)\n || checkValidNumber(params.usageMetadata.input_token_details.cache_read)\n )\n )\n ) && checkValidNumber(params.usageMetadata.output_tokens)) {\n currentUsage = calculateTotalTokens(params.usageMetadata);\n totalTokens = currentUsage.total_tokens;\n }\n\n for (let i = lastTurnStartIndex; i < params.messages.length; i++) {\n const message = params.messages[i];\n if (i === lastTurnStartIndex && indexTokenCountMap[i] === undefined && currentUsage) {\n indexTokenCountMap[i] = currentUsage.output_tokens;\n } else if (indexTokenCountMap[i] === undefined) {\n indexTokenCountMap[i] = factoryParams.tokenCounter(message);\n totalTokens += indexTokenCountMap[i];\n }\n }\n\n // If `currentUsage` is defined, we need to distribute the current total tokensto our `indexTokenCountMap`,\n // for all message index keys before `lastTurnStartIndex`, as it has the most accurate count for those messages.\n // We must distribute it in a weighted manner, so that the total token count is equal to `currentUsage.total_tokens`,\n // relative the manually counted tokens in `indexTokenCountMap`.\n if (currentUsage) {\n const totalIndexTokens = Object.values(indexTokenCountMap).reduce((a, b) => a + b, 0);\n const ratio = currentUsage.total_tokens / totalIndexTokens;\n for (const key in indexTokenCountMap) {\n indexTokenCountMap[key] = Math.round(indexTokenCountMap[key] * ratio);\n }\n }\n\n lastTurnStartIndex = params.messages.length;\n if (totalTokens <= factoryParams.maxTokens) {\n return { context: params.messages, indexTokenCountMap };\n }\n\n // Pass the tokenCounter to getMessagesWithinTokenLimit for token recalculation\n const { context } = getMessagesWithinTokenLimit({\n maxContextTokens: factoryParams.maxTokens,\n messages: params.messages,\n indexTokenCountMap,\n startOnMessageType: params.startOnMessageType,\n thinkingEnabled: factoryParams.thinkingEnabled,\n tokenCounter: factoryParams.tokenCounter,\n });\n\n return { context, indexTokenCountMap };\n }\n}\n"],"names":[],"mappings":";;AAgBA;;;;;AAKG;AACH,SAAS,oBAAoB,CAAC,KAA6B,EAAA;IACzD,MAAM,eAAe,GAAG,MAAM,CAAC,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC;AACvD,IAAA,MAAM,aAAa,GAAG,MAAM,CAAC,KAAK,CAAC,mBAAmB,EAAE,cAAc,CAAC,IAAI,CAAC;AAC5E,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC,mBAAmB,EAAE,UAAU,CAAC,IAAI,CAAC;AAEpE,IAAA,MAAM,gBAAgB,GAAG,eAAe,GAAG,aAAa,GAAG,SAAS;IACpE,MAAM,iBAAiB,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC;IAE1D,OAAO;AACL,QAAA,YAAY,EAAE,gBAAgB;AAC9B,QAAA,aAAa,EAAE,iBAAiB;QAChC,YAAY,EAAE,gBAAgB,GAAG;KAClC;AACH;AAEA;;;;;;AAMG;AACH,SAAS,2BAA2B,CAAC,EACnC,QAAQ,EAAE,SAAS,EACnB,gBAAgB,EAChB,kBAAkB,EAClB,kBAAkB,EAClB,eAAe,EACf,YAAY,GAQb,EAAA;;;AAQC,IAAA,IAAI,YAAY,GAAG,EAAE;IACrB,IAAI,iBAAiB,GAAG,CAAC;IACzB,MAAM,YAAY,GAAG,SAAS,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC,GAAG,SAAS;AACtF,IAAA,MAAM,sBAAsB,GAAG,YAAY,IAAI,IAAI,GAAG,kBAAkB,CAAC,CAAC,CAAC,GAAG,CAAC;AAC/E,IAAA,IAAI,sBAAsB,GAAG,gBAAgB,GAAG,sBAAsB;AACtE,IAAA,MAAM,QAAQ,GAAG,CAAC,GAAG,SAAS,CAAC;IAC/B,IAAI,OAAO,GAAkB,EAAE;AAE/B,IAAA,IAAI,iBAAiB,GAAG,sBAAsB,EAAE;AAC9C,QAAA,IAAI,YAAY,GAAG,QAAQ,CAAC,MAAM;AAClC,QAAA,OAAO,QAAQ,CAAC,MAAM,GAAG,CAAC,IAAI,iBAAiB,GAAG,sBAAsB,IAAI,YAAY,GAAG,CAAC,EAAE;AAC5F,YAAA,YAAY,EAAE;YACd,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,IAAI,YAAY,EAAE;gBACzC;;AAEF,YAAA,MAAM,aAAa,GAAG,QAAQ,CAAC,GAAG,EAAE;AACpC,YAAA,IAAI,CAAC,aAAa;gBAAE;YAEpB,MAAM,UAAU,GAAG,kBAAkB,CAAC,YAAY,CAAC,IAAI,CAAC;YAExD,IAAI,CAAC,iBAAiB,GAAG,UAAU,KAAK,sBAAsB,EAAE;AAC9D,gBAAA,OAAO,CAAC,IAAI,CAAC,aAAa,CAAC;gBAC3B,iBAAiB,IAAI,UAAU;;iBAC1B;AACL,gBAAA,QAAQ,CAAC,IAAI,CAAC,aAAa,CAAC;gBAC5B;;;;QAKN,IAAI,kBAAkB,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;AAC5C,YAAA,MAAM,iBAAiB,GAAG,OAAO,CAAC,SAAS,CAAC,GAAG,IAAI,GAAG,CAAC,OAAO,EAAE,KAAK,kBAAkB,CAAC;AAExF,YAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;AACzB,gBAAA,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC,iBAAiB,CAAC;;;;QAK9C,IAAI,eAAe,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,YAAY,EAAE;;AAEzD,YAAA,MAAM,mBAAmB,GAAG,OAAO,CAAC,SAAS,CAAC,GAAG,IAAI,GAAG,CAAC,OAAO,EAAE,KAAK,IAAI,CAAC;AAC5E,YAAA,IAAI,mBAAmB,IAAI,CAAC,EAAE;AAC5B,gBAAA,MAAM,iBAAiB,GAAG,OAAO,CAAC,mBAAmB,CAAC;;gBAGtD,MAAM,gBAAgB,GAAG,KAAK,CAAC,OAAO,CAAC,iBAAiB,CAAC,OAAO,CAAC;oBAC/D,iBAAiB,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,IACjC,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC;;gBAGjE,IAAI,CAAC,gBAAgB,EAAE;;oBAErB,MAAM,cAAc,GAAU,EAAE;;AAGhC,oBAAA,KAAK,MAAM,GAAG,IAAI,QAAQ,EAAE;AAC1B,wBAAA,IAAI,GAAG,CAAC,OAAO,EAAE,KAAK,IAAI,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE;AACxD,4BAAA,KAAK,MAAM,IAAI,IAAI,GAAG,CAAC,OAAO,EAAE;AAC9B,gCAAA,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,IAAI,KAAK,UAAU,EAAE;AAChE,oCAAA,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC;;oCAEzB;;;AAGJ,4BAAA,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC;AAAE,gCAAA,MAAM;;;;AAKzC,oBAAA,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;;AAE7B,wBAAA,MAAM,kBAAkB,GAAG,YAAY,CAAC,iBAAiB,CAAC;;AAG1D,wBAAA,IAAI,UAAiB;wBAErB,IAAI,KAAK,CAAC,OAAO,CAAC,iBAAiB,CAAC,OAAO,CAAC,EAAE;;AAE5C,4BAAA,MAAM,eAAe,GAAG,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI,IAC3D,EAAE,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,CAAC;4BAElE,UAAU,GAAG,CAAC,GAAG,cAAc,EAAE,GAAG,eAAe,CAAC;;AAC/C,6BAAA,IAAI,OAAO,iBAAiB,CAAC,OAAO,KAAK,QAAQ,EAAE;AACxD,4BAAA,UAAU,GAAG;AACX,gCAAA,GAAG,cAAc;gCACjB,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,iBAAiB,CAAC,OAAO;6BAChD;;6BACI;4BACL,UAAU,GAAG,cAAc;;;AAI7B,wBAAA,MAAM,UAAU,GAAG,IAAI,SAAS,CAAC;AAC/B,4BAAA,OAAO,EAAE,UAAU;4BACnB,iBAAiB,EAAE,iBAAiB,CAAC,iBAAiB;4BACtD,iBAAiB,EAAE,iBAAiB,CAAC,iBAAiB;AACvD,yBAAA,CAAC;;AAGF,wBAAA,MAAM,aAAa,GAAG,YAAY,CAAC,UAAU,CAAC;;AAG9C,wBAAA,iBAAiB,KAAK,aAAa,GAAG,kBAAkB,CAAC;;AAGzD,wBAAA,OAAO,CAAC,mBAAmB,CAAC,GAAG,UAAU;;AAGzC,wBAAA,IAAI,iBAAiB,GAAG,sBAAsB,EAAE;;;AAG9C,4BAAA,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC;4BAC1B,OAAO,CAAC,GAAG,mBAAmB,IAAI,iBAAiB,GAAG,sBAAsB,EAAE;AAC5E,gCAAA,MAAM,WAAW,GAAG,OAAO,CAAC,CAAC,CAAC;AAC9B,gCAAA,MAAM,aAAa,GAAG,YAAY,CAAC,WAAW,CAAC;AAC/C,gCAAA,OAAO,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC;gCACpB,iBAAiB,IAAI,aAAa;AAClC,gCAAA,CAAC,EAAE;;;AAIL,4BAAA,sBAAsB,GAAG,gBAAgB,GAAG,iBAAiB;;;;;;;IAQvE,IAAI,YAAY,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;QACxC,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAgB,CAAC;QACzC,QAAQ,CAAC,KAAK,EAAE;;IAGlB,MAAM,YAAY,GAAG,QAAQ;AAC7B,IAAA,YAAY,GAAG,YAAY,CAAC,MAAM,GAAG,CAAC;IACtC,sBAAsB,IAAI,iBAAiB;IAE3C,OAAO;QACL,YAAY;QACZ,sBAAsB;AACtB,QAAA,OAAO,EAAE,OAAO,CAAC,OAAO,EAAE;AAC1B,QAAA,gBAAgB,EAAE,YAAY;KAC/B;AACH;AAEA,SAAS,gBAAgB,CAAC,KAAc,EAAA;AACtC,IAAA,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,GAAG,CAAC;AAChE;AAEM,SAAU,mBAAmB,CAAC,aAAyC,EAAA;IAC3E,MAAM,kBAAkB,GAAG,EAAE,GAAG,aAAa,CAAC,kBAAkB,EAAE;AAClE,IAAA,IAAI,kBAAkB,GAAG,aAAa,CAAC,UAAU;IACjD,IAAI,WAAW,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;IAEhF,OAAO,SAAS,aAAa,CAAC,MAA2B,EAAA;AAIvD,QAAA,IAAI,YAAuC;AAC3C,QAAA,IAAI,MAAM,CAAC,aAAa,KACtB,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,YAAY;AAC/C,gBACD,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,mBAAmB;oBAEvD,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,mBAAmB,CAAC,cAAc;uBACrE,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,mBAAmB,CAAC,UAAU,CAAC,CACzE,CACF,CACF,IAAI,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,EAAE;AACzD,YAAA,YAAY,GAAG,oBAAoB,CAAC,MAAM,CAAC,aAAa,CAAC;AACzD,YAAA,WAAW,GAAG,YAAY,CAAC,YAAY;;AAGzC,QAAA,KAAK,IAAI,CAAC,GAAG,kBAAkB,EAAE,CAAC,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAChE,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC;AAClC,YAAA,IAAI,CAAC,KAAK,kBAAkB,IAAI,kBAAkB,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,YAAY,EAAE;AACnF,gBAAA,kBAAkB,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,aAAa;;AAC7C,iBAAA,IAAI,kBAAkB,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE;gBAC9C,kBAAkB,CAAC,CAAC,CAAC,GAAG,aAAa,CAAC,YAAY,CAAC,OAAO,CAAC;AAC3D,gBAAA,WAAW,IAAI,kBAAkB,CAAC,CAAC,CAAC;;;;;;;QAQxC,IAAI,YAAY,EAAE;YAChB,MAAM,gBAAgB,GAAG,MAAM,CAAC,MAAM,CAAC,kBAAkB,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;AACrF,YAAA,MAAM,KAAK,GAAG,YAAY,CAAC,YAAY,GAAG,gBAAgB;AAC1D,YAAA,KAAK,MAAM,GAAG,IAAI,kBAAkB,EAAE;AACpC,gBAAA,kBAAkB,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,kBAAkB,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;;;AAIzE,QAAA,kBAAkB,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM;AAC3C,QAAA,IAAI,WAAW,IAAI,aAAa,CAAC,SAAS,EAAE;YAC1C,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,QAAQ,EAAE,kBAAkB,EAAE;;;AAIzD,QAAA,MAAM,EAAE,OAAO,EAAE,GAAG,2BAA2B,CAAC;YAC9C,gBAAgB,EAAE,aAAa,CAAC,SAAS;YACzC,QAAQ,EAAE,MAAM,CAAC,QAAQ;YACzB,kBAAkB;YAClB,kBAAkB,EAAE,MAAM,CAAC,kBAAkB;YAC7C,eAAe,EAAE,aAAa,CAAC,eAAe;YAC9C,YAAY,EAAE,aAAa,CAAC,YAAY;AACzC,SAAA,CAAC;AAEF,QAAA,OAAO,EAAE,OAAO,EAAE,kBAAkB,EAAE;AACxC,KAAC;AACH;;;;"}
@@ -5,6 +5,7 @@ export type PruneMessagesFactoryParams = {
5
5
  startIndex: number;
6
6
  tokenCounter: TokenCounter;
7
7
  indexTokenCountMap: Record<string, number>;
8
+ thinkingEnabled?: boolean;
8
9
  };
9
10
  export type PruneMessagesParams = {
10
11
  messages: BaseMessage[];
@@ -0,0 +1 @@
1
+ export {};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@librechat/agents",
3
- "version": "2.2.8",
3
+ "version": "2.2.9",
4
4
  "main": "./dist/cjs/main.cjs",
5
5
  "module": "./dist/esm/main.mjs",
6
6
  "types": "./dist/types/index.d.ts",
@@ -44,6 +44,7 @@
44
44
  "stream": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/stream.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
45
45
  "code_exec": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec.ts --provider 'google' --name 'Jo' --location 'New York, NY'",
46
46
  "image": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/image.ts --provider 'google' --name 'Jo' --location 'New York, NY'",
47
+ "code_exec_files": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec_files.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
47
48
  "code_exec_simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec_simple.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
48
49
  "simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/simple.ts --provider 'openrouter' --name 'Jo' --location 'New York, NY'",
49
50
  "caching": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/caching.ts --name 'Jo' --location 'New York, NY'",
@@ -362,18 +362,24 @@ export class StandardGraph extends Graph<
362
362
 
363
363
  let messagesToUse = messages;
364
364
  if (!this.pruneMessages && this.tokenCounter && this.maxContextTokens && this.indexTokenCountMap[0] != null) {
365
+ const isAnthropicWithThinking = (
366
+ (this.provider === Providers.ANTHROPIC
367
+ || (this.provider === Providers.BEDROCK && (this.clientOptions as t.BedrockClientOptions)?.model?.includes('anthropic')))
368
+ && (this.clientOptions as t.AnthropicClientOptions)?.thinking != null);
369
+
365
370
  this.pruneMessages = createPruneMessages({
366
371
  indexTokenCountMap: this.indexTokenCountMap,
367
372
  maxTokens: this.maxContextTokens,
368
373
  tokenCounter: this.tokenCounter,
369
374
  startIndex: this.startIndex,
375
+ thinkingEnabled: isAnthropicWithThinking,
370
376
  });
371
377
  }
372
378
  if (this.pruneMessages) {
373
379
  const { context, indexTokenCountMap } = this.pruneMessages({
374
380
  messages,
375
381
  usageMetadata: this.currentUsage,
376
- startOnMessageType: 'human',
382
+ // startOnMessageType: 'human',
377
383
  });
378
384
  this.indexTokenCountMap = indexTokenCountMap;
379
385
  messagesToUse = context;
@@ -1,3 +1,4 @@
1
+ import { AIMessage } from '@langchain/core/messages';
1
2
  import type { BaseMessage, UsageMetadata } from '@langchain/core/messages';
2
3
  import type { TokenCounter } from '@/types/run';
3
4
  export type PruneMessagesFactoryParams = {
@@ -5,6 +6,7 @@ export type PruneMessagesFactoryParams = {
5
6
  startIndex: number;
6
7
  tokenCounter: TokenCounter;
7
8
  indexTokenCountMap: Record<string, number>;
9
+ thinkingEnabled?: boolean;
8
10
  };
9
11
  export type PruneMessagesParams = {
10
12
  messages: BaseMessage[];
@@ -45,11 +47,15 @@ function getMessagesWithinTokenLimit({
45
47
  maxContextTokens,
46
48
  indexTokenCountMap,
47
49
  startOnMessageType,
50
+ thinkingEnabled,
51
+ tokenCounter,
48
52
  }: {
49
53
  messages: BaseMessage[];
50
54
  maxContextTokens: number;
51
55
  indexTokenCountMap: Record<string, number>;
52
56
  startOnMessageType?: string;
57
+ thinkingEnabled?: boolean;
58
+ tokenCounter?: TokenCounter;
53
59
  }): {
54
60
  context: BaseMessage[];
55
61
  remainingContextTokens: number;
@@ -87,14 +93,106 @@ function getMessagesWithinTokenLimit({
87
93
  }
88
94
  }
89
95
 
90
- if (startOnMessageType && context.length > 0) {
91
- const requiredTypeIndex = context.findIndex(msg => msg.getType() === startOnMessageType);
96
+ // Handle startOnMessageType requirement
97
+ if (startOnMessageType && context.length > 0) {
98
+ const requiredTypeIndex = context.findIndex(msg => msg.getType() === startOnMessageType);
99
+
100
+ if (requiredTypeIndex > 0) {
101
+ context = context.slice(requiredTypeIndex);
102
+ }
103
+ }
104
+
105
+ // Handle thinking mode requirement for Anthropic
106
+ if (thinkingEnabled && context.length > 0 && tokenCounter) {
107
+ // Process only if we have an assistant message in the context
108
+ const firstAssistantIndex = context.findIndex(msg => msg.getType() === 'ai');
109
+ if (firstAssistantIndex >= 0) {
110
+ const firstAssistantMsg = context[firstAssistantIndex];
92
111
 
93
- if (requiredTypeIndex > 0) {
94
- context = context.slice(requiredTypeIndex);
112
+ // Check if the first assistant message already has a thinking block
113
+ const hasThinkingBlock = Array.isArray(firstAssistantMsg.content) &&
114
+ firstAssistantMsg.content.some(item =>
115
+ item && typeof item === 'object' && item.type === 'thinking');
116
+
117
+ // Only proceed if we need to add thinking blocks
118
+ if (!hasThinkingBlock) {
119
+ // Collect thinking blocks from pruned assistant messages
120
+ const thinkingBlocks: any[] = [];
121
+
122
+ // Look through pruned messages for thinking blocks
123
+ for (const msg of messages) {
124
+ if (msg.getType() === 'ai' && Array.isArray(msg.content)) {
125
+ for (const item of msg.content) {
126
+ if (item && typeof item === 'object' && item.type === 'thinking') {
127
+ thinkingBlocks.push(item);
128
+ // We only need one thinking block
129
+ break;
130
+ }
131
+ }
132
+ if (thinkingBlocks.length > 0) break; // Stop after finding one thinking block
133
+ }
134
+ }
135
+
136
+ // If we found thinking blocks, add them to the first assistant message
137
+ if (thinkingBlocks.length > 0) {
138
+ // Calculate token count of original message
139
+ const originalTokenCount = tokenCounter(firstAssistantMsg);
140
+
141
+ // Create a new content array with thinking blocks at the beginning
142
+ let newContent: any[];
143
+
144
+ if (Array.isArray(firstAssistantMsg.content)) {
145
+ // Keep the original content (excluding any existing thinking blocks)
146
+ const originalContent = firstAssistantMsg.content.filter(item =>
147
+ !(item && typeof item === 'object' && item.type === 'thinking'));
148
+
149
+ newContent = [...thinkingBlocks, ...originalContent];
150
+ } else if (typeof firstAssistantMsg.content === 'string') {
151
+ newContent = [
152
+ ...thinkingBlocks,
153
+ { type: 'text', text: firstAssistantMsg.content }
154
+ ];
155
+ } else {
156
+ newContent = thinkingBlocks;
157
+ }
158
+
159
+ // Create a new message with the updated content
160
+ const newMessage = new AIMessage({
161
+ content: newContent,
162
+ additional_kwargs: firstAssistantMsg.additional_kwargs,
163
+ response_metadata: firstAssistantMsg.response_metadata,
164
+ });
165
+
166
+ // Calculate token count of new message
167
+ const newTokenCount = tokenCounter(newMessage);
168
+
169
+ // Adjust current token count
170
+ currentTokenCount += (newTokenCount - originalTokenCount);
171
+
172
+ // Replace the first assistant message
173
+ context[firstAssistantIndex] = newMessage;
174
+
175
+ // If we've exceeded the token limit, we need to prune more messages
176
+ if (currentTokenCount > remainingContextTokens) {
177
+ // Remove messages from the end of the context until we're under the token limit
178
+ // But make sure to keep the first assistant message with thinking block
179
+ let i = context.length - 1;
180
+ while (i > firstAssistantIndex && currentTokenCount > remainingContextTokens) {
181
+ const msgToRemove = context[i];
182
+ const msgTokenCount = tokenCounter(msgToRemove);
183
+ context.splice(i, 1);
184
+ currentTokenCount -= msgTokenCount;
185
+ i--;
186
+ }
187
+
188
+ // Update remainingContextTokens to reflect the new token count
189
+ remainingContextTokens = maxContextTokens - currentTokenCount;
190
+ }
191
+ }
95
192
  }
96
193
  }
97
194
  }
195
+ }
98
196
 
99
197
  if (instructions && _messages.length > 0) {
100
198
  context.push(_messages[0] as BaseMessage);
@@ -121,6 +219,7 @@ export function createPruneMessages(factoryParams: PruneMessagesFactoryParams) {
121
219
  const indexTokenCountMap = { ...factoryParams.indexTokenCountMap };
122
220
  let lastTurnStartIndex = factoryParams.startIndex;
123
221
  let totalTokens = (Object.values(indexTokenCountMap)).reduce((a, b) => a + b, 0);
222
+
124
223
  return function pruneMessages(params: PruneMessagesParams): {
125
224
  context: BaseMessage[];
126
225
  indexTokenCountMap: Record<string, number>;
@@ -167,11 +266,14 @@ export function createPruneMessages(factoryParams: PruneMessagesFactoryParams) {
167
266
  return { context: params.messages, indexTokenCountMap };
168
267
  }
169
268
 
269
+ // Pass the tokenCounter to getMessagesWithinTokenLimit for token recalculation
170
270
  const { context } = getMessagesWithinTokenLimit({
171
271
  maxContextTokens: factoryParams.maxTokens,
172
272
  messages: params.messages,
173
273
  indexTokenCountMap,
174
274
  startOnMessageType: params.startOnMessageType,
275
+ thinkingEnabled: factoryParams.thinkingEnabled,
276
+ tokenCounter: factoryParams.tokenCounter,
175
277
  });
176
278
 
177
279
  return { context, indexTokenCountMap };