@librechat/agents 2.3.7 → 2.3.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/graphs/Graph.cjs +5 -4
- package/dist/cjs/graphs/Graph.cjs.map +1 -1
- package/dist/cjs/messages/prune.cjs +36 -13
- package/dist/cjs/messages/prune.cjs.map +1 -1
- package/dist/esm/graphs/Graph.mjs +5 -4
- package/dist/esm/graphs/Graph.mjs.map +1 -1
- package/dist/esm/messages/prune.mjs +37 -14
- package/dist/esm/messages/prune.mjs.map +1 -1
- package/dist/types/messages/prune.d.ts +14 -10
- package/dist/types/types/llm.d.ts +8 -0
- package/package.json +1 -1
- package/src/graphs/Graph.ts +13 -4
- package/src/messages/prune.ts +59 -23
- package/src/specs/prune.test.ts +182 -1
- package/src/specs/token-distribution-edge-case.test.ts +4 -5
- package/src/types/llm.ts +9 -2
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Graph.mjs","sources":["../../../src/graphs/Graph.ts"],"sourcesContent":["// src/graphs/Graph.ts\nimport { nanoid } from 'nanoid';\nimport { concat } from '@langchain/core/utils/stream';\nimport { ToolNode } from '@langchain/langgraph/prebuilt';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { START, END, StateGraph } from '@langchain/langgraph';\nimport { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';\nimport { Runnable, RunnableConfig } from '@langchain/core/runnables';\nimport { dispatchCustomEvent } from '@langchain/core/callbacks/dispatch';\nimport { AIMessageChunk, ToolMessage, SystemMessage } from '@langchain/core/messages';\nimport type { BaseMessage, BaseMessageFields, UsageMetadata } from '@langchain/core/messages';\nimport type * as t from '@/types';\nimport { Providers, GraphEvents, GraphNodeKeys, StepTypes, Callback, ContentTypes } from '@/common';\nimport type { ToolCall } from '@langchain/core/messages/tool';\nimport { getChatModelClass, manualToolStreamProviders } from '@/llm/providers';\nimport { ToolNode as CustomToolNode, toolsCondition } from '@/tools/ToolNode';\nimport {\n createPruneMessages,\n modifyDeltaProperties,\n formatArtifactPayload,\n convertMessagesToContent,\n formatAnthropicArtifactContent,\n} from '@/messages';\nimport { resetIfNotEmpty, isOpenAILike, isGoogleLike, joinKeys, sleep } from '@/utils';\nimport { createFakeStreamingLLM } from '@/llm/fake';\nimport { HandlerRegistry } from '@/events';\n\nconst { AGENT, TOOLS } = GraphNodeKeys;\nexport type GraphNode = GraphNodeKeys | typeof START;\nexport type ClientCallback<T extends unknown[]> = (graph: StandardGraph, ...args: T) => void;\nexport type ClientCallbacks = {\n [Callback.TOOL_ERROR]?: ClientCallback<[Error, string]>;\n [Callback.TOOL_START]?: ClientCallback<unknown[]>;\n [Callback.TOOL_END]?: ClientCallback<unknown[]>;\n}\nexport type SystemCallbacks = {\n [K in keyof ClientCallbacks]: ClientCallbacks[K] extends ClientCallback<infer Args>\n ? (...args: Args) => void\n : never;\n};\n\nexport abstract class Graph<\n T extends t.BaseGraphState = t.BaseGraphState,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n TNodeName extends string = string,\n> {\n abstract resetValues(): void;\n abstract createGraphState(): t.GraphStateChannels<T>;\n abstract initializeTools(): CustomToolNode<T> | ToolNode<T>;\n abstract initializeModel(): Runnable;\n abstract getRunMessages(): BaseMessage[] | undefined;\n abstract getContentParts(): t.MessageContentComplex[] | undefined;\n abstract generateStepId(stepKey: string): [string, number];\n abstract getKeyList(metadata: Record<string, unknown> | undefined): (string | number | undefined)[];\n abstract getStepKey(metadata: Record<string, unknown> | undefined): string;\n abstract checkKeyList(keyList: (string | number | undefined)[]): boolean;\n abstract getStepIdByKey(stepKey: string, index?: number): string\n abstract getRunStep(stepId: string): t.RunStep | undefined;\n abstract dispatchRunStep(stepKey: string, stepDetails: t.StepDetails): string;\n abstract dispatchRunStepDelta(id: string, delta: t.ToolCallDelta): void;\n abstract dispatchMessageDelta(id: string, delta: t.MessageDelta): void;\n abstract dispatchReasoningDelta(stepId: string, delta: t.ReasoningDelta): void;\n abstract handleToolCallCompleted(data: t.ToolEndData, metadata?: Record<string, unknown>): void;\n\n abstract createCallModel(): (state: T, config?: RunnableConfig) => Promise<Partial<T>>;\n abstract createWorkflow(): t.CompiledWorkflow<T>;\n lastToken?: string;\n tokenTypeSwitch?: 'reasoning' | 'content';\n reasoningKey: 'reasoning_content' | 'reasoning' = 'reasoning_content';\n currentTokenType: ContentTypes.TEXT | ContentTypes.THINK = ContentTypes.TEXT;\n messageStepHasToolCalls: Map<string, boolean> = new Map();\n messageIdsByStepKey: Map<string, string> = new Map();\n prelimMessageIdsByStepKey: Map<string, string> = new Map();\n config: RunnableConfig | undefined;\n contentData: t.RunStep[] = [];\n stepKeyIds: Map<string, string[]> = new Map<string, string[]>();\n contentIndexMap: Map<string, number> = new Map();\n toolCallStepIds: Map<string, string> = new Map();\n currentUsage: Partial<UsageMetadata> | undefined;\n indexTokenCountMap: Record<string, number> = {};\n maxContextTokens: number | undefined;\n pruneMessages?: ReturnType<typeof createPruneMessages>;\n /** The amount of time that should pass before another consecutive API call */\n streamBuffer: number | undefined;\n tokenCounter?: t.TokenCounter;\n signal?: AbortSignal;\n}\n\nexport class StandardGraph extends Graph<\n t.BaseGraphState,\n GraphNode\n> {\n private graphState: t.GraphStateChannels<t.BaseGraphState>;\n clientOptions: t.ClientOptions;\n boundModel: Runnable;\n /** The last recorded timestamp that a stream API call was invoked */\n lastStreamCall: number | undefined;\n handlerRegistry: HandlerRegistry | undefined;\n systemMessage: SystemMessage | undefined;\n messages: BaseMessage[] = [];\n runId: string | undefined;\n tools?: t.GenericTool[];\n toolMap?: t.ToolMap;\n startIndex: number = 0;\n provider: Providers;\n toolEnd: boolean;\n signal: AbortSignal | undefined;\n\n constructor({\n runId,\n tools,\n signal,\n toolMap,\n provider,\n streamBuffer,\n instructions,\n reasoningKey,\n clientOptions,\n toolEnd = false,\n additional_instructions = '',\n } : t.StandardGraphInput) {\n super();\n this.runId = runId;\n this.tools = tools;\n this.signal = signal;\n this.toolEnd = toolEnd;\n this.toolMap = toolMap;\n this.provider = provider;\n this.streamBuffer = streamBuffer;\n this.clientOptions = clientOptions;\n this.graphState = this.createGraphState();\n this.boundModel = this.initializeModel();\n if (reasoningKey) {\n this.reasoningKey = reasoningKey;\n }\n\n let finalInstructions: string | BaseMessageFields = instructions ?? '';\n if (additional_instructions) {\n finalInstructions = finalInstructions ? `${finalInstructions}\\n\\n${additional_instructions}` : additional_instructions;\n }\n\n if (finalInstructions && provider === Providers.ANTHROPIC && (clientOptions as t.AnthropicClientOptions).clientOptions?.defaultHeaders?.['anthropic-beta']?.includes('prompt-caching')) {\n finalInstructions = {\n content: [\n {\n type: 'text',\n text: instructions,\n cache_control: { type: 'ephemeral' },\n },\n ],\n };\n }\n\n if (finalInstructions) {\n this.systemMessage = new SystemMessage(finalInstructions);\n }\n }\n\n /* Init */\n\n resetValues(keepContent?: boolean): void {\n this.messages = [];\n this.config = resetIfNotEmpty(this.config, undefined);\n if (keepContent !== true) {\n this.contentData = resetIfNotEmpty(this.contentData, []);\n this.contentIndexMap = resetIfNotEmpty(this.contentIndexMap, new Map());\n }\n this.stepKeyIds = resetIfNotEmpty(this.stepKeyIds, new Map());\n this.toolCallStepIds = resetIfNotEmpty(this.toolCallStepIds, new Map());\n this.messageIdsByStepKey = resetIfNotEmpty(this.messageIdsByStepKey, new Map());\n this.messageStepHasToolCalls = resetIfNotEmpty(this.prelimMessageIdsByStepKey, new Map());\n this.prelimMessageIdsByStepKey = resetIfNotEmpty(this.prelimMessageIdsByStepKey, new Map());\n this.currentTokenType = resetIfNotEmpty(this.currentTokenType, ContentTypes.TEXT);\n this.lastToken = resetIfNotEmpty(this.lastToken, undefined);\n this.tokenTypeSwitch = resetIfNotEmpty(this.tokenTypeSwitch, undefined);\n this.indexTokenCountMap = resetIfNotEmpty(this.indexTokenCountMap, {});\n this.currentUsage = resetIfNotEmpty(this.currentUsage, undefined);\n this.tokenCounter = resetIfNotEmpty(this.tokenCounter, undefined);\n this.maxContextTokens = resetIfNotEmpty(this.maxContextTokens, undefined);\n }\n\n /* Run Step Processing */\n\n getRunStep(stepId: string): t.RunStep | undefined {\n const index = this.contentIndexMap.get(stepId);\n if (index !== undefined) {\n return this.contentData[index];\n }\n return undefined;\n }\n\n getStepKey(metadata: Record<string, unknown> | undefined): string {\n if (!metadata) return '';\n\n const keyList = this.getKeyList(metadata);\n if (this.checkKeyList(keyList)) {\n throw new Error('Missing metadata');\n }\n\n return joinKeys(keyList);\n }\n\n getStepIdByKey(stepKey: string, index?: number): string {\n const stepIds = this.stepKeyIds.get(stepKey);\n if (!stepIds) {\n throw new Error(`No step IDs found for stepKey ${stepKey}`);\n }\n\n if (index === undefined) {\n return stepIds[stepIds.length - 1];\n }\n\n return stepIds[index];\n }\n\n generateStepId(stepKey: string): [string, number] {\n const stepIds = this.stepKeyIds.get(stepKey);\n let newStepId: string | undefined;\n let stepIndex = 0;\n if (stepIds) {\n stepIndex = stepIds.length;\n newStepId = `step_${nanoid()}`;\n stepIds.push(newStepId);\n this.stepKeyIds.set(stepKey, stepIds);\n } else {\n newStepId = `step_${nanoid()}`;\n this.stepKeyIds.set(stepKey, [newStepId]);\n }\n\n return [newStepId, stepIndex];\n }\n\n getKeyList(metadata: Record<string, unknown> | undefined): (string | number | undefined)[] {\n if (!metadata) return [];\n\n const keyList = [\n metadata.run_id as string,\n metadata.thread_id as string,\n metadata.langgraph_node as string,\n metadata.langgraph_step as number,\n metadata.checkpoint_ns as string,\n ];\n if (this.currentTokenType === ContentTypes.THINK) {\n keyList.push('reasoning');\n }\n\n return keyList;\n }\n\n checkKeyList(keyList: (string | number | undefined)[]): boolean {\n return keyList.some((key) => key === undefined);\n }\n\n /* Misc.*/\n\n getRunMessages(): BaseMessage[] | undefined {\n return this.messages.slice(this.startIndex);\n }\n\n getContentParts(): t.MessageContentComplex[] | undefined {\n return convertMessagesToContent(this.messages.slice(this.startIndex));\n }\n\n /* Graph */\n\n createGraphState(): t.GraphStateChannels<t.BaseGraphState> {\n return {\n messages: {\n value: (x: BaseMessage[], y: BaseMessage[]): BaseMessage[] => {\n if (!x.length) {\n if (this.systemMessage) {\n x.push(this.systemMessage);\n }\n\n this.startIndex = x.length + y.length;\n }\n const current = x.concat(y);\n this.messages = current;\n return current;\n },\n default: () => [],\n },\n };\n }\n\n initializeTools(): CustomToolNode<t.BaseGraphState> | ToolNode<t.BaseGraphState> {\n // return new ToolNode<t.BaseGraphState>(this.tools);\n return new CustomToolNode<t.BaseGraphState>({\n tools: this.tools || [],\n toolMap: this.toolMap,\n toolCallStepIds: this.toolCallStepIds,\n errorHandler: this.handleToolCallError.bind(this),\n });\n }\n\n initializeModel(): Runnable {\n const ChatModelClass = getChatModelClass(this.provider);\n const model = new ChatModelClass(this.clientOptions);\n\n if (isOpenAILike(this.provider) && (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)) {\n model.temperature = (this.clientOptions as t.OpenAIClientOptions).temperature as number;\n model.topP = (this.clientOptions as t.OpenAIClientOptions).topP as number;\n model.frequencyPenalty = (this.clientOptions as t.OpenAIClientOptions).frequencyPenalty as number;\n model.presencePenalty = (this.clientOptions as t.OpenAIClientOptions).presencePenalty as number;\n model.n = (this.clientOptions as t.OpenAIClientOptions).n as number;\n } else if (this.provider === Providers.VERTEXAI && model instanceof ChatVertexAI) {\n model.temperature = (this.clientOptions as t.VertexAIClientOptions).temperature as number;\n model.topP = (this.clientOptions as t.VertexAIClientOptions).topP as number;\n model.topK = (this.clientOptions as t.VertexAIClientOptions).topK as number;\n model.topLogprobs = (this.clientOptions as t.VertexAIClientOptions).topLogprobs as number;\n model.frequencyPenalty = (this.clientOptions as t.VertexAIClientOptions).frequencyPenalty as number;\n model.presencePenalty = (this.clientOptions as t.VertexAIClientOptions).presencePenalty as number;\n model.maxOutputTokens = (this.clientOptions as t.VertexAIClientOptions).maxOutputTokens as number;\n }\n\n if (!this.tools || this.tools.length === 0) {\n return model as unknown as Runnable;\n }\n\n return (model as t.ModelWithTools).bindTools(this.tools);\n }\n overrideTestModel(responses: string[], sleep?: number, toolCalls?: ToolCall[]): void {\n this.boundModel = createFakeStreamingLLM({\n responses,\n sleep,\n toolCalls,\n });\n }\n\n getNewModel({\n clientOptions = {},\n omitOriginalOptions,\n } : {\n clientOptions?: t.ClientOptions;\n omitOriginalOptions?: string[]\n }): t.ChatModelInstance {\n const ChatModelClass = getChatModelClass(this.provider);\n const _options = omitOriginalOptions ? Object.fromEntries(\n Object.entries(this.clientOptions).filter(([key]) => !omitOriginalOptions.includes(key)),\n ) : this.clientOptions;\n const options = Object.assign(_options, clientOptions);\n return new ChatModelClass(options);\n }\n\n storeUsageMetadata(finalMessage?: BaseMessage): void {\n if (finalMessage && 'usage_metadata' in finalMessage && finalMessage.usage_metadata) {\n this.currentUsage = finalMessage.usage_metadata as Partial<UsageMetadata>;\n }\n }\n\n createCallModel() {\n return async (state: t.BaseGraphState, config?: RunnableConfig): Promise<Partial<t.BaseGraphState>> => {\n const { provider = '' } = (config?.configurable as t.GraphConfig | undefined) ?? {} ;\n if (!config || !provider) {\n throw new Error(`No ${config ? 'provider' : 'config'} provided`);\n }\n if (!config.signal) {\n config.signal = this.signal;\n }\n this.config = config;\n const { messages } = state;\n\n let messagesToUse = messages;\n if (!this.pruneMessages && this.tokenCounter && this.maxContextTokens && this.indexTokenCountMap[0] != null) {\n const isAnthropicWithThinking = (\n (this.provider === Providers.ANTHROPIC\n || (this.provider === Providers.BEDROCK && (this.clientOptions as t.BedrockClientOptions).model?.includes('anthropic')))\n && (this.clientOptions as t.AnthropicClientOptions).thinking != null);\n\n this.pruneMessages = createPruneMessages({\n indexTokenCountMap: this.indexTokenCountMap,\n maxTokens: this.maxContextTokens,\n tokenCounter: this.tokenCounter,\n startIndex: this.startIndex,\n thinkingEnabled: isAnthropicWithThinking,\n });\n }\n if (this.pruneMessages) {\n const { context, indexTokenCountMap } = this.pruneMessages({\n messages,\n usageMetadata: this.currentUsage,\n // startOnMessageType: 'human',\n });\n this.indexTokenCountMap = indexTokenCountMap;\n messagesToUse = context;\n }\n\n const finalMessages = messagesToUse;\n const lastMessageX = finalMessages.length >= 2 ? finalMessages[finalMessages.length - 2] : null;\n const lastMessageY = finalMessages.length >= 1 ? finalMessages[finalMessages.length - 1] : null;\n\n if (\n provider === Providers.BEDROCK\n && lastMessageX instanceof AIMessageChunk\n && lastMessageY instanceof ToolMessage\n && typeof lastMessageX.content === 'string'\n ) {\n finalMessages[finalMessages.length - 2].content = '';\n }\n\n const isLatestToolMessage = lastMessageY instanceof ToolMessage;\n\n if (isLatestToolMessage && provider === Providers.ANTHROPIC) {\n formatAnthropicArtifactContent(finalMessages);\n } else if (\n isLatestToolMessage &&\n (isOpenAILike(provider) || isGoogleLike(provider))\n ) {\n formatArtifactPayload(finalMessages);\n }\n\n if (this.lastStreamCall != null && this.streamBuffer != null) {\n const timeSinceLastCall = Date.now() - this.lastStreamCall;\n if (timeSinceLastCall < this.streamBuffer) {\n const timeToWait = Math.ceil((this.streamBuffer - timeSinceLastCall) / 1000) * 1000;\n await sleep(timeToWait);\n }\n }\n\n this.lastStreamCall = Date.now();\n\n let result: Partial<t.BaseGraphState>;\n if ((this.tools?.length ?? 0) > 0 && manualToolStreamProviders.has(provider)) {\n const stream = await this.boundModel.stream(finalMessages, config);\n let finalChunk: AIMessageChunk | undefined;\n for await (const chunk of stream) {\n dispatchCustomEvent(GraphEvents.CHAT_MODEL_STREAM, { chunk }, config);\n if (!finalChunk) {\n finalChunk = chunk;\n } else {\n finalChunk = concat(finalChunk, chunk);\n }\n }\n\n finalChunk = modifyDeltaProperties(this.provider, finalChunk);\n result = { messages: [finalChunk as AIMessageChunk] };\n } else {\n const finalMessage = (await this.boundModel.invoke(finalMessages, config)) as AIMessageChunk;\n if ((finalMessage.tool_calls?.length ?? 0) > 0) {\n finalMessage.tool_calls = finalMessage.tool_calls?.filter((tool_call) => {\n if (!tool_call.name) {\n return false;\n }\n return true;\n });\n }\n result = { messages: [finalMessage] };\n }\n\n this.storeUsageMetadata(result.messages?.[0]);\n return result;\n };\n }\n\n createWorkflow(): t.CompiledWorkflow<t.BaseGraphState> {\n const routeMessage = (state: t.BaseGraphState, config?: RunnableConfig): string => {\n this.config = config;\n // const lastMessage = state.messages[state.messages.length - 1] as AIMessage;\n // if (!lastMessage?.tool_calls?.length) {\n // return END;\n // }\n // return TOOLS;\n return toolsCondition(state);\n };\n\n const workflow = new StateGraph<t.BaseGraphState>({\n channels: this.graphState,\n })\n .addNode(AGENT, this.createCallModel())\n .addNode(TOOLS, this.initializeTools())\n .addEdge(START, AGENT)\n .addConditionalEdges(AGENT, routeMessage)\n .addEdge(TOOLS, this.toolEnd ? END : AGENT);\n\n return workflow.compile();\n }\n\n /* Dispatchers */\n\n /**\n * Dispatches a run step to the client, returns the step ID\n */\n dispatchRunStep(stepKey: string, stepDetails: t.StepDetails): string {\n if (!this.config) {\n throw new Error('No config provided');\n }\n\n const [stepId, stepIndex] = this.generateStepId(stepKey);\n if (stepDetails.type === StepTypes.TOOL_CALLS && stepDetails.tool_calls) {\n for (const tool_call of stepDetails.tool_calls) {\n const toolCallId = tool_call.id ?? '';\n if (!toolCallId || this.toolCallStepIds.has(toolCallId)) {\n continue;\n }\n this.toolCallStepIds.set(toolCallId, stepId);\n }\n }\n\n const runStep: t.RunStep = {\n stepIndex,\n id: stepId,\n type: stepDetails.type,\n index: this.contentData.length,\n stepDetails,\n usage: null,\n };\n\n const runId = this.runId ?? '';\n if (runId) {\n runStep.runId = runId;\n }\n\n this.contentData.push(runStep);\n this.contentIndexMap.set(stepId, runStep.index);\n dispatchCustomEvent(GraphEvents.ON_RUN_STEP, runStep, this.config);\n return stepId;\n }\n\n handleToolCallCompleted(data: t.ToolEndData, metadata?: Record<string, unknown>): void {\n if (!this.config) {\n throw new Error('No config provided');\n }\n\n if (!data.output) {\n return;\n }\n\n const { input, output } = data;\n const { tool_call_id } = output;\n const stepId = this.toolCallStepIds.get(tool_call_id) ?? '';\n if (!stepId) {\n throw new Error(`No stepId found for tool_call_id ${tool_call_id}`);\n }\n\n const runStep = this.getRunStep(stepId);\n if (!runStep) {\n throw new Error(`No run step found for stepId ${stepId}`);\n }\n\n const args = typeof input === 'string' ? input : input.input;\n const tool_call = {\n args: typeof args === 'string' ? args : JSON.stringify(args),\n name: output.name ?? '',\n id: output.tool_call_id,\n output: typeof output.content === 'string'\n ? output.content\n : JSON.stringify(output.content),\n progress: 1,\n };\n\n this.handlerRegistry?.getHandler(GraphEvents.ON_RUN_STEP_COMPLETED)?.handle(\n GraphEvents.ON_RUN_STEP_COMPLETED,\n { result: {\n id: stepId,\n index: runStep.index,\n type: 'tool_call',\n tool_call\n } as t.ToolCompleteEvent,\n },\n metadata,\n this,\n );\n }\n handleToolCallError(data: t.ToolErrorData, metadata?: Record<string, unknown>): void {\n if (!this.config) {\n throw new Error('No config provided');\n }\n\n if (!data.id) {\n console.warn('No Tool ID provided for Tool Error');\n return;\n }\n\n const stepId = this.toolCallStepIds.get(data.id) ?? '';\n if (!stepId) {\n throw new Error(`No stepId found for tool_call_id ${data.id}`);\n }\n\n const { name, input: args, error } = data;\n\n const runStep = this.getRunStep(stepId);\n if (!runStep) {\n throw new Error(`No run step found for stepId ${stepId}`);\n }\n\n const tool_call: t.ProcessedToolCall = {\n id: data.id,\n name: name ?? '',\n args: typeof args === 'string' ? args : JSON.stringify(args),\n output: `Error processing tool${error?.message ? `: ${error.message}` : ''}`,\n progress: 1,\n };\n\n this.handlerRegistry?.getHandler(GraphEvents.ON_RUN_STEP_COMPLETED)?.handle(\n GraphEvents.ON_RUN_STEP_COMPLETED,\n { result: {\n id: stepId,\n index: runStep.index,\n type: 'tool_call',\n tool_call\n } as t.ToolCompleteEvent,\n },\n metadata,\n this,\n );\n }\n\n dispatchRunStepDelta(id: string, delta: t.ToolCallDelta): void {\n if (!this.config) {\n throw new Error('No config provided');\n } else if (!id) {\n throw new Error('No step ID found');\n }\n const runStepDelta: t.RunStepDeltaEvent = {\n id,\n delta,\n };\n dispatchCustomEvent(GraphEvents.ON_RUN_STEP_DELTA, runStepDelta, this.config);\n }\n\n dispatchMessageDelta(id: string, delta: t.MessageDelta): void {\n if (!this.config) {\n throw new Error('No config provided');\n }\n const messageDelta: t.MessageDeltaEvent = {\n id,\n delta,\n };\n dispatchCustomEvent(GraphEvents.ON_MESSAGE_DELTA, messageDelta, this.config);\n }\n\n dispatchReasoningDelta = (stepId: string, delta: t.ReasoningDelta): void => {\n if (!this.config) {\n throw new Error('No config provided');\n }\n const reasoningDelta: t.ReasoningDeltaEvent = {\n id: stepId,\n delta,\n };\n dispatchCustomEvent(GraphEvents.ON_REASONING_DELTA, reasoningDelta, this.config);\n };\n}\n"],"names":["CustomToolNode"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AA2BA,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,aAAa;MAchB,KAAK,CAAA;AAyBzB,IAAA,SAAS;AACT,IAAA,eAAe;IACf,YAAY,GAAsC,mBAAmB;AACrE,IAAA,gBAAgB,GAA2C,YAAY,CAAC,IAAI;AAC5E,IAAA,uBAAuB,GAAyB,IAAI,GAAG,EAAE;AACzD,IAAA,mBAAmB,GAAwB,IAAI,GAAG,EAAE;AACpD,IAAA,yBAAyB,GAAwB,IAAI,GAAG,EAAE;AAC1D,IAAA,MAAM;IACN,WAAW,GAAgB,EAAE;AAC7B,IAAA,UAAU,GAA0B,IAAI,GAAG,EAAoB;AAC/D,IAAA,eAAe,GAAwB,IAAI,GAAG,EAAE;AAChD,IAAA,eAAe,GAAwB,IAAI,GAAG,EAAE;AAChD,IAAA,YAAY;IACZ,kBAAkB,GAA2B,EAAE;AAC/C,IAAA,gBAAgB;AAChB,IAAA,aAAa;;AAEb,IAAA,YAAY;AACZ,IAAA,YAAY;AACZ,IAAA,MAAM;AACP;AAEK,MAAO,aAAc,SAAQ,KAGlC,CAAA;AACS,IAAA,UAAU;AAClB,IAAA,aAAa;AACb,IAAA,UAAU;;AAEV,IAAA,cAAc;AACd,IAAA,eAAe;AACf,IAAA,aAAa;IACb,QAAQ,GAAkB,EAAE;AAC5B,IAAA,KAAK;AACL,IAAA,KAAK;AACL,IAAA,OAAO;IACP,UAAU,GAAW,CAAC;AACtB,IAAA,QAAQ;AACR,IAAA,OAAO;AACP,IAAA,MAAM;IAEN,WAAY,CAAA,EACV,KAAK,EACL,KAAK,EACL,MAAM,EACN,OAAO,EACP,QAAQ,EACR,YAAY,EACZ,YAAY,EACZ,YAAY,EACZ,aAAa,EACb,OAAO,GAAG,KAAK,EACf,uBAAuB,GAAG,EAAE,GACN,EAAA;AACtB,QAAA,KAAK,EAAE;AACP,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;AAClB,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;AAClB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;AACpB,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO;AACtB,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO;AACtB,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ;AACxB,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY;AAChC,QAAA,IAAI,CAAC,aAAa,GAAG,aAAa;AAClC,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,gBAAgB,EAAE;AACzC,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,eAAe,EAAE;QACxC,IAAI,YAAY,EAAE;AAChB,YAAA,IAAI,CAAC,YAAY,GAAG,YAAY;;AAGlC,QAAA,IAAI,iBAAiB,GAA+B,YAAY,IAAI,EAAE;QACtE,IAAI,uBAAuB,EAAE;AAC3B,YAAA,iBAAiB,GAAG,iBAAiB,GAAG,CAAG,EAAA,iBAAiB,CAAO,IAAA,EAAA,uBAAuB,CAAE,CAAA,GAAG,uBAAuB;;QAGxH,IAAI,iBAAiB,IAAI,QAAQ,KAAK,SAAS,CAAC,SAAS,IAAK,aAA0C,CAAC,aAAa,EAAE,cAAc,GAAG,gBAAgB,CAAC,EAAE,QAAQ,CAAC,gBAAgB,CAAC,EAAE;AACtL,YAAA,iBAAiB,GAAG;AAClB,gBAAA,OAAO,EAAE;AACP,oBAAA;AACE,wBAAA,IAAI,EAAE,MAAM;AACZ,wBAAA,IAAI,EAAE,YAAY;AAClB,wBAAA,aAAa,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE;AACrC,qBAAA;AACF,iBAAA;aACF;;QAGH,IAAI,iBAAiB,EAAE;YACrB,IAAI,CAAC,aAAa,GAAG,IAAI,aAAa,CAAC,iBAAiB,CAAC;;;;AAM7D,IAAA,WAAW,CAAC,WAAqB,EAAA;AAC/B,QAAA,IAAI,CAAC,QAAQ,GAAG,EAAE;QAClB,IAAI,CAAC,MAAM,GAAG,eAAe,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;AACrD,QAAA,IAAI,WAAW,KAAK,IAAI,EAAE;YACxB,IAAI,CAAC,WAAW,GAAG,eAAe,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,CAAC;AACxD,YAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,GAAG,EAAE,CAAC;;AAEzE,QAAA,IAAI,CAAC,UAAU,GAAG,eAAe,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,GAAG,EAAE,CAAC;AAC7D,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,GAAG,EAAE,CAAC;AACvE,QAAA,IAAI,CAAC,mBAAmB,GAAG,eAAe,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,GAAG,EAAE,CAAC;AAC/E,QAAA,IAAI,CAAC,uBAAuB,GAAG,eAAe,CAAC,IAAI,CAAC,yBAAyB,EAAE,IAAI,GAAG,EAAE,CAAC;AACzF,QAAA,IAAI,CAAC,yBAAyB,GAAG,eAAe,CAAC,IAAI,CAAC,yBAAyB,EAAE,IAAI,GAAG,EAAE,CAAC;AAC3F,QAAA,IAAI,CAAC,gBAAgB,GAAG,eAAe,CAAC,IAAI,CAAC,gBAAgB,EAAE,YAAY,CAAC,IAAI,CAAC;QACjF,IAAI,CAAC,SAAS,GAAG,eAAe,CAAC,IAAI,CAAC,SAAS,EAAE,SAAS,CAAC;QAC3D,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC,IAAI,CAAC,eAAe,EAAE,SAAS,CAAC;QACvE,IAAI,CAAC,kBAAkB,GAAG,eAAe,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE,CAAC;QACtE,IAAI,CAAC,YAAY,GAAG,eAAe,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,CAAC;QACjE,IAAI,CAAC,YAAY,GAAG,eAAe,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,CAAC;QACjE,IAAI,CAAC,gBAAgB,GAAG,eAAe,CAAC,IAAI,CAAC,gBAAgB,EAAE,SAAS,CAAC;;;AAK3E,IAAA,UAAU,CAAC,MAAc,EAAA;QACvB,MAAM,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,MAAM,CAAC;AAC9C,QAAA,IAAI,KAAK,KAAK,SAAS,EAAE;AACvB,YAAA,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC;;AAEhC,QAAA,OAAO,SAAS;;AAGlB,IAAA,UAAU,CAAC,QAA6C,EAAA;AACtD,QAAA,IAAI,CAAC,QAAQ;AAAE,YAAA,OAAO,EAAE;QAExB,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC;AACzC,QAAA,IAAI,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC,EAAE;AAC9B,YAAA,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC;;AAGrC,QAAA,OAAO,QAAQ,CAAC,OAAO,CAAC;;IAG1B,cAAc,CAAC,OAAe,EAAE,KAAc,EAAA;QAC5C,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC;QAC5C,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,OAAO,CAAA,CAAE,CAAC;;AAG7D,QAAA,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,OAAO,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC;;AAGpC,QAAA,OAAO,OAAO,CAAC,KAAK,CAAC;;AAGvB,IAAA,cAAc,CAAC,OAAe,EAAA;QAC5B,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC;AAC5C,QAAA,IAAI,SAA6B;QACjC,IAAI,SAAS,GAAG,CAAC;QACjB,IAAI,OAAO,EAAE;AACX,YAAA,SAAS,GAAG,OAAO,CAAC,MAAM;AAC1B,YAAA,SAAS,GAAG,CAAA,KAAA,EAAQ,MAAM,EAAE,EAAE;AAC9B,YAAA,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC;YACvB,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,EAAE,OAAO,CAAC;;aAChC;AACL,YAAA,SAAS,GAAG,CAAA,KAAA,EAAQ,MAAM,EAAE,EAAE;YAC9B,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,EAAE,CAAC,SAAS,CAAC,CAAC;;AAG3C,QAAA,OAAO,CAAC,SAAS,EAAE,SAAS,CAAC;;AAG/B,IAAA,UAAU,CAAC,QAA6C,EAAA;AACtD,QAAA,IAAI,CAAC,QAAQ;AAAE,YAAA,OAAO,EAAE;AAExB,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,QAAQ,CAAC,MAAgB;AACzB,YAAA,QAAQ,CAAC,SAAmB;AAC5B,YAAA,QAAQ,CAAC,cAAwB;AACjC,YAAA,QAAQ,CAAC,cAAwB;AACjC,YAAA,QAAQ,CAAC,aAAuB;SACjC;QACD,IAAI,IAAI,CAAC,gBAAgB,KAAK,YAAY,CAAC,KAAK,EAAE;AAChD,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC;;AAG3B,QAAA,OAAO,OAAO;;AAGhB,IAAA,YAAY,CAAC,OAAwC,EAAA;AACnD,QAAA,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,GAAG,KAAK,GAAG,KAAK,SAAS,CAAC;;;IAKjD,cAAc,GAAA;QACZ,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC;;IAG7C,eAAe,GAAA;AACb,QAAA,OAAO,wBAAwB,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;;;IAKvE,gBAAgB,GAAA;QACd,OAAO;AACL,YAAA,QAAQ,EAAE;AACR,gBAAA,KAAK,EAAE,CAAC,CAAgB,EAAE,CAAgB,KAAmB;AAC3D,oBAAA,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;AACb,wBAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,4BAAA,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC;;wBAG5B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM;;oBAEvC,MAAM,OAAO,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3B,oBAAA,IAAI,CAAC,QAAQ,GAAG,OAAO;AACvB,oBAAA,OAAO,OAAO;iBACf;AACD,gBAAA,OAAO,EAAE,MAAM,EAAE;AAClB,aAAA;SACF;;IAGH,eAAe,GAAA;;QAEb,OAAO,IAAIA,QAAc,CAAmB;AAC1C,YAAA,KAAK,EAAE,IAAI,CAAC,KAAK,IAAI,EAAE;YACvB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,eAAe,EAAE,IAAI,CAAC,eAAe;YACrC,YAAY,EAAE,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,IAAI,CAAC;AAClD,SAAA,CAAC;;IAGJ,eAAe,GAAA;QACb,MAAM,cAAc,GAAG,iBAAiB,CAAC,IAAI,CAAC,QAAQ,CAAC;QACvD,MAAM,KAAK,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,aAAa,CAAC;AAEpD,QAAA,IAAI,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,YAAY,UAAU,IAAI,KAAK,YAAY,eAAe,CAAC,EAAE;YACpG,KAAK,CAAC,WAAW,GAAI,IAAI,CAAC,aAAuC,CAAC,WAAqB;YACvF,KAAK,CAAC,IAAI,GAAI,IAAI,CAAC,aAAuC,CAAC,IAAc;YACzE,KAAK,CAAC,gBAAgB,GAAI,IAAI,CAAC,aAAuC,CAAC,gBAA0B;YACjG,KAAK,CAAC,eAAe,GAAI,IAAI,CAAC,aAAuC,CAAC,eAAyB;YAC/F,KAAK,CAAC,CAAC,GAAI,IAAI,CAAC,aAAuC,CAAC,CAAW;;AAC9D,aAAA,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,QAAQ,IAAI,KAAK,YAAY,YAAY,EAAE;YAChF,KAAK,CAAC,WAAW,GAAI,IAAI,CAAC,aAAyC,CAAC,WAAqB;YACzF,KAAK,CAAC,IAAI,GAAI,IAAI,CAAC,aAAyC,CAAC,IAAc;YAC3E,KAAK,CAAC,IAAI,GAAI,IAAI,CAAC,aAAyC,CAAC,IAAc;YAC3E,KAAK,CAAC,WAAW,GAAI,IAAI,CAAC,aAAyC,CAAC,WAAqB;YACzF,KAAK,CAAC,gBAAgB,GAAI,IAAI,CAAC,aAAyC,CAAC,gBAA0B;YACnG,KAAK,CAAC,eAAe,GAAI,IAAI,CAAC,aAAyC,CAAC,eAAyB;YACjG,KAAK,CAAC,eAAe,GAAI,IAAI,CAAC,aAAyC,CAAC,eAAyB;;AAGnG,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;AAC1C,YAAA,OAAO,KAA4B;;QAGrC,OAAQ,KAA0B,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC;;AAE1D,IAAA,iBAAiB,CAAC,SAAmB,EAAE,KAAc,EAAE,SAAsB,EAAA;AAC3E,QAAA,IAAI,CAAC,UAAU,GAAG,sBAAsB,CAAC;YACvC,SAAS;YACT,KAAK;YACL,SAAS;AACV,SAAA,CAAC;;AAGJ,IAAA,WAAW,CAAC,EACV,aAAa,GAAG,EAAE,EAClB,mBAAmB,GAIpB,EAAA;QACC,MAAM,cAAc,GAAG,iBAAiB,CAAC,IAAI,CAAC,QAAQ,CAAC;AACvD,QAAA,MAAM,QAAQ,GAAG,mBAAmB,GAAG,MAAM,CAAC,WAAW,CACvD,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,mBAAmB,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CACzF,GAAG,IAAI,CAAC,aAAa;QACtB,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE,aAAa,CAAC;AACtD,QAAA,OAAO,IAAI,cAAc,CAAC,OAAO,CAAC;;AAGpC,IAAA,kBAAkB,CAAC,YAA0B,EAAA;QAC3C,IAAI,YAAY,IAAI,gBAAgB,IAAI,YAAY,IAAI,YAAY,CAAC,cAAc,EAAE;AACnF,YAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC,cAAwC;;;IAI7E,eAAe,GAAA;AACb,QAAA,OAAO,OAAO,KAAuB,EAAE,MAAuB,KAAwC;YACpG,MAAM,EAAE,QAAQ,GAAG,EAAE,EAAE,GAAI,MAAM,EAAE,YAA0C,IAAI,EAAE;AACnF,YAAA,IAAI,CAAC,MAAM,IAAI,CAAC,QAAQ,EAAE;AACxB,gBAAA,MAAM,IAAI,KAAK,CAAC,CAAA,GAAA,EAAM,MAAM,GAAG,UAAU,GAAG,QAAQ,CAAA,SAAA,CAAW,CAAC;;AAElE,YAAA,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;AAClB,gBAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM;;AAE7B,YAAA,IAAI,CAAC,MAAM,GAAG,MAAM;AACpB,YAAA,MAAM,EAAE,QAAQ,EAAE,GAAG,KAAK;YAE1B,IAAI,aAAa,GAAG,QAAQ;YAC5B,IAAI,CAAC,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE;gBAC3G,MAAM,uBAAuB,IAC3B,CAAC,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC;AAC1B,wBAAC,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,OAAO,IAAK,IAAI,CAAC,aAAwC,CAAC,KAAK,EAAE,QAAQ,CAAC,WAAW,CAAC,CAAC;AACnH,uBAAA,IAAI,CAAC,aAA0C,CAAC,QAAQ,IAAI,IAAI,CAAC;AAEvE,gBAAA,IAAI,CAAC,aAAa,GAAG,mBAAmB,CAAC;oBACvC,kBAAkB,EAAE,IAAI,CAAC,kBAAkB;oBAC3C,SAAS,EAAE,IAAI,CAAC,gBAAgB;oBAChC,YAAY,EAAE,IAAI,CAAC,YAAY;oBAC/B,UAAU,EAAE,IAAI,CAAC,UAAU;AAC3B,oBAAA,eAAe,EAAE,uBAAuB;AACzC,iBAAA,CAAC;;AAEJ,YAAA,IAAI,IAAI,CAAC,aAAa,EAAE;gBACtB,MAAM,EAAE,OAAO,EAAE,kBAAkB,EAAE,GAAG,IAAI,CAAC,aAAa,CAAC;oBACzD,QAAQ;oBACR,aAAa,EAAE,IAAI,CAAC,YAAY;;AAEjC,iBAAA,CAAC;AACF,gBAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB;gBAC5C,aAAa,GAAG,OAAO;;YAGzB,MAAM,aAAa,GAAG,aAAa;YACnC,MAAM,YAAY,GAAG,aAAa,CAAC,MAAM,IAAI,CAAC,GAAG,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,IAAI;YAC/F,MAAM,YAAY,GAAG,aAAa,CAAC,MAAM,IAAI,CAAC,GAAG,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,IAAI;AAE/F,YAAA,IACE,QAAQ,KAAK,SAAS,CAAC;AACpB,mBAAA,YAAY,YAAY;AACxB,mBAAA,YAAY,YAAY;AACxB,mBAAA,OAAO,YAAY,CAAC,OAAO,KAAK,QAAQ,EAC3C;gBACA,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,OAAO,GAAG,EAAE;;AAGtD,YAAA,MAAM,mBAAmB,GAAG,YAAY,YAAY,WAAW;YAE/D,IAAI,mBAAmB,IAAI,QAAQ,KAAK,SAAS,CAAC,SAAS,EAAE;gBAC3D,8BAA8B,CAAC,aAAa,CAAC;;AACxC,iBAAA,IACL,mBAAmB;iBAClB,YAAY,CAAC,QAAQ,CAAC,IAAI,YAAY,CAAC,QAAQ,CAAC,CAAC,EAClD;gBACA,qBAAqB,CAAC,aAAa,CAAC;;AAGtC,YAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,EAAE;gBAC5D,MAAM,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,cAAc;AAC1D,gBAAA,IAAI,iBAAiB,GAAG,IAAI,CAAC,YAAY,EAAE;AACzC,oBAAA,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,YAAY,GAAG,iBAAiB,IAAI,IAAI,CAAC,GAAG,IAAI;AACnF,oBAAA,MAAM,KAAK,CAAC,UAAU,CAAC;;;AAI3B,YAAA,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,GAAG,EAAE;AAEhC,YAAA,IAAI,MAAiC;AACrC,YAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,IAAI,CAAC,IAAI,CAAC,IAAI,yBAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;AAC5E,gBAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,aAAa,EAAE,MAAM,CAAC;AAClE,gBAAA,IAAI,UAAsC;AAC1C,gBAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;oBAChC,mBAAmB,CAAC,WAAW,CAAC,iBAAiB,EAAE,EAAE,KAAK,EAAE,EAAE,MAAM,CAAC;oBACrE,IAAI,CAAC,UAAU,EAAE;wBACf,UAAU,GAAG,KAAK;;yBACb;AACL,wBAAA,UAAU,GAAG,MAAM,CAAC,UAAU,EAAE,KAAK,CAAC;;;gBAI1C,UAAU,GAAG,qBAAqB,CAAC,IAAI,CAAC,QAAQ,EAAE,UAAU,CAAC;gBAC7D,MAAM,GAAG,EAAE,QAAQ,EAAE,CAAC,UAA4B,CAAC,EAAE;;iBAChD;AACL,gBAAA,MAAM,YAAY,IAAI,MAAM,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,aAAa,EAAE,MAAM,CAAC,CAAmB;AAC5F,gBAAA,IAAI,CAAC,YAAY,CAAC,UAAU,EAAE,MAAM,IAAI,CAAC,IAAI,CAAC,EAAE;AAC9C,oBAAA,YAAY,CAAC,UAAU,GAAG,YAAY,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC,SAAS,KAAI;AACtE,wBAAA,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE;AACnB,4BAAA,OAAO,KAAK;;AAEd,wBAAA,OAAO,IAAI;AACb,qBAAC,CAAC;;gBAEJ,MAAM,GAAG,EAAE,QAAQ,EAAE,CAAC,YAAY,CAAC,EAAE;;YAGvC,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,QAAQ,GAAG,CAAC,CAAC,CAAC;AAC7C,YAAA,OAAO,MAAM;AACf,SAAC;;IAGH,cAAc,GAAA;AACZ,QAAA,MAAM,YAAY,GAAG,CAAC,KAAuB,EAAE,MAAuB,KAAY;AAChF,YAAA,IAAI,CAAC,MAAM,GAAG,MAAM;;;;;;AAMpB,YAAA,OAAO,cAAc,CAAC,KAAK,CAAC;AAC9B,SAAC;AAED,QAAA,MAAM,QAAQ,GAAG,IAAI,UAAU,CAAmB;YAChD,QAAQ,EAAE,IAAI,CAAC,UAAU;SAC1B;AACE,aAAA,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,eAAe,EAAE;AACrC,aAAA,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,eAAe,EAAE;AACrC,aAAA,OAAO,CAAC,KAAK,EAAE,KAAK;AACpB,aAAA,mBAAmB,CAAC,KAAK,EAAE,YAAY;AACvC,aAAA,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,GAAG,GAAG,GAAG,KAAK,CAAC;AAE7C,QAAA,OAAO,QAAQ,CAAC,OAAO,EAAE;;;AAK3B;;AAEG;IACH,eAAe,CAAC,OAAe,EAAE,WAA0B,EAAA;AACzD,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAGvC,QAAA,MAAM,CAAC,MAAM,EAAE,SAAS,CAAC,GAAG,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC;AACxD,QAAA,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,CAAC,UAAU,IAAI,WAAW,CAAC,UAAU,EAAE;AACvE,YAAA,KAAK,MAAM,SAAS,IAAI,WAAW,CAAC,UAAU,EAAE;AAC9C,gBAAA,MAAM,UAAU,GAAG,SAAS,CAAC,EAAE,IAAI,EAAE;AACrC,gBAAA,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE;oBACvD;;gBAEF,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,UAAU,EAAE,MAAM,CAAC;;;AAIhD,QAAA,MAAM,OAAO,GAAc;YACzB,SAAS;AACT,YAAA,EAAE,EAAE,MAAM;YACV,IAAI,EAAE,WAAW,CAAC,IAAI;AACtB,YAAA,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,MAAM;YAC9B,WAAW;AACX,YAAA,KAAK,EAAE,IAAI;SACZ;AAED,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI,EAAE;QAC9B,IAAI,KAAK,EAAE;AACT,YAAA,OAAO,CAAC,KAAK,GAAG,KAAK;;AAGvB,QAAA,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,CAAC;QAC/C,mBAAmB,CAAC,WAAW,CAAC,WAAW,EAAE,OAAO,EAAE,IAAI,CAAC,MAAM,CAAC;AAClE,QAAA,OAAO,MAAM;;IAGf,uBAAuB,CAAC,IAAmB,EAAE,QAAkC,EAAA;AAC7E,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAGvC,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB;;AAGF,QAAA,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,IAAI;AAC9B,QAAA,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM;AAC/B,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE;QAC3D,IAAI,CAAC,MAAM,EAAE;AACX,YAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,YAAY,CAAA,CAAE,CAAC;;QAGrE,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;QACvC,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,gCAAgC,MAAM,CAAA,CAAE,CAAC;;AAG3D,QAAA,MAAM,IAAI,GAAG,OAAO,KAAK,KAAK,QAAQ,GAAG,KAAK,GAAG,KAAK,CAAC,KAAK;AAC5D,QAAA,MAAM,SAAS,GAAG;AAChB,YAAA,IAAI,EAAE,OAAO,IAAI,KAAK,QAAQ,GAAG,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;AAC5D,YAAA,IAAI,EAAE,MAAM,CAAC,IAAI,IAAI,EAAE;YACvB,EAAE,EAAE,MAAM,CAAC,YAAY;AACvB,YAAA,MAAM,EAAE,OAAO,MAAM,CAAC,OAAO,KAAK;kBAC9B,MAAM,CAAC;kBACP,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC;AAClC,YAAA,QAAQ,EAAE,CAAC;SACZ;AAED,QAAA,IAAI,CAAC,eAAe,EAAE,UAAU,CAAC,WAAW,CAAC,qBAAqB,CAAC,EAAE,MAAM,CACzE,WAAW,CAAC,qBAAqB,EACjC,EAAE,MAAM,EAAE;AACR,gBAAA,EAAE,EAAE,MAAM;gBACV,KAAK,EAAE,OAAO,CAAC,KAAK;AACpB,gBAAA,IAAI,EAAE,WAAW;gBACjB;AACsB,aAAA;AACvB,SAAA,EACD,QAAQ,EACR,IAAI,CACL;;IAEH,mBAAmB,CAAC,IAAqB,EAAE,QAAkC,EAAA;AAC3E,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAGvC,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,OAAO,CAAC,IAAI,CAAC,oCAAoC,CAAC;YAClD;;AAGF,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE;QACtD,IAAI,CAAC,MAAM,EAAE;YACX,MAAM,IAAI,KAAK,CAAC,CAAA,iCAAA,EAAoC,IAAI,CAAC,EAAE,CAAE,CAAA,CAAC;;QAGhE,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,IAAI;QAEzC,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;QACvC,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,gCAAgC,MAAM,CAAA,CAAE,CAAC;;AAG3D,QAAA,MAAM,SAAS,GAAwB;YACrC,EAAE,EAAE,IAAI,CAAC,EAAE;YACX,IAAI,EAAE,IAAI,IAAI,EAAE;AAChB,YAAA,IAAI,EAAE,OAAO,IAAI,KAAK,QAAQ,GAAG,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;AAC5D,YAAA,MAAM,EAAE,CAAwB,qBAAA,EAAA,KAAK,EAAE,OAAO,GAAG,CAAK,EAAA,EAAA,KAAK,CAAC,OAAO,CAAA,CAAE,GAAG,EAAE,CAAE,CAAA;AAC5E,YAAA,QAAQ,EAAE,CAAC;SACZ;AAED,QAAA,IAAI,CAAC,eAAe,EAAE,UAAU,CAAC,WAAW,CAAC,qBAAqB,CAAC,EAAE,MAAM,CACzE,WAAW,CAAC,qBAAqB,EACjC,EAAE,MAAM,EAAE;AACR,gBAAA,EAAE,EAAE,MAAM;gBACV,KAAK,EAAE,OAAO,CAAC,KAAK;AACpB,gBAAA,IAAI,EAAE,WAAW;gBACjB;AACsB,aAAA;AACvB,SAAA,EACD,QAAQ,EACR,IAAI,CACL;;IAGH,oBAAoB,CAAC,EAAU,EAAE,KAAsB,EAAA;AACrD,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;aAChC,IAAI,CAAC,EAAE,EAAE;AACd,YAAA,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC;;AAErC,QAAA,MAAM,YAAY,GAAwB;YACxC,EAAE;YACF,KAAK;SACN;QACD,mBAAmB,CAAC,WAAW,CAAC,iBAAiB,EAAE,YAAY,EAAE,IAAI,CAAC,MAAM,CAAC;;IAG/E,oBAAoB,CAAC,EAAU,EAAE,KAAqB,EAAA;AACpD,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAEvC,QAAA,MAAM,YAAY,GAAwB;YACxC,EAAE;YACF,KAAK;SACN;QACD,mBAAmB,CAAC,WAAW,CAAC,gBAAgB,EAAE,YAAY,EAAE,IAAI,CAAC,MAAM,CAAC;;AAG9E,IAAA,sBAAsB,GAAG,CAAC,MAAc,EAAE,KAAuB,KAAU;AACzE,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAEvC,QAAA,MAAM,cAAc,GAA0B;AAC5C,YAAA,EAAE,EAAE,MAAM;YACV,KAAK;SACN;QACD,mBAAmB,CAAC,WAAW,CAAC,kBAAkB,EAAE,cAAc,EAAE,IAAI,CAAC,MAAM,CAAC;AAClF,KAAC;AACF;;;;"}
|
|
1
|
+
{"version":3,"file":"Graph.mjs","sources":["../../../src/graphs/Graph.ts"],"sourcesContent":["// src/graphs/Graph.ts\nimport { nanoid } from 'nanoid';\nimport { concat } from '@langchain/core/utils/stream';\nimport { ToolNode } from '@langchain/langgraph/prebuilt';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { START, END, StateGraph } from '@langchain/langgraph';\nimport { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';\nimport { Runnable, RunnableConfig } from '@langchain/core/runnables';\nimport { dispatchCustomEvent } from '@langchain/core/callbacks/dispatch';\nimport { AIMessageChunk, ToolMessage, SystemMessage } from '@langchain/core/messages';\nimport type { BaseMessage, BaseMessageFields, UsageMetadata } from '@langchain/core/messages';\nimport type * as t from '@/types';\nimport { Providers, GraphEvents, GraphNodeKeys, StepTypes, Callback, ContentTypes } from '@/common';\nimport type { ToolCall } from '@langchain/core/messages/tool';\nimport { getChatModelClass, manualToolStreamProviders } from '@/llm/providers';\nimport { ToolNode as CustomToolNode, toolsCondition } from '@/tools/ToolNode';\nimport {\n createPruneMessages,\n modifyDeltaProperties,\n formatArtifactPayload,\n convertMessagesToContent,\n formatAnthropicArtifactContent,\n} from '@/messages';\nimport { resetIfNotEmpty, isOpenAILike, isGoogleLike, joinKeys, sleep } from '@/utils';\nimport { createFakeStreamingLLM } from '@/llm/fake';\nimport { HandlerRegistry } from '@/events';\n\nconst { AGENT, TOOLS } = GraphNodeKeys;\nexport type GraphNode = GraphNodeKeys | typeof START;\nexport type ClientCallback<T extends unknown[]> = (graph: StandardGraph, ...args: T) => void;\nexport type ClientCallbacks = {\n [Callback.TOOL_ERROR]?: ClientCallback<[Error, string]>;\n [Callback.TOOL_START]?: ClientCallback<unknown[]>;\n [Callback.TOOL_END]?: ClientCallback<unknown[]>;\n}\nexport type SystemCallbacks = {\n [K in keyof ClientCallbacks]: ClientCallbacks[K] extends ClientCallback<infer Args>\n ? (...args: Args) => void\n : never;\n};\n\nexport abstract class Graph<\n T extends t.BaseGraphState = t.BaseGraphState,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n TNodeName extends string = string,\n> {\n abstract resetValues(): void;\n abstract createGraphState(): t.GraphStateChannels<T>;\n abstract initializeTools(): CustomToolNode<T> | ToolNode<T>;\n abstract initializeModel(): Runnable;\n abstract getRunMessages(): BaseMessage[] | undefined;\n abstract getContentParts(): t.MessageContentComplex[] | undefined;\n abstract generateStepId(stepKey: string): [string, number];\n abstract getKeyList(metadata: Record<string, unknown> | undefined): (string | number | undefined)[];\n abstract getStepKey(metadata: Record<string, unknown> | undefined): string;\n abstract checkKeyList(keyList: (string | number | undefined)[]): boolean;\n abstract getStepIdByKey(stepKey: string, index?: number): string\n abstract getRunStep(stepId: string): t.RunStep | undefined;\n abstract dispatchRunStep(stepKey: string, stepDetails: t.StepDetails): string;\n abstract dispatchRunStepDelta(id: string, delta: t.ToolCallDelta): void;\n abstract dispatchMessageDelta(id: string, delta: t.MessageDelta): void;\n abstract dispatchReasoningDelta(stepId: string, delta: t.ReasoningDelta): void;\n abstract handleToolCallCompleted(data: t.ToolEndData, metadata?: Record<string, unknown>): void;\n\n abstract createCallModel(): (state: T, config?: RunnableConfig) => Promise<Partial<T>>;\n abstract createWorkflow(): t.CompiledWorkflow<T>;\n lastToken?: string;\n tokenTypeSwitch?: 'reasoning' | 'content';\n reasoningKey: 'reasoning_content' | 'reasoning' = 'reasoning_content';\n currentTokenType: ContentTypes.TEXT | ContentTypes.THINK = ContentTypes.TEXT;\n messageStepHasToolCalls: Map<string, boolean> = new Map();\n messageIdsByStepKey: Map<string, string> = new Map();\n prelimMessageIdsByStepKey: Map<string, string> = new Map();\n config: RunnableConfig | undefined;\n contentData: t.RunStep[] = [];\n stepKeyIds: Map<string, string[]> = new Map<string, string[]>();\n contentIndexMap: Map<string, number> = new Map();\n toolCallStepIds: Map<string, string> = new Map();\n currentUsage: Partial<UsageMetadata> | undefined;\n indexTokenCountMap: Record<string, number> = {};\n maxContextTokens: number | undefined;\n pruneMessages?: ReturnType<typeof createPruneMessages>;\n /** The amount of time that should pass before another consecutive API call */\n streamBuffer: number | undefined;\n tokenCounter?: t.TokenCounter;\n signal?: AbortSignal;\n}\n\nexport class StandardGraph extends Graph<\n t.BaseGraphState,\n GraphNode\n> {\n private graphState: t.GraphStateChannels<t.BaseGraphState>;\n clientOptions: t.ClientOptions;\n boundModel: Runnable;\n /** The last recorded timestamp that a stream API call was invoked */\n lastStreamCall: number | undefined;\n handlerRegistry: HandlerRegistry | undefined;\n systemMessage: SystemMessage | undefined;\n messages: BaseMessage[] = [];\n runId: string | undefined;\n tools?: t.GenericTool[];\n toolMap?: t.ToolMap;\n startIndex: number = 0;\n provider: Providers;\n toolEnd: boolean;\n signal: AbortSignal | undefined;\n\n constructor({\n runId,\n tools,\n signal,\n toolMap,\n provider,\n streamBuffer,\n instructions,\n reasoningKey,\n clientOptions,\n toolEnd = false,\n additional_instructions = '',\n } : t.StandardGraphInput) {\n super();\n this.runId = runId;\n this.tools = tools;\n this.signal = signal;\n this.toolEnd = toolEnd;\n this.toolMap = toolMap;\n this.provider = provider;\n this.streamBuffer = streamBuffer;\n this.clientOptions = clientOptions;\n this.graphState = this.createGraphState();\n this.boundModel = this.initializeModel();\n if (reasoningKey) {\n this.reasoningKey = reasoningKey;\n }\n\n let finalInstructions: string | BaseMessageFields = instructions ?? '';\n if (additional_instructions) {\n finalInstructions = finalInstructions ? `${finalInstructions}\\n\\n${additional_instructions}` : additional_instructions;\n }\n\n if (finalInstructions && provider === Providers.ANTHROPIC && (clientOptions as t.AnthropicClientOptions).clientOptions?.defaultHeaders?.['anthropic-beta']?.includes('prompt-caching')) {\n finalInstructions = {\n content: [\n {\n type: 'text',\n text: instructions,\n cache_control: { type: 'ephemeral' },\n },\n ],\n };\n }\n\n if (finalInstructions) {\n this.systemMessage = new SystemMessage(finalInstructions);\n }\n }\n\n /* Init */\n\n resetValues(keepContent?: boolean): void {\n this.messages = [];\n this.config = resetIfNotEmpty(this.config, undefined);\n if (keepContent !== true) {\n this.contentData = resetIfNotEmpty(this.contentData, []);\n this.contentIndexMap = resetIfNotEmpty(this.contentIndexMap, new Map());\n }\n this.stepKeyIds = resetIfNotEmpty(this.stepKeyIds, new Map());\n this.toolCallStepIds = resetIfNotEmpty(this.toolCallStepIds, new Map());\n this.messageIdsByStepKey = resetIfNotEmpty(this.messageIdsByStepKey, new Map());\n this.messageStepHasToolCalls = resetIfNotEmpty(this.prelimMessageIdsByStepKey, new Map());\n this.prelimMessageIdsByStepKey = resetIfNotEmpty(this.prelimMessageIdsByStepKey, new Map());\n this.currentTokenType = resetIfNotEmpty(this.currentTokenType, ContentTypes.TEXT);\n this.lastToken = resetIfNotEmpty(this.lastToken, undefined);\n this.tokenTypeSwitch = resetIfNotEmpty(this.tokenTypeSwitch, undefined);\n this.indexTokenCountMap = resetIfNotEmpty(this.indexTokenCountMap, {});\n this.currentUsage = resetIfNotEmpty(this.currentUsage, undefined);\n this.tokenCounter = resetIfNotEmpty(this.tokenCounter, undefined);\n this.maxContextTokens = resetIfNotEmpty(this.maxContextTokens, undefined);\n }\n\n /* Run Step Processing */\n\n getRunStep(stepId: string): t.RunStep | undefined {\n const index = this.contentIndexMap.get(stepId);\n if (index !== undefined) {\n return this.contentData[index];\n }\n return undefined;\n }\n\n getStepKey(metadata: Record<string, unknown> | undefined): string {\n if (!metadata) return '';\n\n const keyList = this.getKeyList(metadata);\n if (this.checkKeyList(keyList)) {\n throw new Error('Missing metadata');\n }\n\n return joinKeys(keyList);\n }\n\n getStepIdByKey(stepKey: string, index?: number): string {\n const stepIds = this.stepKeyIds.get(stepKey);\n if (!stepIds) {\n throw new Error(`No step IDs found for stepKey ${stepKey}`);\n }\n\n if (index === undefined) {\n return stepIds[stepIds.length - 1];\n }\n\n return stepIds[index];\n }\n\n generateStepId(stepKey: string): [string, number] {\n const stepIds = this.stepKeyIds.get(stepKey);\n let newStepId: string | undefined;\n let stepIndex = 0;\n if (stepIds) {\n stepIndex = stepIds.length;\n newStepId = `step_${nanoid()}`;\n stepIds.push(newStepId);\n this.stepKeyIds.set(stepKey, stepIds);\n } else {\n newStepId = `step_${nanoid()}`;\n this.stepKeyIds.set(stepKey, [newStepId]);\n }\n\n return [newStepId, stepIndex];\n }\n\n getKeyList(metadata: Record<string, unknown> | undefined): (string | number | undefined)[] {\n if (!metadata) return [];\n\n const keyList = [\n metadata.run_id as string,\n metadata.thread_id as string,\n metadata.langgraph_node as string,\n metadata.langgraph_step as number,\n metadata.checkpoint_ns as string,\n ];\n if (this.currentTokenType === ContentTypes.THINK) {\n keyList.push('reasoning');\n }\n\n return keyList;\n }\n\n checkKeyList(keyList: (string | number | undefined)[]): boolean {\n return keyList.some((key) => key === undefined);\n }\n\n /* Misc.*/\n\n getRunMessages(): BaseMessage[] | undefined {\n return this.messages.slice(this.startIndex);\n }\n\n getContentParts(): t.MessageContentComplex[] | undefined {\n return convertMessagesToContent(this.messages.slice(this.startIndex));\n }\n\n /* Graph */\n\n createGraphState(): t.GraphStateChannels<t.BaseGraphState> {\n return {\n messages: {\n value: (x: BaseMessage[], y: BaseMessage[]): BaseMessage[] => {\n if (!x.length) {\n if (this.systemMessage) {\n x.push(this.systemMessage);\n }\n\n this.startIndex = x.length + y.length;\n }\n const current = x.concat(y);\n this.messages = current;\n return current;\n },\n default: () => [],\n },\n };\n }\n\n initializeTools(): CustomToolNode<t.BaseGraphState> | ToolNode<t.BaseGraphState> {\n // return new ToolNode<t.BaseGraphState>(this.tools);\n return new CustomToolNode<t.BaseGraphState>({\n tools: this.tools || [],\n toolMap: this.toolMap,\n toolCallStepIds: this.toolCallStepIds,\n errorHandler: this.handleToolCallError.bind(this),\n });\n }\n\n initializeModel(): Runnable {\n const ChatModelClass = getChatModelClass(this.provider);\n const model = new ChatModelClass(this.clientOptions);\n\n if (isOpenAILike(this.provider) && (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)) {\n model.temperature = (this.clientOptions as t.OpenAIClientOptions).temperature as number;\n model.topP = (this.clientOptions as t.OpenAIClientOptions).topP as number;\n model.frequencyPenalty = (this.clientOptions as t.OpenAIClientOptions).frequencyPenalty as number;\n model.presencePenalty = (this.clientOptions as t.OpenAIClientOptions).presencePenalty as number;\n model.n = (this.clientOptions as t.OpenAIClientOptions).n as number;\n } else if (this.provider === Providers.VERTEXAI && model instanceof ChatVertexAI) {\n model.temperature = (this.clientOptions as t.VertexAIClientOptions).temperature as number;\n model.topP = (this.clientOptions as t.VertexAIClientOptions).topP as number;\n model.topK = (this.clientOptions as t.VertexAIClientOptions).topK as number;\n model.topLogprobs = (this.clientOptions as t.VertexAIClientOptions).topLogprobs as number;\n model.frequencyPenalty = (this.clientOptions as t.VertexAIClientOptions).frequencyPenalty as number;\n model.presencePenalty = (this.clientOptions as t.VertexAIClientOptions).presencePenalty as number;\n model.maxOutputTokens = (this.clientOptions as t.VertexAIClientOptions).maxOutputTokens as number;\n }\n\n if (!this.tools || this.tools.length === 0) {\n return model as unknown as Runnable;\n }\n\n return (model as t.ModelWithTools).bindTools(this.tools);\n }\n overrideTestModel(responses: string[], sleep?: number, toolCalls?: ToolCall[]): void {\n this.boundModel = createFakeStreamingLLM({\n responses,\n sleep,\n toolCalls,\n });\n }\n\n getNewModel({\n clientOptions = {},\n omitOriginalOptions,\n } : {\n clientOptions?: t.ClientOptions;\n omitOriginalOptions?: string[]\n }): t.ChatModelInstance {\n const ChatModelClass = getChatModelClass(this.provider);\n const _options = omitOriginalOptions ? Object.fromEntries(\n Object.entries(this.clientOptions).filter(([key]) => !omitOriginalOptions.includes(key)),\n ) : this.clientOptions;\n const options = Object.assign(_options, clientOptions);\n return new ChatModelClass(options);\n }\n\n storeUsageMetadata(finalMessage?: BaseMessage): void {\n if (finalMessage && 'usage_metadata' in finalMessage && finalMessage.usage_metadata) {\n this.currentUsage = finalMessage.usage_metadata as Partial<UsageMetadata>;\n }\n }\n\n createCallModel() {\n return async (state: t.BaseGraphState, config?: RunnableConfig): Promise<Partial<t.BaseGraphState>> => {\n const { provider = '' } = (config?.configurable as t.GraphConfig | undefined) ?? {} ;\n if (!config || !provider) {\n throw new Error(`No ${config ? 'provider' : 'config'} provided`);\n }\n if (!config.signal) {\n config.signal = this.signal;\n }\n this.config = config;\n const { messages } = state;\n\n let messagesToUse = messages;\n if (!this.pruneMessages && this.tokenCounter && this.maxContextTokens != null && this.indexTokenCountMap[0] != null) {\n const isAnthropicWithThinking = (\n (\n (\n this.provider === Providers.ANTHROPIC && (this.clientOptions as t.AnthropicClientOptions).thinking != null\n )\n ||\n (this.provider === Providers.BEDROCK && (\n (this.clientOptions as t.BedrockAnthropicInput).additionalModelRequestFields?.['thinking'] != null\n )\n )\n )\n );\n\n this.pruneMessages = createPruneMessages({\n provider: this.provider,\n indexTokenCountMap: this.indexTokenCountMap,\n maxTokens: this.maxContextTokens,\n tokenCounter: this.tokenCounter,\n startIndex: this.startIndex,\n thinkingEnabled: isAnthropicWithThinking,\n });\n }\n if (this.pruneMessages) {\n const { context, indexTokenCountMap } = this.pruneMessages({\n messages,\n usageMetadata: this.currentUsage,\n // startOnMessageType: 'human',\n });\n this.indexTokenCountMap = indexTokenCountMap;\n messagesToUse = context;\n }\n\n const finalMessages = messagesToUse;\n const lastMessageX = finalMessages.length >= 2 ? finalMessages[finalMessages.length - 2] : null;\n const lastMessageY = finalMessages.length >= 1 ? finalMessages[finalMessages.length - 1] : null;\n\n if (\n provider === Providers.BEDROCK\n && lastMessageX instanceof AIMessageChunk\n && lastMessageY instanceof ToolMessage\n && typeof lastMessageX.content === 'string'\n ) {\n finalMessages[finalMessages.length - 2].content = '';\n }\n\n const isLatestToolMessage = lastMessageY instanceof ToolMessage;\n\n if (isLatestToolMessage && provider === Providers.ANTHROPIC) {\n formatAnthropicArtifactContent(finalMessages);\n } else if (\n isLatestToolMessage &&\n (isOpenAILike(provider) || isGoogleLike(provider))\n ) {\n formatArtifactPayload(finalMessages);\n }\n\n if (this.lastStreamCall != null && this.streamBuffer != null) {\n const timeSinceLastCall = Date.now() - this.lastStreamCall;\n if (timeSinceLastCall < this.streamBuffer) {\n const timeToWait = Math.ceil((this.streamBuffer - timeSinceLastCall) / 1000) * 1000;\n await sleep(timeToWait);\n }\n }\n\n this.lastStreamCall = Date.now();\n\n let result: Partial<t.BaseGraphState>;\n if ((this.tools?.length ?? 0) > 0 && manualToolStreamProviders.has(provider)) {\n const stream = await this.boundModel.stream(finalMessages, config);\n let finalChunk: AIMessageChunk | undefined;\n for await (const chunk of stream) {\n dispatchCustomEvent(GraphEvents.CHAT_MODEL_STREAM, { chunk }, config);\n if (!finalChunk) {\n finalChunk = chunk;\n } else {\n finalChunk = concat(finalChunk, chunk);\n }\n }\n\n finalChunk = modifyDeltaProperties(this.provider, finalChunk);\n result = { messages: [finalChunk as AIMessageChunk] };\n } else {\n const finalMessage = (await this.boundModel.invoke(finalMessages, config)) as AIMessageChunk;\n if ((finalMessage.tool_calls?.length ?? 0) > 0) {\n finalMessage.tool_calls = finalMessage.tool_calls?.filter((tool_call) => {\n if (!tool_call.name) {\n return false;\n }\n return true;\n });\n }\n result = { messages: [finalMessage] };\n }\n\n this.storeUsageMetadata(result.messages?.[0]);\n return result;\n };\n }\n\n createWorkflow(): t.CompiledWorkflow<t.BaseGraphState> {\n const routeMessage = (state: t.BaseGraphState, config?: RunnableConfig): string => {\n this.config = config;\n // const lastMessage = state.messages[state.messages.length - 1] as AIMessage;\n // if (!lastMessage?.tool_calls?.length) {\n // return END;\n // }\n // return TOOLS;\n return toolsCondition(state);\n };\n\n const workflow = new StateGraph<t.BaseGraphState>({\n channels: this.graphState,\n })\n .addNode(AGENT, this.createCallModel())\n .addNode(TOOLS, this.initializeTools())\n .addEdge(START, AGENT)\n .addConditionalEdges(AGENT, routeMessage)\n .addEdge(TOOLS, this.toolEnd ? END : AGENT);\n\n return workflow.compile();\n }\n\n /* Dispatchers */\n\n /**\n * Dispatches a run step to the client, returns the step ID\n */\n dispatchRunStep(stepKey: string, stepDetails: t.StepDetails): string {\n if (!this.config) {\n throw new Error('No config provided');\n }\n\n const [stepId, stepIndex] = this.generateStepId(stepKey);\n if (stepDetails.type === StepTypes.TOOL_CALLS && stepDetails.tool_calls) {\n for (const tool_call of stepDetails.tool_calls) {\n const toolCallId = tool_call.id ?? '';\n if (!toolCallId || this.toolCallStepIds.has(toolCallId)) {\n continue;\n }\n this.toolCallStepIds.set(toolCallId, stepId);\n }\n }\n\n const runStep: t.RunStep = {\n stepIndex,\n id: stepId,\n type: stepDetails.type,\n index: this.contentData.length,\n stepDetails,\n usage: null,\n };\n\n const runId = this.runId ?? '';\n if (runId) {\n runStep.runId = runId;\n }\n\n this.contentData.push(runStep);\n this.contentIndexMap.set(stepId, runStep.index);\n dispatchCustomEvent(GraphEvents.ON_RUN_STEP, runStep, this.config);\n return stepId;\n }\n\n handleToolCallCompleted(data: t.ToolEndData, metadata?: Record<string, unknown>): void {\n if (!this.config) {\n throw new Error('No config provided');\n }\n\n if (!data.output) {\n return;\n }\n\n const { input, output } = data;\n const { tool_call_id } = output;\n const stepId = this.toolCallStepIds.get(tool_call_id) ?? '';\n if (!stepId) {\n throw new Error(`No stepId found for tool_call_id ${tool_call_id}`);\n }\n\n const runStep = this.getRunStep(stepId);\n if (!runStep) {\n throw new Error(`No run step found for stepId ${stepId}`);\n }\n\n const args = typeof input === 'string' ? input : input.input;\n const tool_call = {\n args: typeof args === 'string' ? args : JSON.stringify(args),\n name: output.name ?? '',\n id: output.tool_call_id,\n output: typeof output.content === 'string'\n ? output.content\n : JSON.stringify(output.content),\n progress: 1,\n };\n\n this.handlerRegistry?.getHandler(GraphEvents.ON_RUN_STEP_COMPLETED)?.handle(\n GraphEvents.ON_RUN_STEP_COMPLETED,\n { result: {\n id: stepId,\n index: runStep.index,\n type: 'tool_call',\n tool_call\n } as t.ToolCompleteEvent,\n },\n metadata,\n this,\n );\n }\n handleToolCallError(data: t.ToolErrorData, metadata?: Record<string, unknown>): void {\n if (!this.config) {\n throw new Error('No config provided');\n }\n\n if (!data.id) {\n console.warn('No Tool ID provided for Tool Error');\n return;\n }\n\n const stepId = this.toolCallStepIds.get(data.id) ?? '';\n if (!stepId) {\n throw new Error(`No stepId found for tool_call_id ${data.id}`);\n }\n\n const { name, input: args, error } = data;\n\n const runStep = this.getRunStep(stepId);\n if (!runStep) {\n throw new Error(`No run step found for stepId ${stepId}`);\n }\n\n const tool_call: t.ProcessedToolCall = {\n id: data.id,\n name: name ?? '',\n args: typeof args === 'string' ? args : JSON.stringify(args),\n output: `Error processing tool${error?.message ? `: ${error.message}` : ''}`,\n progress: 1,\n };\n\n this.handlerRegistry?.getHandler(GraphEvents.ON_RUN_STEP_COMPLETED)?.handle(\n GraphEvents.ON_RUN_STEP_COMPLETED,\n { result: {\n id: stepId,\n index: runStep.index,\n type: 'tool_call',\n tool_call\n } as t.ToolCompleteEvent,\n },\n metadata,\n this,\n );\n }\n\n dispatchRunStepDelta(id: string, delta: t.ToolCallDelta): void {\n if (!this.config) {\n throw new Error('No config provided');\n } else if (!id) {\n throw new Error('No step ID found');\n }\n const runStepDelta: t.RunStepDeltaEvent = {\n id,\n delta,\n };\n dispatchCustomEvent(GraphEvents.ON_RUN_STEP_DELTA, runStepDelta, this.config);\n }\n\n dispatchMessageDelta(id: string, delta: t.MessageDelta): void {\n if (!this.config) {\n throw new Error('No config provided');\n }\n const messageDelta: t.MessageDeltaEvent = {\n id,\n delta,\n };\n dispatchCustomEvent(GraphEvents.ON_MESSAGE_DELTA, messageDelta, this.config);\n }\n\n dispatchReasoningDelta = (stepId: string, delta: t.ReasoningDelta): void => {\n if (!this.config) {\n throw new Error('No config provided');\n }\n const reasoningDelta: t.ReasoningDeltaEvent = {\n id: stepId,\n delta,\n };\n dispatchCustomEvent(GraphEvents.ON_REASONING_DELTA, reasoningDelta, this.config);\n };\n}\n"],"names":["CustomToolNode"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AA2BA,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,aAAa;MAchB,KAAK,CAAA;AAyBzB,IAAA,SAAS;AACT,IAAA,eAAe;IACf,YAAY,GAAsC,mBAAmB;AACrE,IAAA,gBAAgB,GAA2C,YAAY,CAAC,IAAI;AAC5E,IAAA,uBAAuB,GAAyB,IAAI,GAAG,EAAE;AACzD,IAAA,mBAAmB,GAAwB,IAAI,GAAG,EAAE;AACpD,IAAA,yBAAyB,GAAwB,IAAI,GAAG,EAAE;AAC1D,IAAA,MAAM;IACN,WAAW,GAAgB,EAAE;AAC7B,IAAA,UAAU,GAA0B,IAAI,GAAG,EAAoB;AAC/D,IAAA,eAAe,GAAwB,IAAI,GAAG,EAAE;AAChD,IAAA,eAAe,GAAwB,IAAI,GAAG,EAAE;AAChD,IAAA,YAAY;IACZ,kBAAkB,GAA2B,EAAE;AAC/C,IAAA,gBAAgB;AAChB,IAAA,aAAa;;AAEb,IAAA,YAAY;AACZ,IAAA,YAAY;AACZ,IAAA,MAAM;AACP;AAEK,MAAO,aAAc,SAAQ,KAGlC,CAAA;AACS,IAAA,UAAU;AAClB,IAAA,aAAa;AACb,IAAA,UAAU;;AAEV,IAAA,cAAc;AACd,IAAA,eAAe;AACf,IAAA,aAAa;IACb,QAAQ,GAAkB,EAAE;AAC5B,IAAA,KAAK;AACL,IAAA,KAAK;AACL,IAAA,OAAO;IACP,UAAU,GAAW,CAAC;AACtB,IAAA,QAAQ;AACR,IAAA,OAAO;AACP,IAAA,MAAM;IAEN,WAAY,CAAA,EACV,KAAK,EACL,KAAK,EACL,MAAM,EACN,OAAO,EACP,QAAQ,EACR,YAAY,EACZ,YAAY,EACZ,YAAY,EACZ,aAAa,EACb,OAAO,GAAG,KAAK,EACf,uBAAuB,GAAG,EAAE,GACN,EAAA;AACtB,QAAA,KAAK,EAAE;AACP,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;AAClB,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;AAClB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM;AACpB,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO;AACtB,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO;AACtB,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ;AACxB,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY;AAChC,QAAA,IAAI,CAAC,aAAa,GAAG,aAAa;AAClC,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,gBAAgB,EAAE;AACzC,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,eAAe,EAAE;QACxC,IAAI,YAAY,EAAE;AAChB,YAAA,IAAI,CAAC,YAAY,GAAG,YAAY;;AAGlC,QAAA,IAAI,iBAAiB,GAA+B,YAAY,IAAI,EAAE;QACtE,IAAI,uBAAuB,EAAE;AAC3B,YAAA,iBAAiB,GAAG,iBAAiB,GAAG,CAAG,EAAA,iBAAiB,CAAO,IAAA,EAAA,uBAAuB,CAAE,CAAA,GAAG,uBAAuB;;QAGxH,IAAI,iBAAiB,IAAI,QAAQ,KAAK,SAAS,CAAC,SAAS,IAAK,aAA0C,CAAC,aAAa,EAAE,cAAc,GAAG,gBAAgB,CAAC,EAAE,QAAQ,CAAC,gBAAgB,CAAC,EAAE;AACtL,YAAA,iBAAiB,GAAG;AAClB,gBAAA,OAAO,EAAE;AACP,oBAAA;AACE,wBAAA,IAAI,EAAE,MAAM;AACZ,wBAAA,IAAI,EAAE,YAAY;AAClB,wBAAA,aAAa,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE;AACrC,qBAAA;AACF,iBAAA;aACF;;QAGH,IAAI,iBAAiB,EAAE;YACrB,IAAI,CAAC,aAAa,GAAG,IAAI,aAAa,CAAC,iBAAiB,CAAC;;;;AAM7D,IAAA,WAAW,CAAC,WAAqB,EAAA;AAC/B,QAAA,IAAI,CAAC,QAAQ,GAAG,EAAE;QAClB,IAAI,CAAC,MAAM,GAAG,eAAe,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;AACrD,QAAA,IAAI,WAAW,KAAK,IAAI,EAAE;YACxB,IAAI,CAAC,WAAW,GAAG,eAAe,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,CAAC;AACxD,YAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,GAAG,EAAE,CAAC;;AAEzE,QAAA,IAAI,CAAC,UAAU,GAAG,eAAe,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,GAAG,EAAE,CAAC;AAC7D,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC,IAAI,CAAC,eAAe,EAAE,IAAI,GAAG,EAAE,CAAC;AACvE,QAAA,IAAI,CAAC,mBAAmB,GAAG,eAAe,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,GAAG,EAAE,CAAC;AAC/E,QAAA,IAAI,CAAC,uBAAuB,GAAG,eAAe,CAAC,IAAI,CAAC,yBAAyB,EAAE,IAAI,GAAG,EAAE,CAAC;AACzF,QAAA,IAAI,CAAC,yBAAyB,GAAG,eAAe,CAAC,IAAI,CAAC,yBAAyB,EAAE,IAAI,GAAG,EAAE,CAAC;AAC3F,QAAA,IAAI,CAAC,gBAAgB,GAAG,eAAe,CAAC,IAAI,CAAC,gBAAgB,EAAE,YAAY,CAAC,IAAI,CAAC;QACjF,IAAI,CAAC,SAAS,GAAG,eAAe,CAAC,IAAI,CAAC,SAAS,EAAE,SAAS,CAAC;QAC3D,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC,IAAI,CAAC,eAAe,EAAE,SAAS,CAAC;QACvE,IAAI,CAAC,kBAAkB,GAAG,eAAe,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE,CAAC;QACtE,IAAI,CAAC,YAAY,GAAG,eAAe,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,CAAC;QACjE,IAAI,CAAC,YAAY,GAAG,eAAe,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,CAAC;QACjE,IAAI,CAAC,gBAAgB,GAAG,eAAe,CAAC,IAAI,CAAC,gBAAgB,EAAE,SAAS,CAAC;;;AAK3E,IAAA,UAAU,CAAC,MAAc,EAAA;QACvB,MAAM,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,MAAM,CAAC;AAC9C,QAAA,IAAI,KAAK,KAAK,SAAS,EAAE;AACvB,YAAA,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC;;AAEhC,QAAA,OAAO,SAAS;;AAGlB,IAAA,UAAU,CAAC,QAA6C,EAAA;AACtD,QAAA,IAAI,CAAC,QAAQ;AAAE,YAAA,OAAO,EAAE;QAExB,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC;AACzC,QAAA,IAAI,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC,EAAE;AAC9B,YAAA,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC;;AAGrC,QAAA,OAAO,QAAQ,CAAC,OAAO,CAAC;;IAG1B,cAAc,CAAC,OAAe,EAAE,KAAc,EAAA;QAC5C,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC;QAC5C,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,OAAO,CAAA,CAAE,CAAC;;AAG7D,QAAA,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,OAAO,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC;;AAGpC,QAAA,OAAO,OAAO,CAAC,KAAK,CAAC;;AAGvB,IAAA,cAAc,CAAC,OAAe,EAAA;QAC5B,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,CAAC;AAC5C,QAAA,IAAI,SAA6B;QACjC,IAAI,SAAS,GAAG,CAAC;QACjB,IAAI,OAAO,EAAE;AACX,YAAA,SAAS,GAAG,OAAO,CAAC,MAAM;AAC1B,YAAA,SAAS,GAAG,CAAA,KAAA,EAAQ,MAAM,EAAE,EAAE;AAC9B,YAAA,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC;YACvB,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,EAAE,OAAO,CAAC;;aAChC;AACL,YAAA,SAAS,GAAG,CAAA,KAAA,EAAQ,MAAM,EAAE,EAAE;YAC9B,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,OAAO,EAAE,CAAC,SAAS,CAAC,CAAC;;AAG3C,QAAA,OAAO,CAAC,SAAS,EAAE,SAAS,CAAC;;AAG/B,IAAA,UAAU,CAAC,QAA6C,EAAA;AACtD,QAAA,IAAI,CAAC,QAAQ;AAAE,YAAA,OAAO,EAAE;AAExB,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,QAAQ,CAAC,MAAgB;AACzB,YAAA,QAAQ,CAAC,SAAmB;AAC5B,YAAA,QAAQ,CAAC,cAAwB;AACjC,YAAA,QAAQ,CAAC,cAAwB;AACjC,YAAA,QAAQ,CAAC,aAAuB;SACjC;QACD,IAAI,IAAI,CAAC,gBAAgB,KAAK,YAAY,CAAC,KAAK,EAAE;AAChD,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC;;AAG3B,QAAA,OAAO,OAAO;;AAGhB,IAAA,YAAY,CAAC,OAAwC,EAAA;AACnD,QAAA,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,GAAG,KAAK,GAAG,KAAK,SAAS,CAAC;;;IAKjD,cAAc,GAAA;QACZ,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC;;IAG7C,eAAe,GAAA;AACb,QAAA,OAAO,wBAAwB,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;;;IAKvE,gBAAgB,GAAA;QACd,OAAO;AACL,YAAA,QAAQ,EAAE;AACR,gBAAA,KAAK,EAAE,CAAC,CAAgB,EAAE,CAAgB,KAAmB;AAC3D,oBAAA,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;AACb,wBAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,4BAAA,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC;;wBAG5B,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM;;oBAEvC,MAAM,OAAO,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AAC3B,oBAAA,IAAI,CAAC,QAAQ,GAAG,OAAO;AACvB,oBAAA,OAAO,OAAO;iBACf;AACD,gBAAA,OAAO,EAAE,MAAM,EAAE;AAClB,aAAA;SACF;;IAGH,eAAe,GAAA;;QAEb,OAAO,IAAIA,QAAc,CAAmB;AAC1C,YAAA,KAAK,EAAE,IAAI,CAAC,KAAK,IAAI,EAAE;YACvB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,eAAe,EAAE,IAAI,CAAC,eAAe;YACrC,YAAY,EAAE,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,IAAI,CAAC;AAClD,SAAA,CAAC;;IAGJ,eAAe,GAAA;QACb,MAAM,cAAc,GAAG,iBAAiB,CAAC,IAAI,CAAC,QAAQ,CAAC;QACvD,MAAM,KAAK,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,aAAa,CAAC;AAEpD,QAAA,IAAI,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,YAAY,UAAU,IAAI,KAAK,YAAY,eAAe,CAAC,EAAE;YACpG,KAAK,CAAC,WAAW,GAAI,IAAI,CAAC,aAAuC,CAAC,WAAqB;YACvF,KAAK,CAAC,IAAI,GAAI,IAAI,CAAC,aAAuC,CAAC,IAAc;YACzE,KAAK,CAAC,gBAAgB,GAAI,IAAI,CAAC,aAAuC,CAAC,gBAA0B;YACjG,KAAK,CAAC,eAAe,GAAI,IAAI,CAAC,aAAuC,CAAC,eAAyB;YAC/F,KAAK,CAAC,CAAC,GAAI,IAAI,CAAC,aAAuC,CAAC,CAAW;;AAC9D,aAAA,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,QAAQ,IAAI,KAAK,YAAY,YAAY,EAAE;YAChF,KAAK,CAAC,WAAW,GAAI,IAAI,CAAC,aAAyC,CAAC,WAAqB;YACzF,KAAK,CAAC,IAAI,GAAI,IAAI,CAAC,aAAyC,CAAC,IAAc;YAC3E,KAAK,CAAC,IAAI,GAAI,IAAI,CAAC,aAAyC,CAAC,IAAc;YAC3E,KAAK,CAAC,WAAW,GAAI,IAAI,CAAC,aAAyC,CAAC,WAAqB;YACzF,KAAK,CAAC,gBAAgB,GAAI,IAAI,CAAC,aAAyC,CAAC,gBAA0B;YACnG,KAAK,CAAC,eAAe,GAAI,IAAI,CAAC,aAAyC,CAAC,eAAyB;YACjG,KAAK,CAAC,eAAe,GAAI,IAAI,CAAC,aAAyC,CAAC,eAAyB;;AAGnG,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;AAC1C,YAAA,OAAO,KAA4B;;QAGrC,OAAQ,KAA0B,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC;;AAE1D,IAAA,iBAAiB,CAAC,SAAmB,EAAE,KAAc,EAAE,SAAsB,EAAA;AAC3E,QAAA,IAAI,CAAC,UAAU,GAAG,sBAAsB,CAAC;YACvC,SAAS;YACT,KAAK;YACL,SAAS;AACV,SAAA,CAAC;;AAGJ,IAAA,WAAW,CAAC,EACV,aAAa,GAAG,EAAE,EAClB,mBAAmB,GAIpB,EAAA;QACC,MAAM,cAAc,GAAG,iBAAiB,CAAC,IAAI,CAAC,QAAQ,CAAC;AACvD,QAAA,MAAM,QAAQ,GAAG,mBAAmB,GAAG,MAAM,CAAC,WAAW,CACvD,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,mBAAmB,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CACzF,GAAG,IAAI,CAAC,aAAa;QACtB,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE,aAAa,CAAC;AACtD,QAAA,OAAO,IAAI,cAAc,CAAC,OAAO,CAAC;;AAGpC,IAAA,kBAAkB,CAAC,YAA0B,EAAA;QAC3C,IAAI,YAAY,IAAI,gBAAgB,IAAI,YAAY,IAAI,YAAY,CAAC,cAAc,EAAE;AACnF,YAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC,cAAwC;;;IAI7E,eAAe,GAAA;AACb,QAAA,OAAO,OAAO,KAAuB,EAAE,MAAuB,KAAwC;YACpG,MAAM,EAAE,QAAQ,GAAG,EAAE,EAAE,GAAI,MAAM,EAAE,YAA0C,IAAI,EAAE;AACnF,YAAA,IAAI,CAAC,MAAM,IAAI,CAAC,QAAQ,EAAE;AACxB,gBAAA,MAAM,IAAI,KAAK,CAAC,CAAA,GAAA,EAAM,MAAM,GAAG,UAAU,GAAG,QAAQ,CAAA,SAAA,CAAW,CAAC;;AAElE,YAAA,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;AAClB,gBAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM;;AAE7B,YAAA,IAAI,CAAC,MAAM,GAAG,MAAM;AACpB,YAAA,MAAM,EAAE,QAAQ,EAAE,GAAG,KAAK;YAE1B,IAAI,aAAa,GAAG,QAAQ;YAC5B,IAAI,CAAC,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE;gBACnH,MAAM,uBAAuB,KAEzB,CACE,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,SAAS,IAAK,IAAI,CAAC,aAA0C,CAAC,QAAQ,IAAI,IAAI;;yBAG3G,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,OAAO,KACjC,IAAI,CAAC,aAAyC,CAAC,4BAA4B,GAAG,UAAU,CAAC,IAAI,IAAI,CACnG,CACA,EAEJ;AAED,gBAAA,IAAI,CAAC,aAAa,GAAG,mBAAmB,CAAC;oBACvC,QAAQ,EAAE,IAAI,CAAC,QAAQ;oBACvB,kBAAkB,EAAE,IAAI,CAAC,kBAAkB;oBAC3C,SAAS,EAAE,IAAI,CAAC,gBAAgB;oBAChC,YAAY,EAAE,IAAI,CAAC,YAAY;oBAC/B,UAAU,EAAE,IAAI,CAAC,UAAU;AAC3B,oBAAA,eAAe,EAAE,uBAAuB;AACzC,iBAAA,CAAC;;AAEJ,YAAA,IAAI,IAAI,CAAC,aAAa,EAAE;gBACtB,MAAM,EAAE,OAAO,EAAE,kBAAkB,EAAE,GAAG,IAAI,CAAC,aAAa,CAAC;oBACzD,QAAQ;oBACR,aAAa,EAAE,IAAI,CAAC,YAAY;;AAEjC,iBAAA,CAAC;AACF,gBAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB;gBAC5C,aAAa,GAAG,OAAO;;YAGzB,MAAM,aAAa,GAAG,aAAa;YACnC,MAAM,YAAY,GAAG,aAAa,CAAC,MAAM,IAAI,CAAC,GAAG,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,IAAI;YAC/F,MAAM,YAAY,GAAG,aAAa,CAAC,MAAM,IAAI,CAAC,GAAG,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,IAAI;AAE/F,YAAA,IACE,QAAQ,KAAK,SAAS,CAAC;AACpB,mBAAA,YAAY,YAAY;AACxB,mBAAA,YAAY,YAAY;AACxB,mBAAA,OAAO,YAAY,CAAC,OAAO,KAAK,QAAQ,EAC3C;gBACA,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,OAAO,GAAG,EAAE;;AAGtD,YAAA,MAAM,mBAAmB,GAAG,YAAY,YAAY,WAAW;YAE/D,IAAI,mBAAmB,IAAI,QAAQ,KAAK,SAAS,CAAC,SAAS,EAAE;gBAC3D,8BAA8B,CAAC,aAAa,CAAC;;AACxC,iBAAA,IACL,mBAAmB;iBAClB,YAAY,CAAC,QAAQ,CAAC,IAAI,YAAY,CAAC,QAAQ,CAAC,CAAC,EAClD;gBACA,qBAAqB,CAAC,aAAa,CAAC;;AAGtC,YAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,EAAE;gBAC5D,MAAM,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,cAAc;AAC1D,gBAAA,IAAI,iBAAiB,GAAG,IAAI,CAAC,YAAY,EAAE;AACzC,oBAAA,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,YAAY,GAAG,iBAAiB,IAAI,IAAI,CAAC,GAAG,IAAI;AACnF,oBAAA,MAAM,KAAK,CAAC,UAAU,CAAC;;;AAI3B,YAAA,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,GAAG,EAAE;AAEhC,YAAA,IAAI,MAAiC;AACrC,YAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,IAAI,CAAC,IAAI,CAAC,IAAI,yBAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;AAC5E,gBAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,aAAa,EAAE,MAAM,CAAC;AAClE,gBAAA,IAAI,UAAsC;AAC1C,gBAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;oBAChC,mBAAmB,CAAC,WAAW,CAAC,iBAAiB,EAAE,EAAE,KAAK,EAAE,EAAE,MAAM,CAAC;oBACrE,IAAI,CAAC,UAAU,EAAE;wBACf,UAAU,GAAG,KAAK;;yBACb;AACL,wBAAA,UAAU,GAAG,MAAM,CAAC,UAAU,EAAE,KAAK,CAAC;;;gBAI1C,UAAU,GAAG,qBAAqB,CAAC,IAAI,CAAC,QAAQ,EAAE,UAAU,CAAC;gBAC7D,MAAM,GAAG,EAAE,QAAQ,EAAE,CAAC,UAA4B,CAAC,EAAE;;iBAChD;AACL,gBAAA,MAAM,YAAY,IAAI,MAAM,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,aAAa,EAAE,MAAM,CAAC,CAAmB;AAC5F,gBAAA,IAAI,CAAC,YAAY,CAAC,UAAU,EAAE,MAAM,IAAI,CAAC,IAAI,CAAC,EAAE;AAC9C,oBAAA,YAAY,CAAC,UAAU,GAAG,YAAY,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC,SAAS,KAAI;AACtE,wBAAA,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE;AACnB,4BAAA,OAAO,KAAK;;AAEd,wBAAA,OAAO,IAAI;AACb,qBAAC,CAAC;;gBAEJ,MAAM,GAAG,EAAE,QAAQ,EAAE,CAAC,YAAY,CAAC,EAAE;;YAGvC,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,QAAQ,GAAG,CAAC,CAAC,CAAC;AAC7C,YAAA,OAAO,MAAM;AACf,SAAC;;IAGH,cAAc,GAAA;AACZ,QAAA,MAAM,YAAY,GAAG,CAAC,KAAuB,EAAE,MAAuB,KAAY;AAChF,YAAA,IAAI,CAAC,MAAM,GAAG,MAAM;;;;;;AAMpB,YAAA,OAAO,cAAc,CAAC,KAAK,CAAC;AAC9B,SAAC;AAED,QAAA,MAAM,QAAQ,GAAG,IAAI,UAAU,CAAmB;YAChD,QAAQ,EAAE,IAAI,CAAC,UAAU;SAC1B;AACE,aAAA,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,eAAe,EAAE;AACrC,aAAA,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,eAAe,EAAE;AACrC,aAAA,OAAO,CAAC,KAAK,EAAE,KAAK;AACpB,aAAA,mBAAmB,CAAC,KAAK,EAAE,YAAY;AACvC,aAAA,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,GAAG,GAAG,GAAG,KAAK,CAAC;AAE7C,QAAA,OAAO,QAAQ,CAAC,OAAO,EAAE;;;AAK3B;;AAEG;IACH,eAAe,CAAC,OAAe,EAAE,WAA0B,EAAA;AACzD,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAGvC,QAAA,MAAM,CAAC,MAAM,EAAE,SAAS,CAAC,GAAG,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC;AACxD,QAAA,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,CAAC,UAAU,IAAI,WAAW,CAAC,UAAU,EAAE;AACvE,YAAA,KAAK,MAAM,SAAS,IAAI,WAAW,CAAC,UAAU,EAAE;AAC9C,gBAAA,MAAM,UAAU,GAAG,SAAS,CAAC,EAAE,IAAI,EAAE;AACrC,gBAAA,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE;oBACvD;;gBAEF,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,UAAU,EAAE,MAAM,CAAC;;;AAIhD,QAAA,MAAM,OAAO,GAAc;YACzB,SAAS;AACT,YAAA,EAAE,EAAE,MAAM;YACV,IAAI,EAAE,WAAW,CAAC,IAAI;AACtB,YAAA,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,MAAM;YAC9B,WAAW;AACX,YAAA,KAAK,EAAE,IAAI;SACZ;AAED,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI,EAAE;QAC9B,IAAI,KAAK,EAAE;AACT,YAAA,OAAO,CAAC,KAAK,GAAG,KAAK;;AAGvB,QAAA,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,CAAC;QAC/C,mBAAmB,CAAC,WAAW,CAAC,WAAW,EAAE,OAAO,EAAE,IAAI,CAAC,MAAM,CAAC;AAClE,QAAA,OAAO,MAAM;;IAGf,uBAAuB,CAAC,IAAmB,EAAE,QAAkC,EAAA;AAC7E,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAGvC,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB;;AAGF,QAAA,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,IAAI;AAC9B,QAAA,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM;AAC/B,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE;QAC3D,IAAI,CAAC,MAAM,EAAE;AACX,YAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,YAAY,CAAA,CAAE,CAAC;;QAGrE,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;QACvC,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,gCAAgC,MAAM,CAAA,CAAE,CAAC;;AAG3D,QAAA,MAAM,IAAI,GAAG,OAAO,KAAK,KAAK,QAAQ,GAAG,KAAK,GAAG,KAAK,CAAC,KAAK;AAC5D,QAAA,MAAM,SAAS,GAAG;AAChB,YAAA,IAAI,EAAE,OAAO,IAAI,KAAK,QAAQ,GAAG,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;AAC5D,YAAA,IAAI,EAAE,MAAM,CAAC,IAAI,IAAI,EAAE;YACvB,EAAE,EAAE,MAAM,CAAC,YAAY;AACvB,YAAA,MAAM,EAAE,OAAO,MAAM,CAAC,OAAO,KAAK;kBAC9B,MAAM,CAAC;kBACP,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC;AAClC,YAAA,QAAQ,EAAE,CAAC;SACZ;AAED,QAAA,IAAI,CAAC,eAAe,EAAE,UAAU,CAAC,WAAW,CAAC,qBAAqB,CAAC,EAAE,MAAM,CACzE,WAAW,CAAC,qBAAqB,EACjC,EAAE,MAAM,EAAE;AACR,gBAAA,EAAE,EAAE,MAAM;gBACV,KAAK,EAAE,OAAO,CAAC,KAAK;AACpB,gBAAA,IAAI,EAAE,WAAW;gBACjB;AACsB,aAAA;AACvB,SAAA,EACD,QAAQ,EACR,IAAI,CACL;;IAEH,mBAAmB,CAAC,IAAqB,EAAE,QAAkC,EAAA;AAC3E,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAGvC,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,OAAO,CAAC,IAAI,CAAC,oCAAoC,CAAC;YAClD;;AAGF,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE;QACtD,IAAI,CAAC,MAAM,EAAE;YACX,MAAM,IAAI,KAAK,CAAC,CAAA,iCAAA,EAAoC,IAAI,CAAC,EAAE,CAAE,CAAA,CAAC;;QAGhE,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,IAAI;QAEzC,MAAM,OAAO,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;QACvC,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,gCAAgC,MAAM,CAAA,CAAE,CAAC;;AAG3D,QAAA,MAAM,SAAS,GAAwB;YACrC,EAAE,EAAE,IAAI,CAAC,EAAE;YACX,IAAI,EAAE,IAAI,IAAI,EAAE;AAChB,YAAA,IAAI,EAAE,OAAO,IAAI,KAAK,QAAQ,GAAG,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;AAC5D,YAAA,MAAM,EAAE,CAAwB,qBAAA,EAAA,KAAK,EAAE,OAAO,GAAG,CAAK,EAAA,EAAA,KAAK,CAAC,OAAO,CAAA,CAAE,GAAG,EAAE,CAAE,CAAA;AAC5E,YAAA,QAAQ,EAAE,CAAC;SACZ;AAED,QAAA,IAAI,CAAC,eAAe,EAAE,UAAU,CAAC,WAAW,CAAC,qBAAqB,CAAC,EAAE,MAAM,CACzE,WAAW,CAAC,qBAAqB,EACjC,EAAE,MAAM,EAAE;AACR,gBAAA,EAAE,EAAE,MAAM;gBACV,KAAK,EAAE,OAAO,CAAC,KAAK;AACpB,gBAAA,IAAI,EAAE,WAAW;gBACjB;AACsB,aAAA;AACvB,SAAA,EACD,QAAQ,EACR,IAAI,CACL;;IAGH,oBAAoB,CAAC,EAAU,EAAE,KAAsB,EAAA;AACrD,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;aAChC,IAAI,CAAC,EAAE,EAAE;AACd,YAAA,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC;;AAErC,QAAA,MAAM,YAAY,GAAwB;YACxC,EAAE;YACF,KAAK;SACN;QACD,mBAAmB,CAAC,WAAW,CAAC,iBAAiB,EAAE,YAAY,EAAE,IAAI,CAAC,MAAM,CAAC;;IAG/E,oBAAoB,CAAC,EAAU,EAAE,KAAqB,EAAA;AACpD,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAEvC,QAAA,MAAM,YAAY,GAAwB;YACxC,EAAE;YACF,KAAK;SACN;QACD,mBAAmB,CAAC,WAAW,CAAC,gBAAgB,EAAE,YAAY,EAAE,IAAI,CAAC,MAAM,CAAC;;AAG9E,IAAA,sBAAsB,GAAG,CAAC,MAAc,EAAE,KAAuB,KAAU;AACzE,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC;;AAEvC,QAAA,MAAM,cAAc,GAA0B;AAC5C,YAAA,EAAE,EAAE,MAAM;YACV,KAAK;SACN;QACD,mBAAmB,CAAC,WAAW,CAAC,kBAAkB,EAAE,cAAc,EAAE,IAAI,CAAC,MAAM,CAAC;AAClF,KAAC;AACF;;;;"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { AIMessage } from '@langchain/core/messages';
|
|
2
|
-
import { ContentTypes } from '../common/enum.mjs';
|
|
2
|
+
import { ContentTypes, Providers } from '../common/enum.mjs';
|
|
3
3
|
|
|
4
4
|
function isIndexInContext(arrayA, arrayB, targetIndex) {
|
|
5
5
|
const startingIndexInA = arrayA.length - arrayB.length;
|
|
@@ -40,9 +40,7 @@ function calculateTotalTokens(usage) {
|
|
|
40
40
|
* @param options Configuration options for processing messages
|
|
41
41
|
* @returns Object containing the message context, remaining tokens, messages not included, and summary index
|
|
42
42
|
*/
|
|
43
|
-
function getMessagesWithinTokenLimit({ messages: _messages, maxContextTokens, indexTokenCountMap, startType: _startType, thinkingEnabled,
|
|
44
|
-
/** We may need to use this when recalculating */
|
|
45
|
-
tokenCounter, }) {
|
|
43
|
+
function getMessagesWithinTokenLimit({ messages: _messages, maxContextTokens, indexTokenCountMap, startType: _startType, thinkingEnabled, tokenCounter, thinkingStartIndex: _thinkingStartIndex = -1, reasoningType = ContentTypes.THINKING, }) {
|
|
46
44
|
// Every reply is primed with <|start|>assistant<|message|>, so we
|
|
47
45
|
// start with 3 tokens for the label after all messages have been counted.
|
|
48
46
|
let currentTokenCount = 3;
|
|
@@ -59,11 +57,17 @@ tokenCounter, }) {
|
|
|
59
57
|
* This may be confusing to read, but it is done to ensure the context is in the correct order for the model.
|
|
60
58
|
* */
|
|
61
59
|
let context = [];
|
|
62
|
-
let thinkingStartIndex =
|
|
60
|
+
let thinkingStartIndex = _thinkingStartIndex;
|
|
63
61
|
let thinkingEndIndex = -1;
|
|
64
62
|
let thinkingBlock;
|
|
65
63
|
const endIndex = instructions != null ? 1 : 0;
|
|
66
64
|
const prunedMemory = [];
|
|
65
|
+
if (_thinkingStartIndex > -1) {
|
|
66
|
+
const thinkingMessageContent = _messages[_thinkingStartIndex]?.content;
|
|
67
|
+
if (Array.isArray(thinkingMessageContent)) {
|
|
68
|
+
thinkingBlock = thinkingMessageContent.find((content) => content.type === reasoningType);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
67
71
|
if (currentTokenCount < remainingContextTokens) {
|
|
68
72
|
let currentIndex = messages.length;
|
|
69
73
|
while (messages.length > 0 && currentTokenCount < remainingContextTokens && currentIndex > endIndex) {
|
|
@@ -79,7 +83,7 @@ tokenCounter, }) {
|
|
|
79
83
|
thinkingEndIndex = currentIndex;
|
|
80
84
|
}
|
|
81
85
|
if (thinkingEndIndex > -1 && !thinkingBlock && thinkingStartIndex < 0 && messageType === 'ai' && Array.isArray(poppedMessage.content)) {
|
|
82
|
-
thinkingBlock = (poppedMessage.content.find((content) => content.type ===
|
|
86
|
+
thinkingBlock = (poppedMessage.content.find((content) => content.type === reasoningType));
|
|
83
87
|
thinkingStartIndex = thinkingBlock != null ? currentIndex : -1;
|
|
84
88
|
}
|
|
85
89
|
/** False start, the latest message was not part of a multi-assistant/tool sequence of messages */
|
|
@@ -95,19 +99,30 @@ tokenCounter, }) {
|
|
|
95
99
|
}
|
|
96
100
|
else {
|
|
97
101
|
prunedMemory.push(poppedMessage);
|
|
98
|
-
if (thinkingEndIndex > -1) {
|
|
102
|
+
if (thinkingEndIndex > -1 && thinkingStartIndex < 0) {
|
|
99
103
|
continue;
|
|
100
104
|
}
|
|
101
105
|
break;
|
|
102
106
|
}
|
|
103
107
|
}
|
|
104
|
-
if (
|
|
105
|
-
startType = 'ai';
|
|
108
|
+
if (context[context.length - 1]?.getType() === 'tool') {
|
|
109
|
+
startType = ['ai', 'human'];
|
|
106
110
|
}
|
|
107
|
-
if (startType != null && startType && context.length > 0) {
|
|
108
|
-
|
|
111
|
+
if (startType != null && startType.length > 0 && context.length > 0) {
|
|
112
|
+
let requiredTypeIndex = -1;
|
|
113
|
+
let totalTokens = 0;
|
|
114
|
+
for (let i = context.length - 1; i >= 0; i--) {
|
|
115
|
+
const currentType = context[i]?.getType() ?? '';
|
|
116
|
+
if (Array.isArray(startType) ? startType.includes(currentType) : currentType === startType) {
|
|
117
|
+
requiredTypeIndex = i + 1;
|
|
118
|
+
break;
|
|
119
|
+
}
|
|
120
|
+
const originalIndex = originalLength - 1 - i;
|
|
121
|
+
totalTokens += indexTokenCountMap[originalIndex] ?? 0;
|
|
122
|
+
}
|
|
109
123
|
if (requiredTypeIndex > 0) {
|
|
110
|
-
|
|
124
|
+
currentTokenCount -= totalTokens;
|
|
125
|
+
context = context.slice(0, requiredTypeIndex);
|
|
111
126
|
}
|
|
112
127
|
}
|
|
113
128
|
}
|
|
@@ -121,6 +136,9 @@ tokenCounter, }) {
|
|
|
121
136
|
context: [],
|
|
122
137
|
messagesToRefine: prunedMemory,
|
|
123
138
|
};
|
|
139
|
+
if (thinkingStartIndex > -1) {
|
|
140
|
+
result.thinkingStartIndex = thinkingStartIndex;
|
|
141
|
+
}
|
|
124
142
|
if (prunedMemory.length === 0 || thinkingEndIndex < 0 || (thinkingStartIndex > -1 && isIndexInContext(_messages, context, thinkingStartIndex))) {
|
|
125
143
|
// we reverse at this step to ensure the context is in the correct order for the model, and we need to work backwards
|
|
126
144
|
result.context = context.reverse();
|
|
@@ -214,6 +232,7 @@ function createPruneMessages(factoryParams) {
|
|
|
214
232
|
let lastTurnStartIndex = factoryParams.startIndex;
|
|
215
233
|
let lastCutOffIndex = 0;
|
|
216
234
|
let totalTokens = (Object.values(indexTokenCountMap)).reduce((a, b) => a + b, 0);
|
|
235
|
+
let runThinkingStartIndex = -1;
|
|
217
236
|
return function pruneMessages(params) {
|
|
218
237
|
let currentUsage;
|
|
219
238
|
if (params.usageMetadata && (checkValidNumber(params.usageMetadata.input_tokens)
|
|
@@ -270,15 +289,19 @@ function createPruneMessages(factoryParams) {
|
|
|
270
289
|
if (totalTokens <= factoryParams.maxTokens) {
|
|
271
290
|
return { context: params.messages, indexTokenCountMap };
|
|
272
291
|
}
|
|
273
|
-
const { context } = getMessagesWithinTokenLimit({
|
|
292
|
+
const { context, thinkingStartIndex } = getMessagesWithinTokenLimit({
|
|
274
293
|
maxContextTokens: factoryParams.maxTokens,
|
|
275
294
|
messages: params.messages,
|
|
276
295
|
indexTokenCountMap,
|
|
277
296
|
startType: params.startType,
|
|
278
297
|
thinkingEnabled: factoryParams.thinkingEnabled,
|
|
279
298
|
tokenCounter: factoryParams.tokenCounter,
|
|
299
|
+
reasoningType: factoryParams.provider === Providers.BEDROCK ? ContentTypes.REASONING_CONTENT : ContentTypes.THINKING,
|
|
300
|
+
thinkingStartIndex: factoryParams.thinkingEnabled === true ? runThinkingStartIndex : undefined,
|
|
280
301
|
});
|
|
281
|
-
|
|
302
|
+
runThinkingStartIndex = thinkingStartIndex ?? -1;
|
|
303
|
+
/** The index is the first value of `context`, index relative to `params.messages` */
|
|
304
|
+
lastCutOffIndex = Math.max(params.messages.length - (context.length - (context[0]?.getType() === 'system' ? 1 : 0)), 0);
|
|
282
305
|
return { context, indexTokenCountMap };
|
|
283
306
|
};
|
|
284
307
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"prune.mjs","sources":["../../../src/messages/prune.ts"],"sourcesContent":["import { AIMessage, BaseMessage, UsageMetadata } from '@langchain/core/messages';\nimport type { ThinkingContentText, MessageContentComplex } from '@/types/stream';\nimport type { TokenCounter } from '@/types/run';\nimport { ContentTypes } from '@/common';\nexport type PruneMessagesFactoryParams = {\n maxTokens: number;\n startIndex: number;\n tokenCounter: TokenCounter;\n indexTokenCountMap: Record<string, number>;\n thinkingEnabled?: boolean;\n};\nexport type PruneMessagesParams = {\n messages: BaseMessage[];\n usageMetadata?: Partial<UsageMetadata>;\n startType?: ReturnType<BaseMessage['getType']>;\n}\n\nfunction isIndexInContext(arrayA: unknown[], arrayB: unknown[], targetIndex: number): boolean {\n const startingIndexInA = arrayA.length - arrayB.length;\n return targetIndex >= startingIndexInA;\n}\n\nfunction addThinkingBlock(message: AIMessage, thinkingBlock: ThinkingContentText): MessageContentComplex[] {\n const content: MessageContentComplex[] = Array.isArray(message.content)\n ? message.content as MessageContentComplex[]\n : [{\n type: ContentTypes.TEXT,\n text: message.content,\n }];\n content.unshift(thinkingBlock);\n return content;\n}\n\n/**\n * Calculates the total tokens from a single usage object\n *\n * @param usage The usage metadata object containing token information\n * @returns An object containing the total input and output tokens\n */\nexport function calculateTotalTokens(usage: Partial<UsageMetadata>): UsageMetadata {\n const baseInputTokens = Number(usage.input_tokens) || 0;\n const cacheCreation = Number(usage.input_token_details?.cache_creation) || 0;\n const cacheRead = Number(usage.input_token_details?.cache_read) || 0;\n\n const totalInputTokens = baseInputTokens + cacheCreation + cacheRead;\n const totalOutputTokens = Number(usage.output_tokens) || 0;\n\n return {\n input_tokens: totalInputTokens,\n output_tokens: totalOutputTokens,\n total_tokens: totalInputTokens + totalOutputTokens\n };\n}\n\n/**\n * Processes an array of messages and returns a context of messages that fit within a specified token limit.\n * It iterates over the messages from newest to oldest, adding them to the context until the token limit is reached.\n *\n * @param options Configuration options for processing messages\n * @returns Object containing the message context, remaining tokens, messages not included, and summary index\n */\nexport function getMessagesWithinTokenLimit({\n messages: _messages,\n maxContextTokens,\n indexTokenCountMap,\n startType: _startType,\n thinkingEnabled,\n /** We may need to use this when recalculating */\n tokenCounter,\n}: {\n messages: BaseMessage[];\n maxContextTokens: number;\n indexTokenCountMap: Record<string, number | undefined>;\n tokenCounter: TokenCounter;\n startType?: string;\n thinkingEnabled?: boolean;\n}): {\n context: BaseMessage[];\n remainingContextTokens: number;\n messagesToRefine: BaseMessage[];\n} {\n // Every reply is primed with <|start|>assistant<|message|>, so we\n // start with 3 tokens for the label after all messages have been counted.\n let currentTokenCount = 3;\n const instructions = _messages[0]?.getType() === 'system' ? _messages[0] : undefined;\n const instructionsTokenCount = instructions != null ? indexTokenCountMap[0] ?? 0 : 0;\n const initialContextTokens = maxContextTokens - instructionsTokenCount;\n let remainingContextTokens = initialContextTokens;\n let startType = _startType;\n const originalLength = _messages.length;\n const messages = [..._messages];\n /**\n * IMPORTANT: this context array gets reversed at the end, since the latest messages get pushed first.\n *\n * This may be confusing to read, but it is done to ensure the context is in the correct order for the model.\n * */\n let context: Array<BaseMessage | undefined> = [];\n\n let thinkingStartIndex = -1;\n let thinkingEndIndex = -1;\n let thinkingBlock: ThinkingContentText | undefined;\n const endIndex = instructions != null ? 1 : 0;\n const prunedMemory: BaseMessage[] = [];\n\n if (currentTokenCount < remainingContextTokens) {\n let currentIndex = messages.length;\n while (messages.length > 0 && currentTokenCount < remainingContextTokens && currentIndex > endIndex) {\n currentIndex--;\n if (messages.length === 1 && instructions) {\n break;\n }\n const poppedMessage = messages.pop();\n if (!poppedMessage) continue;\n const messageType = poppedMessage.getType();\n if (thinkingEnabled === true && thinkingEndIndex === -1 && (currentIndex === (originalLength - 1)) && (messageType === 'ai' || messageType === 'tool')) {\n thinkingEndIndex = currentIndex;\n }\n if (thinkingEndIndex > -1 && !thinkingBlock && thinkingStartIndex < 0 && messageType === 'ai' && Array.isArray(poppedMessage.content)) {\n thinkingBlock = (poppedMessage.content.find((content) => content.type === ContentTypes.THINKING)) as ThinkingContentText | undefined;\n thinkingStartIndex = thinkingBlock != null ? currentIndex : -1;\n }\n /** False start, the latest message was not part of a multi-assistant/tool sequence of messages */\n if (\n thinkingEndIndex > -1\n && currentIndex === (thinkingEndIndex - 1)\n && (messageType !== 'ai' && messageType !== 'tool')\n ) {\n thinkingEndIndex = -1;\n }\n\n const tokenCount = indexTokenCountMap[currentIndex] ?? 0;\n\n if (prunedMemory.length === 0 && ((currentTokenCount + tokenCount) <= remainingContextTokens)) {\n context.push(poppedMessage);\n currentTokenCount += tokenCount;\n } else {\n prunedMemory.push(poppedMessage);\n if (thinkingEndIndex > -1) {\n continue;\n }\n break;\n }\n }\n\n if (thinkingEndIndex > -1 && context[context.length - 1]?.getType() === 'tool') {\n startType = 'ai';\n }\n\n if (startType != null && startType && context.length > 0) {\n const requiredTypeIndex = context.findIndex(msg => msg?.getType() === startType);\n\n if (requiredTypeIndex > 0) {\n context = context.slice(requiredTypeIndex);\n }\n }\n }\n\n if (instructions && originalLength > 0) {\n context.push(_messages[0] as BaseMessage);\n messages.shift();\n }\n\n remainingContextTokens -= currentTokenCount;\n const result = {\n remainingContextTokens,\n context: [] as BaseMessage[],\n messagesToRefine: prunedMemory,\n };\n\n if (prunedMemory.length === 0 || thinkingEndIndex < 0 || (thinkingStartIndex > -1 && isIndexInContext(_messages, context, thinkingStartIndex))) {\n // we reverse at this step to ensure the context is in the correct order for the model, and we need to work backwards\n result.context = context.reverse() as BaseMessage[];\n return result;\n }\n\n if (thinkingEndIndex > -1 && thinkingStartIndex < 0) {\n throw new Error('The payload is malformed. There is a thinking sequence but no \"AI\" messages with thinking blocks.');\n }\n\n if (!thinkingBlock) {\n throw new Error('The payload is malformed. There is a thinking sequence but no thinking block found.');\n }\n\n // Since we have a thinking sequence, we need to find the last assistant message\n // in the latest AI/tool sequence to add the thinking block that falls outside of the current context\n // Latest messages are ordered first.\n let assistantIndex = -1;\n for (let i = 0; i < context.length; i++) {\n const currentMessage = context[i];\n const type = currentMessage?.getType();\n if (type === 'ai') {\n assistantIndex = i;\n }\n if (assistantIndex > -1 && (type === 'human' || type === 'system')) {\n break;\n }\n }\n\n if (assistantIndex === -1) {\n throw new Error('The payload is malformed. There is a thinking sequence but no \"AI\" messages to append thinking blocks to.');\n }\n\n thinkingStartIndex = originalLength - 1 - assistantIndex;\n const thinkingTokenCount = tokenCounter(new AIMessage({ content: [thinkingBlock] }));\n const newRemainingCount = remainingContextTokens - thinkingTokenCount;\n const content: MessageContentComplex[] = addThinkingBlock(context[assistantIndex] as AIMessage, thinkingBlock);\n (context[assistantIndex] as AIMessage).content = content;\n if (newRemainingCount > 0) {\n result.context = context.reverse() as BaseMessage[];\n return result;\n }\n\n const thinkingMessage: AIMessage = context[assistantIndex] as AIMessage;\n // now we need to an additional round of pruning but making the thinking block fit\n const newThinkingMessageTokenCount = (indexTokenCountMap[thinkingStartIndex] ?? 0) + thinkingTokenCount;\n remainingContextTokens = initialContextTokens - newThinkingMessageTokenCount;\n currentTokenCount = 3;\n let newContext: BaseMessage[] = [];\n const secondRoundMessages = [..._messages];\n let currentIndex = secondRoundMessages.length;\n while (secondRoundMessages.length > 0 && currentTokenCount < remainingContextTokens && currentIndex > thinkingStartIndex) {\n currentIndex--;\n const poppedMessage = secondRoundMessages.pop();\n if (!poppedMessage) continue;\n const tokenCount = indexTokenCountMap[currentIndex] ?? 0;\n if ((currentTokenCount + tokenCount) <= remainingContextTokens) {\n newContext.push(poppedMessage);\n currentTokenCount += tokenCount;\n } else {\n messages.push(poppedMessage);\n break;\n }\n }\n\n const firstMessage: AIMessage = newContext[newContext.length - 1];\n const firstMessageType = newContext[newContext.length - 1].getType();\n if (firstMessageType === 'tool') {\n startType = 'ai';\n }\n\n if (startType != null && startType && newContext.length > 0) {\n const requiredTypeIndex = newContext.findIndex(msg => msg.getType() === startType);\n if (requiredTypeIndex > 0) {\n newContext = newContext.slice(requiredTypeIndex);\n }\n }\n\n if (firstMessageType === 'ai') {\n const content = addThinkingBlock(firstMessage, thinkingBlock);\n newContext[newContext.length - 1].content = content;\n } else {\n newContext.push(thinkingMessage);\n }\n\n if (instructions && originalLength > 0) {\n newContext.push(_messages[0] as BaseMessage);\n secondRoundMessages.shift();\n }\n\n result.context = newContext.reverse();\n return result;\n}\n\nexport function checkValidNumber(value: unknown): value is number {\n return typeof value === 'number' && !isNaN(value) && value > 0;\n}\n\nexport function createPruneMessages(factoryParams: PruneMessagesFactoryParams) {\n const indexTokenCountMap = { ...factoryParams.indexTokenCountMap };\n let lastTurnStartIndex = factoryParams.startIndex;\n let lastCutOffIndex = 0;\n let totalTokens = (Object.values(indexTokenCountMap)).reduce((a, b) => a + b, 0);\n return function pruneMessages(params: PruneMessagesParams): {\n context: BaseMessage[];\n indexTokenCountMap: Record<string, number>;\n } {\n let currentUsage: UsageMetadata | undefined;\n if (params.usageMetadata && (\n checkValidNumber(params.usageMetadata.input_tokens)\n || (\n checkValidNumber(params.usageMetadata.input_token_details)\n && (\n checkValidNumber(params.usageMetadata.input_token_details.cache_creation)\n || checkValidNumber(params.usageMetadata.input_token_details.cache_read)\n )\n )\n ) && checkValidNumber(params.usageMetadata.output_tokens)) {\n currentUsage = calculateTotalTokens(params.usageMetadata);\n totalTokens = currentUsage.total_tokens;\n }\n\n for (let i = lastTurnStartIndex; i < params.messages.length; i++) {\n const message = params.messages[i];\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (i === lastTurnStartIndex && indexTokenCountMap[i] === undefined && currentUsage) {\n indexTokenCountMap[i] = currentUsage.output_tokens;\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n } else if (indexTokenCountMap[i] === undefined) {\n indexTokenCountMap[i] = factoryParams.tokenCounter(message);\n totalTokens += indexTokenCountMap[i];\n }\n }\n\n // If `currentUsage` is defined, we need to distribute the current total tokens to our `indexTokenCountMap`,\n // We must distribute it in a weighted manner, so that the total token count is equal to `currentUsage.total_tokens`,\n // relative the manually counted tokens in `indexTokenCountMap`.\n // EDGE CASE: when the resulting context gets pruned, we should not distribute the usage for messages that are not in the context.\n if (currentUsage) {\n // Calculate the sum of tokens only for indices at or after lastCutOffIndex\n const totalIndexTokens = Object.entries(indexTokenCountMap).reduce((sum, [key, value]) => {\n // Convert string key to number and check if it's >= lastCutOffIndex\n const numericKey = Number(key);\n if (numericKey === 0 && params.messages[0].getType() === 'system') {\n return sum + value;\n }\n return numericKey >= lastCutOffIndex ? sum + value : sum;\n }, 0);\n\n // Calculate ratio based only on messages that remain in the context\n const ratio = currentUsage.total_tokens / totalIndexTokens;\n const isRatioSafe = ratio >= 1/3 && ratio <= 2.5;\n\n // Apply the ratio adjustment only to messages at or after lastCutOffIndex, and only if the ratio is safe\n if (isRatioSafe) {\n for (const key in indexTokenCountMap) {\n const numericKey = Number(key);\n if (numericKey === 0 && params.messages[0].getType() === 'system') {\n indexTokenCountMap[key] = Math.round(indexTokenCountMap[key] * ratio);\n } else if (numericKey >= lastCutOffIndex) {\n // Only adjust token counts for messages still in the context\n indexTokenCountMap[key] = Math.round(indexTokenCountMap[key] * ratio);\n }\n }\n }\n }\n\n lastTurnStartIndex = params.messages.length;\n if (totalTokens <= factoryParams.maxTokens) {\n return { context: params.messages, indexTokenCountMap };\n }\n\n const { context } = getMessagesWithinTokenLimit({\n maxContextTokens: factoryParams.maxTokens,\n messages: params.messages,\n indexTokenCountMap,\n startType: params.startType,\n thinkingEnabled: factoryParams.thinkingEnabled,\n tokenCounter: factoryParams.tokenCounter,\n });\n lastCutOffIndex = Math.max(params.messages.length - context.length, 0);\n\n return { context, indexTokenCountMap };\n };\n}\n"],"names":[],"mappings":";;;AAiBA,SAAS,gBAAgB,CAAC,MAAiB,EAAE,MAAiB,EAAE,WAAmB,EAAA;IACjF,MAAM,gBAAgB,GAAG,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM;IACtD,OAAO,WAAW,IAAI,gBAAgB;AACxC;AAEA,SAAS,gBAAgB,CAAC,OAAkB,EAAE,aAAkC,EAAA;IAC9E,MAAM,OAAO,GAA4B,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO;UAClE,OAAO,CAAC;AACV,UAAE,CAAC;gBACD,IAAI,EAAE,YAAY,CAAC,IAAI;gBACvB,IAAI,EAAE,OAAO,CAAC,OAAO;AACtB,aAAA,CAAC;AACJ,IAAA,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC;AAC9B,IAAA,OAAO,OAAO;AAChB;AAEA;;;;;AAKG;AACG,SAAU,oBAAoB,CAAC,KAA6B,EAAA;IAChE,MAAM,eAAe,GAAG,MAAM,CAAC,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC;AACvD,IAAA,MAAM,aAAa,GAAG,MAAM,CAAC,KAAK,CAAC,mBAAmB,EAAE,cAAc,CAAC,IAAI,CAAC;AAC5E,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC,mBAAmB,EAAE,UAAU,CAAC,IAAI,CAAC;AAEpE,IAAA,MAAM,gBAAgB,GAAG,eAAe,GAAG,aAAa,GAAG,SAAS;IACpE,MAAM,iBAAiB,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC;IAE1D,OAAO;AACL,QAAA,YAAY,EAAE,gBAAgB;AAC9B,QAAA,aAAa,EAAE,iBAAiB;QAChC,YAAY,EAAE,gBAAgB,GAAG;KAClC;AACH;AAEA;;;;;;AAMG;SACa,2BAA2B,CAAC,EAC1C,QAAQ,EAAE,SAAS,EACnB,gBAAgB,EAChB,kBAAkB,EAClB,SAAS,EAAE,UAAU,EACrB,eAAe;AACf;AACA,YAAY,GAQb,EAAA;;;IAOC,IAAI,iBAAiB,GAAG,CAAC;IACzB,MAAM,YAAY,GAAG,SAAS,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC,GAAG,SAAS;AACpF,IAAA,MAAM,sBAAsB,GAAG,YAAY,IAAI,IAAI,GAAG,kBAAkB,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AACpF,IAAA,MAAM,oBAAoB,GAAG,gBAAgB,GAAG,sBAAsB;IACtE,IAAI,sBAAsB,GAAG,oBAAoB;IACjD,IAAI,SAAS,GAAG,UAAU;AAC1B,IAAA,MAAM,cAAc,GAAG,SAAS,CAAC,MAAM;AACvC,IAAA,MAAM,QAAQ,GAAG,CAAC,GAAG,SAAS,CAAC;AAC/B;;;;AAIK;IACL,IAAI,OAAO,GAAmC,EAAE;AAEhD,IAAA,IAAI,kBAAkB,GAAG,EAAE;AAC3B,IAAA,IAAI,gBAAgB,GAAG,EAAE;AACzB,IAAA,IAAI,aAA8C;AAClD,IAAA,MAAM,QAAQ,GAAG,YAAY,IAAI,IAAI,GAAG,CAAC,GAAG,CAAC;IAC7C,MAAM,YAAY,GAAkB,EAAE;AAEtC,IAAA,IAAI,iBAAiB,GAAG,sBAAsB,EAAE;AAC9C,QAAA,IAAI,YAAY,GAAG,QAAQ,CAAC,MAAM;AAClC,QAAA,OAAO,QAAQ,CAAC,MAAM,GAAG,CAAC,IAAI,iBAAiB,GAAG,sBAAsB,IAAI,YAAY,GAAG,QAAQ,EAAE;AACnG,YAAA,YAAY,EAAE;YACd,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,IAAI,YAAY,EAAE;gBACzC;;AAEF,YAAA,MAAM,aAAa,GAAG,QAAQ,CAAC,GAAG,EAAE;AACpC,YAAA,IAAI,CAAC,aAAa;gBAAE;AACpB,YAAA,MAAM,WAAW,GAAG,aAAa,CAAC,OAAO,EAAE;AAC3C,YAAA,IAAI,eAAe,KAAK,IAAI,IAAI,gBAAgB,KAAK,EAAE,KAAK,YAAY,MAAM,cAAc,GAAG,CAAC,CAAC,CAAC,KAAK,WAAW,KAAK,IAAI,IAAI,WAAW,KAAK,MAAM,CAAC,EAAE;gBACtJ,gBAAgB,GAAG,YAAY;;YAEjC,IAAI,gBAAgB,GAAG,EAAE,IAAI,CAAC,aAAa,IAAK,kBAAkB,GAAG,CAAC,IAAI,WAAW,KAAK,IAAI,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,OAAO,CAAC,EAAE;gBACtI,aAAa,IAAI,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,KAAK,OAAO,CAAC,IAAI,KAAK,YAAY,CAAC,QAAQ,CAAC,CAAoC;AACpI,gBAAA,kBAAkB,GAAG,aAAa,IAAI,IAAI,GAAG,YAAY,GAAG,EAAE;;;YAGhE,IACE,gBAAgB,GAAG;AAChB,mBAAA,YAAY,MAAM,gBAAgB,GAAG,CAAC;oBACrC,WAAW,KAAK,IAAI,IAAI,WAAW,KAAK,MAAM,CAAC,EACnD;gBACA,gBAAgB,GAAG,EAAE;;YAGvB,MAAM,UAAU,GAAG,kBAAkB,CAAC,YAAY,CAAC,IAAI,CAAC;AAExD,YAAA,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,KAAK,CAAC,iBAAiB,GAAG,UAAU,KAAK,sBAAsB,CAAC,EAAE;AAC7F,gBAAA,OAAO,CAAC,IAAI,CAAC,aAAa,CAAC;gBAC3B,iBAAiB,IAAI,UAAU;;iBAC1B;AACL,gBAAA,YAAY,CAAC,IAAI,CAAC,aAAa,CAAC;AAChC,gBAAA,IAAI,gBAAgB,GAAG,EAAE,EAAE;oBACzB;;gBAEF;;;AAIJ,QAAA,IAAI,gBAAgB,GAAG,EAAE,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,MAAM,EAAE;YAC9E,SAAS,GAAG,IAAI;;AAGlB,QAAA,IAAI,SAAS,IAAI,IAAI,IAAI,SAAS,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;AACxD,YAAA,MAAM,iBAAiB,GAAG,OAAO,CAAC,SAAS,CAAC,GAAG,IAAI,GAAG,EAAE,OAAO,EAAE,KAAK,SAAS,CAAC;AAEhF,YAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;AACzB,gBAAA,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC,iBAAiB,CAAC;;;;AAKhD,IAAA,IAAI,YAAY,IAAI,cAAc,GAAG,CAAC,EAAE;QACtC,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAgB,CAAC;QACzC,QAAQ,CAAC,KAAK,EAAE;;IAGlB,sBAAsB,IAAI,iBAAiB;AAC3C,IAAA,MAAM,MAAM,GAAG;QACb,sBAAsB;AACtB,QAAA,OAAO,EAAE,EAAmB;AAC5B,QAAA,gBAAgB,EAAE,YAAY;KAC/B;IAED,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,IAAI,gBAAgB,GAAG,CAAC,KAAK,kBAAkB,GAAG,EAAE,IAAI,gBAAgB,CAAC,SAAS,EAAE,OAAO,EAAE,kBAAkB,CAAC,CAAC,EAAE;;AAE9I,QAAA,MAAM,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,EAAmB;AACnD,QAAA,OAAO,MAAM;;IAGf,IAAI,gBAAgB,GAAG,EAAE,IAAI,kBAAkB,GAAG,CAAC,EAAE;AACnD,QAAA,MAAM,IAAI,KAAK,CAAC,mGAAmG,CAAC;;IAGtH,IAAI,CAAC,aAAa,EAAE;AAClB,QAAA,MAAM,IAAI,KAAK,CAAC,qFAAqF,CAAC;;;;;AAMxG,IAAA,IAAI,cAAc,GAAG,EAAE;AACvB,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACvC,QAAA,MAAM,cAAc,GAAG,OAAO,CAAC,CAAC,CAAC;AACjC,QAAA,MAAM,IAAI,GAAG,cAAc,EAAE,OAAO,EAAE;AACtC,QAAA,IAAI,IAAI,KAAK,IAAI,EAAE;YACjB,cAAc,GAAG,CAAC;;AAEpB,QAAA,IAAI,cAAc,GAAG,EAAE,KAAK,IAAI,KAAK,OAAO,IAAI,IAAI,KAAK,QAAQ,CAAC,EAAE;YAClE;;;AAIJ,IAAA,IAAI,cAAc,KAAK,EAAE,EAAE;AACzB,QAAA,MAAM,IAAI,KAAK,CAAC,2GAA2G,CAAC;;AAG9H,IAAA,kBAAkB,GAAG,cAAc,GAAG,CAAC,GAAG,cAAc;AACxD,IAAA,MAAM,kBAAkB,GAAG,YAAY,CAAC,IAAI,SAAS,CAAC,EAAE,OAAO,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,CAAC;AACpF,IAAA,MAAM,iBAAiB,GAAG,sBAAsB,GAAG,kBAAkB;IACrE,MAAM,OAAO,GAA4B,gBAAgB,CAAC,OAAO,CAAC,cAAc,CAAc,EAAE,aAAa,CAAC;AAC7G,IAAA,OAAO,CAAC,cAAc,CAAe,CAAC,OAAO,GAAG,OAAO;AACxD,IAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;AACzB,QAAA,MAAM,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,EAAmB;AACnD,QAAA,OAAO,MAAM;;AAGf,IAAA,MAAM,eAAe,GAAc,OAAO,CAAC,cAAc,CAAc;;AAEvE,IAAA,MAAM,4BAA4B,GAAG,CAAC,kBAAkB,CAAC,kBAAkB,CAAC,IAAI,CAAC,IAAI,kBAAkB;AACvG,IAAA,sBAAsB,GAAG,oBAAoB,GAAG,4BAA4B;IAC5E,iBAAiB,GAAG,CAAC;IACrB,IAAI,UAAU,GAAkB,EAAE;AAClC,IAAA,MAAM,mBAAmB,GAAG,CAAC,GAAG,SAAS,CAAC;AAC1C,IAAA,IAAI,YAAY,GAAG,mBAAmB,CAAC,MAAM;AAC7C,IAAA,OAAO,mBAAmB,CAAC,MAAM,GAAG,CAAC,IAAI,iBAAiB,GAAG,sBAAsB,IAAI,YAAY,GAAG,kBAAkB,EAAE;AACxH,QAAA,YAAY,EAAE;AACd,QAAA,MAAM,aAAa,GAAG,mBAAmB,CAAC,GAAG,EAAE;AAC/C,QAAA,IAAI,CAAC,aAAa;YAAE;QACpB,MAAM,UAAU,GAAG,kBAAkB,CAAC,YAAY,CAAC,IAAI,CAAC;QACxD,IAAI,CAAC,iBAAiB,GAAG,UAAU,KAAK,sBAAsB,EAAE;AAC9D,YAAA,UAAU,CAAC,IAAI,CAAC,aAAa,CAAC;YAC9B,iBAAiB,IAAI,UAAU;;aAC1B;AACL,YAAA,QAAQ,CAAC,IAAI,CAAC,aAAa,CAAC;YAC5B;;;IAIJ,MAAM,YAAY,GAAc,UAAU,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;AACjE,IAAA,MAAM,gBAAgB,GAAG,UAAU,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,OAAO,EAAE;AACpE,IAAA,IAAI,gBAAgB,KAAK,MAAM,EAAE;QAC/B,SAAS,GAAG,IAAI;;AAGlB,IAAA,IAAI,SAAS,IAAI,IAAI,IAAI,SAAS,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;AAC3D,QAAA,MAAM,iBAAiB,GAAG,UAAU,CAAC,SAAS,CAAC,GAAG,IAAI,GAAG,CAAC,OAAO,EAAE,KAAK,SAAS,CAAC;AAClF,QAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;AACzB,YAAA,UAAU,GAAG,UAAU,CAAC,KAAK,CAAC,iBAAiB,CAAC;;;AAIpD,IAAA,IAAI,gBAAgB,KAAK,IAAI,EAAE;QAC7B,MAAM,OAAO,GAAG,gBAAgB,CAAC,YAAY,EAAE,aAAa,CAAC;QAC7D,UAAU,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,OAAO,GAAG,OAAO;;SAC9C;AACL,QAAA,UAAU,CAAC,IAAI,CAAC,eAAe,CAAC;;AAGlC,IAAA,IAAI,YAAY,IAAI,cAAc,GAAG,CAAC,EAAE;QACtC,UAAU,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAgB,CAAC;QAC5C,mBAAmB,CAAC,KAAK,EAAE;;AAG7B,IAAA,MAAM,CAAC,OAAO,GAAG,UAAU,CAAC,OAAO,EAAE;AACrC,IAAA,OAAO,MAAM;AACf;AAEM,SAAU,gBAAgB,CAAC,KAAc,EAAA;AAC7C,IAAA,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,GAAG,CAAC;AAChE;AAEM,SAAU,mBAAmB,CAAC,aAAyC,EAAA;IAC3E,MAAM,kBAAkB,GAAG,EAAE,GAAG,aAAa,CAAC,kBAAkB,EAAE;AAClE,IAAA,IAAI,kBAAkB,GAAG,aAAa,CAAC,UAAU;IACjD,IAAI,eAAe,GAAG,CAAC;IACvB,IAAI,WAAW,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;IAChF,OAAO,SAAS,aAAa,CAAC,MAA2B,EAAA;AAIvD,QAAA,IAAI,YAAuC;AAC3C,QAAA,IAAI,MAAM,CAAC,aAAa,KACtB,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,YAAY;AAC/C,gBACD,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,mBAAmB;oBAEvD,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,mBAAmB,CAAC,cAAc;uBACrE,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,mBAAmB,CAAC,UAAU,CAAC,CACzE,CACF,CACF,IAAI,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,EAAE;AACzD,YAAA,YAAY,GAAG,oBAAoB,CAAC,MAAM,CAAC,aAAa,CAAC;AACzD,YAAA,WAAW,GAAG,YAAY,CAAC,YAAY;;AAGzC,QAAA,KAAK,IAAI,CAAC,GAAG,kBAAkB,EAAE,CAAC,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAChE,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC;;AAElC,YAAA,IAAI,CAAC,KAAK,kBAAkB,IAAI,kBAAkB,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,YAAY,EAAE;AACnF,gBAAA,kBAAkB,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,aAAa;;;AAE7C,iBAAA,IAAI,kBAAkB,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE;gBAC9C,kBAAkB,CAAC,CAAC,CAAC,GAAG,aAAa,CAAC,YAAY,CAAC,OAAO,CAAC;AAC3D,gBAAA,WAAW,IAAI,kBAAkB,CAAC,CAAC,CAAC;;;;;;;QAQxC,IAAI,YAAY,EAAE;;YAEhB,MAAM,gBAAgB,GAAG,MAAM,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC,KAAI;;AAEvF,gBAAA,MAAM,UAAU,GAAG,MAAM,CAAC,GAAG,CAAC;AAC9B,gBAAA,IAAI,UAAU,KAAK,CAAC,IAAI,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;oBACjE,OAAO,GAAG,GAAG,KAAK;;AAEpB,gBAAA,OAAO,UAAU,IAAI,eAAe,GAAG,GAAG,GAAG,KAAK,GAAG,GAAG;aACzD,EAAE,CAAC,CAAC;;AAGL,YAAA,MAAM,KAAK,GAAG,YAAY,CAAC,YAAY,GAAG,gBAAgB;YAC1D,MAAM,WAAW,GAAG,KAAK,IAAI,CAAC,GAAC,CAAC,IAAI,KAAK,IAAI,GAAG;;YAGhD,IAAI,WAAW,EAAE;AACf,gBAAA,KAAK,MAAM,GAAG,IAAI,kBAAkB,EAAE;AACpC,oBAAA,MAAM,UAAU,GAAG,MAAM,CAAC,GAAG,CAAC;AAC9B,oBAAA,IAAI,UAAU,KAAK,CAAC,IAAI,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AACjE,wBAAA,kBAAkB,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,kBAAkB,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;;AAChE,yBAAA,IAAI,UAAU,IAAI,eAAe,EAAE;;AAExC,wBAAA,kBAAkB,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,kBAAkB,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;;;;;AAM7E,QAAA,kBAAkB,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM;AAC3C,QAAA,IAAI,WAAW,IAAI,aAAa,CAAC,SAAS,EAAE;YAC1C,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,QAAQ,EAAE,kBAAkB,EAAE;;AAGzD,QAAA,MAAM,EAAE,OAAO,EAAE,GAAG,2BAA2B,CAAC;YAC9C,gBAAgB,EAAE,aAAa,CAAC,SAAS;YACzC,QAAQ,EAAE,MAAM,CAAC,QAAQ;YACzB,kBAAkB;YAClB,SAAS,EAAE,MAAM,CAAC,SAAS;YAC3B,eAAe,EAAE,aAAa,CAAC,eAAe;YAC9C,YAAY,EAAE,aAAa,CAAC,YAAY;AACzC,SAAA,CAAC;AACF,QAAA,eAAe,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;AAEtE,QAAA,OAAO,EAAE,OAAO,EAAE,kBAAkB,EAAE;AACxC,KAAC;AACH;;;;"}
|
|
1
|
+
{"version":3,"file":"prune.mjs","sources":["../../../src/messages/prune.ts"],"sourcesContent":["import { AIMessage, BaseMessage, UsageMetadata } from '@langchain/core/messages';\nimport type { ThinkingContentText, MessageContentComplex, ReasoningContentText } from '@/types/stream';\nimport type { TokenCounter } from '@/types/run';\nimport { ContentTypes, Providers } from '@/common';\n\nexport type PruneMessagesFactoryParams = {\n provider?: Providers;\n maxTokens: number;\n startIndex: number;\n tokenCounter: TokenCounter;\n indexTokenCountMap: Record<string, number>;\n thinkingEnabled?: boolean;\n};\nexport type PruneMessagesParams = {\n messages: BaseMessage[];\n usageMetadata?: Partial<UsageMetadata>;\n startType?: ReturnType<BaseMessage['getType']>;\n}\n\nfunction isIndexInContext(arrayA: unknown[], arrayB: unknown[], targetIndex: number): boolean {\n const startingIndexInA = arrayA.length - arrayB.length;\n return targetIndex >= startingIndexInA;\n}\n\nfunction addThinkingBlock(message: AIMessage, thinkingBlock: ThinkingContentText | ReasoningContentText): MessageContentComplex[] {\n const content: MessageContentComplex[] = Array.isArray(message.content)\n ? message.content as MessageContentComplex[]\n : [{\n type: ContentTypes.TEXT,\n text: message.content,\n }];\n content.unshift(thinkingBlock);\n return content;\n}\n\n/**\n * Calculates the total tokens from a single usage object\n *\n * @param usage The usage metadata object containing token information\n * @returns An object containing the total input and output tokens\n */\nexport function calculateTotalTokens(usage: Partial<UsageMetadata>): UsageMetadata {\n const baseInputTokens = Number(usage.input_tokens) || 0;\n const cacheCreation = Number(usage.input_token_details?.cache_creation) || 0;\n const cacheRead = Number(usage.input_token_details?.cache_read) || 0;\n\n const totalInputTokens = baseInputTokens + cacheCreation + cacheRead;\n const totalOutputTokens = Number(usage.output_tokens) || 0;\n\n return {\n input_tokens: totalInputTokens,\n output_tokens: totalOutputTokens,\n total_tokens: totalInputTokens + totalOutputTokens\n };\n}\n\nexport type PruningResult = {\n context: BaseMessage[];\n remainingContextTokens: number;\n messagesToRefine: BaseMessage[];\n thinkingStartIndex?: number;\n};\n\n/**\n * Processes an array of messages and returns a context of messages that fit within a specified token limit.\n * It iterates over the messages from newest to oldest, adding them to the context until the token limit is reached.\n *\n * @param options Configuration options for processing messages\n * @returns Object containing the message context, remaining tokens, messages not included, and summary index\n */\nexport function getMessagesWithinTokenLimit({\n messages: _messages,\n maxContextTokens,\n indexTokenCountMap,\n startType: _startType,\n thinkingEnabled,\n tokenCounter,\n thinkingStartIndex: _thinkingStartIndex = -1,\n reasoningType = ContentTypes.THINKING,\n}: {\n messages: BaseMessage[];\n maxContextTokens: number;\n indexTokenCountMap: Record<string, number | undefined>;\n startType?: string | string[];\n thinkingEnabled?: boolean;\n tokenCounter: TokenCounter;\n thinkingStartIndex?: number;\n reasoningType?: ContentTypes.THINKING | ContentTypes.REASONING_CONTENT;\n}): PruningResult {\n // Every reply is primed with <|start|>assistant<|message|>, so we\n // start with 3 tokens for the label after all messages have been counted.\n let currentTokenCount = 3;\n const instructions = _messages[0]?.getType() === 'system' ? _messages[0] : undefined;\n const instructionsTokenCount = instructions != null ? indexTokenCountMap[0] ?? 0 : 0;\n const initialContextTokens = maxContextTokens - instructionsTokenCount;\n let remainingContextTokens = initialContextTokens;\n let startType = _startType;\n const originalLength = _messages.length;\n const messages = [..._messages];\n /**\n * IMPORTANT: this context array gets reversed at the end, since the latest messages get pushed first.\n *\n * This may be confusing to read, but it is done to ensure the context is in the correct order for the model.\n * */\n let context: Array<BaseMessage | undefined> = [];\n\n let thinkingStartIndex = _thinkingStartIndex;\n let thinkingEndIndex = -1;\n let thinkingBlock: ThinkingContentText | ReasoningContentText | undefined;\n const endIndex = instructions != null ? 1 : 0;\n const prunedMemory: BaseMessage[] = [];\n\n if (_thinkingStartIndex > -1) {\n const thinkingMessageContent = _messages[_thinkingStartIndex]?.content;\n if (Array.isArray(thinkingMessageContent)) {\n thinkingBlock = thinkingMessageContent.find((content) => content.type === reasoningType) as ThinkingContentText | undefined;\n }\n }\n\n if (currentTokenCount < remainingContextTokens) {\n let currentIndex = messages.length;\n while (messages.length > 0 && currentTokenCount < remainingContextTokens && currentIndex > endIndex) {\n currentIndex--;\n if (messages.length === 1 && instructions) {\n break;\n }\n const poppedMessage = messages.pop();\n if (!poppedMessage) continue;\n const messageType = poppedMessage.getType();\n if (thinkingEnabled === true && thinkingEndIndex === -1 && (currentIndex === (originalLength - 1)) && (messageType === 'ai' || messageType === 'tool')) {\n thinkingEndIndex = currentIndex;\n }\n if (thinkingEndIndex > -1 && !thinkingBlock && thinkingStartIndex < 0 && messageType === 'ai' && Array.isArray(poppedMessage.content)) {\n thinkingBlock = (poppedMessage.content.find((content) => content.type === reasoningType)) as ThinkingContentText | undefined;\n thinkingStartIndex = thinkingBlock != null ? currentIndex : -1;\n }\n /** False start, the latest message was not part of a multi-assistant/tool sequence of messages */\n if (\n thinkingEndIndex > -1\n && currentIndex === (thinkingEndIndex - 1)\n && (messageType !== 'ai' && messageType !== 'tool')\n ) {\n thinkingEndIndex = -1;\n }\n\n const tokenCount = indexTokenCountMap[currentIndex] ?? 0;\n\n if (prunedMemory.length === 0 && ((currentTokenCount + tokenCount) <= remainingContextTokens)) {\n context.push(poppedMessage);\n currentTokenCount += tokenCount;\n } else {\n prunedMemory.push(poppedMessage);\n if (thinkingEndIndex > -1 && thinkingStartIndex < 0) {\n continue;\n }\n break;\n }\n }\n\n if (context[context.length - 1]?.getType() === 'tool') {\n startType = ['ai', 'human'];\n }\n\n if (startType != null && startType.length > 0 && context.length > 0) {\n let requiredTypeIndex = -1;\n\n let totalTokens = 0;\n for (let i = context.length - 1; i >= 0; i--) {\n const currentType = context[i]?.getType() ?? '';\n if (Array.isArray(startType) ? startType.includes(currentType) : currentType === startType) {\n requiredTypeIndex = i + 1;\n break;\n }\n const originalIndex = originalLength - 1 - i;\n totalTokens += indexTokenCountMap[originalIndex] ?? 0;\n }\n\n if (requiredTypeIndex > 0) {\n currentTokenCount -= totalTokens;\n context = context.slice(0, requiredTypeIndex);\n }\n }\n }\n\n if (instructions && originalLength > 0) {\n context.push(_messages[0] as BaseMessage);\n messages.shift();\n }\n\n remainingContextTokens -= currentTokenCount;\n const result: PruningResult = {\n remainingContextTokens,\n context: [] as BaseMessage[],\n messagesToRefine: prunedMemory,\n };\n\n if (thinkingStartIndex > -1) {\n result.thinkingStartIndex = thinkingStartIndex;\n }\n\n if (prunedMemory.length === 0 || thinkingEndIndex < 0 || (thinkingStartIndex > -1 && isIndexInContext(_messages, context, thinkingStartIndex))) {\n // we reverse at this step to ensure the context is in the correct order for the model, and we need to work backwards\n result.context = context.reverse() as BaseMessage[];\n return result;\n }\n\n if (thinkingEndIndex > -1 && thinkingStartIndex < 0) {\n throw new Error('The payload is malformed. There is a thinking sequence but no \"AI\" messages with thinking blocks.');\n }\n\n if (!thinkingBlock) {\n throw new Error('The payload is malformed. There is a thinking sequence but no thinking block found.');\n }\n\n // Since we have a thinking sequence, we need to find the last assistant message\n // in the latest AI/tool sequence to add the thinking block that falls outside of the current context\n // Latest messages are ordered first.\n let assistantIndex = -1;\n for (let i = 0; i < context.length; i++) {\n const currentMessage = context[i];\n const type = currentMessage?.getType();\n if (type === 'ai') {\n assistantIndex = i;\n }\n if (assistantIndex > -1 && (type === 'human' || type === 'system')) {\n break;\n }\n }\n\n if (assistantIndex === -1) {\n throw new Error('The payload is malformed. There is a thinking sequence but no \"AI\" messages to append thinking blocks to.');\n }\n\n thinkingStartIndex = originalLength - 1 - assistantIndex;\n const thinkingTokenCount = tokenCounter(new AIMessage({ content: [thinkingBlock] }));\n const newRemainingCount = remainingContextTokens - thinkingTokenCount;\n const content: MessageContentComplex[] = addThinkingBlock(context[assistantIndex] as AIMessage, thinkingBlock);\n (context[assistantIndex] as AIMessage).content = content;\n if (newRemainingCount > 0) {\n result.context = context.reverse() as BaseMessage[];\n return result;\n }\n\n const thinkingMessage: AIMessage = context[assistantIndex] as AIMessage;\n // now we need to an additional round of pruning but making the thinking block fit\n const newThinkingMessageTokenCount = (indexTokenCountMap[thinkingStartIndex] ?? 0) + thinkingTokenCount;\n remainingContextTokens = initialContextTokens - newThinkingMessageTokenCount;\n currentTokenCount = 3;\n let newContext: BaseMessage[] = [];\n const secondRoundMessages = [..._messages];\n let currentIndex = secondRoundMessages.length;\n while (secondRoundMessages.length > 0 && currentTokenCount < remainingContextTokens && currentIndex > thinkingStartIndex) {\n currentIndex--;\n const poppedMessage = secondRoundMessages.pop();\n if (!poppedMessage) continue;\n const tokenCount = indexTokenCountMap[currentIndex] ?? 0;\n if ((currentTokenCount + tokenCount) <= remainingContextTokens) {\n newContext.push(poppedMessage);\n currentTokenCount += tokenCount;\n } else {\n messages.push(poppedMessage);\n break;\n }\n }\n\n const firstMessage: AIMessage = newContext[newContext.length - 1];\n const firstMessageType = newContext[newContext.length - 1].getType();\n if (firstMessageType === 'tool') {\n startType = 'ai';\n }\n\n if (startType != null && startType && newContext.length > 0) {\n const requiredTypeIndex = newContext.findIndex(msg => msg.getType() === startType);\n if (requiredTypeIndex > 0) {\n newContext = newContext.slice(requiredTypeIndex);\n }\n }\n\n if (firstMessageType === 'ai') {\n const content = addThinkingBlock(firstMessage, thinkingBlock);\n newContext[newContext.length - 1].content = content;\n } else {\n newContext.push(thinkingMessage);\n }\n\n if (instructions && originalLength > 0) {\n newContext.push(_messages[0] as BaseMessage);\n secondRoundMessages.shift();\n }\n\n result.context = newContext.reverse();\n return result;\n}\n\nexport function checkValidNumber(value: unknown): value is number {\n return typeof value === 'number' && !isNaN(value) && value > 0;\n}\n\nexport function createPruneMessages(factoryParams: PruneMessagesFactoryParams) {\n const indexTokenCountMap = { ...factoryParams.indexTokenCountMap };\n let lastTurnStartIndex = factoryParams.startIndex;\n let lastCutOffIndex = 0;\n let totalTokens = (Object.values(indexTokenCountMap)).reduce((a, b) => a + b, 0);\n let runThinkingStartIndex = -1;\n return function pruneMessages(params: PruneMessagesParams): {\n context: BaseMessage[];\n indexTokenCountMap: Record<string, number>;\n } {\n let currentUsage: UsageMetadata | undefined;\n if (params.usageMetadata && (\n checkValidNumber(params.usageMetadata.input_tokens)\n || (\n checkValidNumber(params.usageMetadata.input_token_details)\n && (\n checkValidNumber(params.usageMetadata.input_token_details.cache_creation)\n || checkValidNumber(params.usageMetadata.input_token_details.cache_read)\n )\n )\n ) && checkValidNumber(params.usageMetadata.output_tokens)) {\n currentUsage = calculateTotalTokens(params.usageMetadata);\n totalTokens = currentUsage.total_tokens;\n }\n\n for (let i = lastTurnStartIndex; i < params.messages.length; i++) {\n const message = params.messages[i];\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (i === lastTurnStartIndex && indexTokenCountMap[i] === undefined && currentUsage) {\n indexTokenCountMap[i] = currentUsage.output_tokens;\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n } else if (indexTokenCountMap[i] === undefined) {\n indexTokenCountMap[i] = factoryParams.tokenCounter(message);\n totalTokens += indexTokenCountMap[i];\n }\n }\n\n // If `currentUsage` is defined, we need to distribute the current total tokens to our `indexTokenCountMap`,\n // We must distribute it in a weighted manner, so that the total token count is equal to `currentUsage.total_tokens`,\n // relative the manually counted tokens in `indexTokenCountMap`.\n // EDGE CASE: when the resulting context gets pruned, we should not distribute the usage for messages that are not in the context.\n if (currentUsage) {\n // Calculate the sum of tokens only for indices at or after lastCutOffIndex\n const totalIndexTokens = Object.entries(indexTokenCountMap).reduce((sum, [key, value]) => {\n // Convert string key to number and check if it's >= lastCutOffIndex\n const numericKey = Number(key);\n if (numericKey === 0 && params.messages[0].getType() === 'system') {\n return sum + value;\n }\n return numericKey >= lastCutOffIndex ? sum + value : sum;\n }, 0);\n\n // Calculate ratio based only on messages that remain in the context\n const ratio = currentUsage.total_tokens / totalIndexTokens;\n const isRatioSafe = ratio >= 1/3 && ratio <= 2.5;\n\n // Apply the ratio adjustment only to messages at or after lastCutOffIndex, and only if the ratio is safe\n if (isRatioSafe) {\n for (const key in indexTokenCountMap) {\n const numericKey = Number(key);\n if (numericKey === 0 && params.messages[0].getType() === 'system') {\n indexTokenCountMap[key] = Math.round(indexTokenCountMap[key] * ratio);\n } else if (numericKey >= lastCutOffIndex) {\n // Only adjust token counts for messages still in the context\n indexTokenCountMap[key] = Math.round(indexTokenCountMap[key] * ratio);\n }\n }\n }\n }\n\n lastTurnStartIndex = params.messages.length;\n if (totalTokens <= factoryParams.maxTokens) {\n return { context: params.messages, indexTokenCountMap };\n }\n\n const { context, thinkingStartIndex } = getMessagesWithinTokenLimit({\n maxContextTokens: factoryParams.maxTokens,\n messages: params.messages,\n indexTokenCountMap,\n startType: params.startType,\n thinkingEnabled: factoryParams.thinkingEnabled,\n tokenCounter: factoryParams.tokenCounter,\n reasoningType: factoryParams.provider === Providers.BEDROCK ? ContentTypes.REASONING_CONTENT : ContentTypes.THINKING,\n thinkingStartIndex: factoryParams.thinkingEnabled === true ? runThinkingStartIndex : undefined,\n });\n runThinkingStartIndex = thinkingStartIndex ?? -1;\n /** The index is the first value of `context`, index relative to `params.messages` */\n lastCutOffIndex = Math.max(params.messages.length - (context.length - (context[0]?.getType() === 'system' ? 1 : 0)), 0);\n\n return { context, indexTokenCountMap };\n };\n}\n"],"names":[],"mappings":";;;AAmBA,SAAS,gBAAgB,CAAC,MAAiB,EAAE,MAAiB,EAAE,WAAmB,EAAA;IACjF,MAAM,gBAAgB,GAAG,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM;IACtD,OAAO,WAAW,IAAI,gBAAgB;AACxC;AAEA,SAAS,gBAAgB,CAAC,OAAkB,EAAE,aAAyD,EAAA;IACrG,MAAM,OAAO,GAA4B,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO;UAClE,OAAO,CAAC;AACV,UAAE,CAAC;gBACD,IAAI,EAAE,YAAY,CAAC,IAAI;gBACvB,IAAI,EAAE,OAAO,CAAC,OAAO;AACtB,aAAA,CAAC;AACJ,IAAA,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC;AAC9B,IAAA,OAAO,OAAO;AAChB;AAEA;;;;;AAKG;AACG,SAAU,oBAAoB,CAAC,KAA6B,EAAA;IAChE,MAAM,eAAe,GAAG,MAAM,CAAC,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC;AACvD,IAAA,MAAM,aAAa,GAAG,MAAM,CAAC,KAAK,CAAC,mBAAmB,EAAE,cAAc,CAAC,IAAI,CAAC;AAC5E,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC,mBAAmB,EAAE,UAAU,CAAC,IAAI,CAAC;AAEpE,IAAA,MAAM,gBAAgB,GAAG,eAAe,GAAG,aAAa,GAAG,SAAS;IACpE,MAAM,iBAAiB,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC;IAE1D,OAAO;AACL,QAAA,YAAY,EAAE,gBAAgB;AAC9B,QAAA,aAAa,EAAE,iBAAiB;QAChC,YAAY,EAAE,gBAAgB,GAAG;KAClC;AACH;AASA;;;;;;AAMG;AACa,SAAA,2BAA2B,CAAC,EAC1C,QAAQ,EAAE,SAAS,EACnB,gBAAgB,EAChB,kBAAkB,EAClB,SAAS,EAAE,UAAU,EACrB,eAAe,EACf,YAAY,EACZ,kBAAkB,EAAE,mBAAmB,GAAG,EAAE,EAC5C,aAAa,GAAG,YAAY,CAAC,QAAQ,GAUtC,EAAA;;;IAGC,IAAI,iBAAiB,GAAG,CAAC;IACzB,MAAM,YAAY,GAAG,SAAS,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC,GAAG,SAAS;AACpF,IAAA,MAAM,sBAAsB,GAAG,YAAY,IAAI,IAAI,GAAG,kBAAkB,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;AACpF,IAAA,MAAM,oBAAoB,GAAG,gBAAgB,GAAG,sBAAsB;IACtE,IAAI,sBAAsB,GAAG,oBAAoB;IACjD,IAAI,SAAS,GAAG,UAAU;AAC1B,IAAA,MAAM,cAAc,GAAG,SAAS,CAAC,MAAM;AACvC,IAAA,MAAM,QAAQ,GAAG,CAAC,GAAG,SAAS,CAAC;AAC/B;;;;AAIK;IACL,IAAI,OAAO,GAAmC,EAAE;IAEhD,IAAI,kBAAkB,GAAG,mBAAmB;AAC5C,IAAA,IAAI,gBAAgB,GAAG,EAAE;AACzB,IAAA,IAAI,aAAqE;AACzE,IAAA,MAAM,QAAQ,GAAG,YAAY,IAAI,IAAI,GAAG,CAAC,GAAG,CAAC;IAC7C,MAAM,YAAY,GAAkB,EAAE;AAEtC,IAAA,IAAI,mBAAmB,GAAG,EAAE,EAAE;QAC5B,MAAM,sBAAsB,GAAG,SAAS,CAAC,mBAAmB,CAAC,EAAE,OAAO;AACtE,QAAA,IAAI,KAAK,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;AACzC,YAAA,aAAa,GAAG,sBAAsB,CAAC,IAAI,CAAC,CAAC,OAAO,KAAK,OAAO,CAAC,IAAI,KAAK,aAAa,CAAoC;;;AAI/H,IAAA,IAAI,iBAAiB,GAAG,sBAAsB,EAAE;AAC9C,QAAA,IAAI,YAAY,GAAG,QAAQ,CAAC,MAAM;AAClC,QAAA,OAAO,QAAQ,CAAC,MAAM,GAAG,CAAC,IAAI,iBAAiB,GAAG,sBAAsB,IAAI,YAAY,GAAG,QAAQ,EAAE;AACnG,YAAA,YAAY,EAAE;YACd,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,IAAI,YAAY,EAAE;gBACzC;;AAEF,YAAA,MAAM,aAAa,GAAG,QAAQ,CAAC,GAAG,EAAE;AACpC,YAAA,IAAI,CAAC,aAAa;gBAAE;AACpB,YAAA,MAAM,WAAW,GAAG,aAAa,CAAC,OAAO,EAAE;AAC3C,YAAA,IAAI,eAAe,KAAK,IAAI,IAAI,gBAAgB,KAAK,EAAE,KAAK,YAAY,MAAM,cAAc,GAAG,CAAC,CAAC,CAAC,KAAK,WAAW,KAAK,IAAI,IAAI,WAAW,KAAK,MAAM,CAAC,EAAE;gBACtJ,gBAAgB,GAAG,YAAY;;YAEjC,IAAI,gBAAgB,GAAG,EAAE,IAAI,CAAC,aAAa,IAAK,kBAAkB,GAAG,CAAC,IAAI,WAAW,KAAK,IAAI,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,OAAO,CAAC,EAAE;gBACtI,aAAa,IAAI,aAAa,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,KAAK,OAAO,CAAC,IAAI,KAAK,aAAa,CAAC,CAAoC;AAC5H,gBAAA,kBAAkB,GAAG,aAAa,IAAI,IAAI,GAAG,YAAY,GAAG,EAAE;;;YAGhE,IACE,gBAAgB,GAAG;AAChB,mBAAA,YAAY,MAAM,gBAAgB,GAAG,CAAC;oBACrC,WAAW,KAAK,IAAI,IAAI,WAAW,KAAK,MAAM,CAAC,EACnD;gBACA,gBAAgB,GAAG,EAAE;;YAGvB,MAAM,UAAU,GAAG,kBAAkB,CAAC,YAAY,CAAC,IAAI,CAAC;AAExD,YAAA,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,KAAK,CAAC,iBAAiB,GAAG,UAAU,KAAK,sBAAsB,CAAC,EAAE;AAC7F,gBAAA,OAAO,CAAC,IAAI,CAAC,aAAa,CAAC;gBAC3B,iBAAiB,IAAI,UAAU;;iBAC1B;AACL,gBAAA,YAAY,CAAC,IAAI,CAAC,aAAa,CAAC;gBAChC,IAAI,gBAAgB,GAAG,EAAE,IAAI,kBAAkB,GAAG,CAAC,EAAE;oBACnD;;gBAEF;;;AAIJ,QAAA,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,MAAM,EAAE;AACrD,YAAA,SAAS,GAAG,CAAC,IAAI,EAAE,OAAO,CAAC;;AAG7B,QAAA,IAAI,SAAS,IAAI,IAAI,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;AACnE,YAAA,IAAI,iBAAiB,GAAG,EAAE;YAE1B,IAAI,WAAW,GAAG,CAAC;AACnB,YAAA,KAAK,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;gBAC5C,MAAM,WAAW,GAAG,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE;gBAC/C,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,SAAS,CAAC,QAAQ,CAAC,WAAW,CAAC,GAAG,WAAW,KAAK,SAAS,EAAE;AAC1F,oBAAA,iBAAiB,GAAG,CAAC,GAAG,CAAC;oBACzB;;AAEF,gBAAA,MAAM,aAAa,GAAG,cAAc,GAAG,CAAC,GAAG,CAAC;AAC5C,gBAAA,WAAW,IAAI,kBAAkB,CAAC,aAAa,CAAC,IAAI,CAAC;;AAGvD,YAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;gBACzB,iBAAiB,IAAI,WAAW;gBAChC,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,iBAAiB,CAAC;;;;AAKnD,IAAA,IAAI,YAAY,IAAI,cAAc,GAAG,CAAC,EAAE;QACtC,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAgB,CAAC;QACzC,QAAQ,CAAC,KAAK,EAAE;;IAGlB,sBAAsB,IAAI,iBAAiB;AAC3C,IAAA,MAAM,MAAM,GAAkB;QAC5B,sBAAsB;AACtB,QAAA,OAAO,EAAE,EAAmB;AAC5B,QAAA,gBAAgB,EAAE,YAAY;KAC/B;AAED,IAAA,IAAI,kBAAkB,GAAG,EAAE,EAAE;AAC3B,QAAA,MAAM,CAAC,kBAAkB,GAAG,kBAAkB;;IAGhD,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,IAAI,gBAAgB,GAAG,CAAC,KAAK,kBAAkB,GAAG,EAAE,IAAI,gBAAgB,CAAC,SAAS,EAAE,OAAO,EAAE,kBAAkB,CAAC,CAAC,EAAE;;AAE9I,QAAA,MAAM,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,EAAmB;AACnD,QAAA,OAAO,MAAM;;IAGf,IAAI,gBAAgB,GAAG,EAAE,IAAI,kBAAkB,GAAG,CAAC,EAAE;AACnD,QAAA,MAAM,IAAI,KAAK,CAAC,mGAAmG,CAAC;;IAGtH,IAAI,CAAC,aAAa,EAAE;AAClB,QAAA,MAAM,IAAI,KAAK,CAAC,qFAAqF,CAAC;;;;;AAMxG,IAAA,IAAI,cAAc,GAAG,EAAE;AACvB,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACvC,QAAA,MAAM,cAAc,GAAG,OAAO,CAAC,CAAC,CAAC;AACjC,QAAA,MAAM,IAAI,GAAG,cAAc,EAAE,OAAO,EAAE;AACtC,QAAA,IAAI,IAAI,KAAK,IAAI,EAAE;YACjB,cAAc,GAAG,CAAC;;AAEpB,QAAA,IAAI,cAAc,GAAG,EAAE,KAAK,IAAI,KAAK,OAAO,IAAI,IAAI,KAAK,QAAQ,CAAC,EAAE;YAClE;;;AAIJ,IAAA,IAAI,cAAc,KAAK,EAAE,EAAE;AACzB,QAAA,MAAM,IAAI,KAAK,CAAC,2GAA2G,CAAC;;AAG9H,IAAA,kBAAkB,GAAG,cAAc,GAAG,CAAC,GAAG,cAAc;AACxD,IAAA,MAAM,kBAAkB,GAAG,YAAY,CAAC,IAAI,SAAS,CAAC,EAAE,OAAO,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,CAAC;AACpF,IAAA,MAAM,iBAAiB,GAAG,sBAAsB,GAAG,kBAAkB;IACrE,MAAM,OAAO,GAA4B,gBAAgB,CAAC,OAAO,CAAC,cAAc,CAAc,EAAE,aAAa,CAAC;AAC7G,IAAA,OAAO,CAAC,cAAc,CAAe,CAAC,OAAO,GAAG,OAAO;AACxD,IAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;AACzB,QAAA,MAAM,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,EAAmB;AACnD,QAAA,OAAO,MAAM;;AAGf,IAAA,MAAM,eAAe,GAAc,OAAO,CAAC,cAAc,CAAc;;AAEvE,IAAA,MAAM,4BAA4B,GAAG,CAAC,kBAAkB,CAAC,kBAAkB,CAAC,IAAI,CAAC,IAAI,kBAAkB;AACvG,IAAA,sBAAsB,GAAG,oBAAoB,GAAG,4BAA4B;IAC5E,iBAAiB,GAAG,CAAC;IACrB,IAAI,UAAU,GAAkB,EAAE;AAClC,IAAA,MAAM,mBAAmB,GAAG,CAAC,GAAG,SAAS,CAAC;AAC1C,IAAA,IAAI,YAAY,GAAG,mBAAmB,CAAC,MAAM;AAC7C,IAAA,OAAO,mBAAmB,CAAC,MAAM,GAAG,CAAC,IAAI,iBAAiB,GAAG,sBAAsB,IAAI,YAAY,GAAG,kBAAkB,EAAE;AACxH,QAAA,YAAY,EAAE;AACd,QAAA,MAAM,aAAa,GAAG,mBAAmB,CAAC,GAAG,EAAE;AAC/C,QAAA,IAAI,CAAC,aAAa;YAAE;QACpB,MAAM,UAAU,GAAG,kBAAkB,CAAC,YAAY,CAAC,IAAI,CAAC;QACxD,IAAI,CAAC,iBAAiB,GAAG,UAAU,KAAK,sBAAsB,EAAE;AAC9D,YAAA,UAAU,CAAC,IAAI,CAAC,aAAa,CAAC;YAC9B,iBAAiB,IAAI,UAAU;;aAC1B;AACL,YAAA,QAAQ,CAAC,IAAI,CAAC,aAAa,CAAC;YAC5B;;;IAIJ,MAAM,YAAY,GAAc,UAAU,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;AACjE,IAAA,MAAM,gBAAgB,GAAG,UAAU,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,OAAO,EAAE;AACpE,IAAA,IAAI,gBAAgB,KAAK,MAAM,EAAE;QAC/B,SAAS,GAAG,IAAI;;AAGlB,IAAA,IAAI,SAAS,IAAI,IAAI,IAAI,SAAS,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;AAC3D,QAAA,MAAM,iBAAiB,GAAG,UAAU,CAAC,SAAS,CAAC,GAAG,IAAI,GAAG,CAAC,OAAO,EAAE,KAAK,SAAS,CAAC;AAClF,QAAA,IAAI,iBAAiB,GAAG,CAAC,EAAE;AACzB,YAAA,UAAU,GAAG,UAAU,CAAC,KAAK,CAAC,iBAAiB,CAAC;;;AAIpD,IAAA,IAAI,gBAAgB,KAAK,IAAI,EAAE;QAC7B,MAAM,OAAO,GAAG,gBAAgB,CAAC,YAAY,EAAE,aAAa,CAAC;QAC7D,UAAU,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,OAAO,GAAG,OAAO;;SAC9C;AACL,QAAA,UAAU,CAAC,IAAI,CAAC,eAAe,CAAC;;AAGlC,IAAA,IAAI,YAAY,IAAI,cAAc,GAAG,CAAC,EAAE;QACtC,UAAU,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAgB,CAAC;QAC5C,mBAAmB,CAAC,KAAK,EAAE;;AAG7B,IAAA,MAAM,CAAC,OAAO,GAAG,UAAU,CAAC,OAAO,EAAE;AACrC,IAAA,OAAO,MAAM;AACf;AAEM,SAAU,gBAAgB,CAAC,KAAc,EAAA;AAC7C,IAAA,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,GAAG,CAAC;AAChE;AAEM,SAAU,mBAAmB,CAAC,aAAyC,EAAA;IAC3E,MAAM,kBAAkB,GAAG,EAAE,GAAG,aAAa,CAAC,kBAAkB,EAAE;AAClE,IAAA,IAAI,kBAAkB,GAAG,aAAa,CAAC,UAAU;IACjD,IAAI,eAAe,GAAG,CAAC;IACvB,IAAI,WAAW,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;AAChF,IAAA,IAAI,qBAAqB,GAAG,EAAE;IAC9B,OAAO,SAAS,aAAa,CAAC,MAA2B,EAAA;AAIvD,QAAA,IAAI,YAAuC;AAC3C,QAAA,IAAI,MAAM,CAAC,aAAa,KACtB,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,YAAY;AAC/C,gBACD,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,mBAAmB;oBAEvD,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,mBAAmB,CAAC,cAAc;uBACrE,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,mBAAmB,CAAC,UAAU,CAAC,CACzE,CACF,CACF,IAAI,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,EAAE;AACzD,YAAA,YAAY,GAAG,oBAAoB,CAAC,MAAM,CAAC,aAAa,CAAC;AACzD,YAAA,WAAW,GAAG,YAAY,CAAC,YAAY;;AAGzC,QAAA,KAAK,IAAI,CAAC,GAAG,kBAAkB,EAAE,CAAC,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAChE,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC;;AAElC,YAAA,IAAI,CAAC,KAAK,kBAAkB,IAAI,kBAAkB,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,YAAY,EAAE;AACnF,gBAAA,kBAAkB,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,aAAa;;;AAE7C,iBAAA,IAAI,kBAAkB,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE;gBAC9C,kBAAkB,CAAC,CAAC,CAAC,GAAG,aAAa,CAAC,YAAY,CAAC,OAAO,CAAC;AAC3D,gBAAA,WAAW,IAAI,kBAAkB,CAAC,CAAC,CAAC;;;;;;;QAQxC,IAAI,YAAY,EAAE;;YAEhB,MAAM,gBAAgB,GAAG,MAAM,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,KAAK,CAAC,KAAI;;AAEvF,gBAAA,MAAM,UAAU,GAAG,MAAM,CAAC,GAAG,CAAC;AAC9B,gBAAA,IAAI,UAAU,KAAK,CAAC,IAAI,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;oBACjE,OAAO,GAAG,GAAG,KAAK;;AAEpB,gBAAA,OAAO,UAAU,IAAI,eAAe,GAAG,GAAG,GAAG,KAAK,GAAG,GAAG;aACzD,EAAE,CAAC,CAAC;;AAGL,YAAA,MAAM,KAAK,GAAG,YAAY,CAAC,YAAY,GAAG,gBAAgB;YAC1D,MAAM,WAAW,GAAG,KAAK,IAAI,CAAC,GAAC,CAAC,IAAI,KAAK,IAAI,GAAG;;YAGhD,IAAI,WAAW,EAAE;AACf,gBAAA,KAAK,MAAM,GAAG,IAAI,kBAAkB,EAAE;AACpC,oBAAA,MAAM,UAAU,GAAG,MAAM,CAAC,GAAG,CAAC;AAC9B,oBAAA,IAAI,UAAU,KAAK,CAAC,IAAI,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AACjE,wBAAA,kBAAkB,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,kBAAkB,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;;AAChE,yBAAA,IAAI,UAAU,IAAI,eAAe,EAAE;;AAExC,wBAAA,kBAAkB,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,kBAAkB,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;;;;;AAM7E,QAAA,kBAAkB,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM;AAC3C,QAAA,IAAI,WAAW,IAAI,aAAa,CAAC,SAAS,EAAE;YAC1C,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,QAAQ,EAAE,kBAAkB,EAAE;;AAGzD,QAAA,MAAM,EAAE,OAAO,EAAE,kBAAkB,EAAE,GAAG,2BAA2B,CAAC;YAClE,gBAAgB,EAAE,aAAa,CAAC,SAAS;YACzC,QAAQ,EAAE,MAAM,CAAC,QAAQ;YACzB,kBAAkB;YAClB,SAAS,EAAE,MAAM,CAAC,SAAS;YAC3B,eAAe,EAAE,aAAa,CAAC,eAAe;YAC9C,YAAY,EAAE,aAAa,CAAC,YAAY;AACxC,YAAA,aAAa,EAAE,aAAa,CAAC,QAAQ,KAAK,SAAS,CAAC,OAAO,GAAG,YAAY,CAAC,iBAAiB,GAAG,YAAY,CAAC,QAAQ;AACpH,YAAA,kBAAkB,EAAE,aAAa,CAAC,eAAe,KAAK,IAAI,GAAG,qBAAqB,GAAG,SAAS;AAC/F,SAAA,CAAC;AACF,QAAA,qBAAqB,GAAG,kBAAkB,IAAI,EAAE;;AAEhD,QAAA,eAAe,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,MAAM,IAAI,OAAO,CAAC,MAAM,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,KAAK,QAAQ,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;AAEvH,QAAA,OAAO,EAAE,OAAO,EAAE,kBAAkB,EAAE;AACxC,KAAC;AACH;;;;"}
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { BaseMessage, UsageMetadata } from '@langchain/core/messages';
|
|
2
2
|
import type { TokenCounter } from '@/types/run';
|
|
3
|
+
import { ContentTypes, Providers } from '@/common';
|
|
3
4
|
export type PruneMessagesFactoryParams = {
|
|
5
|
+
provider?: Providers;
|
|
4
6
|
maxTokens: number;
|
|
5
7
|
startIndex: number;
|
|
6
8
|
tokenCounter: TokenCounter;
|
|
@@ -19,6 +21,12 @@ export type PruneMessagesParams = {
|
|
|
19
21
|
* @returns An object containing the total input and output tokens
|
|
20
22
|
*/
|
|
21
23
|
export declare function calculateTotalTokens(usage: Partial<UsageMetadata>): UsageMetadata;
|
|
24
|
+
export type PruningResult = {
|
|
25
|
+
context: BaseMessage[];
|
|
26
|
+
remainingContextTokens: number;
|
|
27
|
+
messagesToRefine: BaseMessage[];
|
|
28
|
+
thinkingStartIndex?: number;
|
|
29
|
+
};
|
|
22
30
|
/**
|
|
23
31
|
* Processes an array of messages and returns a context of messages that fit within a specified token limit.
|
|
24
32
|
* It iterates over the messages from newest to oldest, adding them to the context until the token limit is reached.
|
|
@@ -26,20 +34,16 @@ export declare function calculateTotalTokens(usage: Partial<UsageMetadata>): Usa
|
|
|
26
34
|
* @param options Configuration options for processing messages
|
|
27
35
|
* @returns Object containing the message context, remaining tokens, messages not included, and summary index
|
|
28
36
|
*/
|
|
29
|
-
export declare function getMessagesWithinTokenLimit({ messages: _messages, maxContextTokens, indexTokenCountMap, startType: _startType, thinkingEnabled,
|
|
30
|
-
/** We may need to use this when recalculating */
|
|
31
|
-
tokenCounter, }: {
|
|
37
|
+
export declare function getMessagesWithinTokenLimit({ messages: _messages, maxContextTokens, indexTokenCountMap, startType: _startType, thinkingEnabled, tokenCounter, thinkingStartIndex: _thinkingStartIndex, reasoningType, }: {
|
|
32
38
|
messages: BaseMessage[];
|
|
33
39
|
maxContextTokens: number;
|
|
34
40
|
indexTokenCountMap: Record<string, number | undefined>;
|
|
35
|
-
|
|
36
|
-
startType?: string;
|
|
41
|
+
startType?: string | string[];
|
|
37
42
|
thinkingEnabled?: boolean;
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
};
|
|
43
|
+
tokenCounter: TokenCounter;
|
|
44
|
+
thinkingStartIndex?: number;
|
|
45
|
+
reasoningType?: ContentTypes.THINKING | ContentTypes.REASONING_CONTENT;
|
|
46
|
+
}): PruningResult;
|
|
43
47
|
export declare function checkValidNumber(value: unknown): value is number;
|
|
44
48
|
export declare function createPruneMessages(factoryParams: PruneMessagesFactoryParams): (params: PruneMessagesParams) => {
|
|
45
49
|
context: BaseMessage[];
|
|
@@ -33,14 +33,22 @@ export type AzureClientOptions = (Partial<OpenAIChatInput> & Partial<AzureOpenAI
|
|
|
33
33
|
} & BaseChatModelParams & {
|
|
34
34
|
configuration?: OAIClientOptions;
|
|
35
35
|
});
|
|
36
|
+
export type ThinkingConfig = AnthropicInput['thinking'];
|
|
36
37
|
export type ChatOpenAIToolType = BindToolsInput | OpenAIClient.ChatCompletionTool;
|
|
37
38
|
export type CommonToolType = StructuredTool | ChatOpenAIToolType;
|
|
39
|
+
export type AnthropicReasoning = {
|
|
40
|
+
thinking?: ThinkingConfig | boolean;
|
|
41
|
+
thinkingBudget?: number;
|
|
42
|
+
};
|
|
38
43
|
export type OpenAIClientOptions = ChatOpenAIFields;
|
|
39
44
|
export type OllamaClientOptions = ChatOllamaInput;
|
|
40
45
|
export type AnthropicClientOptions = AnthropicInput;
|
|
41
46
|
export type MistralAIClientOptions = ChatMistralAIInput;
|
|
42
47
|
export type VertexAIClientOptions = ChatVertexAIInput;
|
|
43
48
|
export type BedrockClientOptions = BedrockChatFields;
|
|
49
|
+
export type BedrockAnthropicInput = ChatBedrockConverseInput & {
|
|
50
|
+
additionalModelRequestFields?: ChatBedrockConverseInput['additionalModelRequestFields'] & AnthropicReasoning;
|
|
51
|
+
};
|
|
44
52
|
export type BedrockConverseClientOptions = ChatBedrockConverseInput;
|
|
45
53
|
export type GoogleClientOptions = GoogleGenerativeAIChatInput;
|
|
46
54
|
export type DeepSeekClientOptions = ChatDeepSeekCallOptions;
|
package/package.json
CHANGED
package/src/graphs/Graph.ts
CHANGED
|
@@ -361,13 +361,22 @@ export class StandardGraph extends Graph<
|
|
|
361
361
|
const { messages } = state;
|
|
362
362
|
|
|
363
363
|
let messagesToUse = messages;
|
|
364
|
-
if (!this.pruneMessages && this.tokenCounter && this.maxContextTokens && this.indexTokenCountMap[0] != null) {
|
|
364
|
+
if (!this.pruneMessages && this.tokenCounter && this.maxContextTokens != null && this.indexTokenCountMap[0] != null) {
|
|
365
365
|
const isAnthropicWithThinking = (
|
|
366
|
-
(
|
|
367
|
-
|
|
368
|
-
|
|
366
|
+
(
|
|
367
|
+
(
|
|
368
|
+
this.provider === Providers.ANTHROPIC && (this.clientOptions as t.AnthropicClientOptions).thinking != null
|
|
369
|
+
)
|
|
370
|
+
||
|
|
371
|
+
(this.provider === Providers.BEDROCK && (
|
|
372
|
+
(this.clientOptions as t.BedrockAnthropicInput).additionalModelRequestFields?.['thinking'] != null
|
|
373
|
+
)
|
|
374
|
+
)
|
|
375
|
+
)
|
|
376
|
+
);
|
|
369
377
|
|
|
370
378
|
this.pruneMessages = createPruneMessages({
|
|
379
|
+
provider: this.provider,
|
|
371
380
|
indexTokenCountMap: this.indexTokenCountMap,
|
|
372
381
|
maxTokens: this.maxContextTokens,
|
|
373
382
|
tokenCounter: this.tokenCounter,
|