@copilotkit/runtime 1.2.2-feat-runtime-remote-actions.1 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -13
- package/dist/{chunk-BPEPG56J.mjs → chunk-47TPNJX7.mjs} +2 -2
- package/dist/{chunk-Y5TWOZFD.mjs → chunk-4BZ6WXBB.mjs} +3 -3
- package/dist/{chunk-3SKYFYY2.mjs → chunk-IRRAL44O.mjs} +4 -5
- package/dist/chunk-IRRAL44O.mjs.map +1 -0
- package/dist/{chunk-BJ2LVHWA.mjs → chunk-OF6AN6HF.mjs} +521 -256
- package/dist/chunk-OF6AN6HF.mjs.map +1 -0
- package/dist/{chunk-UL2OKN2O.mjs → chunk-VWS65V7Y.mjs} +2 -2
- package/dist/{chunk-U2EKJP47.mjs → chunk-XCGRXAJU.mjs} +2 -2
- package/dist/{copilot-runtime-d427e991.d.ts → copilot-runtime-a1b5f1ce.d.ts} +1 -1
- package/dist/{index-079752b9.d.ts → groq-adapter-069ac812.d.ts} +82 -82
- package/dist/index.d.ts +7 -5
- package/dist/index.js +721 -454
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +10 -6
- package/dist/index.mjs.map +1 -1
- package/dist/{langserve-d6073a3b.d.ts → langserve-15a1286b.d.ts} +1 -1
- package/dist/lib/index.d.ts +4 -4
- package/dist/lib/index.js +445 -446
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/index.mjs +6 -6
- package/dist/lib/integrations/index.d.ts +3 -3
- package/dist/lib/integrations/index.js +3 -4
- package/dist/lib/integrations/index.js.map +1 -1
- package/dist/lib/integrations/index.mjs +4 -4
- package/dist/lib/integrations/nest/index.d.ts +2 -2
- package/dist/lib/integrations/nest/index.js +3 -4
- package/dist/lib/integrations/nest/index.js.map +1 -1
- package/dist/lib/integrations/nest/index.mjs +2 -2
- package/dist/lib/integrations/node-express/index.d.ts +2 -2
- package/dist/lib/integrations/node-express/index.js +3 -4
- package/dist/lib/integrations/node-express/index.js.map +1 -1
- package/dist/lib/integrations/node-express/index.mjs +2 -2
- package/dist/lib/integrations/node-http/index.d.ts +2 -2
- package/dist/lib/integrations/node-http/index.js +3 -4
- package/dist/lib/integrations/node-http/index.js.map +1 -1
- package/dist/lib/integrations/node-http/index.mjs +1 -1
- package/dist/service-adapters/index.d.ts +47 -3
- package/dist/service-adapters/index.js +708 -442
- package/dist/service-adapters/index.js.map +1 -1
- package/dist/service-adapters/index.mjs +3 -1
- package/package.json +6 -7
- package/src/index.ts +1 -0
- package/src/service-adapters/anthropic/anthropic-adapter.ts +197 -0
- package/src/service-adapters/anthropic/utils.ts +144 -0
- package/src/service-adapters/index.ts +9 -7
- package/dist/chunk-3SKYFYY2.mjs.map +0 -1
- package/dist/chunk-BJ2LVHWA.mjs.map +0 -1
- /package/dist/{chunk-BPEPG56J.mjs.map → chunk-47TPNJX7.mjs.map} +0 -0
- /package/dist/{chunk-Y5TWOZFD.mjs.map → chunk-4BZ6WXBB.mjs.map} +0 -0
- /package/dist/{chunk-UL2OKN2O.mjs.map → chunk-VWS65V7Y.mjs.map} +0 -0
- /package/dist/{chunk-U2EKJP47.mjs.map → chunk-XCGRXAJU.mjs.map} +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/service-adapters/langchain/langserve.ts","../src/service-adapters/openai/openai-adapter.ts","../src/service-adapters/openai/utils.ts","../src/service-adapters/langchain/langchain-adapter.ts","../src/service-adapters/google/google-genai-adapter.ts","../src/service-adapters/google/utils.ts","../src/service-adapters/openai/openai-assistant-adapter.ts","../src/service-adapters/unify/unify-adapter.ts","../src/service-adapters/groq/groq-adapter.ts","../src/service-adapters/anthropic/anthropic-adapter.ts","../src/service-adapters/anthropic/utils.ts"],"sourcesContent":["import { Parameter, Action } from \"@copilotkit/shared\";\nimport { RemoteRunnable } from \"langchain/runnables/remote\";\n\nexport interface RemoteChainParameters {\n name: string;\n description: string;\n chainUrl: string;\n parameters?: Parameter[];\n parameterType?: \"single\" | \"multi\";\n}\n\nexport class RemoteChain {\n name: string;\n description: string;\n chainUrl: string;\n parameters?: Parameter[];\n parameterType: \"single\" | \"multi\";\n\n constructor(options: RemoteChainParameters) {\n this.name = options.name;\n this.description = options.description;\n this.chainUrl = options.chainUrl;\n this.parameters = options.parameters;\n this.parameterType = options.parameterType || \"multi\";\n }\n\n async toAction(): Promise<Action<any>> {\n if (!this.parameters) {\n await this.inferLangServeParameters();\n }\n\n return {\n name: this.name,\n description: this.description,\n parameters: this.parameters!,\n handler: async (args: any) => {\n const runnable = new RemoteRunnable({ url: this.chainUrl });\n let input: any;\n if (this.parameterType === \"single\") {\n input = args[Object.keys(args)[0]];\n } else {\n input = args;\n }\n return await runnable.invoke(input);\n },\n };\n }\n\n async inferLangServeParameters() {\n const supportedTypes = [\"string\", \"number\", \"boolean\"];\n\n let schemaUrl = this.chainUrl.replace(/\\/+$/, \"\") + \"/input_schema\";\n let schema = await fetch(schemaUrl)\n .then((res) => res.json())\n .catch(() => {\n throw new Error(\"Failed to fetch langserve schema at \" + schemaUrl);\n });\n // for now, don't use json schema, just do a simple conversion\n\n if (supportedTypes.includes(schema.type)) {\n this.parameterType = \"single\";\n this.parameters = [\n {\n name: \"input\",\n type: schema.type,\n description: \"The input to the chain\",\n },\n ];\n } else if (schema.type === \"object\") {\n this.parameterType = \"multi\";\n this.parameters = Object.keys(schema.properties).map((key) => {\n let property = schema.properties[key];\n if (!supportedTypes.includes(property.type)) {\n throw new Error(\"Unsupported schema type\");\n }\n return {\n name: key,\n type: property.type,\n description: property.description || \"\",\n required: schema.required?.includes(key) || false,\n };\n });\n } else {\n throw new Error(\"Unsupported schema type\");\n }\n }\n}\n","/**\n * Copilot Runtime adapter for OpenAI.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, OpenAIAdapter } from \"@copilotkit/runtime\";\n * import OpenAI from \"openai\";\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const openai = new OpenAI({\n * organization: \"<your-organization-id>\", // optional\n * apiKey: \"<your-api-key>\",\n * });\n *\n * const serviceAdapter = new OpenAIAdapter({ openai });\n *\n * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);\n * ```\n */\nimport OpenAI from \"openai\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToOpenAITool,\n convertMessageToOpenAIMessage,\n limitMessagesToTokenCount,\n} from \"./utils\";\nimport { randomId } from \"@copilotkit/shared\";\n\nconst DEFAULT_MODEL = \"gpt-4o\";\n\nexport interface OpenAIAdapterParams {\n /**\n * An optional OpenAI instance to use. If not provided, a new instance will be\n * created.\n */\n openai?: OpenAI;\n\n /**\n * The model to use.\n */\n model?: string;\n\n /**\n * Whether to disable parallel tool calls.\n * You can disable parallel tool calls to force the model to execute tool calls sequentially.\n * This is useful if you want to execute tool calls in a specific order so that the state changes\n * introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)\n *\n * @default false\n */\n disableParallelToolCalls?: boolean;\n}\n\nexport class OpenAIAdapter implements CopilotServiceAdapter {\n private model: string = DEFAULT_MODEL;\n\n private disableParallelToolCalls: boolean = false;\n private _openai: OpenAI;\n public get openai(): OpenAI {\n return this._openai;\n }\n\n constructor(params?: OpenAIAdapterParams) {\n this._openai = params?.openai || new OpenAI({});\n if (params?.model) {\n this.model = params.model;\n }\n this.disableParallelToolCalls = params?.disableParallelToolCalls || false;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const {\n threadId,\n model = this.model,\n messages,\n actions,\n eventSource,\n forwardedParameters,\n } = request;\n const tools = actions.map(convertActionInputToOpenAITool);\n\n let openaiMessages = messages.map(convertMessageToOpenAIMessage);\n openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);\n\n let toolChoice: any = forwardedParameters?.toolChoice;\n if (forwardedParameters?.toolChoice === \"function\") {\n toolChoice = {\n type: \"function\",\n function: { name: forwardedParameters.toolChoiceFunctionName },\n };\n }\n\n const stream = this.openai.beta.chat.completions.stream({\n model: model,\n stream: true,\n messages: openaiMessages,\n ...(tools.length > 0 && { tools }),\n ...(forwardedParameters?.maxTokens && { max_tokens: forwardedParameters.maxTokens }),\n ...(forwardedParameters?.stop && { stop: forwardedParameters.stop }),\n ...(toolChoice && { tool_choice: toolChoice }),\n ...(this.disableParallelToolCalls && { parallel_tool_calls: false }),\n });\n\n eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n for await (const chunk of stream) {\n const toolCall = chunk.choices[0].delta.tool_calls?.[0];\n const content = chunk.choices[0].delta.content;\n\n // When switching from message to function or vice versa,\n // send the respective end event.\n // If toolCall?.id is defined, it means a new tool call starts.\n if (mode === \"message\" && toolCall?.id) {\n mode = null;\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\" && (toolCall === undefined || toolCall?.id)) {\n mode = null;\n eventStream$.sendActionExecutionEnd();\n }\n\n // If we send a new message type, send the appropriate start event.\n if (mode === null) {\n if (toolCall?.id) {\n mode = \"function\";\n eventStream$.sendActionExecutionStart(toolCall!.id, toolCall!.function!.name);\n } else if (content) {\n mode = \"message\";\n eventStream$.sendTextMessageStart(chunk.id);\n }\n }\n\n // send the content events\n if (mode === \"message\" && content) {\n eventStream$.sendTextMessageContent(content);\n } else if (mode === \"function\" && toolCall?.function?.arguments) {\n eventStream$.sendActionExecutionArgs(toolCall.function.arguments);\n }\n }\n\n // send the end events\n if (mode === \"message\") {\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd();\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId: threadId || randomId(),\n };\n }\n}\n","import {\n ActionExecutionMessage,\n Message,\n ResultMessage,\n TextMessage,\n} from \"../../graphql/types/converted\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { ChatCompletionMessageParam, ChatCompletionTool } from \"openai/resources\";\n\nexport function limitMessagesToTokenCount(\n messages: any[],\n tools: any[],\n model: string,\n maxTokens?: number,\n): any[] {\n maxTokens ||= maxTokensForOpenAIModel(model);\n\n const result: any[] = [];\n const toolsNumTokens = countToolsTokens(model, tools);\n if (toolsNumTokens > maxTokens) {\n throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);\n }\n maxTokens -= toolsNumTokens;\n\n for (const message of messages) {\n if (message.role === \"system\") {\n const numTokens = countMessageTokens(model, message);\n maxTokens -= numTokens;\n\n if (maxTokens < 0) {\n throw new Error(\"Not enough tokens for system message.\");\n }\n }\n }\n\n let cutoff: boolean = false;\n\n const reversedMessages = [...messages].reverse();\n for (const message of reversedMessages) {\n if (message.role === \"system\") {\n result.unshift(message);\n continue;\n } else if (cutoff) {\n continue;\n }\n let numTokens = countMessageTokens(model, message);\n if (maxTokens < numTokens) {\n cutoff = true;\n continue;\n }\n result.unshift(message);\n maxTokens -= numTokens;\n }\n\n return result;\n}\n\nexport function maxTokensForOpenAIModel(model: string): number {\n return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;\n}\n\nconst DEFAULT_MAX_TOKENS = 128000;\n\nconst maxTokensByModel: { [key: string]: number } = {\n // GPT-4\n \"gpt-4o\": 128000,\n \"gpt-4o-2024-05-13\": 128000,\n \"gpt-4-turbo\": 128000,\n \"gpt-4-turbo-2024-04-09\": 128000,\n \"gpt-4-0125-preview\": 128000,\n \"gpt-4-turbo-preview\": 128000,\n \"gpt-4-1106-preview\": 128000,\n \"gpt-4-vision-preview\": 128000,\n \"gpt-4-1106-vision-preview\": 128000,\n \"gpt-4-32k\": 32768,\n \"gpt-4-32k-0613\": 32768,\n \"gpt-4-32k-0314\": 32768,\n \"gpt-4\": 8192,\n \"gpt-4-0613\": 8192,\n \"gpt-4-0314\": 8192,\n\n // GPT-3.5\n \"gpt-3.5-turbo-0125\": 16385,\n \"gpt-3.5-turbo\": 16385,\n \"gpt-3.5-turbo-1106\": 16385,\n \"gpt-3.5-turbo-instruct\": 4096,\n \"gpt-3.5-turbo-16k\": 16385,\n \"gpt-3.5-turbo-0613\": 4096,\n \"gpt-3.5-turbo-16k-0613\": 16385,\n \"gpt-3.5-turbo-0301\": 4097,\n};\n\nfunction countToolsTokens(model: string, tools: any[]): number {\n if (tools.length === 0) {\n return 0;\n }\n const json = JSON.stringify(tools);\n return countTokens(model, json);\n}\n\nfunction countMessageTokens(model: string, message: any): number {\n return countTokens(model, message.content || \"\");\n}\n\nfunction countTokens(model: string, text: string): number {\n return text.length / 3;\n}\n\nexport function convertActionInputToOpenAITool(action: ActionInput): ChatCompletionTool {\n return {\n type: \"function\",\n function: {\n name: action.name,\n description: action.description,\n parameters: JSON.parse(action.jsonSchema),\n },\n };\n}\n\nexport function convertMessageToOpenAIMessage(message: Message): ChatCompletionMessageParam {\n if (message instanceof TextMessage) {\n return {\n role: message.role,\n content: message.content,\n };\n } else if (message instanceof ActionExecutionMessage) {\n return {\n role: \"assistant\",\n tool_calls: [\n {\n id: message.id,\n type: \"function\",\n function: {\n name: message.name,\n arguments: JSON.stringify(message.arguments),\n },\n },\n ],\n };\n } else if (message instanceof ResultMessage) {\n return {\n role: \"tool\",\n content: message.result,\n tool_call_id: message.actionExecutionId,\n };\n }\n}\n\nexport function convertSystemMessageToAssistantAPI(message: ChatCompletionMessageParam) {\n return {\n ...message,\n ...(message.role === \"system\" && {\n role: \"assistant\",\n content: \"THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: \" + message.content,\n }),\n };\n}\n","/**\n * Copilot Runtime adapter for LangChain.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, LangChainAdapter } from \"@copilotkit/runtime\";\n * import { ChatOpenAI } from \"@langchain/openai\";\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const model = new ChatOpenAI({\n * model: \"gpt-4o\",\n * apiKey: \"<your-api-key>\",\n * });\n *\n * const serviceAdapter = new LangChainAdapter({\n * chainFn: async ({ messages, tools }) => {\n * return model.stream(messages, { tools });\n * }\n * });\n *\n * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);\n * ```\n *\n * The asynchronous handler function (`chainFn`) can return any of the following:\n *\n * - A simple `string` response\n * - A LangChain stream (`IterableReadableStream`)\n * - A LangChain `BaseMessageChunk` object\n * - A LangChain `AIMessage` object\n */\n\nimport { BaseMessage } from \"@langchain/core/messages\";\nimport { CopilotServiceAdapter } from \"../service-adapter\";\nimport {\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToLangChainTool,\n convertMessageToLangChainMessage,\n streamLangChainResponse,\n} from \"./utils\";\nimport { DynamicStructuredTool } from \"@langchain/core/tools\";\nimport { LangChainReturnType } from \"./types\";\nimport { randomId } from \"@copilotkit/shared\";\n\ninterface ChainFnParameters {\n model: string;\n messages: BaseMessage[];\n tools: DynamicStructuredTool[];\n threadId?: string;\n runId?: string;\n}\n\ninterface LangChainAdapterOptions {\n /**\n * A function that uses the LangChain API to generate a response.\n */\n chainFn: (parameters: ChainFnParameters) => Promise<LangChainReturnType>;\n}\n\nexport class LangChainAdapter implements CopilotServiceAdapter {\n /**\n * To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.\n */\n constructor(private options: LangChainAdapterOptions) {}\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { eventSource, model, actions, messages, threadId, runId } = request;\n const result = await this.options.chainFn({\n messages: messages.map(convertMessageToLangChainMessage),\n tools: actions.map(convertActionInputToLangChainTool),\n model,\n threadId,\n runId,\n });\n\n eventSource.stream(async (eventStream$) => {\n await streamLangChainResponse({\n result,\n eventStream$,\n });\n });\n\n return {\n threadId: threadId || randomId(),\n };\n }\n}\n","/**\n * Copilot Runtime adapter for Google Generative AI (e.g. Gemini).\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, GoogleGenerativeAIAdapter } from \"@copilotkit/runtime\";\n * const { GoogleGenerativeAI } = require(\"@google/generative-ai\");\n *\n * const genAI = new GoogleGenerativeAI(process.env[\"GOOGLE_API_KEY\"]);\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const model = genAI.getGenerativeModel({\n * model: \"gemini-pro\"\n * });\n *\n * const serviceAdapter = new GoogleGenerativeAIAdapter({ model });\n *\n * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);\n * ```\n */\nimport { CopilotServiceAdapter } from \"../service-adapter\";\nimport {\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport { GenerativeModel, GoogleGenerativeAI } from \"@google/generative-ai\";\nimport { TextMessage } from \"../../graphql/types/converted\";\nimport { convertMessageToGoogleGenAIMessage, transformActionToGoogleGenAITool } from \"./utils\";\nimport { randomId } from \"@copilotkit/shared\";\n\ninterface GoogleGenerativeAIAdapterOptions {\n /**\n * A custom Google Generative AI model to use.\n */\n model?: GenerativeModel;\n}\n\nexport class GoogleGenerativeAIAdapter implements CopilotServiceAdapter {\n private model: GenerativeModel;\n\n constructor(options?: GoogleGenerativeAIAdapterOptions) {\n if (options?.model) {\n this.model = options.model;\n } else {\n const genAI = new GoogleGenerativeAI(process.env[\"GOOGLE_API_KEY\"]!);\n this.model = genAI.getGenerativeModel({ model: \"gemini-pro\" });\n }\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { messages, actions, eventSource } = request;\n\n // get the history (everything except the first and last message)\n const history = messages.slice(1, -1).map(convertMessageToGoogleGenAIMessage);\n\n // get the current message (the last message)\n const currentMessage = convertMessageToGoogleGenAIMessage(messages.at(-1));\n if (!currentMessage) {\n throw new Error(\"No current message\");\n }\n\n let systemMessage: string;\n const firstMessage = messages.at(0);\n if (firstMessage instanceof TextMessage && firstMessage.role === \"system\") {\n systemMessage = firstMessage.content.trim();\n } else {\n throw new Error(\"First message is not a system message\");\n }\n\n const tools = actions.map(transformActionToGoogleGenAITool);\n\n const isFirstGenGeminiPro =\n this.model.model === \"gemini-pro\" || this.model.model === \"models/gemini-pro\";\n\n const chat = this.model.startChat({\n history: [\n ...history,\n // gemini-pro does not support system instructions, so we need to add them to the history\n ...(isFirstGenGeminiPro ? [{ role: \"user\", parts: [{ text: systemMessage }] }] : []),\n ],\n // only gemini-1.5-pro-latest and later supports setting system instructions\n ...(isFirstGenGeminiPro\n ? {}\n : { systemInstruction: { role: \"user\", parts: [{ text: systemMessage }] } }),\n tools,\n });\n\n const result = await chat.sendMessageStream(currentMessage.parts);\n\n eventSource.stream(async (eventStream$) => {\n let isTextMessage = false;\n for await (const chunk of result.stream) {\n const chunkText = chunk.text();\n if (chunkText === \"\") {\n continue;\n }\n if (!isTextMessage) {\n isTextMessage = true;\n eventStream$.sendTextMessageStart(randomId());\n }\n eventStream$.sendTextMessageContent(chunkText);\n }\n if (isTextMessage) {\n eventStream$.sendTextMessageEnd();\n }\n\n let calls = (await result.response).functionCalls();\n if (calls) {\n for (let call of calls) {\n eventStream$.sendActionExecution(\n randomId(),\n call.name,\n JSON.stringify(replaceNewlinesInObject(call.args)),\n );\n }\n }\n eventStream$.complete();\n });\n\n return {\n threadId: request.threadId || randomId(),\n };\n }\n}\n\nfunction replaceNewlinesInObject(obj: any): any {\n if (typeof obj === \"string\") {\n return obj.replace(/\\\\\\\\n/g, \"\\n\");\n } else if (Array.isArray(obj)) {\n return obj.map(replaceNewlinesInObject);\n } else if (typeof obj === \"object\" && obj !== null) {\n const newObj: any = {};\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n newObj[key] = replaceNewlinesInObject(obj[key]);\n }\n }\n return newObj;\n }\n return obj;\n}\n","import {\n ActionExecutionMessage,\n Message,\n ResultMessage,\n TextMessage,\n} from \"../../graphql/types/converted\";\nimport { Tool } from \"@google/generative-ai\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\n\nexport function convertMessageToGoogleGenAIMessage(message: Message) {\n if (message instanceof TextMessage) {\n const role = {\n user: \"user\",\n assistant: \"model\",\n system: \"user\",\n }[message.role];\n\n const text =\n message.role === \"system\"\n ? \"THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: \" + message.content\n : message.content;\n\n return {\n role,\n parts: [{ text }],\n };\n } else if (message instanceof ActionExecutionMessage) {\n return {\n role: \"model\",\n parts: [\n {\n functionCall: {\n name: message.name,\n args: message.arguments,\n },\n },\n ],\n };\n } else if (message instanceof ResultMessage) {\n return {\n role: \"function\",\n parts: [\n {\n functionResponse: {\n name: message.actionName,\n response: {\n name: message.actionName,\n content: tryParseJson(message.result),\n },\n },\n },\n ],\n };\n }\n}\n\nexport function transformActionToGoogleGenAITool(action: ActionInput): Tool {\n const name = action.name;\n const description = action.description;\n const parameters = JSON.parse(action.jsonSchema);\n\n const transformProperties = (props: any) => {\n for (const key in props) {\n if (props[key].type) {\n props[key].type = props[key].type.toUpperCase();\n }\n if (props[key].properties) {\n transformProperties(props[key].properties);\n }\n }\n };\n transformProperties(parameters);\n\n return {\n functionDeclarations: [\n {\n name,\n description,\n parameters,\n },\n ],\n };\n}\n\nfunction tryParseJson(str?: string) {\n if (!str) {\n return \"\";\n }\n try {\n return JSON.parse(str);\n } catch (e) {\n return str;\n }\n}\n","/**\n * Copilot Runtime adapter for the OpenAI Assistant API.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, OpenAIAssistantAdapter } from \"@copilotkit/runtime\";\n * import OpenAI from \"openai\";\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const openai = new OpenAI({\n * organization: \"<your-organization-id>\",\n * apiKey: \"<your-api-key>\",\n * });\n *\n * const serviceAdapter = new OpenAIAssistantAdapter({\n * openai,\n * assistantId: \"<your-assistant-id>\",\n * codeInterpreterEnabled: true,\n * fileSearchEnabled: true,\n * });\n *\n * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);\n * ```\n */\nimport OpenAI from \"openai\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport { Message, ResultMessage, TextMessage } from \"../../graphql/types/converted\";\nimport {\n convertActionInputToOpenAITool,\n convertMessageToOpenAIMessage,\n convertSystemMessageToAssistantAPI,\n} from \"./utils\";\nimport { RunSubmitToolOutputsStreamParams } from \"openai/resources/beta/threads/runs/runs\";\nimport { AssistantStream } from \"openai/lib/AssistantStream\";\nimport { RuntimeEventSource } from \"../events\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { AssistantStreamEvent, AssistantTool } from \"openai/resources/beta/assistants\";\nimport { ForwardedParametersInput } from \"../../graphql/inputs/forwarded-parameters.input\";\n\nexport interface OpenAIAssistantAdapterParams {\n /**\n * The ID of the assistant to use.\n */\n assistantId: string;\n\n /**\n * An optional OpenAI instance to use. If not provided, a new instance will be created.\n */\n openai?: OpenAI;\n\n /**\n * Whether to enable code interpretation.\n * @default true\n */\n codeInterpreterEnabled?: boolean;\n\n /**\n * Whether to enable file search.\n * @default true\n */\n fileSearchEnabled?: boolean;\n\n /**\n * Whether to disable parallel tool calls.\n * You can disable parallel tool calls to force the model to execute tool calls sequentially.\n * This is useful if you want to execute tool calls in a specific order so that the state changes\n * introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)\n *\n * @default false\n */\n disableParallelToolCalls?: boolean;\n}\n\nexport class OpenAIAssistantAdapter implements CopilotServiceAdapter {\n private openai: OpenAI;\n private codeInterpreterEnabled: boolean;\n private assistantId: string;\n private fileSearchEnabled: boolean;\n private disableParallelToolCalls: boolean;\n\n constructor(params: OpenAIAssistantAdapterParams) {\n this.openai = params.openai || new OpenAI({});\n this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;\n this.fileSearchEnabled = params.fileSearchEnabled === false || true;\n this.assistantId = params.assistantId;\n this.disableParallelToolCalls = params?.disableParallelToolCalls || false;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { messages, actions, eventSource, runId, forwardedParameters } = request;\n // if we don't have a threadId, create a new thread\n let threadId = request.threadId || (await this.openai.beta.threads.create()).id;\n\n const lastMessage = messages.at(-1);\n\n let nextRunId: string | undefined = undefined;\n\n // submit function outputs\n if (lastMessage instanceof ResultMessage && runId) {\n nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);\n }\n // submit user message\n else if (lastMessage instanceof TextMessage) {\n nextRunId = await this.submitUserMessage(\n threadId,\n messages,\n actions,\n eventSource,\n forwardedParameters,\n );\n }\n // unsupported message\n else {\n throw new Error(\"No actionable message found in the messages\");\n }\n\n return {\n threadId,\n runId: nextRunId,\n };\n }\n\n private async submitToolOutputs(\n threadId: string,\n runId: string,\n messages: Message[],\n eventSource: RuntimeEventSource,\n ) {\n let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);\n if (!run.required_action) {\n throw new Error(\"No tool outputs required\");\n }\n\n // get the required tool call ids\n const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map(\n (toolCall) => toolCall.id,\n );\n\n // search for these tool calls\n const resultMessages = messages.filter(\n (message) =>\n message instanceof ResultMessage && toolCallsIds.includes(message.actionExecutionId),\n ) as ResultMessage[];\n\n if (toolCallsIds.length != resultMessages.length) {\n throw new Error(\"Number of function results does not match the number of tool calls\");\n }\n\n // submit the tool outputs\n const toolOutputs: RunSubmitToolOutputsStreamParams.ToolOutput[] = resultMessages.map(\n (message) => {\n return {\n tool_call_id: message.actionExecutionId,\n output: message.result,\n };\n },\n );\n\n const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {\n tool_outputs: toolOutputs,\n ...(this.disableParallelToolCalls && { parallel_tool_calls: false }),\n });\n\n await this.streamResponse(stream, eventSource);\n return runId;\n }\n\n private async submitUserMessage(\n threadId: string,\n messages: Message[],\n actions: ActionInput[],\n eventSource: RuntimeEventSource,\n forwardedParameters: ForwardedParametersInput,\n ) {\n messages = [...messages];\n\n // get the instruction message\n const instructionsMessage = messages.shift();\n const instructions =\n instructionsMessage instanceof TextMessage ? instructionsMessage.content : \"\";\n\n // get the latest user message\n const userMessage = messages\n .map(convertMessageToOpenAIMessage)\n .map(convertSystemMessageToAssistantAPI)\n .at(-1);\n\n if (userMessage.role !== \"user\") {\n throw new Error(\"No user message found\");\n }\n\n // create a new message on the thread\n await this.openai.beta.threads.messages.create(threadId, {\n role: \"user\",\n content: userMessage.content,\n });\n\n const openaiTools = actions.map(convertActionInputToOpenAITool);\n\n const tools = [\n ...openaiTools,\n ...(this.codeInterpreterEnabled ? [{ type: \"code_interpreter\" } as AssistantTool] : []),\n ...(this.fileSearchEnabled ? [{ type: \"file_search\" } as AssistantTool] : []),\n ];\n\n // run the thread\n let stream = this.openai.beta.threads.runs.stream(threadId, {\n assistant_id: this.assistantId,\n instructions,\n tools: tools,\n ...(forwardedParameters?.maxTokens && {\n max_completion_tokens: forwardedParameters.maxTokens,\n }),\n ...(this.disableParallelToolCalls && { parallel_tool_calls: false }),\n });\n\n await this.streamResponse(stream, eventSource);\n\n return getRunIdFromStream(stream);\n }\n\n private async streamResponse(stream: AssistantStream, eventSource: RuntimeEventSource) {\n eventSource.stream(async (eventStream$) => {\n let inFunctionCall = false;\n\n for await (const chunk of stream) {\n switch (chunk.event) {\n case \"thread.message.created\":\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd();\n }\n eventStream$.sendTextMessageStart(chunk.data.id);\n break;\n case \"thread.message.delta\":\n if (chunk.data.delta.content?.[0].type === \"text\") {\n eventStream$.sendTextMessageContent(chunk.data.delta.content?.[0].text.value);\n }\n break;\n case \"thread.message.completed\":\n eventStream$.sendTextMessageEnd();\n break;\n case \"thread.run.step.delta\":\n let toolCallId: string | undefined;\n let toolCallName: string | undefined;\n let toolCallArgs: string | undefined;\n if (\n chunk.data.delta.step_details.type === \"tool_calls\" &&\n chunk.data.delta.step_details.tool_calls?.[0].type === \"function\"\n ) {\n toolCallId = chunk.data.delta.step_details.tool_calls?.[0].id;\n toolCallName = chunk.data.delta.step_details.tool_calls?.[0].function.name;\n toolCallArgs = chunk.data.delta.step_details.tool_calls?.[0].function.arguments;\n }\n\n if (toolCallName && toolCallId) {\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd();\n }\n inFunctionCall = true;\n eventStream$.sendActionExecutionStart(toolCallId, toolCallName);\n } else if (toolCallArgs) {\n eventStream$.sendActionExecutionArgs(toolCallArgs);\n }\n break;\n }\n }\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd();\n }\n eventStream$.complete();\n });\n }\n}\n\nfunction getRunIdFromStream(stream: AssistantStream): Promise<string> {\n return new Promise<string>((resolve, reject) => {\n let runIdGetter = (event: AssistantStreamEvent) => {\n if (event.event === \"thread.run.created\") {\n const runId = event.data.id;\n stream.off(\"event\", runIdGetter);\n resolve(runId);\n }\n };\n stream.on(\"event\", runIdGetter);\n });\n}\n","/**\n * CopilotKit Adapter for Unify\n *\n * <RequestExample>\n * ```jsx CopilotRuntime Example\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(req, new UnifyAdapter());\n * ```\n * </RequestExample>\n *\n * You can easily set the model to use by passing it to the constructor.\n * ```jsx\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new UnifyAdapter({ model: \"llama-3-8b-chat@fireworks-ai\" }),\n * );\n * ```\n */\nimport { TextMessage } from \"../../graphql/types/converted\";\nimport {\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n CopilotServiceAdapter,\n} from \"../service-adapter\";\nimport OpenAI from \"openai\";\nimport { randomId } from \"@copilotkit/shared\";\nimport { convertActionInputToOpenAITool, convertMessageToOpenAIMessage } from \"../openai/utils\";\n\nexport interface UnifyAdapterParams {\n apiKey?: string;\n model: string;\n}\n\nexport class UnifyAdapter implements CopilotServiceAdapter {\n private apiKey: string;\n private model: string;\n private start: boolean;\n\n constructor(options?: UnifyAdapterParams) {\n if (options?.apiKey) {\n this.apiKey = options.apiKey;\n } else {\n this.apiKey = \"UNIFY_API_KEY\";\n }\n this.model = options?.model;\n this.start = true;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const tools = request.actions.map(convertActionInputToOpenAITool);\n const openai = new OpenAI({\n apiKey: this.apiKey,\n baseURL: \"https://api.unify.ai/v0/\",\n });\n\n const messages = request.messages.map(convertMessageToOpenAIMessage);\n\n const stream = await openai.chat.completions.create({\n model: this.model,\n messages: messages,\n stream: true,\n ...(tools.length > 0 && { tools }),\n });\n\n let model = null;\n request.eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n for await (const chunk of stream) {\n if (this.start) {\n model = chunk.model;\n eventStream$.sendTextMessageStart(randomId());\n eventStream$.sendTextMessageContent(`Model used: ${model}\\n`);\n eventStream$.sendTextMessageEnd();\n this.start = false;\n }\n const toolCall = chunk.choices[0].delta.tool_calls?.[0];\n const content = chunk.choices[0].delta.content;\n\n // When switching from message to function or vice versa,\n // send the respective end event.\n // If toolCall?.id is defined, it means a new tool call starts.\n if (mode === \"message\" && toolCall?.id) {\n mode = null;\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\" && (toolCall === undefined || toolCall?.id)) {\n mode = null;\n eventStream$.sendActionExecutionEnd();\n }\n\n // If we send a new message type, send the appropriate start event.\n if (mode === null) {\n if (toolCall?.id) {\n mode = \"function\";\n eventStream$.sendActionExecutionStart(toolCall!.id, toolCall!.function!.name);\n } else if (content) {\n mode = \"message\";\n eventStream$.sendTextMessageStart(chunk.id);\n }\n }\n\n // send the content events\n if (mode === \"message\" && content) {\n eventStream$.sendTextMessageContent(content);\n } else if (mode === \"function\" && toolCall?.function?.arguments) {\n eventStream$.sendActionExecutionArgs(toolCall.function.arguments);\n }\n }\n\n // send the end events\n if (mode === \"message\") {\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd();\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId: request.threadId || randomId(),\n };\n }\n}\n","/**\n * Copilot Runtime adapter for OpenAI.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, GroqAdapter } from \"@copilotkit/runtime\";\n * import { Groq } from \"groq-sdk\";\n *\n * const groq = new Groq({ apiKey: process.env[\"GROQ_API_KEY\"] });\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const serviceAdapter = new GroqAdapter({ groq, model: \"<model-name>\" });\n *\n * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);\n * ```\n */\nimport { Groq } from \"groq-sdk\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToOpenAITool,\n convertMessageToOpenAIMessage,\n limitMessagesToTokenCount,\n} from \"../openai/utils\";\nimport { randomId } from \"@copilotkit/shared\";\n\nconst DEFAULT_MODEL = \"llama3-groq-70b-8192-tool-use-preview\";\n\nexport interface GroqAdapterParams {\n /**\n * An optional Groq instance to use.\n */\n groq?: Groq;\n\n /**\n * The model to use.\n */\n model?: string;\n\n /**\n * Whether to disable parallel tool calls.\n * You can disable parallel tool calls to force the model to execute tool calls sequentially.\n * This is useful if you want to execute tool calls in a specific order so that the state changes\n * introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)\n *\n * @default false\n */\n disableParallelToolCalls?: boolean;\n}\n\nexport class GroqAdapter implements CopilotServiceAdapter {\n private model: string = DEFAULT_MODEL;\n\n private disableParallelToolCalls: boolean = false;\n private _groq: Groq;\n public get groq(): Groq {\n return this._groq;\n }\n\n constructor(params?: GroqAdapterParams) {\n this._groq = params?.groq || new Groq({});\n if (params?.model) {\n this.model = params.model;\n }\n this.disableParallelToolCalls = params?.disableParallelToolCalls || false;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const {\n threadId,\n model = this.model,\n messages,\n actions,\n eventSource,\n forwardedParameters,\n } = request;\n const tools = actions.map(convertActionInputToOpenAITool);\n\n let openaiMessages = messages.map(convertMessageToOpenAIMessage);\n openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);\n\n let toolChoice: any = forwardedParameters?.toolChoice;\n if (forwardedParameters?.toolChoice === \"function\") {\n toolChoice = {\n type: \"function\",\n function: { name: forwardedParameters.toolChoiceFunctionName },\n };\n }\n const stream = await this.groq.chat.completions.create({\n model: model,\n stream: true,\n messages: openaiMessages,\n ...(tools.length > 0 && { tools }),\n ...(forwardedParameters?.maxTokens && {\n max_tokens: forwardedParameters.maxTokens,\n }),\n ...(forwardedParameters?.stop && { stop: forwardedParameters.stop }),\n ...(toolChoice && { tool_choice: toolChoice }),\n ...(this.disableParallelToolCalls && { parallel_tool_calls: false }),\n });\n\n eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n for await (const chunk of stream) {\n const toolCall = chunk.choices[0].delta.tool_calls?.[0];\n const content = chunk.choices[0].delta.content;\n\n // When switching from message to function or vice versa,\n // send the respective end event.\n // If toolCall?.id is defined, it means a new tool call starts.\n if (mode === \"message\" && toolCall?.id) {\n mode = null;\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\" && (toolCall === undefined || toolCall?.id)) {\n mode = null;\n eventStream$.sendActionExecutionEnd();\n }\n\n // If we send a new message type, send the appropriate start event.\n if (mode === null) {\n if (toolCall?.id) {\n mode = \"function\";\n eventStream$.sendActionExecutionStart(toolCall!.id, toolCall!.function!.name);\n } else if (content) {\n mode = \"message\";\n eventStream$.sendTextMessageStart(chunk.id);\n }\n }\n\n // send the content events\n if (mode === \"message\" && content) {\n eventStream$.sendTextMessageContent(content);\n } else if (mode === \"function\" && toolCall?.function?.arguments) {\n eventStream$.sendActionExecutionArgs(toolCall.function.arguments);\n }\n }\n\n // send the end events\n if (mode === \"message\") {\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd();\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId: threadId || randomId(),\n };\n }\n}\n","/**\n * Copilot Runtime adapter for Anthropic.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, AnthropicAdapter } from \"@copilotkit/runtime\";\n * import Anthropic from \"@anthropic-ai/sdk\";\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const anthropic = new Anthropic({\n * apiKey: \"<your-api-key>\",\n * });\n *\n * const serviceAdapter = new AnthropicAdapter({ anthropic });\n *\n * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);\n * ```\n */\nimport Anthropic from \"@anthropic-ai/sdk\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToAnthropicTool,\n convertMessageToAnthropicMessage,\n groupAnthropicMessagesByRole,\n limitMessagesToTokenCount,\n} from \"./utils\";\n\nimport { randomId } from \"@copilotkit/shared\";\nimport { TextMessage } from \"../../graphql/types/converted\";\n\nconst DEFAULT_MODEL = \"claude-3-opus-20240229\";\n\nexport interface AnthropicAdapterParams {\n /**\n * An optional Anthropic instance to use. If not provided, a new instance will be\n * created.\n */\n anthropic?: Anthropic;\n\n /**\n * The model to use.\n */\n model?: string;\n}\n\nexport class AnthropicAdapter implements CopilotServiceAdapter {\n private model: string = DEFAULT_MODEL;\n\n private _anthropic: Anthropic;\n public get anthropic(): Anthropic {\n return this._anthropic;\n }\n\n constructor(params?: AnthropicAdapterParams) {\n this._anthropic = params?.anthropic || new Anthropic({});\n if (params?.model) {\n this.model = params.model;\n }\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const {\n threadId,\n model = this.model,\n messages: rawMessages,\n actions,\n eventSource,\n forwardedParameters,\n } = request;\n const tools = actions.map(convertActionInputToAnthropicTool);\n\n const messages = [...rawMessages];\n\n // get the instruction message\n const instructionsMessage = messages.shift();\n const instructions =\n instructionsMessage instanceof TextMessage ? instructionsMessage.content : \"\";\n\n let anthropicMessages = messages.map(convertMessageToAnthropicMessage);\n anthropicMessages = limitMessagesToTokenCount(anthropicMessages, tools, model);\n anthropicMessages = groupAnthropicMessagesByRole(anthropicMessages);\n\n let toolChoice: any = forwardedParameters?.toolChoice;\n if (forwardedParameters?.toolChoice === \"function\") {\n toolChoice = {\n type: \"tool\",\n name: forwardedParameters.toolChoiceFunctionName,\n };\n }\n\n const stream = this.anthropic.messages.create({\n system: instructions,\n model: this.model,\n messages: anthropicMessages,\n max_tokens: forwardedParameters?.maxTokens || 1024,\n ...(tools.length > 0 && { tools }),\n ...(toolChoice && { tool_choice: toolChoice }),\n stream: true,\n });\n\n eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n let didOutputText = false;\n let currentMessageId = randomId();\n let currentToolCallId = randomId();\n let filterThinkingTextBuffer = new FilterThinkingTextBuffer();\n\n for await (const chunk of await stream) {\n if (chunk.type === \"message_start\") {\n currentMessageId = chunk.message.id;\n } else if (chunk.type === \"content_block_start\") {\n if (chunk.content_block.type === \"text\") {\n didOutputText = false;\n filterThinkingTextBuffer.reset();\n mode = \"message\";\n } else if (chunk.content_block.type === \"tool_use\") {\n currentToolCallId = chunk.content_block.id;\n eventStream$.sendActionExecutionStart(currentToolCallId, chunk.content_block.name);\n mode = \"function\";\n }\n } else if (chunk.type === \"content_block_delta\") {\n if (chunk.delta.type === \"text_delta\") {\n const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);\n if (text.length > 0) {\n if (!didOutputText) {\n eventStream$.sendTextMessageStart(currentMessageId);\n didOutputText = true;\n }\n eventStream$.sendTextMessageContent(text);\n }\n } else if (chunk.delta.type === \"input_json_delta\") {\n eventStream$.sendActionExecutionArgs(chunk.delta.partial_json);\n }\n } else if (chunk.type === \"content_block_stop\") {\n if (mode === \"message\") {\n if (didOutputText) {\n eventStream$.sendTextMessageEnd();\n }\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd();\n }\n }\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId: threadId || randomId(),\n };\n }\n}\n\nconst THINKING_TAG = \"<thinking>\";\nconst THINKING_TAG_END = \"</thinking>\";\n\nclass FilterThinkingTextBuffer {\n private buffer: string;\n private didFilterThinkingTag: boolean = false;\n\n constructor() {\n this.buffer = \"\";\n }\n\n onTextChunk(text: string): string {\n this.buffer += text;\n if (this.didFilterThinkingTag) {\n return text;\n }\n const potentialTag = this.buffer.slice(0, THINKING_TAG.length);\n if (THINKING_TAG.startsWith(potentialTag)) {\n if (this.buffer.includes(THINKING_TAG_END)) {\n const end = this.buffer.indexOf(THINKING_TAG_END);\n const filteredText = this.buffer.slice(end + THINKING_TAG_END.length);\n this.buffer = filteredText;\n this.didFilterThinkingTag = true;\n return filteredText;\n } else {\n return \"\";\n }\n }\n return text;\n }\n\n reset() {\n this.buffer = \"\";\n this.didFilterThinkingTag = false;\n }\n}\n","import {\n ActionExecutionMessage,\n Message,\n ResultMessage,\n TextMessage,\n} from \"../../graphql/types/converted\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { Anthropic } from \"@anthropic-ai/sdk\";\n\nexport function limitMessagesToTokenCount(\n messages: any[],\n tools: any[],\n model: string,\n maxTokens?: number,\n): any[] {\n maxTokens ||= MAX_TOKENS;\n\n const result: any[] = [];\n const toolsNumTokens = countToolsTokens(model, tools);\n if (toolsNumTokens > maxTokens) {\n throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);\n }\n maxTokens -= toolsNumTokens;\n\n for (const message of messages) {\n if (message.role === \"system\") {\n const numTokens = countMessageTokens(model, message);\n maxTokens -= numTokens;\n\n if (maxTokens < 0) {\n throw new Error(\"Not enough tokens for system message.\");\n }\n }\n }\n\n let cutoff: boolean = false;\n\n const reversedMessages = [...messages].reverse();\n for (const message of reversedMessages) {\n if (message.role === \"system\") {\n result.unshift(message);\n continue;\n } else if (cutoff) {\n continue;\n }\n let numTokens = countMessageTokens(model, message);\n if (maxTokens < numTokens) {\n cutoff = true;\n continue;\n }\n result.unshift(message);\n maxTokens -= numTokens;\n }\n\n return result;\n}\n\nconst MAX_TOKENS = 128000;\n\nfunction countToolsTokens(model: string, tools: any[]): number {\n if (tools.length === 0) {\n return 0;\n }\n const json = JSON.stringify(tools);\n return countTokens(model, json);\n}\n\nfunction countMessageTokens(model: string, message: any): number {\n return countTokens(model, JSON.stringify(message.content) || \"\");\n}\n\nfunction countTokens(model: string, text: string): number {\n return text.length / 3;\n}\n\nexport function convertActionInputToAnthropicTool(action: ActionInput): Anthropic.Messages.Tool {\n return {\n name: action.name,\n description: action.description,\n input_schema: JSON.parse(action.jsonSchema),\n };\n}\n\nexport function convertMessageToAnthropicMessage(\n message: Message,\n): Anthropic.Messages.MessageParam {\n if (message instanceof TextMessage) {\n if (message.role === \"system\") {\n return {\n role: \"assistant\",\n content: [\n { type: \"text\", text: \"THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: \" + message.content },\n ],\n };\n } else {\n return {\n role: message.role === \"user\" ? \"user\" : \"assistant\",\n content: [{ type: \"text\", text: message.content }],\n };\n }\n } else if (message instanceof ActionExecutionMessage) {\n return {\n role: \"assistant\",\n content: [\n {\n id: message.id,\n type: \"tool_use\",\n input: message.arguments,\n name: message.name,\n },\n ],\n };\n } else if (message instanceof ResultMessage) {\n return {\n role: \"user\",\n content: [\n {\n type: \"tool_result\",\n content: message.result,\n tool_use_id: message.actionExecutionId,\n },\n ],\n };\n }\n}\n\nexport function groupAnthropicMessagesByRole(\n messageParams: Anthropic.Messages.MessageParam[],\n): Anthropic.Messages.MessageParam[] {\n return messageParams.reduce((acc, message) => {\n const lastGroup = acc[acc.length - 1];\n\n if (lastGroup && lastGroup.role === message.role) {\n lastGroup.content = lastGroup.content.concat(message.content as any);\n } else {\n acc.push({\n role: message.role,\n content: [...(message.content as any)],\n });\n }\n\n return acc;\n }, [] as Anthropic.Messages.MessageParam[]);\n}\n"],"mappings":";;;;;;;;;;;;;;;AACA,SAASA,sBAAsB;AAUxB,IAAMC,cAAN,MAAMA;EACXC;EACAC;EACAC;EACAC;EACAC;EAEAC,YAAYC,SAAgC;AAC1C,SAAKN,OAAOM,QAAQN;AACpB,SAAKC,cAAcK,QAAQL;AAC3B,SAAKC,WAAWI,QAAQJ;AACxB,SAAKC,aAAaG,QAAQH;AAC1B,SAAKC,gBAAgBE,QAAQF,iBAAiB;EAChD;EAEA,MAAMG,WAAiC;AACrC,QAAI,CAAC,KAAKJ,YAAY;AACpB,YAAM,KAAKK,yBAAwB;IACrC;AAEA,WAAO;MACLR,MAAM,KAAKA;MACXC,aAAa,KAAKA;MAClBE,YAAY,KAAKA;MACjBM,SAAS,OAAOC,SAAAA;AACd,cAAMC,WAAW,IAAIC,eAAe;UAAEC,KAAK,KAAKX;QAAS,CAAA;AACzD,YAAIY;AACJ,YAAI,KAAKV,kBAAkB,UAAU;AACnCU,kBAAQJ,KAAKK,OAAOC,KAAKN,IAAAA,EAAM,CAAA,CAAE;QACnC,OAAO;AACLI,kBAAQJ;QACV;AACA,eAAO,MAAMC,SAASM,OAAOH,KAAAA;MAC/B;IACF;EACF;EAEA,MAAMN,2BAA2B;AAC/B,UAAMU,iBAAiB;MAAC;MAAU;MAAU;;AAE5C,QAAIC,YAAY,KAAKjB,SAASkB,QAAQ,QAAQ,EAAA,IAAM;AACpD,QAAIC,SAAS,MAAMC,MAAMH,SAAAA,EACtBI,KAAK,CAACC,QAAQA,IAAIC,KAAI,CAAA,EACtBC,MAAM,MAAA;AACL,YAAM,IAAIC,MAAM,yCAAyCR,SAAAA;IAC3D,CAAA;AAGF,QAAID,eAAeU,SAASP,OAAOQ,IAAI,GAAG;AACxC,WAAKzB,gBAAgB;AACrB,WAAKD,aAAa;QAChB;UACEH,MAAM;UACN6B,MAAMR,OAAOQ;UACb5B,aAAa;QACf;;IAEJ,WAAWoB,OAAOQ,SAAS,UAAU;AACnC,WAAKzB,gBAAgB;AACrB,WAAKD,aAAaY,OAAOC,KAAKK,OAAOS,UAAU,EAAEC,IAAI,CAACC,QAAAA;AArE5D;AAsEQ,YAAIC,WAAWZ,OAAOS,WAAWE,GAAAA;AACjC,YAAI,CAACd,eAAeU,SAASK,SAASJ,IAAI,GAAG;AAC3C,gBAAM,IAAIF,MAAM,yBAAA;QAClB;AACA,eAAO;UACL3B,MAAMgC;UACNH,MAAMI,SAASJ;UACf5B,aAAagC,SAAShC,eAAe;UACrCiC,YAAUb,YAAOa,aAAPb,mBAAiBO,SAASI,SAAQ;QAC9C;MACF,CAAA;IACF,OAAO;AACL,YAAM,IAAIL,MAAM,yBAAA;IAClB;EACF;AACF;AA3Ea5B;;;ACUb,OAAOoC,YAAY;;;ACZZ,SAASC,0BACdC,UACAC,OACAC,OACAC,WAAkB;AAElBA,4BAAcC,wBAAwBF,KAAAA;AAEtC,QAAMG,SAAgB,CAAA;AACtB,QAAMC,iBAAiBC,iBAAiBL,OAAOD,KAAAA;AAC/C,MAAIK,iBAAiBH,WAAW;AAC9B,UAAM,IAAIK,MAAM,4CAA4CF,oBAAoBH,WAAW;EAC7F;AACAA,eAAaG;AAEb,aAAWG,WAAWT,UAAU;AAC9B,QAAIS,QAAQC,SAAS,UAAU;AAC7B,YAAMC,YAAYC,mBAAmBV,OAAOO,OAAAA;AAC5CN,mBAAaQ;AAEb,UAAIR,YAAY,GAAG;AACjB,cAAM,IAAIK,MAAM,uCAAA;MAClB;IACF;EACF;AAEA,MAAIK,SAAkB;AAEtB,QAAMC,mBAAmB;OAAId;IAAUe,QAAO;AAC9C,aAAWN,WAAWK,kBAAkB;AACtC,QAAIL,QAAQC,SAAS,UAAU;AAC7BL,aAAOW,QAAQP,OAAAA;AACf;IACF,WAAWI,QAAQ;AACjB;IACF;AACA,QAAIF,YAAYC,mBAAmBV,OAAOO,OAAAA;AAC1C,QAAIN,YAAYQ,WAAW;AACzBE,eAAS;AACT;IACF;AACAR,WAAOW,QAAQP,OAAAA;AACfN,iBAAaQ;EACf;AAEA,SAAON;AACT;AA9CgBN;AAgDT,SAASK,wBAAwBF,OAAa;AACnD,SAAOe,iBAAiBf,KAAAA,KAAUgB;AACpC;AAFgBd;AAIhB,IAAMc,qBAAqB;AAE3B,IAAMD,mBAA8C;;EAElD,UAAU;EACV,qBAAqB;EACrB,eAAe;EACf,0BAA0B;EAC1B,sBAAsB;EACtB,uBAAuB;EACvB,sBAAsB;EACtB,wBAAwB;EACxB,6BAA6B;EAC7B,aAAa;EACb,kBAAkB;EAClB,kBAAkB;EAClB,SAAS;EACT,cAAc;EACd,cAAc;;EAGd,sBAAsB;EACtB,iBAAiB;EACjB,sBAAsB;EACtB,0BAA0B;EAC1B,qBAAqB;EACrB,sBAAsB;EACtB,0BAA0B;EAC1B,sBAAsB;AACxB;AAEA,SAASV,iBAAiBL,OAAeD,OAAY;AACnD,MAAIA,MAAMkB,WAAW,GAAG;AACtB,WAAO;EACT;AACA,QAAMC,OAAOC,KAAKC,UAAUrB,KAAAA;AAC5B,SAAOsB,YAAYrB,OAAOkB,IAAAA;AAC5B;AANSb;AAQT,SAASK,mBAAmBV,OAAeO,SAAY;AACrD,SAAOc,YAAYrB,OAAOO,QAAQe,WAAW,EAAA;AAC/C;AAFSZ;AAIT,SAASW,YAAYrB,OAAeuB,MAAY;AAC9C,SAAOA,KAAKN,SAAS;AACvB;AAFSI;AAIF,SAASG,+BAA+BC,QAAmB;AAChE,SAAO;IACLC,MAAM;IACNC,UAAU;MACRC,MAAMH,OAAOG;MACbC,aAAaJ,OAAOI;MACpBC,YAAYX,KAAKY,MAAMN,OAAOO,UAAU;IAC1C;EACF;AACF;AATgBR;AAWT,SAASS,8BAA8B1B,SAAgB;AAC5D,MAAIA,mBAAmB2B,aAAa;AAClC,WAAO;MACL1B,MAAMD,QAAQC;MACdc,SAASf,QAAQe;IACnB;EACF,WAAWf,mBAAmB4B,wBAAwB;AACpD,WAAO;MACL3B,MAAM;MACN4B,YAAY;QACV;UACEC,IAAI9B,QAAQ8B;UACZX,MAAM;UACNC,UAAU;YACRC,MAAMrB,QAAQqB;YACdU,WAAWnB,KAAKC,UAAUb,QAAQ+B,SAAS;UAC7C;QACF;;IAEJ;EACF,WAAW/B,mBAAmBgC,eAAe;AAC3C,WAAO;MACL/B,MAAM;MACNc,SAASf,QAAQJ;MACjBqC,cAAcjC,QAAQkC;IACxB;EACF;AACF;AA3BgBR;AA6BT,SAASS,mCAAmCnC,SAAmC;AACpF,SAAO;IACL,GAAGA;IACH,GAAIA,QAAQC,SAAS,YAAY;MAC/BA,MAAM;MACNc,SAAS,gDAAgDf,QAAQe;IACnE;EACF;AACF;AARgBoB;;;ADpHhB,SAASC,gBAAgB;AAEzB,IAAMC,gBAAgB;AAyBf,IAAMC,gBAAN,MAAMA;EACHC,QAAgBF;EAEhBG,2BAAoC;EACpCC;EACR,IAAWC,SAAiB;AAC1B,WAAO,KAAKD;EACd;EAEAE,YAAYC,QAA8B;AACxC,SAAKH,WAAUG,iCAAQF,WAAU,IAAIG,OAAO,CAAC,CAAA;AAC7C,QAAID,iCAAQL,OAAO;AACjB,WAAKA,QAAQK,OAAOL;IACtB;AACA,SAAKC,4BAA2BI,iCAAQJ,6BAA4B;EACtE;EAEA,MAAMM,QACJC,SAC+C;AAC/C,UAAM,EACJC,UACAT,QAAQ,KAAKA,OACbU,UACAC,SACAC,aACAC,oBAAmB,IACjBL;AACJ,UAAMM,QAAQH,QAAQI,IAAIC,8BAAAA;AAE1B,QAAIC,iBAAiBP,SAASK,IAAIG,6BAAAA;AAClCD,qBAAiBE,0BAA0BF,gBAAgBH,OAAOd,KAAAA;AAElE,QAAIoB,aAAkBP,2DAAqBO;AAC3C,SAAIP,2DAAqBO,gBAAe,YAAY;AAClDA,mBAAa;QACXC,MAAM;QACNC,UAAU;UAAEC,MAAMV,oBAAoBW;QAAuB;MAC/D;IACF;AAEA,UAAMC,SAAS,KAAKtB,OAAOuB,KAAKC,KAAKC,YAAYH,OAAO;MACtDzB;MACAyB,QAAQ;MACRf,UAAUO;MACV,GAAIH,MAAMe,SAAS,KAAK;QAAEf;MAAM;MAChC,IAAID,2DAAqBiB,cAAa;QAAEC,YAAYlB,oBAAoBiB;MAAU;MAClF,IAAIjB,2DAAqBmB,SAAQ;QAAEA,MAAMnB,oBAAoBmB;MAAK;MAClE,GAAIZ,cAAc;QAAEa,aAAab;MAAW;MAC5C,GAAI,KAAKnB,4BAA4B;QAAEiC,qBAAqB;MAAM;IACpE,CAAA;AAEAtB,gBAAYa,OAAO,OAAOU,iBAAAA;AA/G9B;AAgHM,UAAIC,OAAsC;AAC1C,uBAAiBC,SAASZ,QAAQ;AAChC,cAAMa,YAAWD,WAAME,QAAQ,CAAA,EAAGC,MAAMC,eAAvBJ,mBAAoC;AACrD,cAAMK,UAAUL,MAAME,QAAQ,CAAA,EAAGC,MAAME;AAKvC,YAAIN,SAAS,cAAaE,qCAAUK,KAAI;AACtCP,iBAAO;AACPD,uBAAaS,mBAAkB;QACjC,WAAWR,SAAS,eAAeE,aAAaO,WAAaP,qCAAUK,MAAK;AAC1EP,iBAAO;AACPD,uBAAaW,uBAAsB;QACrC;AAGA,YAAIV,SAAS,MAAM;AACjB,cAAIE,qCAAUK,IAAI;AAChBP,mBAAO;AACPD,yBAAaY,yBAAyBT,SAAUK,IAAIL,SAAUhB,SAAUC,IAAI;UAC9E,WAAWmB,SAAS;AAClBN,mBAAO;AACPD,yBAAaa,qBAAqBX,MAAMM,EAAE;UAC5C;QACF;AAGA,YAAIP,SAAS,aAAaM,SAAS;AACjCP,uBAAac,uBAAuBP,OAAAA;QACtC,WAAWN,SAAS,gBAAcE,0CAAUhB,aAAVgB,mBAAoBY,YAAW;AAC/Df,uBAAagB,wBAAwBb,SAAShB,SAAS4B,SAAS;QAClE;MACF;AAGA,UAAId,SAAS,WAAW;AACtBD,qBAAaS,mBAAkB;MACjC,WAAWR,SAAS,YAAY;AAC9BD,qBAAaW,uBAAsB;MACrC;AAEAX,mBAAaiB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACL3C,UAAUA,YAAY4C,SAAAA;IACxB;EACF;AACF;AAtGatD;;;AEbb,SAASuD,YAAAA,iBAAgB;AAiBlB,IAAMC,mBAAN,MAAMA;;;;;EAIXC,YAAoBC,SAAkC;SAAlCA,UAAAA;EAAmC;EAEvD,MAAMC,QACJC,SAC+C;AAC/C,UAAM,EAAEC,aAAaC,OAAOC,SAASC,UAAUC,UAAUC,MAAK,IAAKN;AACnE,UAAMO,SAAS,MAAM,KAAKT,QAAQU,QAAQ;MACxCJ,UAAUA,SAASK,IAAIC,gCAAAA;MACvBC,OAAOR,QAAQM,IAAIG,iCAAAA;MACnBV;MACAG;MACAC;IACF,CAAA;AAEAL,gBAAYY,OAAO,OAAOC,iBAAAA;AACxB,YAAMC,wBAAwB;QAC5BR;QACAO;MACF,CAAA;IACF,CAAA;AAEA,WAAO;MACLT,UAAUA,YAAYW,UAAAA;IACxB;EACF;AACF;AA7BapB;;;ACpCb,SAA0BqB,0BAA0B;;;AClB7C,SAASC,mCAAmCC,SAAgB;AACjE,MAAIA,mBAAmBC,aAAa;AAClC,UAAMC,OAAO;MACXC,MAAM;MACNC,WAAW;MACXC,QAAQ;IACV,EAAEL,QAAQE,IAAI;AAEd,UAAMI,OACJN,QAAQE,SAAS,WACb,gDAAgDF,QAAQO,UACxDP,QAAQO;AAEd,WAAO;MACLL;MACAM,OAAO;QAAC;UAAEF;QAAK;;IACjB;EACF,WAAWN,mBAAmBS,wBAAwB;AACpD,WAAO;MACLP,MAAM;MACNM,OAAO;QACL;UACEE,cAAc;YACZC,MAAMX,QAAQW;YACdC,MAAMZ,QAAQa;UAChB;QACF;;IAEJ;EACF,WAAWb,mBAAmBc,eAAe;AAC3C,WAAO;MACLZ,MAAM;MACNM,OAAO;QACL;UACEO,kBAAkB;YAChBJ,MAAMX,QAAQgB;YACdC,UAAU;cACRN,MAAMX,QAAQgB;cACdT,SAASW,aAAalB,QAAQmB,MAAM;YACtC;UACF;QACF;;IAEJ;EACF;AACF;AA7CgBpB;AA+CT,SAASqB,iCAAiCC,QAAmB;AAClE,QAAMV,OAAOU,OAAOV;AACpB,QAAMW,cAAcD,OAAOC;AAC3B,QAAMC,aAAaC,KAAKC,MAAMJ,OAAOK,UAAU;AAE/C,QAAMC,sBAAsB,wBAACC,UAAAA;AAC3B,eAAWC,OAAOD,OAAO;AACvB,UAAIA,MAAMC,GAAAA,EAAKC,MAAM;AACnBF,cAAMC,GAAAA,EAAKC,OAAOF,MAAMC,GAAAA,EAAKC,KAAKC,YAAW;MAC/C;AACA,UAAIH,MAAMC,GAAAA,EAAKG,YAAY;AACzBL,4BAAoBC,MAAMC,GAAAA,EAAKG,UAAU;MAC3C;IACF;EACF,GAT4B;AAU5BL,sBAAoBJ,UAAAA;AAEpB,SAAO;IACLU,sBAAsB;MACpB;QACEtB;QACAW;QACAC;MACF;;EAEJ;AACF;AA1BgBH;AA4BhB,SAASF,aAAagB,KAAY;AAChC,MAAI,CAACA,KAAK;AACR,WAAO;EACT;AACA,MAAI;AACF,WAAOV,KAAKC,MAAMS,GAAAA;EACpB,SAASC,GAAP;AACA,WAAOD;EACT;AACF;AATShB;;;ADtDT,SAASkB,YAAAA,iBAAgB;AASlB,IAAMC,4BAAN,MAAMA;EACHC;EAERC,YAAYC,SAA4C;AACtD,QAAIA,mCAASF,OAAO;AAClB,WAAKA,QAAQE,QAAQF;IACvB,OAAO;AACL,YAAMG,QAAQ,IAAIC,mBAAmBC,QAAQC,IAAI,gBAAA,CAAiB;AAClE,WAAKN,QAAQG,MAAMI,mBAAmB;QAAEP,OAAO;MAAa,CAAA;IAC9D;EACF;EAEA,MAAMK,QACJG,SAC+C;AAC/C,UAAM,EAAEC,UAAUC,SAASC,YAAW,IAAKH;AAG3C,UAAMI,UAAUH,SAASI,MAAM,GAAG,EAAC,EAAGC,IAAIC,kCAAAA;AAG1C,UAAMC,iBAAiBD,mCAAmCN,SAASQ,GAAG,EAAC,CAAA;AACvE,QAAI,CAACD,gBAAgB;AACnB,YAAM,IAAIE,MAAM,oBAAA;IAClB;AAEA,QAAIC;AACJ,UAAMC,eAAeX,SAASQ,GAAG,CAAA;AACjC,QAAIG,wBAAwBC,eAAeD,aAAaE,SAAS,UAAU;AACzEH,sBAAgBC,aAAaG,QAAQC,KAAI;IAC3C,OAAO;AACL,YAAM,IAAIN,MAAM,uCAAA;IAClB;AAEA,UAAMO,QAAQf,QAAQI,IAAIY,gCAAAA;AAE1B,UAAMC,sBACJ,KAAK3B,MAAMA,UAAU,gBAAgB,KAAKA,MAAMA,UAAU;AAE5D,UAAM4B,OAAO,KAAK5B,MAAM6B,UAAU;MAChCjB,SAAS;WACJA;;WAECe,sBAAsB;UAAC;YAAEL,MAAM;YAAQQ,OAAO;cAAC;gBAAEC,MAAMZ;cAAc;;UAAG;YAAK,CAAA;;;MAGnF,GAAIQ,sBACA,CAAC,IACD;QAAEK,mBAAmB;UAAEV,MAAM;UAAQQ,OAAO;YAAC;cAAEC,MAAMZ;YAAc;;QAAG;MAAE;MAC5EM;IACF,CAAA;AAEA,UAAMQ,SAAS,MAAML,KAAKM,kBAAkBlB,eAAec,KAAK;AAEhEnB,gBAAYwB,OAAO,OAAOC,iBAAAA;AACxB,UAAIC,gBAAgB;AACpB,uBAAiBC,SAASL,OAAOE,QAAQ;AACvC,cAAMI,YAAYD,MAAMP,KAAI;AAC5B,YAAIQ,cAAc,IAAI;AACpB;QACF;AACA,YAAI,CAACF,eAAe;AAClBA,0BAAgB;AAChBD,uBAAaI,qBAAqBC,UAAAA,CAAAA;QACpC;AACAL,qBAAaM,uBAAuBH,SAAAA;MACtC;AACA,UAAIF,eAAe;AACjBD,qBAAaO,mBAAkB;MACjC;AAEA,UAAIC,SAAS,MAAMX,OAAOY,UAAUC,cAAa;AACjD,UAAIF,OAAO;AACT,iBAASG,QAAQH,OAAO;AACtBR,uBAAaY,oBACXP,UAAAA,GACAM,KAAKE,MACLC,KAAKC,UAAUC,wBAAwBL,KAAKM,IAAI,CAAA,CAAA;QAEpD;MACF;AACAjB,mBAAakB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACLC,UAAU/C,QAAQ+C,YAAYd,UAAAA;IAChC;EACF;AACF;AAxFa1C;AA0Fb,SAASqD,wBAAwBI,KAAQ;AACvC,MAAI,OAAOA,QAAQ,UAAU;AAC3B,WAAOA,IAAIC,QAAQ,UAAU,IAAA;EAC/B,WAAWC,MAAMC,QAAQH,GAAAA,GAAM;AAC7B,WAAOA,IAAI1C,IAAIsC,uBAAAA;EACjB,WAAW,OAAOI,QAAQ,YAAYA,QAAQ,MAAM;AAClD,UAAMI,SAAc,CAAC;AACrB,eAAWC,OAAOL,KAAK;AACrB,UAAIA,IAAIM,eAAeD,GAAAA,GAAM;AAC3BD,eAAOC,GAAAA,IAAOT,wBAAwBI,IAAIK,GAAAA,CAAI;MAChD;IACF;AACA,WAAOD;EACT;AACA,SAAOJ;AACT;AAfSJ;;;AEvGT,OAAOW,aAAY;AAqDZ,IAAMC,yBAAN,MAAMA;EACHC;EACAC;EACAC;EACAC;EACAC;EAERC,YAAYC,QAAsC;AAChD,SAAKN,SAASM,OAAON,UAAU,IAAIO,QAAO,CAAC,CAAA;AAC3C,SAAKN,yBAAyBK,OAAOL,2BAA2B,SAAS;AACzE,SAAKE,oBAAoBG,OAAOH,sBAAsB,SAAS;AAC/D,SAAKD,cAAcI,OAAOJ;AAC1B,SAAKE,4BAA2BE,iCAAQF,6BAA4B;EACtE;EAEA,MAAMI,QACJC,SAC+C;AAC/C,UAAM,EAAEC,UAAUC,SAASC,aAAaC,OAAOC,oBAAmB,IAAKL;AAEvE,QAAIM,WAAWN,QAAQM,aAAa,MAAM,KAAKf,OAAOgB,KAAKC,QAAQC,OAAM,GAAIC;AAE7E,UAAMC,cAAcV,SAASW,GAAG,EAAC;AAEjC,QAAIC,YAAgCC;AAGpC,QAAIH,uBAAuBI,iBAAiBX,OAAO;AACjDS,kBAAY,MAAM,KAAKG,kBAAkBV,UAAUF,OAAOH,UAAUE,WAAAA;IACtE,WAESQ,uBAAuBM,aAAa;AAC3CJ,kBAAY,MAAM,KAAKK,kBACrBZ,UACAL,UACAC,SACAC,aACAE,mBAAAA;IAEJ,OAEK;AACH,YAAM,IAAIc,MAAM,6CAAA;IAClB;AAEA,WAAO;MACLb;MACAF,OAAOS;IACT;EACF;EAEA,MAAcG,kBACZV,UACAF,OACAH,UACAE,aACA;AACA,QAAIiB,MAAM,MAAM,KAAK7B,OAAOgB,KAAKC,QAAQa,KAAKC,SAAShB,UAAUF,KAAAA;AACjE,QAAI,CAACgB,IAAIG,iBAAiB;AACxB,YAAM,IAAIJ,MAAM,0BAAA;IAClB;AAGA,UAAMK,eAAeJ,IAAIG,gBAAgBE,oBAAoBC,WAAWC,IACtE,CAACC,aAAaA,SAASlB,EAAE;AAI3B,UAAMmB,iBAAiB5B,SAAS6B,OAC9B,CAACC,YACCA,mBAAmBhB,iBAAiBS,aAAaQ,SAASD,QAAQE,iBAAiB,CAAA;AAGvF,QAAIT,aAAaU,UAAUL,eAAeK,QAAQ;AAChD,YAAM,IAAIf,MAAM,oEAAA;IAClB;AAGA,UAAMgB,cAA6DN,eAAeF,IAChF,CAACI,YAAAA;AACC,aAAO;QACLK,cAAcL,QAAQE;QACtBI,QAAQN,QAAQO;MAClB;IACF,CAAA;AAGF,UAAMC,SAAS,KAAKhD,OAAOgB,KAAKC,QAAQa,KAAKmB,wBAAwBlC,UAAUF,OAAO;MACpFqC,cAAcN;MACd,GAAI,KAAKxC,4BAA4B;QAAE+C,qBAAqB;MAAM;IACpE,CAAA;AAEA,UAAM,KAAKC,eAAeJ,QAAQpC,WAAAA;AAClC,WAAOC;EACT;EAEA,MAAcc,kBACZZ,UACAL,UACAC,SACAC,aACAE,qBACA;AACAJ,eAAW;SAAIA;;AAGf,UAAM2C,sBAAsB3C,SAAS4C,MAAK;AAC1C,UAAMC,eACJF,+BAA+B3B,cAAc2B,oBAAoBG,UAAU;AAG7E,UAAMC,cAAc/C,SACjB0B,IAAIsB,6BAAAA,EACJtB,IAAIuB,kCAAAA,EACJtC,GAAG,EAAC;AAEP,QAAIoC,YAAYG,SAAS,QAAQ;AAC/B,YAAM,IAAIhC,MAAM,uBAAA;IAClB;AAGA,UAAM,KAAK5B,OAAOgB,KAAKC,QAAQP,SAASQ,OAAOH,UAAU;MACvD6C,MAAM;MACNJ,SAASC,YAAYD;IACvB,CAAA;AAEA,UAAMK,cAAclD,QAAQyB,IAAI0B,8BAAAA;AAEhC,UAAMC,QAAQ;SACTF;SACC,KAAK5D,yBAAyB;QAAC;UAAE+D,MAAM;QAAmB;UAAsB,CAAA;SAChF,KAAK7D,oBAAoB;QAAC;UAAE6D,MAAM;QAAc;UAAsB,CAAA;;AAI5E,QAAIhB,SAAS,KAAKhD,OAAOgB,KAAKC,QAAQa,KAAKkB,OAAOjC,UAAU;MAC1DkD,cAAc,KAAK/D;MACnBqD;MACAQ;MACA,IAAIjD,2DAAqBoD,cAAa;QACpCC,uBAAuBrD,oBAAoBoD;MAC7C;MACA,GAAI,KAAK9D,4BAA4B;QAAE+C,qBAAqB;MAAM;IACpE,CAAA;AAEA,UAAM,KAAKC,eAAeJ,QAAQpC,WAAAA;AAElC,WAAOwD,mBAAmBpB,MAAAA;EAC5B;EAEA,MAAcI,eAAeJ,QAAyBpC,aAAiC;AACrFA,gBAAYoC,OAAO,OAAOqB,iBAAAA;AAtO9B;AAuOM,UAAIC,iBAAiB;AAErB,uBAAiBC,SAASvB,QAAQ;AAChC,gBAAQuB,MAAMC,OAAK;UACjB,KAAK;AACH,gBAAIF,gBAAgB;AAClBD,2BAAaI,uBAAsB;YACrC;AACAJ,yBAAaK,qBAAqBH,MAAMI,KAAKxD,EAAE;AAC/C;UACF,KAAK;AACH,kBAAIoD,WAAMI,KAAKC,MAAMpB,YAAjBe,mBAA2B,GAAGP,UAAS,QAAQ;AACjDK,2BAAaQ,wBAAuBN,WAAMI,KAAKC,MAAMpB,YAAjBe,mBAA2B,GAAGO,KAAKC,KAAAA;YACzE;AACA;UACF,KAAK;AACHV,yBAAaW,mBAAkB;AAC/B;UACF,KAAK;AACH,gBAAIC;AACJ,gBAAIC;AACJ,gBAAIC;AACJ,gBACEZ,MAAMI,KAAKC,MAAMQ,aAAapB,SAAS,kBACvCO,WAAMI,KAAKC,MAAMQ,aAAajD,eAA9BoC,mBAA2C,GAAGP,UAAS,YACvD;AACAiB,4BAAaV,WAAMI,KAAKC,MAAMQ,aAAajD,eAA9BoC,mBAA2C,GAAGpD;AAC3D+D,8BAAeX,WAAMI,KAAKC,MAAMQ,aAAajD,eAA9BoC,mBAA2C,GAAGc,SAASC;AACtEH,8BAAeZ,WAAMI,KAAKC,MAAMQ,aAAajD,eAA9BoC,mBAA2C,GAAGc,SAASE;YACxE;AAEA,gBAAIL,gBAAgBD,YAAY;AAC9B,kBAAIX,gBAAgB;AAClBD,6BAAaI,uBAAsB;cACrC;AACAH,+BAAiB;AACjBD,2BAAamB,yBAAyBP,YAAYC,YAAAA;YACpD,WAAWC,cAAc;AACvBd,2BAAaoB,wBAAwBN,YAAAA;YACvC;AACA;QACJ;MACF;AACA,UAAIb,gBAAgB;AAClBD,qBAAaI,uBAAsB;MACrC;AACAJ,mBAAaqB,SAAQ;IACvB,CAAA;EACF;AACF;AAzMa3F;AA2Mb,SAASqE,mBAAmBpB,QAAuB;AACjD,SAAO,IAAI2C,QAAgB,CAACC,SAASC,WAAAA;AACnC,QAAIC,cAAc,wBAACtB,UAAAA;AACjB,UAAIA,MAAMA,UAAU,sBAAsB;AACxC,cAAM3D,QAAQ2D,MAAMG,KAAKxD;AACzB6B,eAAO+C,IAAI,SAASD,WAAAA;AACpBF,gBAAQ/E,KAAAA;MACV;IACF,GANkB;AAOlBmC,WAAOgD,GAAG,SAASF,WAAAA;EACrB,CAAA;AACF;AAXS1B;;;ACjQT,OAAO6B,aAAY;AACnB,SAASC,YAAAA,iBAAgB;AAQlB,IAAMC,eAAN,MAAMA;EACHC;EACAC;EACAC;EAERC,YAAYC,SAA8B;AACxC,QAAIA,mCAASJ,QAAQ;AACnB,WAAKA,SAASI,QAAQJ;IACxB,OAAO;AACL,WAAKA,SAAS;IAChB;AACA,SAAKC,QAAQG,mCAASH;AACtB,SAAKC,QAAQ;EACf;EAEA,MAAMG,QACJC,SAC+C;AAC/C,UAAMC,QAAQD,QAAQE,QAAQC,IAAIC,8BAAAA;AAClC,UAAMC,SAAS,IAAIC,QAAO;MACxBZ,QAAQ,KAAKA;MACba,SAAS;IACX,CAAA;AAEA,UAAMC,WAAWR,QAAQQ,SAASL,IAAIM,6BAAAA;AAEtC,UAAMC,SAAS,MAAML,OAAOM,KAAKC,YAAYC,OAAO;MAClDlB,OAAO,KAAKA;MACZa;MACAE,QAAQ;MACR,GAAIT,MAAMa,SAAS,KAAK;QAAEb;MAAM;IAClC,CAAA;AAEA,QAAIN,QAAQ;AACZK,YAAQe,YAAYL,OAAO,OAAOM,iBAAAA;AApEtC;AAqEM,UAAIC,OAAsC;AAC1C,uBAAiBC,SAASR,QAAQ;AAChC,YAAI,KAAKd,OAAO;AACdD,kBAAQuB,MAAMvB;AACdqB,uBAAaG,qBAAqBC,UAAAA,CAAAA;AAClCJ,uBAAaK,uBAAuB,eAAe1B;CAAS;AAC5DqB,uBAAaM,mBAAkB;AAC/B,eAAK1B,QAAQ;QACf;AACA,cAAM2B,YAAWL,WAAMM,QAAQ,CAAA,EAAGC,MAAMC,eAAvBR,mBAAoC;AACrD,cAAMS,UAAUT,MAAMM,QAAQ,CAAA,EAAGC,MAAME;AAKvC,YAAIV,SAAS,cAAaM,qCAAUK,KAAI;AACtCX,iBAAO;AACPD,uBAAaM,mBAAkB;QACjC,WAAWL,SAAS,eAAeM,aAAaM,WAAaN,qCAAUK,MAAK;AAC1EX,iBAAO;AACPD,uBAAac,uBAAsB;QACrC;AAGA,YAAIb,SAAS,MAAM;AACjB,cAAIM,qCAAUK,IAAI;AAChBX,mBAAO;AACPD,yBAAae,yBAAyBR,SAAUK,IAAIL,SAAUS,SAAUC,IAAI;UAC9E,WAAWN,SAAS;AAClBV,mBAAO;AACPD,yBAAaG,qBAAqBD,MAAMU,EAAE;UAC5C;QACF;AAGA,YAAIX,SAAS,aAAaU,SAAS;AACjCX,uBAAaK,uBAAuBM,OAAAA;QACtC,WAAWV,SAAS,gBAAcM,0CAAUS,aAAVT,mBAAoBW,YAAW;AAC/DlB,uBAAamB,wBAAwBZ,SAASS,SAASE,SAAS;QAClE;MACF;AAGA,UAAIjB,SAAS,WAAW;AACtBD,qBAAaM,mBAAkB;MACjC,WAAWL,SAAS,YAAY;AAC9BD,qBAAac,uBAAsB;MACrC;AAEAd,mBAAaoB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACLC,UAAUrC,QAAQqC,YAAYjB,UAAAA;IAChC;EACF;AACF;AA3Fa3B;;;AChBb,SAAS6C,YAAY;AAWrB,SAASC,YAAAA,iBAAgB;AAEzB,IAAMC,iBAAgB;AAwBf,IAAMC,cAAN,MAAMA;EACHC,QAAgBF;EAEhBG,2BAAoC;EACpCC;EACR,IAAWC,OAAa;AACtB,WAAO,KAAKD;EACd;EAEAE,YAAYC,QAA4B;AACtC,SAAKH,SAAQG,iCAAQF,SAAQ,IAAIG,KAAK,CAAC,CAAA;AACvC,QAAID,iCAAQL,OAAO;AACjB,WAAKA,QAAQK,OAAOL;IACtB;AACA,SAAKC,4BAA2BI,iCAAQJ,6BAA4B;EACtE;EAEA,MAAMM,QACJC,SAC+C;AAC/C,UAAM,EACJC,UACAT,QAAQ,KAAKA,OACbU,UACAC,SACAC,aACAC,oBAAmB,IACjBL;AACJ,UAAMM,QAAQH,QAAQI,IAAIC,8BAAAA;AAE1B,QAAIC,iBAAiBP,SAASK,IAAIG,6BAAAA;AAClCD,qBAAiBE,0BAA0BF,gBAAgBH,OAAOd,KAAAA;AAElE,QAAIoB,aAAkBP,2DAAqBO;AAC3C,SAAIP,2DAAqBO,gBAAe,YAAY;AAClDA,mBAAa;QACXC,MAAM;QACNC,UAAU;UAAEC,MAAMV,oBAAoBW;QAAuB;MAC/D;IACF;AACA,UAAMC,SAAS,MAAM,KAAKtB,KAAKuB,KAAKC,YAAYC,OAAO;MACrD5B;MACAyB,QAAQ;MACRf,UAAUO;MACV,GAAIH,MAAMe,SAAS,KAAK;QAAEf;MAAM;MAChC,IAAID,2DAAqBiB,cAAa;QACpCC,YAAYlB,oBAAoBiB;MAClC;MACA,IAAIjB,2DAAqBmB,SAAQ;QAAEA,MAAMnB,oBAAoBmB;MAAK;MAClE,GAAIZ,cAAc;QAAEa,aAAab;MAAW;MAC5C,GAAI,KAAKnB,4BAA4B;QAAEiC,qBAAqB;MAAM;IACpE,CAAA;AAEAtB,gBAAYa,OAAO,OAAOU,iBAAAA;AA5G9B;AA6GM,UAAIC,OAAsC;AAC1C,uBAAiBC,SAASZ,QAAQ;AAChC,cAAMa,YAAWD,WAAME,QAAQ,CAAA,EAAGC,MAAMC,eAAvBJ,mBAAoC;AACrD,cAAMK,UAAUL,MAAME,QAAQ,CAAA,EAAGC,MAAME;AAKvC,YAAIN,SAAS,cAAaE,qCAAUK,KAAI;AACtCP,iBAAO;AACPD,uBAAaS,mBAAkB;QACjC,WAAWR,SAAS,eAAeE,aAAaO,WAAaP,qCAAUK,MAAK;AAC1EP,iBAAO;AACPD,uBAAaW,uBAAsB;QACrC;AAGA,YAAIV,SAAS,MAAM;AACjB,cAAIE,qCAAUK,IAAI;AAChBP,mBAAO;AACPD,yBAAaY,yBAAyBT,SAAUK,IAAIL,SAAUhB,SAAUC,IAAI;UAC9E,WAAWmB,SAAS;AAClBN,mBAAO;AACPD,yBAAaa,qBAAqBX,MAAMM,EAAE;UAC5C;QACF;AAGA,YAAIP,SAAS,aAAaM,SAAS;AACjCP,uBAAac,uBAAuBP,OAAAA;QACtC,WAAWN,SAAS,gBAAcE,0CAAUhB,aAAVgB,mBAAoBY,YAAW;AAC/Df,uBAAagB,wBAAwBb,SAAShB,SAAS4B,SAAS;QAClE;MACF;AAGA,UAAId,SAAS,WAAW;AACtBD,qBAAaS,mBAAkB;MACjC,WAAWR,SAAS,YAAY;AAC9BD,qBAAaW,uBAAsB;MACrC;AAEAX,mBAAaiB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACL3C,UAAUA,YAAY4C,UAAAA;IACxB;EACF;AACF;AAvGatD;;;ACnCb,OAAOuD,eAAe;;;ACXf,SAASC,2BACdC,UACAC,OACAC,OACAC,WAAkB;AAElBA,4BAAcC;AAEd,QAAMC,SAAgB,CAAA;AACtB,QAAMC,iBAAiBC,kBAAiBL,OAAOD,KAAAA;AAC/C,MAAIK,iBAAiBH,WAAW;AAC9B,UAAM,IAAIK,MAAM,4CAA4CF,oBAAoBH,WAAW;EAC7F;AACAA,eAAaG;AAEb,aAAWG,WAAWT,UAAU;AAC9B,QAAIS,QAAQC,SAAS,UAAU;AAC7B,YAAMC,YAAYC,oBAAmBV,OAAOO,OAAAA;AAC5CN,mBAAaQ;AAEb,UAAIR,YAAY,GAAG;AACjB,cAAM,IAAIK,MAAM,uCAAA;MAClB;IACF;EACF;AAEA,MAAIK,SAAkB;AAEtB,QAAMC,mBAAmB;OAAId;IAAUe,QAAO;AAC9C,aAAWN,WAAWK,kBAAkB;AACtC,QAAIL,QAAQC,SAAS,UAAU;AAC7BL,aAAOW,QAAQP,OAAAA;AACf;IACF,WAAWI,QAAQ;AACjB;IACF;AACA,QAAIF,YAAYC,oBAAmBV,OAAOO,OAAAA;AAC1C,QAAIN,YAAYQ,WAAW;AACzBE,eAAS;AACT;IACF;AACAR,WAAOW,QAAQP,OAAAA;AACfN,iBAAaQ;EACf;AAEA,SAAON;AACT;AA9CgBN,OAAAA,4BAAAA;AAgDhB,IAAMK,aAAa;AAEnB,SAASG,kBAAiBL,OAAeD,OAAY;AACnD,MAAIA,MAAMgB,WAAW,GAAG;AACtB,WAAO;EACT;AACA,QAAMC,OAAOC,KAAKC,UAAUnB,KAAAA;AAC5B,SAAOoB,aAAYnB,OAAOgB,IAAAA;AAC5B;AANSX,OAAAA,mBAAAA;AAQT,SAASK,oBAAmBV,OAAeO,SAAY;AACrD,SAAOY,aAAYnB,OAAOiB,KAAKC,UAAUX,QAAQa,OAAO,KAAK,EAAA;AAC/D;AAFSV,OAAAA,qBAAAA;AAIT,SAASS,aAAYnB,OAAeqB,MAAY;AAC9C,SAAOA,KAAKN,SAAS;AACvB;AAFSI,OAAAA,cAAAA;AAIF,SAASG,kCAAkCC,QAAmB;AACnE,SAAO;IACLC,MAAMD,OAAOC;IACbC,aAAaF,OAAOE;IACpBC,cAAcT,KAAKU,MAAMJ,OAAOK,UAAU;EAC5C;AACF;AANgBN;AAQT,SAASO,iCACdtB,SAAgB;AAEhB,MAAIA,mBAAmBuB,aAAa;AAClC,QAAIvB,QAAQC,SAAS,UAAU;AAC7B,aAAO;QACLA,MAAM;QACNY,SAAS;UACP;YAAEW,MAAM;YAAQV,MAAM,gDAAgDd,QAAQa;UAAQ;;MAE1F;IACF,OAAO;AACL,aAAO;QACLZ,MAAMD,QAAQC,SAAS,SAAS,SAAS;QACzCY,SAAS;UAAC;YAAEW,MAAM;YAAQV,MAAMd,QAAQa;UAAQ;;MAClD;IACF;EACF,WAAWb,mBAAmByB,wBAAwB;AACpD,WAAO;MACLxB,MAAM;MACNY,SAAS;QACP;UACEa,IAAI1B,QAAQ0B;UACZF,MAAM;UACNG,OAAO3B,QAAQ4B;UACfX,MAAMjB,QAAQiB;QAChB;;IAEJ;EACF,WAAWjB,mBAAmB6B,eAAe;AAC3C,WAAO;MACL5B,MAAM;MACNY,SAAS;QACP;UACEW,MAAM;UACNX,SAASb,QAAQJ;UACjBkC,aAAa9B,QAAQ+B;QACvB;;IAEJ;EACF;AACF;AAzCgBT;AA2CT,SAASU,6BACdC,eAAgD;AAEhD,SAAOA,cAAcC,OAAO,CAACC,KAAKnC,YAAAA;AAChC,UAAMoC,YAAYD,IAAIA,IAAI3B,SAAS,CAAA;AAEnC,QAAI4B,aAAaA,UAAUnC,SAASD,QAAQC,MAAM;AAChDmC,gBAAUvB,UAAUuB,UAAUvB,QAAQwB,OAAOrC,QAAQa,OAAO;IAC9D,OAAO;AACLsB,UAAIG,KAAK;QACPrC,MAAMD,QAAQC;QACdY,SAAS;aAAKb,QAAQa;;MACxB,CAAA;IACF;AAEA,WAAOsB;EACT,GAAG,CAAA,CAAE;AACP;AAjBgBH;;;AD7FhB,SAASO,YAAAA,iBAAgB;AAGzB,IAAMC,iBAAgB;AAef,IAAMC,mBAAN,MAAMA;EACHC,QAAgBF;EAEhBG;EACR,IAAWC,YAAuB;AAChC,WAAO,KAAKD;EACd;EAEAE,YAAYC,QAAiC;AAC3C,SAAKH,cAAaG,iCAAQF,cAAa,IAAIG,UAAU,CAAC,CAAA;AACtD,QAAID,iCAAQJ,OAAO;AACjB,WAAKA,QAAQI,OAAOJ;IACtB;EACF;EAEA,MAAMM,QACJC,SAC+C;AAC/C,UAAM,EACJC,UACAR,QAAQ,KAAKA,OACbS,UAAUC,aACVC,SACAC,aACAC,oBAAmB,IACjBN;AACJ,UAAMO,QAAQH,QAAQI,IAAIC,iCAAAA;AAE1B,UAAMP,WAAW;SAAIC;;AAGrB,UAAMO,sBAAsBR,SAASS,MAAK;AAC1C,UAAMC,eACJF,+BAA+BG,cAAcH,oBAAoBI,UAAU;AAE7E,QAAIC,oBAAoBb,SAASM,IAAIQ,gCAAAA;AACrCD,wBAAoBE,2BAA0BF,mBAAmBR,OAAOd,KAAAA;AACxEsB,wBAAoBG,6BAA6BH,iBAAAA;AAEjD,QAAII,aAAkBb,2DAAqBa;AAC3C,SAAIb,2DAAqBa,gBAAe,YAAY;AAClDA,mBAAa;QACXC,MAAM;QACNC,MAAMf,oBAAoBgB;MAC5B;IACF;AAEA,UAAMC,SAAS,KAAK5B,UAAUO,SAASsB,OAAO;MAC5CC,QAAQb;MACRnB,OAAO,KAAKA;MACZS,UAAUa;MACVW,aAAYpB,2DAAqBqB,cAAa;MAC9C,GAAIpB,MAAMqB,SAAS,KAAK;QAAErB;MAAM;MAChC,GAAIY,cAAc;QAAEU,aAAaV;MAAW;MAC5CI,QAAQ;IACV,CAAA;AAEAlB,gBAAYkB,OAAO,OAAOO,iBAAAA;AACxB,UAAIC,OAAsC;AAC1C,UAAIC,gBAAgB;AACpB,UAAIC,mBAAmBC,UAAAA;AACvB,UAAIC,oBAAoBD,UAAAA;AACxB,UAAIE,2BAA2B,IAAIC,yBAAAA;AAEnC,uBAAiBC,SAAS,MAAMf,QAAQ;AACtC,YAAIe,MAAMlB,SAAS,iBAAiB;AAClCa,6BAAmBK,MAAMC,QAAQC;QACnC,WAAWF,MAAMlB,SAAS,uBAAuB;AAC/C,cAAIkB,MAAMG,cAAcrB,SAAS,QAAQ;AACvCY,4BAAgB;AAChBI,qCAAyBM,MAAK;AAC9BX,mBAAO;UACT,WAAWO,MAAMG,cAAcrB,SAAS,YAAY;AAClDe,gCAAoBG,MAAMG,cAAcD;AACxCV,yBAAaa,yBAAyBR,mBAAmBG,MAAMG,cAAcpB,IAAI;AACjFU,mBAAO;UACT;QACF,WAAWO,MAAMlB,SAAS,uBAAuB;AAC/C,cAAIkB,MAAMM,MAAMxB,SAAS,cAAc;AACrC,kBAAMyB,OAAOT,yBAAyBU,YAAYR,MAAMM,MAAMC,IAAI;AAClE,gBAAIA,KAAKjB,SAAS,GAAG;AACnB,kBAAI,CAACI,eAAe;AAClBF,6BAAaiB,qBAAqBd,gBAAAA;AAClCD,gCAAgB;cAClB;AACAF,2BAAakB,uBAAuBH,IAAAA;YACtC;UACF,WAAWP,MAAMM,MAAMxB,SAAS,oBAAoB;AAClDU,yBAAamB,wBAAwBX,MAAMM,MAAMM,YAAY;UAC/D;QACF,WAAWZ,MAAMlB,SAAS,sBAAsB;AAC9C,cAAIW,SAAS,WAAW;AACtB,gBAAIC,eAAe;AACjBF,2BAAaqB,mBAAkB;YACjC;UACF,WAAWpB,SAAS,YAAY;AAC9BD,yBAAasB,uBAAsB;UACrC;QACF;MACF;AAEAtB,mBAAauB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACLpD,UAAUA,YAAYiC,UAAAA;IACxB;EACF;AACF;AA5Ga1C;AA8Gb,IAAM8D,eAAe;AACrB,IAAMC,mBAAmB;AAEzB,IAAMlB,2BAAN,6BAAMA,0BAAAA;EACImB;EACAC,uBAAgC;EAExC7D,cAAc;AACZ,SAAK4D,SAAS;EAChB;EAEAV,YAAYD,MAAsB;AAChC,SAAKW,UAAUX;AACf,QAAI,KAAKY,sBAAsB;AAC7B,aAAOZ;IACT;AACA,UAAMa,eAAe,KAAKF,OAAOG,MAAM,GAAGL,aAAa1B,MAAM;AAC7D,QAAI0B,aAAaM,WAAWF,YAAAA,GAAe;AACzC,UAAI,KAAKF,OAAOK,SAASN,gBAAAA,GAAmB;AAC1C,cAAMO,MAAM,KAAKN,OAAOO,QAAQR,gBAAAA;AAChC,cAAMS,eAAe,KAAKR,OAAOG,MAAMG,MAAMP,iBAAiB3B,MAAM;AACpE,aAAK4B,SAASQ;AACd,aAAKP,uBAAuB;AAC5B,eAAOO;MACT,OAAO;AACL,eAAO;MACT;IACF;AACA,WAAOnB;EACT;EAEAH,QAAQ;AACN,SAAKc,SAAS;AACd,SAAKC,uBAAuB;EAC9B;AACF,GAhCA;","names":["RemoteRunnable","RemoteChain","name","description","chainUrl","parameters","parameterType","constructor","options","toAction","inferLangServeParameters","handler","args","runnable","RemoteRunnable","url","input","Object","keys","invoke","supportedTypes","schemaUrl","replace","schema","fetch","then","res","json","catch","Error","includes","type","properties","map","key","property","required","OpenAI","limitMessagesToTokenCount","messages","tools","model","maxTokens","maxTokensForOpenAIModel","result","toolsNumTokens","countToolsTokens","Error","message","role","numTokens","countMessageTokens","cutoff","reversedMessages","reverse","unshift","maxTokensByModel","DEFAULT_MAX_TOKENS","length","json","JSON","stringify","countTokens","content","text","convertActionInputToOpenAITool","action","type","function","name","description","parameters","parse","jsonSchema","convertMessageToOpenAIMessage","TextMessage","ActionExecutionMessage","tool_calls","id","arguments","ResultMessage","tool_call_id","actionExecutionId","convertSystemMessageToAssistantAPI","randomId","DEFAULT_MODEL","OpenAIAdapter","model","disableParallelToolCalls","_openai","openai","constructor","params","OpenAI","process","request","threadId","messages","actions","eventSource","forwardedParameters","tools","map","convertActionInputToOpenAITool","openaiMessages","convertMessageToOpenAIMessage","limitMessagesToTokenCount","toolChoice","type","function","name","toolChoiceFunctionName","stream","beta","chat","completions","length","maxTokens","max_tokens","stop","tool_choice","parallel_tool_calls","eventStream$","mode","chunk","toolCall","choices","delta","tool_calls","content","id","sendTextMessageEnd","undefined","sendActionExecutionEnd","sendActionExecutionStart","sendTextMessageStart","sendTextMessageContent","arguments","sendActionExecutionArgs","complete","randomId","randomId","LangChainAdapter","constructor","options","process","request","eventSource","model","actions","messages","threadId","runId","result","chainFn","map","convertMessageToLangChainMessage","tools","convertActionInputToLangChainTool","stream","eventStream$","streamLangChainResponse","randomId","GoogleGenerativeAI","convertMessageToGoogleGenAIMessage","message","TextMessage","role","user","assistant","system","text","content","parts","ActionExecutionMessage","functionCall","name","args","arguments","ResultMessage","functionResponse","actionName","response","tryParseJson","result","transformActionToGoogleGenAITool","action","description","parameters","JSON","parse","jsonSchema","transformProperties","props","key","type","toUpperCase","properties","functionDeclarations","str","e","randomId","GoogleGenerativeAIAdapter","model","constructor","options","genAI","GoogleGenerativeAI","process","env","getGenerativeModel","request","messages","actions","eventSource","history","slice","map","convertMessageToGoogleGenAIMessage","currentMessage","at","Error","systemMessage","firstMessage","TextMessage","role","content","trim","tools","transformActionToGoogleGenAITool","isFirstGenGeminiPro","chat","startChat","parts","text","systemInstruction","result","sendMessageStream","stream","eventStream$","isTextMessage","chunk","chunkText","sendTextMessageStart","randomId","sendTextMessageContent","sendTextMessageEnd","calls","response","functionCalls","call","sendActionExecution","name","JSON","stringify","replaceNewlinesInObject","args","complete","threadId","obj","replace","Array","isArray","newObj","key","hasOwnProperty","OpenAI","OpenAIAssistantAdapter","openai","codeInterpreterEnabled","assistantId","fileSearchEnabled","disableParallelToolCalls","constructor","params","OpenAI","process","request","messages","actions","eventSource","runId","forwardedParameters","threadId","beta","threads","create","id","lastMessage","at","nextRunId","undefined","ResultMessage","submitToolOutputs","TextMessage","submitUserMessage","Error","run","runs","retrieve","required_action","toolCallsIds","submit_tool_outputs","tool_calls","map","toolCall","resultMessages","filter","message","includes","actionExecutionId","length","toolOutputs","tool_call_id","output","result","stream","submitToolOutputsStream","tool_outputs","parallel_tool_calls","streamResponse","instructionsMessage","shift","instructions","content","userMessage","convertMessageToOpenAIMessage","convertSystemMessageToAssistantAPI","role","openaiTools","convertActionInputToOpenAITool","tools","type","assistant_id","maxTokens","max_completion_tokens","getRunIdFromStream","eventStream$","inFunctionCall","chunk","event","sendActionExecutionEnd","sendTextMessageStart","data","delta","sendTextMessageContent","text","value","sendTextMessageEnd","toolCallId","toolCallName","toolCallArgs","step_details","function","name","arguments","sendActionExecutionStart","sendActionExecutionArgs","complete","Promise","resolve","reject","runIdGetter","off","on","OpenAI","randomId","UnifyAdapter","apiKey","model","start","constructor","options","process","request","tools","actions","map","convertActionInputToOpenAITool","openai","OpenAI","baseURL","messages","convertMessageToOpenAIMessage","stream","chat","completions","create","length","eventSource","eventStream$","mode","chunk","sendTextMessageStart","randomId","sendTextMessageContent","sendTextMessageEnd","toolCall","choices","delta","tool_calls","content","id","undefined","sendActionExecutionEnd","sendActionExecutionStart","function","name","arguments","sendActionExecutionArgs","complete","threadId","Groq","randomId","DEFAULT_MODEL","GroqAdapter","model","disableParallelToolCalls","_groq","groq","constructor","params","Groq","process","request","threadId","messages","actions","eventSource","forwardedParameters","tools","map","convertActionInputToOpenAITool","openaiMessages","convertMessageToOpenAIMessage","limitMessagesToTokenCount","toolChoice","type","function","name","toolChoiceFunctionName","stream","chat","completions","create","length","maxTokens","max_tokens","stop","tool_choice","parallel_tool_calls","eventStream$","mode","chunk","toolCall","choices","delta","tool_calls","content","id","sendTextMessageEnd","undefined","sendActionExecutionEnd","sendActionExecutionStart","sendTextMessageStart","sendTextMessageContent","arguments","sendActionExecutionArgs","complete","randomId","Anthropic","limitMessagesToTokenCount","messages","tools","model","maxTokens","MAX_TOKENS","result","toolsNumTokens","countToolsTokens","Error","message","role","numTokens","countMessageTokens","cutoff","reversedMessages","reverse","unshift","length","json","JSON","stringify","countTokens","content","text","convertActionInputToAnthropicTool","action","name","description","input_schema","parse","jsonSchema","convertMessageToAnthropicMessage","TextMessage","type","ActionExecutionMessage","id","input","arguments","ResultMessage","tool_use_id","actionExecutionId","groupAnthropicMessagesByRole","messageParams","reduce","acc","lastGroup","concat","push","randomId","DEFAULT_MODEL","AnthropicAdapter","model","_anthropic","anthropic","constructor","params","Anthropic","process","request","threadId","messages","rawMessages","actions","eventSource","forwardedParameters","tools","map","convertActionInputToAnthropicTool","instructionsMessage","shift","instructions","TextMessage","content","anthropicMessages","convertMessageToAnthropicMessage","limitMessagesToTokenCount","groupAnthropicMessagesByRole","toolChoice","type","name","toolChoiceFunctionName","stream","create","system","max_tokens","maxTokens","length","tool_choice","eventStream$","mode","didOutputText","currentMessageId","randomId","currentToolCallId","filterThinkingTextBuffer","FilterThinkingTextBuffer","chunk","message","id","content_block","reset","sendActionExecutionStart","delta","text","onTextChunk","sendTextMessageStart","sendTextMessageContent","sendActionExecutionArgs","partial_json","sendTextMessageEnd","sendActionExecutionEnd","complete","THINKING_TAG","THINKING_TAG_END","buffer","didFilterThinkingTag","potentialTag","slice","startsWith","includes","end","indexOf","filteredText"]}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import {
|
|
2
2
|
copilotRuntimeNodeHttpEndpoint,
|
|
3
3
|
telemetry_client_default
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-IRRAL44O.mjs";
|
|
5
5
|
import {
|
|
6
6
|
__name
|
|
7
7
|
} from "./chunk-44O2JGUY.mjs";
|
|
@@ -21,4 +21,4 @@ __name(copilotRuntimeNestEndpoint, "copilotRuntimeNestEndpoint");
|
|
|
21
21
|
export {
|
|
22
22
|
copilotRuntimeNestEndpoint
|
|
23
23
|
};
|
|
24
|
-
//# sourceMappingURL=chunk-
|
|
24
|
+
//# sourceMappingURL=chunk-VWS65V7Y.mjs.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import {
|
|
2
2
|
getCommonConfig,
|
|
3
3
|
telemetry_client_default
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-IRRAL44O.mjs";
|
|
5
5
|
import {
|
|
6
6
|
__name
|
|
7
7
|
} from "./chunk-44O2JGUY.mjs";
|
|
@@ -76,4 +76,4 @@ export {
|
|
|
76
76
|
config,
|
|
77
77
|
copilotRuntimeNextJSPagesRouterEndpoint
|
|
78
78
|
};
|
|
79
|
-
//# sourceMappingURL=chunk-
|
|
79
|
+
//# sourceMappingURL=chunk-XCGRXAJU.mjs.map
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { Parameter, Action } from '@copilotkit/shared';
|
|
2
|
-
import { C as CopilotServiceAdapter, R as RemoteChainParameters, A as ActionInput, F as ForwardedParametersInput,
|
|
2
|
+
import { C as CopilotServiceAdapter, R as RemoteChainParameters, A as ActionInput, F as ForwardedParametersInput, b as RuntimeEventSource } from './langserve-15a1286b.js';
|
|
3
3
|
import { M as MessageInput, a as Message } from './index-0476e4f7.js';
|
|
4
4
|
import * as graphql from 'graphql';
|
|
5
5
|
import * as pino from 'pino';
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import { C as CopilotServiceAdapter, b as CopilotRuntimeChatCompletionRequest, c as CopilotRuntimeChatCompletionResponse } from './langserve-d6073a3b.js';
|
|
2
1
|
import OpenAI from 'openai';
|
|
3
|
-
import {
|
|
2
|
+
import { C as CopilotServiceAdapter, c as CopilotRuntimeChatCompletionRequest, d as CopilotRuntimeChatCompletionResponse } from './langserve-15a1286b.js';
|
|
4
3
|
import { BaseMessageChunk, AIMessage, AIMessageChunk, BaseMessage } from '@langchain/core/messages';
|
|
5
4
|
import { DynamicStructuredTool } from '@langchain/core/tools';
|
|
6
5
|
import { IterableReadableStream, IterableReadableStreamInterface } from '@langchain/core/utils/stream';
|
|
6
|
+
import { GenerativeModel } from '@google/generative-ai';
|
|
7
7
|
import { Groq } from 'groq-sdk';
|
|
8
8
|
|
|
9
9
|
/**
|
|
@@ -57,73 +57,63 @@ declare class OpenAIAdapter implements CopilotServiceAdapter {
|
|
|
57
57
|
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
58
58
|
}
|
|
59
59
|
|
|
60
|
+
type LangChainBaseMessageChunkStream = IterableReadableStream<BaseMessageChunk>;
|
|
61
|
+
type LangChainAIMessageChunkStream = IterableReadableStreamInterface<AIMessageChunk>;
|
|
62
|
+
type LangChainReturnType = LangChainBaseMessageChunkStream | LangChainAIMessageChunkStream | BaseMessageChunk | string | AIMessage;
|
|
63
|
+
|
|
60
64
|
/**
|
|
61
|
-
* Copilot Runtime adapter for
|
|
65
|
+
* Copilot Runtime adapter for LangChain.
|
|
62
66
|
*
|
|
63
67
|
* ## Example
|
|
64
68
|
*
|
|
65
69
|
* ```ts
|
|
66
|
-
* import { CopilotRuntime,
|
|
67
|
-
* import
|
|
70
|
+
* import { CopilotRuntime, LangChainAdapter } from "@copilotkit/runtime";
|
|
71
|
+
* import { ChatOpenAI } from "@langchain/openai";
|
|
68
72
|
*
|
|
69
73
|
* const copilotKit = new CopilotRuntime();
|
|
70
74
|
*
|
|
71
|
-
* const
|
|
72
|
-
*
|
|
75
|
+
* const model = new ChatOpenAI({
|
|
76
|
+
* model: "gpt-4o",
|
|
73
77
|
* apiKey: "<your-api-key>",
|
|
74
78
|
* });
|
|
75
79
|
*
|
|
76
|
-
* const serviceAdapter = new
|
|
77
|
-
*
|
|
78
|
-
*
|
|
79
|
-
*
|
|
80
|
-
* fileSearchEnabled: true,
|
|
80
|
+
* const serviceAdapter = new LangChainAdapter({
|
|
81
|
+
* chainFn: async ({ messages, tools }) => {
|
|
82
|
+
* return model.stream(messages, { tools });
|
|
83
|
+
* }
|
|
81
84
|
* });
|
|
82
85
|
*
|
|
83
86
|
* return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);
|
|
84
87
|
* ```
|
|
88
|
+
*
|
|
89
|
+
* The asynchronous handler function (`chainFn`) can return any of the following:
|
|
90
|
+
*
|
|
91
|
+
* - A simple `string` response
|
|
92
|
+
* - A LangChain stream (`IterableReadableStream`)
|
|
93
|
+
* - A LangChain `BaseMessageChunk` object
|
|
94
|
+
* - A LangChain `AIMessage` object
|
|
85
95
|
*/
|
|
86
96
|
|
|
87
|
-
interface
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
openai?: OpenAI;
|
|
96
|
-
/**
|
|
97
|
-
* Whether to enable code interpretation.
|
|
98
|
-
* @default true
|
|
99
|
-
*/
|
|
100
|
-
codeInterpreterEnabled?: boolean;
|
|
97
|
+
interface ChainFnParameters {
|
|
98
|
+
model: string;
|
|
99
|
+
messages: BaseMessage[];
|
|
100
|
+
tools: DynamicStructuredTool[];
|
|
101
|
+
threadId?: string;
|
|
102
|
+
runId?: string;
|
|
103
|
+
}
|
|
104
|
+
interface LangChainAdapterOptions {
|
|
101
105
|
/**
|
|
102
|
-
*
|
|
103
|
-
* @default true
|
|
106
|
+
* A function that uses the LangChain API to generate a response.
|
|
104
107
|
*/
|
|
105
|
-
|
|
108
|
+
chainFn: (parameters: ChainFnParameters) => Promise<LangChainReturnType>;
|
|
109
|
+
}
|
|
110
|
+
declare class LangChainAdapter implements CopilotServiceAdapter {
|
|
111
|
+
private options;
|
|
106
112
|
/**
|
|
107
|
-
*
|
|
108
|
-
* You can disable parallel tool calls to force the model to execute tool calls sequentially.
|
|
109
|
-
* This is useful if you want to execute tool calls in a specific order so that the state changes
|
|
110
|
-
* introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)
|
|
111
|
-
*
|
|
112
|
-
* @default false
|
|
113
|
+
* To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.
|
|
113
114
|
*/
|
|
114
|
-
|
|
115
|
-
}
|
|
116
|
-
declare class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
117
|
-
private openai;
|
|
118
|
-
private codeInterpreterEnabled;
|
|
119
|
-
private assistantId;
|
|
120
|
-
private fileSearchEnabled;
|
|
121
|
-
private disableParallelToolCalls;
|
|
122
|
-
constructor(params: OpenAIAssistantAdapterParams);
|
|
115
|
+
constructor(options: LangChainAdapterOptions);
|
|
123
116
|
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
124
|
-
private submitToolOutputs;
|
|
125
|
-
private submitUserMessage;
|
|
126
|
-
private streamResponse;
|
|
127
117
|
}
|
|
128
118
|
|
|
129
119
|
/**
|
|
@@ -161,63 +151,73 @@ declare class GoogleGenerativeAIAdapter implements CopilotServiceAdapter {
|
|
|
161
151
|
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
162
152
|
}
|
|
163
153
|
|
|
164
|
-
type LangChainBaseMessageChunkStream = IterableReadableStream<BaseMessageChunk>;
|
|
165
|
-
type LangChainAIMessageChunkStream = IterableReadableStreamInterface<AIMessageChunk>;
|
|
166
|
-
type LangChainReturnType = LangChainBaseMessageChunkStream | LangChainAIMessageChunkStream | BaseMessageChunk | string | AIMessage;
|
|
167
|
-
|
|
168
154
|
/**
|
|
169
|
-
* Copilot Runtime adapter for
|
|
155
|
+
* Copilot Runtime adapter for the OpenAI Assistant API.
|
|
170
156
|
*
|
|
171
157
|
* ## Example
|
|
172
158
|
*
|
|
173
159
|
* ```ts
|
|
174
|
-
* import { CopilotRuntime,
|
|
175
|
-
* import
|
|
160
|
+
* import { CopilotRuntime, OpenAIAssistantAdapter } from "@copilotkit/runtime";
|
|
161
|
+
* import OpenAI from "openai";
|
|
176
162
|
*
|
|
177
163
|
* const copilotKit = new CopilotRuntime();
|
|
178
164
|
*
|
|
179
|
-
* const
|
|
180
|
-
*
|
|
165
|
+
* const openai = new OpenAI({
|
|
166
|
+
* organization: "<your-organization-id>",
|
|
181
167
|
* apiKey: "<your-api-key>",
|
|
182
168
|
* });
|
|
183
169
|
*
|
|
184
|
-
* const serviceAdapter = new
|
|
185
|
-
*
|
|
186
|
-
*
|
|
187
|
-
*
|
|
170
|
+
* const serviceAdapter = new OpenAIAssistantAdapter({
|
|
171
|
+
* openai,
|
|
172
|
+
* assistantId: "<your-assistant-id>",
|
|
173
|
+
* codeInterpreterEnabled: true,
|
|
174
|
+
* fileSearchEnabled: true,
|
|
188
175
|
* });
|
|
189
176
|
*
|
|
190
177
|
* return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);
|
|
191
178
|
* ```
|
|
192
|
-
*
|
|
193
|
-
* The asynchronous handler function (`chainFn`) can return any of the following:
|
|
194
|
-
*
|
|
195
|
-
* - A simple `string` response
|
|
196
|
-
* - A LangChain stream (`IterableReadableStream`)
|
|
197
|
-
* - A LangChain `BaseMessageChunk` object
|
|
198
|
-
* - A LangChain `AIMessage` object
|
|
199
179
|
*/
|
|
200
180
|
|
|
201
|
-
interface
|
|
202
|
-
model: string;
|
|
203
|
-
messages: BaseMessage[];
|
|
204
|
-
tools: DynamicStructuredTool[];
|
|
205
|
-
threadId?: string;
|
|
206
|
-
runId?: string;
|
|
207
|
-
}
|
|
208
|
-
interface LangChainAdapterOptions {
|
|
181
|
+
interface OpenAIAssistantAdapterParams {
|
|
209
182
|
/**
|
|
210
|
-
*
|
|
183
|
+
* The ID of the assistant to use.
|
|
211
184
|
*/
|
|
212
|
-
|
|
213
|
-
}
|
|
214
|
-
declare class LangChainAdapter implements CopilotServiceAdapter {
|
|
215
|
-
private options;
|
|
185
|
+
assistantId: string;
|
|
216
186
|
/**
|
|
217
|
-
*
|
|
187
|
+
* An optional OpenAI instance to use. If not provided, a new instance will be created.
|
|
218
188
|
*/
|
|
219
|
-
|
|
189
|
+
openai?: OpenAI;
|
|
190
|
+
/**
|
|
191
|
+
* Whether to enable code interpretation.
|
|
192
|
+
* @default true
|
|
193
|
+
*/
|
|
194
|
+
codeInterpreterEnabled?: boolean;
|
|
195
|
+
/**
|
|
196
|
+
* Whether to enable file search.
|
|
197
|
+
* @default true
|
|
198
|
+
*/
|
|
199
|
+
fileSearchEnabled?: boolean;
|
|
200
|
+
/**
|
|
201
|
+
* Whether to disable parallel tool calls.
|
|
202
|
+
* You can disable parallel tool calls to force the model to execute tool calls sequentially.
|
|
203
|
+
* This is useful if you want to execute tool calls in a specific order so that the state changes
|
|
204
|
+
* introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)
|
|
205
|
+
*
|
|
206
|
+
* @default false
|
|
207
|
+
*/
|
|
208
|
+
disableParallelToolCalls?: boolean;
|
|
209
|
+
}
|
|
210
|
+
declare class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
211
|
+
private openai;
|
|
212
|
+
private codeInterpreterEnabled;
|
|
213
|
+
private assistantId;
|
|
214
|
+
private fileSearchEnabled;
|
|
215
|
+
private disableParallelToolCalls;
|
|
216
|
+
constructor(params: OpenAIAssistantAdapterParams);
|
|
220
217
|
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
218
|
+
private submitToolOutputs;
|
|
219
|
+
private submitUserMessage;
|
|
220
|
+
private streamResponse;
|
|
221
221
|
}
|
|
222
222
|
|
|
223
223
|
interface UnifyAdapterParams {
|
package/dist/index.d.ts
CHANGED
|
@@ -1,22 +1,24 @@
|
|
|
1
|
-
export { g as CommonConfig, b as CopilotRequestContextProperties, a as CopilotRuntime, C as CopilotRuntimeConstructorParams, c as CreateCopilotRuntimeServerOptions, G as GraphQLContext, e as buildSchema, d as createContext, f as flattenToolCallsNoDuplicates, h as getCommonConfig } from './copilot-runtime-
|
|
2
|
-
export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from './
|
|
1
|
+
export { g as CommonConfig, b as CopilotRequestContextProperties, a as CopilotRuntime, C as CopilotRuntimeConstructorParams, c as CreateCopilotRuntimeServerOptions, G as GraphQLContext, e as buildSchema, d as createContext, f as flattenToolCallsNoDuplicates, h as getCommonConfig } from './copilot-runtime-a1b5f1ce.js';
|
|
2
|
+
export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from './groq-adapter-069ac812.js';
|
|
3
3
|
export { CopilotRuntimeServerInstance, config, copilotRuntimeNextJSAppRouterEndpoint, copilotRuntimeNextJSPagesRouterEndpoint } from './lib/integrations/index.js';
|
|
4
4
|
export { copilotRuntimeNodeHttpEndpoint } from './lib/integrations/node-http/index.js';
|
|
5
5
|
export { copilotRuntimeNodeExpressEndpoint } from './lib/integrations/node-express/index.js';
|
|
6
6
|
export { copilotRuntimeNestEndpoint } from './lib/integrations/nest/index.js';
|
|
7
7
|
export { GuardrailsValidationFailureResponse, MessageStreamInterruptedResponse, UnknownErrorResponse } from './utils/index.js';
|
|
8
|
+
export { C as CopilotServiceAdapter, a as RemoteChain, R as RemoteChainParameters } from './langserve-15a1286b.js';
|
|
9
|
+
export { AnthropicAdapter, AnthropicAdapterParams } from './service-adapters/index.js';
|
|
8
10
|
import '@copilotkit/shared';
|
|
9
|
-
import './langserve-d6073a3b.js';
|
|
10
11
|
import './index-0476e4f7.js';
|
|
11
12
|
import './graphql/types/base/index.js';
|
|
12
|
-
import 'rxjs';
|
|
13
13
|
import 'graphql';
|
|
14
14
|
import 'pino';
|
|
15
15
|
import 'graphql-yoga';
|
|
16
16
|
import './lib/cloud/index.js';
|
|
17
17
|
import 'openai';
|
|
18
|
-
import '@google/generative-ai';
|
|
19
18
|
import '@langchain/core/messages';
|
|
20
19
|
import '@langchain/core/tools';
|
|
21
20
|
import '@langchain/core/utils/stream';
|
|
21
|
+
import '@google/generative-ai';
|
|
22
22
|
import 'groq-sdk';
|
|
23
|
+
import 'rxjs';
|
|
24
|
+
import '@anthropic-ai/sdk';
|