@copilotkit/runtime 1.4.0-lgc-alpha3.0 → 1.4.0-pre-1-4-0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/CHANGELOG.md +85 -3
  2. package/dist/{chunk-KTIMUXCE.mjs → chunk-5DNXXJKY.mjs} +2 -2
  3. package/dist/{chunk-WTEY45IF.mjs → chunk-7ASURDKE.mjs} +3 -3
  4. package/dist/{chunk-WTEY45IF.mjs.map → chunk-7ASURDKE.mjs.map} +1 -1
  5. package/dist/{chunk-XE3SYKK4.mjs → chunk-BNQDVBQH.mjs} +1 -1
  6. package/dist/chunk-BNQDVBQH.mjs.map +1 -0
  7. package/dist/{chunk-CMYZ2LM7.mjs → chunk-ITRJ4KUY.mjs} +2 -2
  8. package/dist/{chunk-2EV5DRUI.mjs → chunk-KDMS6EKE.mjs} +27 -47
  9. package/dist/chunk-KDMS6EKE.mjs.map +1 -0
  10. package/dist/{chunk-3CQQ4U77.mjs → chunk-NR7QFSEY.mjs} +2 -2
  11. package/dist/{copilot-runtime-543a59ae.d.ts → copilot-runtime-8d3f40c7.d.ts} +1 -1
  12. package/dist/{groq-adapter-7aa25931.d.ts → groq-adapter-dbfba3eb.d.ts} +33 -0
  13. package/dist/index.d.ts +2 -2
  14. package/dist/index.js +26 -46
  15. package/dist/index.js.map +1 -1
  16. package/dist/index.mjs +6 -6
  17. package/dist/lib/index.d.ts +2 -2
  18. package/dist/lib/index.js +26 -46
  19. package/dist/lib/index.js.map +1 -1
  20. package/dist/lib/index.mjs +6 -6
  21. package/dist/lib/integrations/index.d.ts +2 -2
  22. package/dist/lib/integrations/index.js +1 -1
  23. package/dist/lib/integrations/index.js.map +1 -1
  24. package/dist/lib/integrations/index.mjs +4 -4
  25. package/dist/lib/integrations/nest/index.d.ts +1 -1
  26. package/dist/lib/integrations/nest/index.js +1 -1
  27. package/dist/lib/integrations/nest/index.js.map +1 -1
  28. package/dist/lib/integrations/nest/index.mjs +2 -2
  29. package/dist/lib/integrations/node-express/index.d.ts +1 -1
  30. package/dist/lib/integrations/node-express/index.js +1 -1
  31. package/dist/lib/integrations/node-express/index.js.map +1 -1
  32. package/dist/lib/integrations/node-express/index.mjs +2 -2
  33. package/dist/lib/integrations/node-http/index.d.ts +1 -1
  34. package/dist/lib/integrations/node-http/index.js +1 -1
  35. package/dist/lib/integrations/node-http/index.js.map +1 -1
  36. package/dist/lib/integrations/node-http/index.mjs +1 -1
  37. package/dist/service-adapters/index.d.ts +1 -1
  38. package/dist/service-adapters/index.js.map +1 -1
  39. package/dist/service-adapters/index.mjs +1 -1
  40. package/package.json +4 -4
  41. package/src/lib/runtime/copilot-runtime.ts +2 -2
  42. package/src/lib/runtime/remote-lg-cloud-action.ts +38 -51
  43. package/src/service-adapters/openai/openai-adapter.ts +33 -0
  44. package/dist/chunk-2EV5DRUI.mjs.map +0 -1
  45. package/dist/chunk-XE3SYKK4.mjs.map +0 -1
  46. /package/dist/{chunk-KTIMUXCE.mjs.map → chunk-5DNXXJKY.mjs.map} +0 -0
  47. /package/dist/{chunk-CMYZ2LM7.mjs.map → chunk-ITRJ4KUY.mjs.map} +0 -0
  48. /package/dist/{chunk-3CQQ4U77.mjs.map → chunk-NR7QFSEY.mjs.map} +0 -0
package/CHANGELOG.md CHANGED
@@ -1,15 +1,97 @@
1
1
  # @copilotkit/runtime
2
2
 
3
- ## 1.4.0-lgc-alpha3.0
3
+ ## 1.4.0-pre-1-4-0.10
4
4
 
5
5
  ### Minor Changes
6
6
 
7
- - lgc alpha 3
7
+ - LangGraph Cloud and LangGraphJS alpha release
8
+ - b318b18: Add LangGraph Cloud support
8
9
 
9
10
  ### Patch Changes
10
11
 
11
12
  - Updated dependencies
12
- - @copilotkit/shared@1.4.0-lgc-alpha3.0
13
+ - Updated dependencies [b318b18]
14
+ - @copilotkit/shared@1.4.0-pre-1-4-0.10
15
+
16
+ ## 1.3.16-mme-lgc-langgraph-package.9
17
+
18
+ ### Patch Changes
19
+
20
+ - update entry
21
+ - Updated dependencies
22
+ - @copilotkit/shared@1.3.16-mme-lgc-langgraph-package.9
23
+
24
+ ## 1.3.16-mme-lgc-langgraph-package.8
25
+
26
+ ### Patch Changes
27
+
28
+ - update entry
29
+ - Updated dependencies
30
+ - @copilotkit/shared@1.3.16-mme-lgc-langgraph-package.8
31
+
32
+ ## 1.3.16-mme-lgc-langgraph-package.7
33
+
34
+ ### Patch Changes
35
+
36
+ - update entry in tsup config
37
+ - Updated dependencies
38
+ - @copilotkit/shared@1.3.16-mme-lgc-langgraph-package.7
39
+
40
+ ## 1.3.16-mme-lgc-langgraph-package.6
41
+
42
+ ### Patch Changes
43
+
44
+ - Update exports
45
+ - Updated dependencies
46
+ - @copilotkit/shared@1.3.16-mme-lgc-langgraph-package.6
47
+
48
+ ## 1.3.16-mme-lgc-langgraph-package.5
49
+
50
+ ### Patch Changes
51
+
52
+ - update tsup config
53
+ - Updated dependencies
54
+ - @copilotkit/shared@1.3.16-mme-lgc-langgraph-package.5
55
+
56
+ ## 1.3.16-mme-lgc-langgraph-package.4
57
+
58
+ ### Patch Changes
59
+
60
+ - Update exports
61
+ - Updated dependencies
62
+ - @copilotkit/shared@1.3.16-mme-lgc-langgraph-package.4
63
+
64
+ ## 1.3.16-mme-lgc-langgraph-package.3
65
+
66
+ ### Patch Changes
67
+
68
+ - export langchain module
69
+ - Updated dependencies
70
+ - @copilotkit/shared@1.3.16-mme-lgc-langgraph-package.3
71
+
72
+ ## 1.3.16-mme-sdk-js.2
73
+
74
+ ### Patch Changes
75
+
76
+ - Ensure intermediate state config is sent as snake case
77
+ - Updated dependencies
78
+ - @copilotkit/shared@1.3.16-mme-sdk-js.2
79
+
80
+ ## 1.3.16-mme-sdk-js.1
81
+
82
+ ### Patch Changes
83
+
84
+ - Update lockfile
85
+ - Updated dependencies
86
+ - @copilotkit/shared@1.3.16-mme-sdk-js.1
87
+
88
+ ## 1.3.16-mme-sdk-js.0
89
+
90
+ ### Patch Changes
91
+
92
+ - Export LangGraph functions
93
+ - Updated dependencies
94
+ - @copilotkit/shared@1.3.16-mme-sdk-js.0
13
95
 
14
96
  ## 1.3.15
15
97
 
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  getCommonConfig,
3
3
  telemetry_client_default
4
- } from "./chunk-2EV5DRUI.mjs";
4
+ } from "./chunk-KDMS6EKE.mjs";
5
5
  import {
6
6
  __name
7
7
  } from "./chunk-44O2JGUY.mjs";
@@ -76,4 +76,4 @@ export {
76
76
  config,
77
77
  copilotRuntimeNextJSPagesRouterEndpoint
78
78
  };
79
- //# sourceMappingURL=chunk-KTIMUXCE.mjs.map
79
+ //# sourceMappingURL=chunk-5DNXXJKY.mjs.map
@@ -1,12 +1,12 @@
1
1
  import {
2
2
  RemoteChain
3
- } from "./chunk-XE3SYKK4.mjs";
3
+ } from "./chunk-BNQDVBQH.mjs";
4
4
  import {
5
5
  EndpointType,
6
6
  RuntimeEventSource,
7
7
  isLangGraphAgentAction,
8
8
  setupRemoteActions
9
- } from "./chunk-2EV5DRUI.mjs";
9
+ } from "./chunk-KDMS6EKE.mjs";
10
10
  import {
11
11
  ActionExecutionMessage,
12
12
  AgentStateMessage,
@@ -285,4 +285,4 @@ export {
285
285
  copilotKitEndpoint,
286
286
  langGraphCloudEndpoint
287
287
  };
288
- //# sourceMappingURL=chunk-WTEY45IF.mjs.map
288
+ //# sourceMappingURL=chunk-7ASURDKE.mjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/lib/runtime/copilot-runtime.ts","../src/service-adapters/conversion.ts"],"sourcesContent":["/**\n * <Callout type=\"info\">\n * This is the reference for the `CopilotRuntime` class. For more information and example code snippets, please see [Concept: Copilot Runtime](/concepts/copilot-runtime).\n * </Callout>\n *\n * ## Usage\n *\n * ```tsx\n * import { CopilotRuntime } from \"@copilotkit/runtime\";\n *\n * const copilotKit = new CopilotRuntime();\n * ```\n */\n\nimport { Action, actionParametersToJsonSchema, Parameter } from \"@copilotkit/shared\";\nimport { CopilotServiceAdapter, RemoteChain, RemoteChainParameters } from \"../../service-adapters\";\nimport { MessageInput } from \"../../graphql/inputs/message.input\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { RuntimeEventSource } from \"../../service-adapters/events\";\nimport { convertGqlInputToMessages } from \"../../service-adapters/conversion\";\nimport { Message } from \"../../graphql/types/converted\";\nimport { ForwardedParametersInput } from \"../../graphql/inputs/forwarded-parameters.input\";\nimport {\n isLangGraphAgentAction,\n LangGraphAgentAction,\n EndpointType,\n setupRemoteActions,\n EndpointDefinition,\n CopilotKitEndpoint,\n LangGraphCloudEndpoint,\n} from \"./remote-actions\";\nimport { GraphQLContext } from \"../integrations/shared\";\nimport { AgentSessionInput } from \"../../graphql/inputs/agent-session.input\";\nimport { from } from \"rxjs\";\nimport { AgentStateInput } from \"../../graphql/inputs/agent-state.input\";\n\ninterface CopilotRuntimeRequest {\n serviceAdapter: CopilotServiceAdapter;\n messages: MessageInput[];\n actions: ActionInput[];\n agentSession?: AgentSessionInput;\n agentStates?: AgentStateInput[];\n outputMessagesPromise: Promise<Message[]>;\n threadId?: string;\n runId?: string;\n publicApiKey?: string;\n graphqlContext: GraphQLContext;\n forwardedParameters?: ForwardedParametersInput;\n url?: string;\n}\n\ninterface CopilotRuntimeResponse {\n threadId: string;\n runId?: string;\n eventSource: RuntimeEventSource;\n serverSideActions: Action<any>[];\n actionInputsWithoutAgents: ActionInput[];\n}\n\ntype ActionsConfiguration<T extends Parameter[] | [] = []> =\n | Action<T>[]\n | ((ctx: { properties: any; url?: string }) => Action<T>[]);\n\ninterface OnBeforeRequestOptions {\n threadId?: string;\n runId?: string;\n inputMessages: Message[];\n properties: any;\n url?: string;\n}\n\ntype OnBeforeRequestHandler = (options: OnBeforeRequestOptions) => void | Promise<void>;\n\ninterface OnAfterRequestOptions {\n threadId: string;\n runId?: string;\n inputMessages: Message[];\n outputMessages: Message[];\n properties: any;\n url?: string;\n}\n\ntype OnAfterRequestHandler = (options: OnAfterRequestOptions) => void | Promise<void>;\n\ninterface Middleware {\n /**\n * A function that is called before the request is processed.\n */\n onBeforeRequest?: OnBeforeRequestHandler;\n\n /**\n * A function that is called after the request is processed.\n */\n onAfterRequest?: OnAfterRequestHandler;\n}\n\nexport interface CopilotRuntimeConstructorParams<T extends Parameter[] | [] = []> {\n /**\n * Middleware to be used by the runtime.\n *\n * ```ts\n * onBeforeRequest: (options: {\n * threadId?: string;\n * runId?: string;\n * inputMessages: Message[];\n * properties: any;\n * }) => void | Promise<void>;\n * ```\n *\n * ```ts\n * onAfterRequest: (options: {\n * threadId?: string;\n * runId?: string;\n * inputMessages: Message[];\n * outputMessages: Message[];\n * properties: any;\n * }) => void | Promise<void>;\n * ```\n */\n middleware?: Middleware;\n\n /*\n * A list of server side actions that can be executed.\n */\n actions?: ActionsConfiguration<T>;\n\n /*\n * Deprecated: See `remoteEndpoints`.\n */\n remoteActions?: EndpointDefinition[];\n\n /*\n * A list of remote actions that can be executed.\n */\n remoteEndpoints?: EndpointDefinition[];\n\n /*\n * An array of LangServer URLs.\n */\n langserve?: RemoteChainParameters[];\n}\n\nexport class CopilotRuntime<const T extends Parameter[] | [] = []> {\n public actions: ActionsConfiguration<T>;\n private remoteEndpointDefinitions: EndpointDefinition[];\n private langserve: Promise<Action<any>>[] = [];\n private onBeforeRequest?: OnBeforeRequestHandler;\n private onAfterRequest?: OnAfterRequestHandler;\n\n constructor(params?: CopilotRuntimeConstructorParams<T>) {\n this.actions = params?.actions || [];\n\n for (const chain of params?.langserve || []) {\n const remoteChain = new RemoteChain(chain);\n this.langserve.push(remoteChain.toAction());\n }\n\n this.remoteEndpointDefinitions = params?.remoteEndpoints || [];\n\n this.onBeforeRequest = params?.middleware?.onBeforeRequest;\n this.onAfterRequest = params?.middleware?.onAfterRequest;\n }\n\n async processRuntimeRequest(request: CopilotRuntimeRequest): Promise<CopilotRuntimeResponse> {\n const {\n serviceAdapter,\n messages: rawMessages,\n actions: clientSideActionsInput,\n threadId,\n runId,\n outputMessagesPromise,\n graphqlContext,\n forwardedParameters,\n agentSession,\n url,\n } = request;\n\n if (agentSession) {\n return this.processAgentRequest(request);\n }\n\n const messages = rawMessages.filter((message) => !message.agentStateMessage);\n\n const inputMessages = convertGqlInputToMessages(messages);\n const serverSideActions = await this.getServerSideActions(request);\n\n const serverSideActionsInput: ActionInput[] = serverSideActions.map((action) => ({\n name: action.name,\n description: action.description,\n jsonSchema: JSON.stringify(actionParametersToJsonSchema(action.parameters)),\n }));\n\n const actionInputs = flattenToolCallsNoDuplicates([\n ...serverSideActionsInput,\n ...clientSideActionsInput,\n ]);\n\n await this.onBeforeRequest?.({\n threadId,\n runId,\n inputMessages,\n properties: graphqlContext.properties,\n url,\n });\n\n try {\n const eventSource = new RuntimeEventSource();\n\n const result = await serviceAdapter.process({\n messages: inputMessages,\n actions: actionInputs,\n threadId,\n runId,\n eventSource,\n forwardedParameters,\n });\n\n outputMessagesPromise\n .then((outputMessages) => {\n this.onAfterRequest?.({\n threadId: result.threadId,\n runId: result.runId,\n inputMessages,\n outputMessages,\n properties: graphqlContext.properties,\n url,\n });\n })\n .catch((_error) => {});\n\n return {\n threadId: result.threadId,\n runId: result.runId,\n eventSource,\n serverSideActions,\n actionInputsWithoutAgents: actionInputs.filter(\n (action) =>\n // TODO-AGENTS: do not exclude ALL server side actions\n !serverSideActions.find((serverSideAction) => serverSideAction.name == action.name),\n // !isLangGraphAgentAction(\n // serverSideActions.find((serverSideAction) => serverSideAction.name == action.name),\n // ),\n ),\n };\n } catch (error) {\n console.error(\"Error getting response:\", error);\n throw error;\n }\n }\n\n private async processAgentRequest(\n request: CopilotRuntimeRequest,\n ): Promise<CopilotRuntimeResponse> {\n const { messages: rawMessages, outputMessagesPromise, graphqlContext, agentSession } = request;\n const { threadId, agentName, nodeName } = agentSession;\n const serverSideActions = await this.getServerSideActions(request);\n\n const messages = convertGqlInputToMessages(rawMessages);\n\n const agent = serverSideActions.find(\n (action) => action.name === agentName && isLangGraphAgentAction(action),\n ) as LangGraphAgentAction;\n\n if (!agent) {\n throw new Error(`Agent ${agentName} not found`);\n }\n\n const serverSideActionsInput: ActionInput[] = serverSideActions\n .filter((action) => !isLangGraphAgentAction(action))\n .map((action) => ({\n name: action.name,\n description: action.description,\n jsonSchema: JSON.stringify(actionParametersToJsonSchema(action.parameters)),\n }));\n\n const actionInputsWithoutAgents = flattenToolCallsNoDuplicates([\n ...serverSideActionsInput,\n ...request.actions,\n ]);\n\n await this.onBeforeRequest?.({\n threadId,\n runId: undefined,\n inputMessages: messages,\n properties: graphqlContext.properties,\n });\n try {\n const eventSource = new RuntimeEventSource();\n const stream = await agent.langGraphAgentHandler({\n name: agentName,\n threadId,\n nodeName,\n actionInputsWithoutAgents,\n });\n\n eventSource.stream(async (eventStream$) => {\n from(stream).subscribe({\n next: (event) => eventStream$.next(event),\n error: (err) => console.error(\"Error in stream\", err),\n complete: () => eventStream$.complete(),\n });\n });\n\n outputMessagesPromise\n .then((outputMessages) => {\n this.onAfterRequest?.({\n threadId,\n runId: undefined,\n inputMessages: messages,\n outputMessages,\n properties: graphqlContext.properties,\n });\n })\n .catch((_error) => {});\n\n return {\n threadId,\n runId: undefined,\n eventSource,\n serverSideActions: [],\n actionInputsWithoutAgents,\n };\n } catch (error) {\n console.error(\"Error getting response:\", error);\n throw error;\n }\n }\n\n private async getServerSideActions(request: CopilotRuntimeRequest): Promise<Action<any>[]> {\n const { messages: rawMessages, graphqlContext, agentStates, url } = request;\n const inputMessages = convertGqlInputToMessages(rawMessages);\n const langserveFunctions: Action<any>[] = [];\n\n for (const chainPromise of this.langserve) {\n try {\n const chain = await chainPromise;\n langserveFunctions.push(chain);\n } catch (error) {\n console.error(\"Error loading langserve chain:\", error);\n }\n }\n\n const remoteEndpointDefinitions = this.remoteEndpointDefinitions.map(\n (endpoint) =>\n ({\n ...endpoint,\n type: this.resolveEndpointType(endpoint),\n }) as EndpointDefinition,\n );\n\n const remoteActions = await setupRemoteActions({\n remoteEndpointDefinitions,\n graphqlContext,\n messages: inputMessages,\n agentStates,\n frontendUrl: url,\n });\n\n const configuredActions =\n typeof this.actions === \"function\"\n ? this.actions({ properties: graphqlContext.properties, url })\n : this.actions;\n\n return [...configuredActions, ...langserveFunctions, ...remoteActions];\n }\n\n private resolveEndpointType(endpoint: EndpointDefinition) {\n if (\n !endpoint.type &&\n \"langsmithApiKey\" in endpoint &&\n \"deploymentUrl\" in endpoint &&\n \"agents\" in endpoint\n ) {\n return EndpointType.LangGraphCloud;\n }\n\n return endpoint.type;\n }\n}\n\nexport function flattenToolCallsNoDuplicates(toolsByPriority: ActionInput[]): ActionInput[] {\n let allTools: ActionInput[] = [];\n const allToolNames: string[] = [];\n for (const tool of toolsByPriority) {\n if (!allToolNames.includes(tool.name)) {\n allTools.push(tool);\n allToolNames.push(tool.name);\n }\n }\n return allTools;\n}\n\n// The two functions below are \"factory functions\", meant to create the action objects that adhere to the expected interfaces\nexport function copilotKitEndpoint(config: Omit<CopilotKitEndpoint, \"type\">): CopilotKitEndpoint {\n return {\n ...config,\n type: EndpointType.CopilotKit,\n };\n}\n\nexport function langGraphCloudEndpoint(\n config: Omit<LangGraphCloudEndpoint, \"type\">,\n): LangGraphCloudEndpoint {\n return {\n ...config,\n type: EndpointType.LangGraphCloud,\n };\n}\n","import {\n ActionExecutionMessage,\n Message,\n ResultMessage,\n TextMessage,\n AgentStateMessage,\n} from \"../graphql/types/converted\";\nimport { MessageInput } from \"../graphql/inputs/message.input\";\nimport { plainToInstance } from \"class-transformer\";\n\nexport function convertGqlInputToMessages(inputMessages: MessageInput[]): Message[] {\n const messages: Message[] = [];\n\n for (const message of inputMessages) {\n if (message.textMessage) {\n messages.push(\n plainToInstance(TextMessage, {\n id: message.id,\n createdAt: message.createdAt,\n role: message.textMessage.role,\n content: message.textMessage.content,\n }),\n );\n } else if (message.actionExecutionMessage) {\n messages.push(\n plainToInstance(ActionExecutionMessage, {\n id: message.id,\n createdAt: message.createdAt,\n name: message.actionExecutionMessage.name,\n arguments: JSON.parse(message.actionExecutionMessage.arguments),\n scope: message.actionExecutionMessage.scope,\n }),\n );\n } else if (message.resultMessage) {\n messages.push(\n plainToInstance(ResultMessage, {\n id: message.id,\n createdAt: message.createdAt,\n actionExecutionId: message.resultMessage.actionExecutionId,\n actionName: message.resultMessage.actionName,\n result: message.resultMessage.result,\n }),\n );\n } else if (message.agentStateMessage) {\n messages.push(\n plainToInstance(AgentStateMessage, {\n id: message.id,\n threadId: message.agentStateMessage.threadId,\n createdAt: message.createdAt,\n agentName: message.agentStateMessage.agentName,\n nodeName: message.agentStateMessage.nodeName,\n runId: message.agentStateMessage.runId,\n active: message.agentStateMessage.active,\n role: message.agentStateMessage.role,\n state: JSON.parse(message.agentStateMessage.state),\n running: message.agentStateMessage.running,\n }),\n );\n }\n }\n\n return messages;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAcA,SAAiBA,oCAA+C;;;ACNhE,SAASC,uBAAuB;AAEzB,SAASC,0BAA0BC,eAA6B;AACrE,QAAMC,WAAsB,CAAA;AAE5B,aAAWC,WAAWF,eAAe;AACnC,QAAIE,QAAQC,aAAa;AACvBF,eAASG,KACPC,gBAAgBC,aAAa;QAC3BC,IAAIL,QAAQK;QACZC,WAAWN,QAAQM;QACnBC,MAAMP,QAAQC,YAAYM;QAC1BC,SAASR,QAAQC,YAAYO;MAC/B,CAAA,CAAA;IAEJ,WAAWR,QAAQS,wBAAwB;AACzCV,eAASG,KACPC,gBAAgBO,wBAAwB;QACtCL,IAAIL,QAAQK;QACZC,WAAWN,QAAQM;QACnBK,MAAMX,QAAQS,uBAAuBE;QACrCC,WAAWC,KAAKC,MAAMd,QAAQS,uBAAuBG,SAAS;QAC9DG,OAAOf,QAAQS,uBAAuBM;MACxC,CAAA,CAAA;IAEJ,WAAWf,QAAQgB,eAAe;AAChCjB,eAASG,KACPC,gBAAgBc,eAAe;QAC7BZ,IAAIL,QAAQK;QACZC,WAAWN,QAAQM;QACnBY,mBAAmBlB,QAAQgB,cAAcE;QACzCC,YAAYnB,QAAQgB,cAAcG;QAClCC,QAAQpB,QAAQgB,cAAcI;MAChC,CAAA,CAAA;IAEJ,WAAWpB,QAAQqB,mBAAmB;AACpCtB,eAASG,KACPC,gBAAgBmB,mBAAmB;QACjCjB,IAAIL,QAAQK;QACZkB,UAAUvB,QAAQqB,kBAAkBE;QACpCjB,WAAWN,QAAQM;QACnBkB,WAAWxB,QAAQqB,kBAAkBG;QACrCC,UAAUzB,QAAQqB,kBAAkBI;QACpCC,OAAO1B,QAAQqB,kBAAkBK;QACjCC,QAAQ3B,QAAQqB,kBAAkBM;QAClCpB,MAAMP,QAAQqB,kBAAkBd;QAChCqB,OAAOf,KAAKC,MAAMd,QAAQqB,kBAAkBO,KAAK;QACjDC,SAAS7B,QAAQqB,kBAAkBQ;MACrC,CAAA,CAAA;IAEJ;EACF;AAEA,SAAO9B;AACT;AApDgBF;;;ADuBhB,SAASiC,YAAY;AA6Gd,IAAMC,iBAAN,MAAMA;EACJC;EACCC;EACAC,YAAoC,CAAA;EACpCC;EACAC;EAERC,YAAYC,QAA6C;AArJ3D;AAsJI,SAAKN,WAAUM,iCAAQN,YAAW,CAAA;AAElC,eAAWO,UAASD,iCAAQJ,cAAa,CAAA,GAAI;AAC3C,YAAMM,cAAc,IAAIC,YAAYF,KAAAA;AACpC,WAAKL,UAAUQ,KAAKF,YAAYG,SAAQ,CAAA;IAC1C;AAEA,SAAKV,6BAA4BK,iCAAQM,oBAAmB,CAAA;AAE5D,SAAKT,mBAAkBG,sCAAQO,eAARP,mBAAoBH;AAC3C,SAAKC,kBAAiBE,sCAAQO,eAARP,mBAAoBF;EAC5C;EAEA,MAAMU,sBAAsBC,SAAiE;AAnK/F;AAoKI,UAAM,EACJC,gBACAC,UAAUC,aACVlB,SAASmB,wBACTC,UACAC,OACAC,uBACAC,gBACAC,qBACAC,cACAC,IAAG,IACDX;AAEJ,QAAIU,cAAc;AAChB,aAAO,KAAKE,oBAAoBZ,OAAAA;IAClC;AAEA,UAAME,WAAWC,YAAYU,OAAO,CAACC,YAAY,CAACA,QAAQC,iBAAiB;AAE3E,UAAMC,gBAAgBC,0BAA0Bf,QAAAA;AAChD,UAAMgB,oBAAoB,MAAM,KAAKC,qBAAqBnB,OAAAA;AAE1D,UAAMoB,yBAAwCF,kBAAkBG,IAAI,CAACC,YAAY;MAC/EC,MAAMD,OAAOC;MACbC,aAAaF,OAAOE;MACpBC,YAAYC,KAAKC,UAAUC,6BAA6BN,OAAOO,UAAU,CAAA;IAC3E,EAAA;AAEA,UAAMC,eAAeC,6BAA6B;SAC7CX;SACAhB;KACJ;AAED,YAAM,UAAKhB,oBAAL,8BAAuB;MAC3BiB;MACAC;MACAU;MACAgB,YAAYxB,eAAewB;MAC3BrB;IACF;AAEA,QAAI;AACF,YAAMsB,cAAc,IAAIC,mBAAAA;AAExB,YAAMC,SAAS,MAAMlC,eAAemC,QAAQ;QAC1ClC,UAAUc;QACV/B,SAAS6C;QACTzB;QACAC;QACA2B;QACAxB;MACF,CAAA;AAEAF,4BACG8B,KAAK,CAACC,mBAAAA;AA1Nf,YAAAC;AA2NU,SAAAA,MAAA,KAAKlD,mBAAL,gBAAAkD,IAAA,WAAsB;UACpBlC,UAAU8B,OAAO9B;UACjBC,OAAO6B,OAAO7B;UACdU;UACAsB;UACAN,YAAYxB,eAAewB;UAC3BrB;QACF;MACF,CAAA,EACC6B,MAAM,CAACC,WAAAA;MAAY,CAAA;AAEtB,aAAO;QACLpC,UAAU8B,OAAO9B;QACjBC,OAAO6B,OAAO7B;QACd2B;QACAf;QACAwB,2BAA2BZ,aAAajB,OACtC,CAACS;;UAEC,CAACJ,kBAAkByB,KAAK,CAACC,qBAAqBA,iBAAiBrB,QAAQD,OAAOC,IAAI;SAAA;MAKxF;IACF,SAASsB,OAAP;AACAC,cAAQD,MAAM,2BAA2BA,KAAAA;AACzC,YAAMA;IACR;EACF;EAEA,MAAcjC,oBACZZ,SACiC;AA5PrC;AA6PI,UAAM,EAAEE,UAAUC,aAAaI,uBAAuBC,gBAAgBE,aAAY,IAAKV;AACvF,UAAM,EAAEK,UAAU0C,WAAWC,SAAQ,IAAKtC;AAC1C,UAAMQ,oBAAoB,MAAM,KAAKC,qBAAqBnB,OAAAA;AAE1D,UAAME,WAAWe,0BAA0Bd,WAAAA;AAE3C,UAAM8C,QAAQ/B,kBAAkByB,KAC9B,CAACrB,WAAWA,OAAOC,SAASwB,aAAaG,uBAAuB5B,MAAAA,CAAAA;AAGlE,QAAI,CAAC2B,OAAO;AACV,YAAM,IAAIE,MAAM,SAASJ,qBAAqB;IAChD;AAEA,UAAM3B,yBAAwCF,kBAC3CL,OAAO,CAACS,WAAW,CAAC4B,uBAAuB5B,MAAAA,CAAAA,EAC3CD,IAAI,CAACC,YAAY;MAChBC,MAAMD,OAAOC;MACbC,aAAaF,OAAOE;MACpBC,YAAYC,KAAKC,UAAUC,6BAA6BN,OAAOO,UAAU,CAAA;IAC3E,EAAA;AAEF,UAAMa,4BAA4BX,6BAA6B;SAC1DX;SACApB,QAAQf;KACZ;AAED,YAAM,UAAKG,oBAAL,8BAAuB;MAC3BiB;MACAC,OAAO8C;MACPpC,eAAed;MACf8B,YAAYxB,eAAewB;IAC7B;AACA,QAAI;AACF,YAAMC,cAAc,IAAIC,mBAAAA;AACxB,YAAMmB,SAAS,MAAMJ,MAAMK,sBAAsB;QAC/C/B,MAAMwB;QACN1C;QACA2C;QACAN;MACF,CAAA;AAEAT,kBAAYoB,OAAO,OAAOE,iBAAAA;AACxBC,aAAKH,MAAAA,EAAQI,UAAU;UACrBC,MAAM,CAACC,UAAUJ,aAAaG,KAAKC,KAAAA;UACnCd,OAAO,CAACe,QAAQd,QAAQD,MAAM,mBAAmBe,GAAAA;UACjDC,UAAU,MAAMN,aAAaM,SAAQ;QACvC,CAAA;MACF,CAAA;AAEAtD,4BACG8B,KAAK,CAACC,mBAAAA;AAhTf,YAAAC;AAiTU,SAAAA,MAAA,KAAKlD,mBAAL,gBAAAkD,IAAA,WAAsB;UACpBlC;UACAC,OAAO8C;UACPpC,eAAed;UACfoC;UACAN,YAAYxB,eAAewB;QAC7B;MACF,CAAA,EACCQ,MAAM,CAACC,WAAAA;MAAY,CAAA;AAEtB,aAAO;QACLpC;QACAC,OAAO8C;QACPnB;QACAf,mBAAmB,CAAA;QACnBwB;MACF;IACF,SAASG,OAAP;AACAC,cAAQD,MAAM,2BAA2BA,KAAAA;AACzC,YAAMA;IACR;EACF;EAEA,MAAc1B,qBAAqBnB,SAAwD;AACzF,UAAM,EAAEE,UAAUC,aAAaK,gBAAgBsD,aAAanD,IAAG,IAAKX;AACpE,UAAMgB,gBAAgBC,0BAA0Bd,WAAAA;AAChD,UAAM4D,qBAAoC,CAAA;AAE1C,eAAWC,gBAAgB,KAAK7E,WAAW;AACzC,UAAI;AACF,cAAMK,QAAQ,MAAMwE;AACpBD,2BAAmBpE,KAAKH,KAAAA;MAC1B,SAASqD,OAAP;AACAC,gBAAQD,MAAM,kCAAkCA,KAAAA;MAClD;IACF;AAEA,UAAM3D,4BAA4B,KAAKA,0BAA0BmC,IAC/D,CAAC4C,cACE;MACC,GAAGA;MACHC,MAAM,KAAKC,oBAAoBF,QAAAA;IACjC,EAAA;AAGJ,UAAMG,gBAAgB,MAAMC,mBAAmB;MAC7CnF;MACAsB;MACAN,UAAUc;MACV8C;MACAQ,aAAa3D;IACf,CAAA;AAEA,UAAM4D,oBACJ,OAAO,KAAKtF,YAAY,aACpB,KAAKA,QAAQ;MAAE+C,YAAYxB,eAAewB;MAAYrB;IAAI,CAAA,IAC1D,KAAK1B;AAEX,WAAO;SAAIsF;SAAsBR;SAAuBK;;EAC1D;EAEQD,oBAAoBF,UAA8B;AACxD,QACE,CAACA,SAASC,QACV,qBAAqBD,YACrB,mBAAmBA,YACnB,YAAYA,UACZ;AACA,aAAOO,aAAaC;IACtB;AAEA,WAAOR,SAASC;EAClB;AACF;AA5OalF;AA8ON,SAAS+C,6BAA6B2C,iBAA8B;AACzE,MAAIC,WAA0B,CAAA;AAC9B,QAAMC,eAAyB,CAAA;AAC/B,aAAWC,QAAQH,iBAAiB;AAClC,QAAI,CAACE,aAAaE,SAASD,KAAKtD,IAAI,GAAG;AACrCoD,eAAShF,KAAKkF,IAAAA;AACdD,mBAAajF,KAAKkF,KAAKtD,IAAI;IAC7B;EACF;AACA,SAAOoD;AACT;AAVgB5C;AAaT,SAASgD,mBAAmBC,QAAwC;AACzE,SAAO;IACL,GAAGA;IACHd,MAAMM,aAAaS;EACrB;AACF;AALgBF;AAOT,SAASG,uBACdF,QAA4C;AAE5C,SAAO;IACL,GAAGA;IACHd,MAAMM,aAAaC;EACrB;AACF;AAPgBS;","names":["actionParametersToJsonSchema","plainToInstance","convertGqlInputToMessages","inputMessages","messages","message","textMessage","push","plainToInstance","TextMessage","id","createdAt","role","content","actionExecutionMessage","ActionExecutionMessage","name","arguments","JSON","parse","scope","resultMessage","ResultMessage","actionExecutionId","actionName","result","agentStateMessage","AgentStateMessage","threadId","agentName","nodeName","runId","active","state","running","from","CopilotRuntime","actions","remoteEndpointDefinitions","langserve","onBeforeRequest","onAfterRequest","constructor","params","chain","remoteChain","RemoteChain","push","toAction","remoteEndpoints","middleware","processRuntimeRequest","request","serviceAdapter","messages","rawMessages","clientSideActionsInput","threadId","runId","outputMessagesPromise","graphqlContext","forwardedParameters","agentSession","url","processAgentRequest","filter","message","agentStateMessage","inputMessages","convertGqlInputToMessages","serverSideActions","getServerSideActions","serverSideActionsInput","map","action","name","description","jsonSchema","JSON","stringify","actionParametersToJsonSchema","parameters","actionInputs","flattenToolCallsNoDuplicates","properties","eventSource","RuntimeEventSource","result","process","then","outputMessages","_a","catch","_error","actionInputsWithoutAgents","find","serverSideAction","error","console","agentName","nodeName","agent","isLangGraphAgentAction","Error","undefined","stream","langGraphAgentHandler","eventStream$","from","subscribe","next","event","err","complete","agentStates","langserveFunctions","chainPromise","endpoint","type","resolveEndpointType","remoteActions","setupRemoteActions","frontendUrl","configuredActions","EndpointType","LangGraphCloud","toolsByPriority","allTools","allToolNames","tool","includes","copilotKitEndpoint","config","CopilotKit","langGraphCloudEndpoint"]}
1
+ {"version":3,"sources":["../src/lib/runtime/copilot-runtime.ts","../src/service-adapters/conversion.ts"],"sourcesContent":["/**\n * <Callout type=\"info\">\n * This is the reference for the `CopilotRuntime` class. For more information and example code snippets, please see [Concept: Copilot Runtime](/concepts/copilot-runtime).\n * </Callout>\n *\n * ## Usage\n *\n * ```tsx\n * import { CopilotRuntime } from \"@copilotkit/runtime\";\n *\n * const copilotKit = new CopilotRuntime();\n * ```\n */\n\nimport { Action, actionParametersToJsonSchema, Parameter } from \"@copilotkit/shared\";\nimport { CopilotServiceAdapter, RemoteChain, RemoteChainParameters } from \"../../service-adapters\";\nimport { MessageInput } from \"../../graphql/inputs/message.input\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { RuntimeEventSource } from \"../../service-adapters/events\";\nimport { convertGqlInputToMessages } from \"../../service-adapters/conversion\";\nimport { Message } from \"../../graphql/types/converted\";\nimport { ForwardedParametersInput } from \"../../graphql/inputs/forwarded-parameters.input\";\nimport {\n isLangGraphAgentAction,\n LangGraphAgentAction,\n EndpointType,\n setupRemoteActions,\n EndpointDefinition,\n CopilotKitEndpoint,\n LangGraphCloudEndpoint,\n} from \"./remote-actions\";\nimport { GraphQLContext } from \"../integrations/shared\";\nimport { AgentSessionInput } from \"../../graphql/inputs/agent-session.input\";\nimport { from } from \"rxjs\";\nimport { AgentStateInput } from \"../../graphql/inputs/agent-state.input\";\n\ninterface CopilotRuntimeRequest {\n serviceAdapter: CopilotServiceAdapter;\n messages: MessageInput[];\n actions: ActionInput[];\n agentSession?: AgentSessionInput;\n agentStates?: AgentStateInput[];\n outputMessagesPromise: Promise<Message[]>;\n threadId?: string;\n runId?: string;\n publicApiKey?: string;\n graphqlContext: GraphQLContext;\n forwardedParameters?: ForwardedParametersInput;\n url?: string;\n}\n\ninterface CopilotRuntimeResponse {\n threadId: string;\n runId?: string;\n eventSource: RuntimeEventSource;\n serverSideActions: Action<any>[];\n actionInputsWithoutAgents: ActionInput[];\n}\n\ntype ActionsConfiguration<T extends Parameter[] | [] = []> =\n | Action<T>[]\n | ((ctx: { properties: any; url?: string }) => Action<T>[]);\n\ninterface OnBeforeRequestOptions {\n threadId?: string;\n runId?: string;\n inputMessages: Message[];\n properties: any;\n url?: string;\n}\n\ntype OnBeforeRequestHandler = (options: OnBeforeRequestOptions) => void | Promise<void>;\n\ninterface OnAfterRequestOptions {\n threadId: string;\n runId?: string;\n inputMessages: Message[];\n outputMessages: Message[];\n properties: any;\n url?: string;\n}\n\ntype OnAfterRequestHandler = (options: OnAfterRequestOptions) => void | Promise<void>;\n\ninterface Middleware {\n /**\n * A function that is called before the request is processed.\n */\n onBeforeRequest?: OnBeforeRequestHandler;\n\n /**\n * A function that is called after the request is processed.\n */\n onAfterRequest?: OnAfterRequestHandler;\n}\n\nexport interface CopilotRuntimeConstructorParams<T extends Parameter[] | [] = []> {\n /**\n * Middleware to be used by the runtime.\n *\n * ```ts\n * onBeforeRequest: (options: {\n * threadId?: string;\n * runId?: string;\n * inputMessages: Message[];\n * properties: any;\n * }) => void | Promise<void>;\n * ```\n *\n * ```ts\n * onAfterRequest: (options: {\n * threadId?: string;\n * runId?: string;\n * inputMessages: Message[];\n * outputMessages: Message[];\n * properties: any;\n * }) => void | Promise<void>;\n * ```\n */\n middleware?: Middleware;\n\n /*\n * A list of server side actions that can be executed.\n */\n actions?: ActionsConfiguration<T>;\n\n /*\n * Deprecated: Use `remoteEndpoints`.\n */\n remoteActions?: CopilotKitEndpoint[];\n\n /*\n * A list of remote actions that can be executed.\n */\n remoteEndpoints?: EndpointDefinition[];\n\n /*\n * An array of LangServer URLs.\n */\n langserve?: RemoteChainParameters[];\n}\n\nexport class CopilotRuntime<const T extends Parameter[] | [] = []> {\n public actions: ActionsConfiguration<T>;\n private remoteEndpointDefinitions: EndpointDefinition[];\n private langserve: Promise<Action<any>>[] = [];\n private onBeforeRequest?: OnBeforeRequestHandler;\n private onAfterRequest?: OnAfterRequestHandler;\n\n constructor(params?: CopilotRuntimeConstructorParams<T>) {\n this.actions = params?.actions || [];\n\n for (const chain of params?.langserve || []) {\n const remoteChain = new RemoteChain(chain);\n this.langserve.push(remoteChain.toAction());\n }\n\n this.remoteEndpointDefinitions = params?.remoteEndpoints || [];\n\n this.onBeforeRequest = params?.middleware?.onBeforeRequest;\n this.onAfterRequest = params?.middleware?.onAfterRequest;\n }\n\n async processRuntimeRequest(request: CopilotRuntimeRequest): Promise<CopilotRuntimeResponse> {\n const {\n serviceAdapter,\n messages: rawMessages,\n actions: clientSideActionsInput,\n threadId,\n runId,\n outputMessagesPromise,\n graphqlContext,\n forwardedParameters,\n agentSession,\n url,\n } = request;\n\n if (agentSession) {\n return this.processAgentRequest(request);\n }\n\n const messages = rawMessages.filter((message) => !message.agentStateMessage);\n\n const inputMessages = convertGqlInputToMessages(messages);\n const serverSideActions = await this.getServerSideActions(request);\n\n const serverSideActionsInput: ActionInput[] = serverSideActions.map((action) => ({\n name: action.name,\n description: action.description,\n jsonSchema: JSON.stringify(actionParametersToJsonSchema(action.parameters)),\n }));\n\n const actionInputs = flattenToolCallsNoDuplicates([\n ...serverSideActionsInput,\n ...clientSideActionsInput,\n ]);\n\n await this.onBeforeRequest?.({\n threadId,\n runId,\n inputMessages,\n properties: graphqlContext.properties,\n url,\n });\n\n try {\n const eventSource = new RuntimeEventSource();\n\n const result = await serviceAdapter.process({\n messages: inputMessages,\n actions: actionInputs,\n threadId,\n runId,\n eventSource,\n forwardedParameters,\n });\n\n outputMessagesPromise\n .then((outputMessages) => {\n this.onAfterRequest?.({\n threadId: result.threadId,\n runId: result.runId,\n inputMessages,\n outputMessages,\n properties: graphqlContext.properties,\n url,\n });\n })\n .catch((_error) => {});\n\n return {\n threadId: result.threadId,\n runId: result.runId,\n eventSource,\n serverSideActions,\n actionInputsWithoutAgents: actionInputs.filter(\n (action) =>\n // TODO-AGENTS: do not exclude ALL server side actions\n !serverSideActions.find((serverSideAction) => serverSideAction.name == action.name),\n // !isLangGraphAgentAction(\n // serverSideActions.find((serverSideAction) => serverSideAction.name == action.name),\n // ),\n ),\n };\n } catch (error) {\n console.error(\"Error getting response:\", error);\n throw error;\n }\n }\n\n private async processAgentRequest(\n request: CopilotRuntimeRequest,\n ): Promise<CopilotRuntimeResponse> {\n const { messages: rawMessages, outputMessagesPromise, graphqlContext, agentSession } = request;\n const { threadId, agentName, nodeName } = agentSession;\n const serverSideActions = await this.getServerSideActions(request);\n\n const messages = convertGqlInputToMessages(rawMessages);\n\n const agent = serverSideActions.find(\n (action) => action.name === agentName && isLangGraphAgentAction(action),\n ) as LangGraphAgentAction;\n\n if (!agent) {\n throw new Error(`Agent ${agentName} not found`);\n }\n\n const serverSideActionsInput: ActionInput[] = serverSideActions\n .filter((action) => !isLangGraphAgentAction(action))\n .map((action) => ({\n name: action.name,\n description: action.description,\n jsonSchema: JSON.stringify(actionParametersToJsonSchema(action.parameters)),\n }));\n\n const actionInputsWithoutAgents = flattenToolCallsNoDuplicates([\n ...serverSideActionsInput,\n ...request.actions,\n ]);\n\n await this.onBeforeRequest?.({\n threadId,\n runId: undefined,\n inputMessages: messages,\n properties: graphqlContext.properties,\n });\n try {\n const eventSource = new RuntimeEventSource();\n const stream = await agent.langGraphAgentHandler({\n name: agentName,\n threadId,\n nodeName,\n actionInputsWithoutAgents,\n });\n\n eventSource.stream(async (eventStream$) => {\n from(stream).subscribe({\n next: (event) => eventStream$.next(event),\n error: (err) => console.error(\"Error in stream\", err),\n complete: () => eventStream$.complete(),\n });\n });\n\n outputMessagesPromise\n .then((outputMessages) => {\n this.onAfterRequest?.({\n threadId,\n runId: undefined,\n inputMessages: messages,\n outputMessages,\n properties: graphqlContext.properties,\n });\n })\n .catch((_error) => {});\n\n return {\n threadId,\n runId: undefined,\n eventSource,\n serverSideActions: [],\n actionInputsWithoutAgents,\n };\n } catch (error) {\n console.error(\"Error getting response:\", error);\n throw error;\n }\n }\n\n private async getServerSideActions(request: CopilotRuntimeRequest): Promise<Action<any>[]> {\n const { messages: rawMessages, graphqlContext, agentStates, url } = request;\n const inputMessages = convertGqlInputToMessages(rawMessages);\n const langserveFunctions: Action<any>[] = [];\n\n for (const chainPromise of this.langserve) {\n try {\n const chain = await chainPromise;\n langserveFunctions.push(chain);\n } catch (error) {\n console.error(\"Error loading langserve chain:\", error);\n }\n }\n\n const remoteEndpointDefinitions = this.remoteEndpointDefinitions.map(\n (endpoint) =>\n ({\n ...endpoint,\n type: this.resolveEndpointType(endpoint),\n }) as EndpointDefinition,\n );\n\n const remoteActions = await setupRemoteActions({\n remoteEndpointDefinitions,\n graphqlContext,\n messages: inputMessages,\n agentStates,\n frontendUrl: url,\n });\n\n const configuredActions =\n typeof this.actions === \"function\"\n ? this.actions({ properties: graphqlContext.properties, url })\n : this.actions;\n\n return [...configuredActions, ...langserveFunctions, ...remoteActions];\n }\n\n private resolveEndpointType(endpoint: EndpointDefinition) {\n if (\n !endpoint.type &&\n \"langsmithApiKey\" in endpoint &&\n \"deploymentUrl\" in endpoint &&\n \"agents\" in endpoint\n ) {\n return EndpointType.LangGraphCloud;\n }\n\n return endpoint.type;\n }\n}\n\nexport function flattenToolCallsNoDuplicates(toolsByPriority: ActionInput[]): ActionInput[] {\n let allTools: ActionInput[] = [];\n const allToolNames: string[] = [];\n for (const tool of toolsByPriority) {\n if (!allToolNames.includes(tool.name)) {\n allTools.push(tool);\n allToolNames.push(tool.name);\n }\n }\n return allTools;\n}\n\n// The two functions below are \"factory functions\", meant to create the action objects that adhere to the expected interfaces\nexport function copilotKitEndpoint(config: Omit<CopilotKitEndpoint, \"type\">): CopilotKitEndpoint {\n return {\n ...config,\n type: EndpointType.CopilotKit,\n };\n}\n\nexport function langGraphCloudEndpoint(\n config: Omit<LangGraphCloudEndpoint, \"type\">,\n): LangGraphCloudEndpoint {\n return {\n ...config,\n type: EndpointType.LangGraphCloud,\n };\n}\n","import {\n ActionExecutionMessage,\n Message,\n ResultMessage,\n TextMessage,\n AgentStateMessage,\n} from \"../graphql/types/converted\";\nimport { MessageInput } from \"../graphql/inputs/message.input\";\nimport { plainToInstance } from \"class-transformer\";\n\nexport function convertGqlInputToMessages(inputMessages: MessageInput[]): Message[] {\n const messages: Message[] = [];\n\n for (const message of inputMessages) {\n if (message.textMessage) {\n messages.push(\n plainToInstance(TextMessage, {\n id: message.id,\n createdAt: message.createdAt,\n role: message.textMessage.role,\n content: message.textMessage.content,\n }),\n );\n } else if (message.actionExecutionMessage) {\n messages.push(\n plainToInstance(ActionExecutionMessage, {\n id: message.id,\n createdAt: message.createdAt,\n name: message.actionExecutionMessage.name,\n arguments: JSON.parse(message.actionExecutionMessage.arguments),\n scope: message.actionExecutionMessage.scope,\n }),\n );\n } else if (message.resultMessage) {\n messages.push(\n plainToInstance(ResultMessage, {\n id: message.id,\n createdAt: message.createdAt,\n actionExecutionId: message.resultMessage.actionExecutionId,\n actionName: message.resultMessage.actionName,\n result: message.resultMessage.result,\n }),\n );\n } else if (message.agentStateMessage) {\n messages.push(\n plainToInstance(AgentStateMessage, {\n id: message.id,\n threadId: message.agentStateMessage.threadId,\n createdAt: message.createdAt,\n agentName: message.agentStateMessage.agentName,\n nodeName: message.agentStateMessage.nodeName,\n runId: message.agentStateMessage.runId,\n active: message.agentStateMessage.active,\n role: message.agentStateMessage.role,\n state: JSON.parse(message.agentStateMessage.state),\n running: message.agentStateMessage.running,\n }),\n );\n }\n }\n\n return messages;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAcA,SAAiBA,oCAA+C;;;ACNhE,SAASC,uBAAuB;AAEzB,SAASC,0BAA0BC,eAA6B;AACrE,QAAMC,WAAsB,CAAA;AAE5B,aAAWC,WAAWF,eAAe;AACnC,QAAIE,QAAQC,aAAa;AACvBF,eAASG,KACPC,gBAAgBC,aAAa;QAC3BC,IAAIL,QAAQK;QACZC,WAAWN,QAAQM;QACnBC,MAAMP,QAAQC,YAAYM;QAC1BC,SAASR,QAAQC,YAAYO;MAC/B,CAAA,CAAA;IAEJ,WAAWR,QAAQS,wBAAwB;AACzCV,eAASG,KACPC,gBAAgBO,wBAAwB;QACtCL,IAAIL,QAAQK;QACZC,WAAWN,QAAQM;QACnBK,MAAMX,QAAQS,uBAAuBE;QACrCC,WAAWC,KAAKC,MAAMd,QAAQS,uBAAuBG,SAAS;QAC9DG,OAAOf,QAAQS,uBAAuBM;MACxC,CAAA,CAAA;IAEJ,WAAWf,QAAQgB,eAAe;AAChCjB,eAASG,KACPC,gBAAgBc,eAAe;QAC7BZ,IAAIL,QAAQK;QACZC,WAAWN,QAAQM;QACnBY,mBAAmBlB,QAAQgB,cAAcE;QACzCC,YAAYnB,QAAQgB,cAAcG;QAClCC,QAAQpB,QAAQgB,cAAcI;MAChC,CAAA,CAAA;IAEJ,WAAWpB,QAAQqB,mBAAmB;AACpCtB,eAASG,KACPC,gBAAgBmB,mBAAmB;QACjCjB,IAAIL,QAAQK;QACZkB,UAAUvB,QAAQqB,kBAAkBE;QACpCjB,WAAWN,QAAQM;QACnBkB,WAAWxB,QAAQqB,kBAAkBG;QACrCC,UAAUzB,QAAQqB,kBAAkBI;QACpCC,OAAO1B,QAAQqB,kBAAkBK;QACjCC,QAAQ3B,QAAQqB,kBAAkBM;QAClCpB,MAAMP,QAAQqB,kBAAkBd;QAChCqB,OAAOf,KAAKC,MAAMd,QAAQqB,kBAAkBO,KAAK;QACjDC,SAAS7B,QAAQqB,kBAAkBQ;MACrC,CAAA,CAAA;IAEJ;EACF;AAEA,SAAO9B;AACT;AApDgBF;;;ADuBhB,SAASiC,YAAY;AA6Gd,IAAMC,iBAAN,MAAMA;EACJC;EACCC;EACAC,YAAoC,CAAA;EACpCC;EACAC;EAERC,YAAYC,QAA6C;AArJ3D;AAsJI,SAAKN,WAAUM,iCAAQN,YAAW,CAAA;AAElC,eAAWO,UAASD,iCAAQJ,cAAa,CAAA,GAAI;AAC3C,YAAMM,cAAc,IAAIC,YAAYF,KAAAA;AACpC,WAAKL,UAAUQ,KAAKF,YAAYG,SAAQ,CAAA;IAC1C;AAEA,SAAKV,6BAA4BK,iCAAQM,oBAAmB,CAAA;AAE5D,SAAKT,mBAAkBG,sCAAQO,eAARP,mBAAoBH;AAC3C,SAAKC,kBAAiBE,sCAAQO,eAARP,mBAAoBF;EAC5C;EAEA,MAAMU,sBAAsBC,SAAiE;AAnK/F;AAoKI,UAAM,EACJC,gBACAC,UAAUC,aACVlB,SAASmB,wBACTC,UACAC,OACAC,uBACAC,gBACAC,qBACAC,cACAC,IAAG,IACDX;AAEJ,QAAIU,cAAc;AAChB,aAAO,KAAKE,oBAAoBZ,OAAAA;IAClC;AAEA,UAAME,WAAWC,YAAYU,OAAO,CAACC,YAAY,CAACA,QAAQC,iBAAiB;AAE3E,UAAMC,gBAAgBC,0BAA0Bf,QAAAA;AAChD,UAAMgB,oBAAoB,MAAM,KAAKC,qBAAqBnB,OAAAA;AAE1D,UAAMoB,yBAAwCF,kBAAkBG,IAAI,CAACC,YAAY;MAC/EC,MAAMD,OAAOC;MACbC,aAAaF,OAAOE;MACpBC,YAAYC,KAAKC,UAAUC,6BAA6BN,OAAOO,UAAU,CAAA;IAC3E,EAAA;AAEA,UAAMC,eAAeC,6BAA6B;SAC7CX;SACAhB;KACJ;AAED,YAAM,UAAKhB,oBAAL,8BAAuB;MAC3BiB;MACAC;MACAU;MACAgB,YAAYxB,eAAewB;MAC3BrB;IACF;AAEA,QAAI;AACF,YAAMsB,cAAc,IAAIC,mBAAAA;AAExB,YAAMC,SAAS,MAAMlC,eAAemC,QAAQ;QAC1ClC,UAAUc;QACV/B,SAAS6C;QACTzB;QACAC;QACA2B;QACAxB;MACF,CAAA;AAEAF,4BACG8B,KAAK,CAACC,mBAAAA;AA1Nf,YAAAC;AA2NU,SAAAA,MAAA,KAAKlD,mBAAL,gBAAAkD,IAAA,WAAsB;UACpBlC,UAAU8B,OAAO9B;UACjBC,OAAO6B,OAAO7B;UACdU;UACAsB;UACAN,YAAYxB,eAAewB;UAC3BrB;QACF;MACF,CAAA,EACC6B,MAAM,CAACC,WAAAA;MAAY,CAAA;AAEtB,aAAO;QACLpC,UAAU8B,OAAO9B;QACjBC,OAAO6B,OAAO7B;QACd2B;QACAf;QACAwB,2BAA2BZ,aAAajB,OACtC,CAACS;;UAEC,CAACJ,kBAAkByB,KAAK,CAACC,qBAAqBA,iBAAiBrB,QAAQD,OAAOC,IAAI;SAAA;MAKxF;IACF,SAASsB,OAAP;AACAC,cAAQD,MAAM,2BAA2BA,KAAAA;AACzC,YAAMA;IACR;EACF;EAEA,MAAcjC,oBACZZ,SACiC;AA5PrC;AA6PI,UAAM,EAAEE,UAAUC,aAAaI,uBAAuBC,gBAAgBE,aAAY,IAAKV;AACvF,UAAM,EAAEK,UAAU0C,WAAWC,SAAQ,IAAKtC;AAC1C,UAAMQ,oBAAoB,MAAM,KAAKC,qBAAqBnB,OAAAA;AAE1D,UAAME,WAAWe,0BAA0Bd,WAAAA;AAE3C,UAAM8C,QAAQ/B,kBAAkByB,KAC9B,CAACrB,WAAWA,OAAOC,SAASwB,aAAaG,uBAAuB5B,MAAAA,CAAAA;AAGlE,QAAI,CAAC2B,OAAO;AACV,YAAM,IAAIE,MAAM,SAASJ,qBAAqB;IAChD;AAEA,UAAM3B,yBAAwCF,kBAC3CL,OAAO,CAACS,WAAW,CAAC4B,uBAAuB5B,MAAAA,CAAAA,EAC3CD,IAAI,CAACC,YAAY;MAChBC,MAAMD,OAAOC;MACbC,aAAaF,OAAOE;MACpBC,YAAYC,KAAKC,UAAUC,6BAA6BN,OAAOO,UAAU,CAAA;IAC3E,EAAA;AAEF,UAAMa,4BAA4BX,6BAA6B;SAC1DX;SACApB,QAAQf;KACZ;AAED,YAAM,UAAKG,oBAAL,8BAAuB;MAC3BiB;MACAC,OAAO8C;MACPpC,eAAed;MACf8B,YAAYxB,eAAewB;IAC7B;AACA,QAAI;AACF,YAAMC,cAAc,IAAIC,mBAAAA;AACxB,YAAMmB,SAAS,MAAMJ,MAAMK,sBAAsB;QAC/C/B,MAAMwB;QACN1C;QACA2C;QACAN;MACF,CAAA;AAEAT,kBAAYoB,OAAO,OAAOE,iBAAAA;AACxBC,aAAKH,MAAAA,EAAQI,UAAU;UACrBC,MAAM,CAACC,UAAUJ,aAAaG,KAAKC,KAAAA;UACnCd,OAAO,CAACe,QAAQd,QAAQD,MAAM,mBAAmBe,GAAAA;UACjDC,UAAU,MAAMN,aAAaM,SAAQ;QACvC,CAAA;MACF,CAAA;AAEAtD,4BACG8B,KAAK,CAACC,mBAAAA;AAhTf,YAAAC;AAiTU,SAAAA,MAAA,KAAKlD,mBAAL,gBAAAkD,IAAA,WAAsB;UACpBlC;UACAC,OAAO8C;UACPpC,eAAed;UACfoC;UACAN,YAAYxB,eAAewB;QAC7B;MACF,CAAA,EACCQ,MAAM,CAACC,WAAAA;MAAY,CAAA;AAEtB,aAAO;QACLpC;QACAC,OAAO8C;QACPnB;QACAf,mBAAmB,CAAA;QACnBwB;MACF;IACF,SAASG,OAAP;AACAC,cAAQD,MAAM,2BAA2BA,KAAAA;AACzC,YAAMA;IACR;EACF;EAEA,MAAc1B,qBAAqBnB,SAAwD;AACzF,UAAM,EAAEE,UAAUC,aAAaK,gBAAgBsD,aAAanD,IAAG,IAAKX;AACpE,UAAMgB,gBAAgBC,0BAA0Bd,WAAAA;AAChD,UAAM4D,qBAAoC,CAAA;AAE1C,eAAWC,gBAAgB,KAAK7E,WAAW;AACzC,UAAI;AACF,cAAMK,QAAQ,MAAMwE;AACpBD,2BAAmBpE,KAAKH,KAAAA;MAC1B,SAASqD,OAAP;AACAC,gBAAQD,MAAM,kCAAkCA,KAAAA;MAClD;IACF;AAEA,UAAM3D,4BAA4B,KAAKA,0BAA0BmC,IAC/D,CAAC4C,cACE;MACC,GAAGA;MACHC,MAAM,KAAKC,oBAAoBF,QAAAA;IACjC,EAAA;AAGJ,UAAMG,gBAAgB,MAAMC,mBAAmB;MAC7CnF;MACAsB;MACAN,UAAUc;MACV8C;MACAQ,aAAa3D;IACf,CAAA;AAEA,UAAM4D,oBACJ,OAAO,KAAKtF,YAAY,aACpB,KAAKA,QAAQ;MAAE+C,YAAYxB,eAAewB;MAAYrB;IAAI,CAAA,IAC1D,KAAK1B;AAEX,WAAO;SAAIsF;SAAsBR;SAAuBK;;EAC1D;EAEQD,oBAAoBF,UAA8B;AACxD,QACE,CAACA,SAASC,QACV,qBAAqBD,YACrB,mBAAmBA,YACnB,YAAYA,UACZ;AACA,aAAOO,aAAaC;IACtB;AAEA,WAAOR,SAASC;EAClB;AACF;AA5OalF;AA8ON,SAAS+C,6BAA6B2C,iBAA8B;AACzE,MAAIC,WAA0B,CAAA;AAC9B,QAAMC,eAAyB,CAAA;AAC/B,aAAWC,QAAQH,iBAAiB;AAClC,QAAI,CAACE,aAAaE,SAASD,KAAKtD,IAAI,GAAG;AACrCoD,eAAShF,KAAKkF,IAAAA;AACdD,mBAAajF,KAAKkF,KAAKtD,IAAI;IAC7B;EACF;AACA,SAAOoD;AACT;AAVgB5C;AAaT,SAASgD,mBAAmBC,QAAwC;AACzE,SAAO;IACL,GAAGA;IACHd,MAAMM,aAAaS;EACrB;AACF;AALgBF;AAOT,SAASG,uBACdF,QAA4C;AAE5C,SAAO;IACL,GAAGA;IACHd,MAAMM,aAAaC;EACrB;AACF;AAPgBS;","names":["actionParametersToJsonSchema","plainToInstance","convertGqlInputToMessages","inputMessages","messages","message","textMessage","push","plainToInstance","TextMessage","id","createdAt","role","content","actionExecutionMessage","ActionExecutionMessage","name","arguments","JSON","parse","scope","resultMessage","ResultMessage","actionExecutionId","actionName","result","agentStateMessage","AgentStateMessage","threadId","agentName","nodeName","runId","active","state","running","from","CopilotRuntime","actions","remoteEndpointDefinitions","langserve","onBeforeRequest","onAfterRequest","constructor","params","chain","remoteChain","RemoteChain","push","toAction","remoteEndpoints","middleware","processRuntimeRequest","request","serviceAdapter","messages","rawMessages","clientSideActionsInput","threadId","runId","outputMessagesPromise","graphqlContext","forwardedParameters","agentSession","url","processAgentRequest","filter","message","agentStateMessage","inputMessages","convertGqlInputToMessages","serverSideActions","getServerSideActions","serverSideActionsInput","map","action","name","description","jsonSchema","JSON","stringify","actionParametersToJsonSchema","parameters","actionInputs","flattenToolCallsNoDuplicates","properties","eventSource","RuntimeEventSource","result","process","then","outputMessages","_a","catch","_error","actionInputsWithoutAgents","find","serverSideAction","error","console","agentName","nodeName","agent","isLangGraphAgentAction","Error","undefined","stream","langGraphAgentHandler","eventStream$","from","subscribe","next","event","err","complete","agentStates","langserveFunctions","chainPromise","endpoint","type","resolveEndpointType","remoteActions","setupRemoteActions","frontendUrl","configuredActions","EndpointType","LangGraphCloud","toolsByPriority","allTools","allToolNames","tool","includes","copilotKitEndpoint","config","CopilotKit","langGraphCloudEndpoint"]}
@@ -981,4 +981,4 @@ export {
981
981
  GroqAdapter,
982
982
  AnthropicAdapter
983
983
  };
984
- //# sourceMappingURL=chunk-XE3SYKK4.mjs.map
984
+ //# sourceMappingURL=chunk-BNQDVBQH.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/service-adapters/langchain/langserve.ts","../src/service-adapters/openai/openai-adapter.ts","../src/service-adapters/openai/utils.ts","../src/service-adapters/langchain/langchain-adapter.ts","../src/service-adapters/google/google-genai-adapter.ts","../src/service-adapters/openai/openai-assistant-adapter.ts","../src/service-adapters/unify/unify-adapter.ts","../src/service-adapters/groq/groq-adapter.ts","../src/service-adapters/anthropic/anthropic-adapter.ts","../src/service-adapters/anthropic/utils.ts"],"sourcesContent":["import { Parameter, Action } from \"@copilotkit/shared\";\nimport { RemoteRunnable } from \"langchain/runnables/remote\";\n\nexport interface RemoteChainParameters {\n name: string;\n description: string;\n chainUrl: string;\n parameters?: Parameter[];\n parameterType?: \"single\" | \"multi\";\n}\n\nexport class RemoteChain {\n name: string;\n description: string;\n chainUrl: string;\n parameters?: Parameter[];\n parameterType: \"single\" | \"multi\";\n\n constructor(options: RemoteChainParameters) {\n this.name = options.name;\n this.description = options.description;\n this.chainUrl = options.chainUrl;\n this.parameters = options.parameters;\n this.parameterType = options.parameterType || \"multi\";\n }\n\n async toAction(): Promise<Action<any>> {\n if (!this.parameters) {\n await this.inferLangServeParameters();\n }\n\n return {\n name: this.name,\n description: this.description,\n parameters: this.parameters!,\n handler: async (args: any) => {\n const runnable = new RemoteRunnable({ url: this.chainUrl });\n let input: any;\n if (this.parameterType === \"single\") {\n input = args[Object.keys(args)[0]];\n } else {\n input = args;\n }\n return await runnable.invoke(input);\n },\n };\n }\n\n async inferLangServeParameters() {\n const supportedTypes = [\"string\", \"number\", \"boolean\"];\n\n let schemaUrl = this.chainUrl.replace(/\\/+$/, \"\") + \"/input_schema\";\n let schema = await fetch(schemaUrl)\n .then((res) => res.json())\n .catch(() => {\n throw new Error(\"Failed to fetch langserve schema at \" + schemaUrl);\n });\n // for now, don't use json schema, just do a simple conversion\n\n if (supportedTypes.includes(schema.type)) {\n this.parameterType = \"single\";\n this.parameters = [\n {\n name: \"input\",\n type: schema.type,\n description: \"The input to the chain\",\n },\n ];\n } else if (schema.type === \"object\") {\n this.parameterType = \"multi\";\n this.parameters = Object.keys(schema.properties).map((key) => {\n let property = schema.properties[key];\n if (!supportedTypes.includes(property.type)) {\n throw new Error(\"Unsupported schema type\");\n }\n return {\n name: key,\n type: property.type,\n description: property.description || \"\",\n required: schema.required?.includes(key) || false,\n };\n });\n } else {\n throw new Error(\"Unsupported schema type\");\n }\n }\n}\n","/**\n * Copilot Runtime adapter for OpenAI.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, OpenAIAdapter } from \"@copilotkit/runtime\";\n * import OpenAI from \"openai\";\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const openai = new OpenAI({\n * organization: \"<your-organization-id>\", // optional\n * apiKey: \"<your-api-key>\",\n * });\n *\n * const serviceAdapter = new OpenAIAdapter({ openai });\n *\n * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);\n * ```\n *\n * ## Example with Azure OpenAI\n *\n * ```ts\n * import { CopilotRuntime, OpenAIAdapter } from \"@copilotkit/runtime\";\n * import OpenAI from \"openai\";\n *\n * // The name of your Azure OpenAI Instance.\n * // https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal#create-a-resource\n * const instance = \"<your instance name>\";\n *\n * // Corresponds to your Model deployment within your OpenAI resource, e.g. my-gpt35-16k-deployment\n * // Navigate to the Azure OpenAI Studio to deploy a model.\n * const model = \"<your model>\";\n *\n * const apiKey = process.env[\"AZURE_OPENAI_API_KEY\"];\n * if (!apiKey) {\n * throw new Error(\"The AZURE_OPENAI_API_KEY environment variable is missing or empty.\");\n * }\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const openai = new OpenAI({\n * apiKey,\n * baseURL: `https://${instance}.openai.azure.com/openai/deployments/${model}`,\n * defaultQuery: { \"api-version\": \"2024-04-01-preview\" },\n * defaultHeaders: { \"api-key\": apiKey },\n * });\n *\n * const serviceAdapter = new OpenAIAdapter({ openai });\n *\n * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);\n * ```\n */\nimport OpenAI from \"openai\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToOpenAITool,\n convertMessageToOpenAIMessage,\n limitMessagesToTokenCount,\n} from \"./utils\";\nimport { randomId } from \"@copilotkit/shared\";\n\nconst DEFAULT_MODEL = \"gpt-4o\";\n\nexport interface OpenAIAdapterParams {\n /**\n * An optional OpenAI instance to use. If not provided, a new instance will be\n * created.\n */\n openai?: OpenAI;\n\n /**\n * The model to use.\n */\n model?: string;\n\n /**\n * Whether to disable parallel tool calls.\n * You can disable parallel tool calls to force the model to execute tool calls sequentially.\n * This is useful if you want to execute tool calls in a specific order so that the state changes\n * introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)\n *\n * @default false\n */\n disableParallelToolCalls?: boolean;\n}\n\nexport class OpenAIAdapter implements CopilotServiceAdapter {\n private model: string = DEFAULT_MODEL;\n\n private disableParallelToolCalls: boolean = false;\n private _openai: OpenAI;\n public get openai(): OpenAI {\n return this._openai;\n }\n\n constructor(params?: OpenAIAdapterParams) {\n this._openai = params?.openai || new OpenAI({});\n if (params?.model) {\n this.model = params.model;\n }\n this.disableParallelToolCalls = params?.disableParallelToolCalls || false;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const {\n threadId,\n model = this.model,\n messages,\n actions,\n eventSource,\n forwardedParameters,\n } = request;\n const tools = actions.map(convertActionInputToOpenAITool);\n\n let openaiMessages = messages.map(convertMessageToOpenAIMessage);\n openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);\n\n let toolChoice: any = forwardedParameters?.toolChoice;\n if (forwardedParameters?.toolChoice === \"function\") {\n toolChoice = {\n type: \"function\",\n function: { name: forwardedParameters.toolChoiceFunctionName },\n };\n }\n\n const stream = this.openai.beta.chat.completions.stream({\n model: model,\n stream: true,\n messages: openaiMessages,\n ...(tools.length > 0 && { tools }),\n ...(forwardedParameters?.maxTokens && { max_tokens: forwardedParameters.maxTokens }),\n ...(forwardedParameters?.stop && { stop: forwardedParameters.stop }),\n ...(toolChoice && { tool_choice: toolChoice }),\n ...(this.disableParallelToolCalls && { parallel_tool_calls: false }),\n });\n\n eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n for await (const chunk of stream) {\n const toolCall = chunk.choices[0].delta.tool_calls?.[0];\n const content = chunk.choices[0].delta.content;\n\n // When switching from message to function or vice versa,\n // send the respective end event.\n // If toolCall?.id is defined, it means a new tool call starts.\n if (mode === \"message\" && toolCall?.id) {\n mode = null;\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\" && (toolCall === undefined || toolCall?.id)) {\n mode = null;\n eventStream$.sendActionExecutionEnd();\n }\n\n // If we send a new message type, send the appropriate start event.\n if (mode === null) {\n if (toolCall?.id) {\n mode = \"function\";\n eventStream$.sendActionExecutionStart(toolCall!.id, toolCall!.function!.name);\n } else if (content) {\n mode = \"message\";\n eventStream$.sendTextMessageStart(chunk.id);\n }\n }\n\n // send the content events\n if (mode === \"message\" && content) {\n eventStream$.sendTextMessageContent(content);\n } else if (mode === \"function\" && toolCall?.function?.arguments) {\n eventStream$.sendActionExecutionArgs(toolCall.function.arguments);\n }\n }\n\n // send the end events\n if (mode === \"message\") {\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd();\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId: threadId || randomId(),\n };\n }\n}\n","import { Message } from \"../../graphql/types/converted\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport {\n ChatCompletionMessageParam,\n ChatCompletionTool,\n ChatCompletionUserMessageParam,\n ChatCompletionAssistantMessageParam,\n ChatCompletionSystemMessageParam,\n} from \"openai/resources\";\n\nexport function limitMessagesToTokenCount(\n messages: any[],\n tools: any[],\n model: string,\n maxTokens?: number,\n): any[] {\n maxTokens ||= maxTokensForOpenAIModel(model);\n\n const result: any[] = [];\n const toolsNumTokens = countToolsTokens(model, tools);\n if (toolsNumTokens > maxTokens) {\n throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);\n }\n maxTokens -= toolsNumTokens;\n\n for (const message of messages) {\n if (message.role === \"system\") {\n const numTokens = countMessageTokens(model, message);\n maxTokens -= numTokens;\n\n if (maxTokens < 0) {\n throw new Error(\"Not enough tokens for system message.\");\n }\n }\n }\n\n let cutoff: boolean = false;\n\n const reversedMessages = [...messages].reverse();\n for (const message of reversedMessages) {\n if (message.role === \"system\") {\n result.unshift(message);\n continue;\n } else if (cutoff) {\n continue;\n }\n let numTokens = countMessageTokens(model, message);\n if (maxTokens < numTokens) {\n cutoff = true;\n continue;\n }\n result.unshift(message);\n maxTokens -= numTokens;\n }\n\n return result;\n}\n\nexport function maxTokensForOpenAIModel(model: string): number {\n return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;\n}\n\nconst DEFAULT_MAX_TOKENS = 128000;\n\nconst maxTokensByModel: { [key: string]: number } = {\n // GPT-4\n \"gpt-4o\": 128000,\n \"gpt-4o-2024-05-13\": 128000,\n \"gpt-4-turbo\": 128000,\n \"gpt-4-turbo-2024-04-09\": 128000,\n \"gpt-4-0125-preview\": 128000,\n \"gpt-4-turbo-preview\": 128000,\n \"gpt-4-1106-preview\": 128000,\n \"gpt-4-vision-preview\": 128000,\n \"gpt-4-1106-vision-preview\": 128000,\n \"gpt-4-32k\": 32768,\n \"gpt-4-32k-0613\": 32768,\n \"gpt-4-32k-0314\": 32768,\n \"gpt-4\": 8192,\n \"gpt-4-0613\": 8192,\n \"gpt-4-0314\": 8192,\n\n // GPT-3.5\n \"gpt-3.5-turbo-0125\": 16385,\n \"gpt-3.5-turbo\": 16385,\n \"gpt-3.5-turbo-1106\": 16385,\n \"gpt-3.5-turbo-instruct\": 4096,\n \"gpt-3.5-turbo-16k\": 16385,\n \"gpt-3.5-turbo-0613\": 4096,\n \"gpt-3.5-turbo-16k-0613\": 16385,\n \"gpt-3.5-turbo-0301\": 4097,\n};\n\nfunction countToolsTokens(model: string, tools: any[]): number {\n if (tools.length === 0) {\n return 0;\n }\n const json = JSON.stringify(tools);\n return countTokens(model, json);\n}\n\nfunction countMessageTokens(model: string, message: any): number {\n return countTokens(model, message.content || \"\");\n}\n\nfunction countTokens(model: string, text: string): number {\n return text.length / 3;\n}\n\nexport function convertActionInputToOpenAITool(action: ActionInput): ChatCompletionTool {\n return {\n type: \"function\",\n function: {\n name: action.name,\n description: action.description,\n parameters: JSON.parse(action.jsonSchema),\n },\n };\n}\n\nexport function convertMessageToOpenAIMessage(message: Message): ChatCompletionMessageParam {\n if (message.isTextMessage()) {\n return {\n role: message.role as ChatCompletionUserMessageParam[\"role\"],\n content: message.content,\n } satisfies\n | ChatCompletionUserMessageParam\n | ChatCompletionAssistantMessageParam\n | ChatCompletionSystemMessageParam;\n } else if (message.isActionExecutionMessage()) {\n return {\n role: \"assistant\",\n tool_calls: [\n {\n id: message.id,\n type: \"function\",\n function: {\n name: message.name,\n arguments: JSON.stringify(message.arguments),\n },\n },\n ],\n };\n } else if (message.isResultMessage()) {\n return {\n role: \"tool\",\n content: message.result,\n tool_call_id: message.actionExecutionId,\n };\n }\n}\n\nexport function convertSystemMessageToAssistantAPI(message: ChatCompletionMessageParam) {\n return {\n ...message,\n ...(message.role === \"system\" && {\n role: \"assistant\",\n content: \"THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: \" + message.content,\n }),\n };\n}\n","/**\n * Copilot Runtime adapter for LangChain.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, LangChainAdapter } from \"@copilotkit/runtime\";\n * import { ChatOpenAI } from \"@langchain/openai\";\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const model = new ChatOpenAI({\n * model: \"gpt-4o\",\n * apiKey: \"<your-api-key>\",\n * });\n *\n * const serviceAdapter = new LangChainAdapter({\n * chainFn: async ({ messages, tools }) => {\n * return model.bindTools(tools).stream(messages);\n * // or optionally enable strict mode\n * // return model.bindTools(tools, { strict: true }).stream(messages);\n * }\n * });\n *\n * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);\n * ```\n *\n * The asynchronous handler function (`chainFn`) can return any of the following:\n *\n * - A simple `string` response\n * - A LangChain stream (`IterableReadableStream`)\n * - A LangChain `BaseMessageChunk` object\n * - A LangChain `AIMessage` object\n */\n\nimport { BaseMessage } from \"@langchain/core/messages\";\nimport { CopilotServiceAdapter } from \"../service-adapter\";\nimport {\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToLangChainTool,\n convertMessageToLangChainMessage,\n streamLangChainResponse,\n} from \"./utils\";\nimport { DynamicStructuredTool } from \"@langchain/core/tools\";\nimport { LangChainReturnType } from \"./types\";\nimport { randomId } from \"@copilotkit/shared\";\n\ninterface ChainFnParameters {\n model: string;\n messages: BaseMessage[];\n tools: DynamicStructuredTool[];\n threadId?: string;\n runId?: string;\n}\n\ninterface LangChainAdapterOptions {\n /**\n * A function that uses the LangChain API to generate a response.\n */\n chainFn: (parameters: ChainFnParameters) => Promise<LangChainReturnType>;\n}\n\nexport class LangChainAdapter implements CopilotServiceAdapter {\n /**\n * To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.\n */\n constructor(private options: LangChainAdapterOptions) {}\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { eventSource, model, actions, messages, threadId, runId } = request;\n const result = await this.options.chainFn({\n messages: messages.map(convertMessageToLangChainMessage),\n tools: actions.map(convertActionInputToLangChainTool),\n model,\n threadId,\n runId,\n });\n\n eventSource.stream(async (eventStream$) => {\n await streamLangChainResponse({\n result,\n eventStream$,\n });\n });\n\n return {\n threadId: threadId || randomId(),\n };\n }\n}\n","/**\n * Copilot Runtime adapter for Google Generative AI (e.g. Gemini).\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, GoogleGenerativeAIAdapter } from \"@copilotkit/runtime\";\n * const { GoogleGenerativeAI } = require(\"@google/generative-ai\");\n *\n * const genAI = new GoogleGenerativeAI(process.env[\"GOOGLE_API_KEY\"]);\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const serviceAdapter = new GoogleGenerativeAIAdapter({ model: \"gemini-1.5-pro\" });\n *\n * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);\n * ```\n */\nimport { ChatGoogle } from \"@langchain/google-gauth\";\nimport { LangChainAdapter } from \"../langchain/langchain-adapter\";\n\ninterface GoogleGenerativeAIAdapterOptions {\n /**\n * A custom Google Generative AI model to use.\n */\n model?: string;\n}\n\nexport class GoogleGenerativeAIAdapter extends LangChainAdapter {\n constructor(options?: GoogleGenerativeAIAdapterOptions) {\n super({\n chainFn: async ({ messages, tools }) => {\n const model = new ChatGoogle({\n modelName: options?.model ?? \"gemini-1.5-pro\",\n apiVersion: \"v1beta\",\n }).bindTools(tools);\n return model.stream(messages);\n },\n });\n }\n}\n","/**\n * Copilot Runtime adapter for the OpenAI Assistant API.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, OpenAIAssistantAdapter } from \"@copilotkit/runtime\";\n * import OpenAI from \"openai\";\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const openai = new OpenAI({\n * organization: \"<your-organization-id>\",\n * apiKey: \"<your-api-key>\",\n * });\n *\n * const serviceAdapter = new OpenAIAssistantAdapter({\n * openai,\n * assistantId: \"<your-assistant-id>\",\n * codeInterpreterEnabled: true,\n * fileSearchEnabled: true,\n * });\n *\n * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);\n * ```\n */\nimport OpenAI from \"openai\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport { Message, ResultMessage, TextMessage } from \"../../graphql/types/converted\";\nimport {\n convertActionInputToOpenAITool,\n convertMessageToOpenAIMessage,\n convertSystemMessageToAssistantAPI,\n} from \"./utils\";\nimport { RunSubmitToolOutputsStreamParams } from \"openai/resources/beta/threads/runs/runs\";\nimport { AssistantStream } from \"openai/lib/AssistantStream\";\nimport { RuntimeEventSource } from \"../events\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { AssistantStreamEvent, AssistantTool } from \"openai/resources/beta/assistants\";\nimport { ForwardedParametersInput } from \"../../graphql/inputs/forwarded-parameters.input\";\n\nexport interface OpenAIAssistantAdapterParams {\n /**\n * The ID of the assistant to use.\n */\n assistantId: string;\n\n /**\n * An optional OpenAI instance to use. If not provided, a new instance will be created.\n */\n openai?: OpenAI;\n\n /**\n * Whether to enable code interpretation.\n * @default true\n */\n codeInterpreterEnabled?: boolean;\n\n /**\n * Whether to enable file search.\n * @default true\n */\n fileSearchEnabled?: boolean;\n\n /**\n * Whether to disable parallel tool calls.\n * You can disable parallel tool calls to force the model to execute tool calls sequentially.\n * This is useful if you want to execute tool calls in a specific order so that the state changes\n * introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)\n *\n * @default false\n */\n disableParallelToolCalls?: boolean;\n}\n\nexport class OpenAIAssistantAdapter implements CopilotServiceAdapter {\n private openai: OpenAI;\n private codeInterpreterEnabled: boolean;\n private assistantId: string;\n private fileSearchEnabled: boolean;\n private disableParallelToolCalls: boolean;\n\n constructor(params: OpenAIAssistantAdapterParams) {\n this.openai = params.openai || new OpenAI({});\n this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;\n this.fileSearchEnabled = params.fileSearchEnabled === false || true;\n this.assistantId = params.assistantId;\n this.disableParallelToolCalls = params?.disableParallelToolCalls || false;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { messages, actions, eventSource, runId, forwardedParameters } = request;\n // if we don't have a threadId, create a new thread\n let threadId = request.threadId || (await this.openai.beta.threads.create()).id;\n\n const lastMessage = messages.at(-1);\n\n let nextRunId: string | undefined = undefined;\n\n // submit function outputs\n if (lastMessage.isResultMessage() && runId) {\n nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);\n }\n // submit user message\n else if (lastMessage.isTextMessage()) {\n nextRunId = await this.submitUserMessage(\n threadId,\n messages,\n actions,\n eventSource,\n forwardedParameters,\n );\n }\n // unsupported message\n else {\n throw new Error(\"No actionable message found in the messages\");\n }\n\n return {\n threadId,\n runId: nextRunId,\n };\n }\n\n private async submitToolOutputs(\n threadId: string,\n runId: string,\n messages: Message[],\n eventSource: RuntimeEventSource,\n ) {\n let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);\n if (!run.required_action) {\n throw new Error(\"No tool outputs required\");\n }\n\n // get the required tool call ids\n const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map(\n (toolCall) => toolCall.id,\n );\n\n // search for these tool calls\n const resultMessages = messages.filter(\n (message) => message.isResultMessage() && toolCallsIds.includes(message.actionExecutionId),\n ) as ResultMessage[];\n\n if (toolCallsIds.length != resultMessages.length) {\n throw new Error(\"Number of function results does not match the number of tool calls\");\n }\n\n // submit the tool outputs\n const toolOutputs: RunSubmitToolOutputsStreamParams.ToolOutput[] = resultMessages.map(\n (message) => {\n return {\n tool_call_id: message.actionExecutionId,\n output: message.result,\n };\n },\n );\n\n const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {\n tool_outputs: toolOutputs,\n ...(this.disableParallelToolCalls && { parallel_tool_calls: false }),\n });\n\n await this.streamResponse(stream, eventSource);\n return runId;\n }\n\n private async submitUserMessage(\n threadId: string,\n messages: Message[],\n actions: ActionInput[],\n eventSource: RuntimeEventSource,\n forwardedParameters: ForwardedParametersInput,\n ) {\n messages = [...messages];\n\n // get the instruction message\n const instructionsMessage = messages.shift();\n const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : \"\";\n\n // get the latest user message\n const userMessage = messages\n .map(convertMessageToOpenAIMessage)\n .map(convertSystemMessageToAssistantAPI)\n .at(-1);\n\n if (userMessage.role !== \"user\") {\n throw new Error(\"No user message found\");\n }\n\n // create a new message on the thread\n await this.openai.beta.threads.messages.create(threadId, {\n role: \"user\",\n content: userMessage.content,\n });\n\n const openaiTools = actions.map(convertActionInputToOpenAITool);\n\n const tools = [\n ...openaiTools,\n ...(this.codeInterpreterEnabled ? [{ type: \"code_interpreter\" } as AssistantTool] : []),\n ...(this.fileSearchEnabled ? [{ type: \"file_search\" } as AssistantTool] : []),\n ];\n\n // run the thread\n let stream = this.openai.beta.threads.runs.stream(threadId, {\n assistant_id: this.assistantId,\n instructions,\n tools: tools,\n ...(forwardedParameters?.maxTokens && {\n max_completion_tokens: forwardedParameters.maxTokens,\n }),\n ...(this.disableParallelToolCalls && { parallel_tool_calls: false }),\n });\n\n await this.streamResponse(stream, eventSource);\n\n return getRunIdFromStream(stream);\n }\n\n private async streamResponse(stream: AssistantStream, eventSource: RuntimeEventSource) {\n eventSource.stream(async (eventStream$) => {\n let inFunctionCall = false;\n\n for await (const chunk of stream) {\n switch (chunk.event) {\n case \"thread.message.created\":\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd();\n }\n eventStream$.sendTextMessageStart(chunk.data.id);\n break;\n case \"thread.message.delta\":\n if (chunk.data.delta.content?.[0].type === \"text\") {\n eventStream$.sendTextMessageContent(chunk.data.delta.content?.[0].text.value);\n }\n break;\n case \"thread.message.completed\":\n eventStream$.sendTextMessageEnd();\n break;\n case \"thread.run.step.delta\":\n let toolCallId: string | undefined;\n let toolCallName: string | undefined;\n let toolCallArgs: string | undefined;\n if (\n chunk.data.delta.step_details.type === \"tool_calls\" &&\n chunk.data.delta.step_details.tool_calls?.[0].type === \"function\"\n ) {\n toolCallId = chunk.data.delta.step_details.tool_calls?.[0].id;\n toolCallName = chunk.data.delta.step_details.tool_calls?.[0].function.name;\n toolCallArgs = chunk.data.delta.step_details.tool_calls?.[0].function.arguments;\n }\n\n if (toolCallName && toolCallId) {\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd();\n }\n inFunctionCall = true;\n eventStream$.sendActionExecutionStart(toolCallId, toolCallName);\n } else if (toolCallArgs) {\n eventStream$.sendActionExecutionArgs(toolCallArgs);\n }\n break;\n }\n }\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd();\n }\n eventStream$.complete();\n });\n }\n}\n\nfunction getRunIdFromStream(stream: AssistantStream): Promise<string> {\n return new Promise<string>((resolve, reject) => {\n let runIdGetter = (event: AssistantStreamEvent) => {\n if (event.event === \"thread.run.created\") {\n const runId = event.data.id;\n stream.off(\"event\", runIdGetter);\n resolve(runId);\n }\n };\n stream.on(\"event\", runIdGetter);\n });\n}\n","/**\n * CopilotKit Adapter for Unify\n *\n * <RequestExample>\n * ```jsx CopilotRuntime Example\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(req, new UnifyAdapter());\n * ```\n * </RequestExample>\n *\n * You can easily set the model to use by passing it to the constructor.\n * ```jsx\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new UnifyAdapter({ model: \"llama-3-8b-chat@fireworks-ai\" }),\n * );\n * ```\n */\nimport { TextMessage } from \"../../graphql/types/converted\";\nimport {\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n CopilotServiceAdapter,\n} from \"../service-adapter\";\nimport OpenAI from \"openai\";\nimport { randomId } from \"@copilotkit/shared\";\nimport { convertActionInputToOpenAITool, convertMessageToOpenAIMessage } from \"../openai/utils\";\n\nexport interface UnifyAdapterParams {\n apiKey?: string;\n model: string;\n}\n\nexport class UnifyAdapter implements CopilotServiceAdapter {\n private apiKey: string;\n private model: string;\n private start: boolean;\n\n constructor(options?: UnifyAdapterParams) {\n if (options?.apiKey) {\n this.apiKey = options.apiKey;\n } else {\n this.apiKey = \"UNIFY_API_KEY\";\n }\n this.model = options?.model;\n this.start = true;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const tools = request.actions.map(convertActionInputToOpenAITool);\n const openai = new OpenAI({\n apiKey: this.apiKey,\n baseURL: \"https://api.unify.ai/v0/\",\n });\n\n const messages = request.messages.map(convertMessageToOpenAIMessage);\n\n const stream = await openai.chat.completions.create({\n model: this.model,\n messages: messages,\n stream: true,\n ...(tools.length > 0 && { tools }),\n });\n\n let model = null;\n request.eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n for await (const chunk of stream) {\n if (this.start) {\n model = chunk.model;\n eventStream$.sendTextMessageStart(randomId());\n eventStream$.sendTextMessageContent(`Model used: ${model}\\n`);\n eventStream$.sendTextMessageEnd();\n this.start = false;\n }\n const toolCall = chunk.choices[0].delta.tool_calls?.[0];\n const content = chunk.choices[0].delta.content;\n\n // When switching from message to function or vice versa,\n // send the respective end event.\n // If toolCall?.id is defined, it means a new tool call starts.\n if (mode === \"message\" && toolCall?.id) {\n mode = null;\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\" && (toolCall === undefined || toolCall?.id)) {\n mode = null;\n eventStream$.sendActionExecutionEnd();\n }\n\n // If we send a new message type, send the appropriate start event.\n if (mode === null) {\n if (toolCall?.id) {\n mode = \"function\";\n eventStream$.sendActionExecutionStart(toolCall!.id, toolCall!.function!.name);\n } else if (content) {\n mode = \"message\";\n eventStream$.sendTextMessageStart(chunk.id);\n }\n }\n\n // send the content events\n if (mode === \"message\" && content) {\n eventStream$.sendTextMessageContent(content);\n } else if (mode === \"function\" && toolCall?.function?.arguments) {\n eventStream$.sendActionExecutionArgs(toolCall.function.arguments);\n }\n }\n\n // send the end events\n if (mode === \"message\") {\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd();\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId: request.threadId || randomId(),\n };\n }\n}\n","/**\n * Copilot Runtime adapter for Groq.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, GroqAdapter } from \"@copilotkit/runtime\";\n * import { Groq } from \"groq-sdk\";\n *\n * const groq = new Groq({ apiKey: process.env[\"GROQ_API_KEY\"] });\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const serviceAdapter = new GroqAdapter({ groq, model: \"<model-name>\" });\n *\n * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);\n * ```\n */\nimport { Groq } from \"groq-sdk\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToOpenAITool,\n convertMessageToOpenAIMessage,\n limitMessagesToTokenCount,\n} from \"../openai/utils\";\nimport { randomId } from \"@copilotkit/shared\";\n\nconst DEFAULT_MODEL = \"llama3-groq-70b-8192-tool-use-preview\";\n\nexport interface GroqAdapterParams {\n /**\n * An optional Groq instance to use.\n */\n groq?: Groq;\n\n /**\n * The model to use.\n */\n model?: string;\n\n /**\n * Whether to disable parallel tool calls.\n * You can disable parallel tool calls to force the model to execute tool calls sequentially.\n * This is useful if you want to execute tool calls in a specific order so that the state changes\n * introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)\n *\n * @default false\n */\n disableParallelToolCalls?: boolean;\n}\n\nexport class GroqAdapter implements CopilotServiceAdapter {\n private model: string = DEFAULT_MODEL;\n\n private disableParallelToolCalls: boolean = false;\n private _groq: Groq;\n public get groq(): Groq {\n return this._groq;\n }\n\n constructor(params?: GroqAdapterParams) {\n this._groq = params?.groq || new Groq({});\n if (params?.model) {\n this.model = params.model;\n }\n this.disableParallelToolCalls = params?.disableParallelToolCalls || false;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const {\n threadId,\n model = this.model,\n messages,\n actions,\n eventSource,\n forwardedParameters,\n } = request;\n const tools = actions.map(convertActionInputToOpenAITool);\n\n let openaiMessages = messages.map(convertMessageToOpenAIMessage);\n openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);\n\n let toolChoice: any = forwardedParameters?.toolChoice;\n if (forwardedParameters?.toolChoice === \"function\") {\n toolChoice = {\n type: \"function\",\n function: { name: forwardedParameters.toolChoiceFunctionName },\n };\n }\n const stream = await this.groq.chat.completions.create({\n model: model,\n stream: true,\n messages: openaiMessages,\n ...(tools.length > 0 && { tools }),\n ...(forwardedParameters?.maxTokens && {\n max_tokens: forwardedParameters.maxTokens,\n }),\n ...(forwardedParameters?.stop && { stop: forwardedParameters.stop }),\n ...(toolChoice && { tool_choice: toolChoice }),\n ...(this.disableParallelToolCalls && { parallel_tool_calls: false }),\n });\n\n eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n for await (const chunk of stream) {\n const toolCall = chunk.choices[0].delta.tool_calls?.[0];\n const content = chunk.choices[0].delta.content;\n\n // When switching from message to function or vice versa,\n // send the respective end event.\n // If toolCall?.id is defined, it means a new tool call starts.\n if (mode === \"message\" && toolCall?.id) {\n mode = null;\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\" && (toolCall === undefined || toolCall?.id)) {\n mode = null;\n eventStream$.sendActionExecutionEnd();\n }\n\n // If we send a new message type, send the appropriate start event.\n if (mode === null) {\n if (toolCall?.id) {\n mode = \"function\";\n eventStream$.sendActionExecutionStart(toolCall!.id, toolCall!.function!.name);\n } else if (content) {\n mode = \"message\";\n eventStream$.sendTextMessageStart(chunk.id);\n }\n }\n\n // send the content events\n if (mode === \"message\" && content) {\n eventStream$.sendTextMessageContent(content);\n } else if (mode === \"function\" && toolCall?.function?.arguments) {\n eventStream$.sendActionExecutionArgs(toolCall.function.arguments);\n }\n }\n\n // send the end events\n if (mode === \"message\") {\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd();\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId: threadId || randomId(),\n };\n }\n}\n","/**\n * Copilot Runtime adapter for Anthropic.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, AnthropicAdapter } from \"@copilotkit/runtime\";\n * import Anthropic from \"@anthropic-ai/sdk\";\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const anthropic = new Anthropic({\n * apiKey: \"<your-api-key>\",\n * });\n *\n * const serviceAdapter = new AnthropicAdapter({ anthropic });\n *\n * return copilotKit.streamHttpServerResponse(req, res, serviceAdapter);\n * ```\n */\nimport Anthropic from \"@anthropic-ai/sdk\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToAnthropicTool,\n convertMessageToAnthropicMessage,\n groupAnthropicMessagesByRole,\n limitMessagesToTokenCount,\n} from \"./utils\";\n\nimport { randomId } from \"@copilotkit/shared\";\nimport { TextMessage } from \"../../graphql/types/converted\";\n\nconst DEFAULT_MODEL = \"claude-3-sonnet-20240229\";\n\nexport interface AnthropicAdapterParams {\n /**\n * An optional Anthropic instance to use. If not provided, a new instance will be\n * created.\n */\n anthropic?: Anthropic;\n\n /**\n * The model to use.\n */\n model?: string;\n}\n\nexport class AnthropicAdapter implements CopilotServiceAdapter {\n private model: string = DEFAULT_MODEL;\n\n private _anthropic: Anthropic;\n public get anthropic(): Anthropic {\n return this._anthropic;\n }\n\n constructor(params?: AnthropicAdapterParams) {\n this._anthropic = params?.anthropic || new Anthropic({});\n if (params?.model) {\n this.model = params.model;\n }\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const {\n threadId,\n model = this.model,\n messages: rawMessages,\n actions,\n eventSource,\n forwardedParameters,\n } = request;\n const tools = actions.map(convertActionInputToAnthropicTool);\n\n const messages = [...rawMessages];\n\n // get the instruction message\n const instructionsMessage = messages.shift();\n const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : \"\";\n\n let anthropicMessages = messages.map(convertMessageToAnthropicMessage);\n anthropicMessages = limitMessagesToTokenCount(anthropicMessages, tools, model);\n anthropicMessages = groupAnthropicMessagesByRole(anthropicMessages);\n\n let toolChoice: any = forwardedParameters?.toolChoice;\n if (forwardedParameters?.toolChoice === \"function\") {\n toolChoice = {\n type: \"tool\",\n name: forwardedParameters.toolChoiceFunctionName,\n };\n }\n\n const stream = this.anthropic.messages.create({\n system: instructions,\n model: this.model,\n messages: anthropicMessages,\n max_tokens: forwardedParameters?.maxTokens || 1024,\n ...(tools.length > 0 && { tools }),\n ...(toolChoice && { tool_choice: toolChoice }),\n stream: true,\n });\n\n eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n let didOutputText = false;\n let currentMessageId = randomId();\n let currentToolCallId = randomId();\n let filterThinkingTextBuffer = new FilterThinkingTextBuffer();\n\n for await (const chunk of await stream) {\n if (chunk.type === \"message_start\") {\n currentMessageId = chunk.message.id;\n } else if (chunk.type === \"content_block_start\") {\n if (chunk.content_block.type === \"text\") {\n didOutputText = false;\n filterThinkingTextBuffer.reset();\n mode = \"message\";\n } else if (chunk.content_block.type === \"tool_use\") {\n currentToolCallId = chunk.content_block.id;\n eventStream$.sendActionExecutionStart(currentToolCallId, chunk.content_block.name);\n mode = \"function\";\n }\n } else if (chunk.type === \"content_block_delta\") {\n if (chunk.delta.type === \"text_delta\") {\n const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);\n if (text.length > 0) {\n if (!didOutputText) {\n eventStream$.sendTextMessageStart(currentMessageId);\n didOutputText = true;\n }\n eventStream$.sendTextMessageContent(text);\n }\n } else if (chunk.delta.type === \"input_json_delta\") {\n eventStream$.sendActionExecutionArgs(chunk.delta.partial_json);\n }\n } else if (chunk.type === \"content_block_stop\") {\n if (mode === \"message\") {\n if (didOutputText) {\n eventStream$.sendTextMessageEnd();\n }\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd();\n }\n }\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId: threadId || randomId(),\n };\n }\n}\n\nconst THINKING_TAG = \"<thinking>\";\nconst THINKING_TAG_END = \"</thinking>\";\n\nclass FilterThinkingTextBuffer {\n private buffer: string;\n private didFilterThinkingTag: boolean = false;\n\n constructor() {\n this.buffer = \"\";\n }\n\n onTextChunk(text: string): string {\n this.buffer += text;\n if (this.didFilterThinkingTag) {\n return text;\n }\n const potentialTag = this.buffer.slice(0, THINKING_TAG.length);\n if (THINKING_TAG.startsWith(potentialTag)) {\n if (this.buffer.includes(THINKING_TAG_END)) {\n const end = this.buffer.indexOf(THINKING_TAG_END);\n const filteredText = this.buffer.slice(end + THINKING_TAG_END.length);\n this.buffer = filteredText;\n this.didFilterThinkingTag = true;\n return filteredText;\n } else {\n return \"\";\n }\n }\n return text;\n }\n\n reset() {\n this.buffer = \"\";\n this.didFilterThinkingTag = false;\n }\n}\n","import {\n ActionExecutionMessage,\n Message,\n ResultMessage,\n TextMessage,\n} from \"../../graphql/types/converted\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { Anthropic } from \"@anthropic-ai/sdk\";\n\nexport function limitMessagesToTokenCount(\n messages: any[],\n tools: any[],\n model: string,\n maxTokens?: number,\n): any[] {\n maxTokens ||= MAX_TOKENS;\n\n const result: any[] = [];\n const toolsNumTokens = countToolsTokens(model, tools);\n if (toolsNumTokens > maxTokens) {\n throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);\n }\n maxTokens -= toolsNumTokens;\n\n for (const message of messages) {\n if (message.role === \"system\") {\n const numTokens = countMessageTokens(model, message);\n maxTokens -= numTokens;\n\n if (maxTokens < 0) {\n throw new Error(\"Not enough tokens for system message.\");\n }\n }\n }\n\n let cutoff: boolean = false;\n\n const reversedMessages = [...messages].reverse();\n for (const message of reversedMessages) {\n if (message.role === \"system\") {\n result.unshift(message);\n continue;\n } else if (cutoff) {\n continue;\n }\n let numTokens = countMessageTokens(model, message);\n if (maxTokens < numTokens) {\n cutoff = true;\n continue;\n }\n result.unshift(message);\n maxTokens -= numTokens;\n }\n\n return result;\n}\n\nconst MAX_TOKENS = 128000;\n\nfunction countToolsTokens(model: string, tools: any[]): number {\n if (tools.length === 0) {\n return 0;\n }\n const json = JSON.stringify(tools);\n return countTokens(model, json);\n}\n\nfunction countMessageTokens(model: string, message: any): number {\n return countTokens(model, JSON.stringify(message.content) || \"\");\n}\n\nfunction countTokens(model: string, text: string): number {\n return text.length / 3;\n}\n\nexport function convertActionInputToAnthropicTool(action: ActionInput): Anthropic.Messages.Tool {\n return {\n name: action.name,\n description: action.description,\n input_schema: JSON.parse(action.jsonSchema),\n };\n}\n\nexport function convertMessageToAnthropicMessage(\n message: Message,\n): Anthropic.Messages.MessageParam {\n if (message.isTextMessage()) {\n if (message.role === \"system\") {\n return {\n role: \"assistant\",\n content: [\n { type: \"text\", text: \"THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: \" + message.content },\n ],\n };\n } else {\n return {\n role: message.role === \"user\" ? \"user\" : \"assistant\",\n content: [{ type: \"text\", text: message.content }],\n };\n }\n } else if (message.isActionExecutionMessage()) {\n return {\n role: \"assistant\",\n content: [\n {\n id: message.id,\n type: \"tool_use\",\n input: message.arguments,\n name: message.name,\n },\n ],\n };\n } else if (message.isResultMessage()) {\n return {\n role: \"user\",\n content: [\n {\n type: \"tool_result\",\n content: message.result,\n tool_use_id: message.actionExecutionId,\n },\n ],\n };\n }\n}\n\nexport function groupAnthropicMessagesByRole(\n messageParams: Anthropic.Messages.MessageParam[],\n): Anthropic.Messages.MessageParam[] {\n return messageParams.reduce((acc, message) => {\n const lastGroup = acc[acc.length - 1];\n\n if (lastGroup && lastGroup.role === message.role) {\n lastGroup.content = lastGroup.content.concat(message.content as any);\n } else {\n acc.push({\n role: message.role,\n content: [...(message.content as any)],\n });\n }\n\n return acc;\n }, [] as Anthropic.Messages.MessageParam[]);\n}\n"],"mappings":";;;;;;;;;;AACA,SAASA,sBAAsB;AAUxB,IAAMC,cAAN,MAAMA;EACXC;EACAC;EACAC;EACAC;EACAC;EAEAC,YAAYC,SAAgC;AAC1C,SAAKN,OAAOM,QAAQN;AACpB,SAAKC,cAAcK,QAAQL;AAC3B,SAAKC,WAAWI,QAAQJ;AACxB,SAAKC,aAAaG,QAAQH;AAC1B,SAAKC,gBAAgBE,QAAQF,iBAAiB;EAChD;EAEA,MAAMG,WAAiC;AACrC,QAAI,CAAC,KAAKJ,YAAY;AACpB,YAAM,KAAKK,yBAAwB;IACrC;AAEA,WAAO;MACLR,MAAM,KAAKA;MACXC,aAAa,KAAKA;MAClBE,YAAY,KAAKA;MACjBM,SAAS,OAAOC,SAAAA;AACd,cAAMC,WAAW,IAAIC,eAAe;UAAEC,KAAK,KAAKX;QAAS,CAAA;AACzD,YAAIY;AACJ,YAAI,KAAKV,kBAAkB,UAAU;AACnCU,kBAAQJ,KAAKK,OAAOC,KAAKN,IAAAA,EAAM,CAAA,CAAE;QACnC,OAAO;AACLI,kBAAQJ;QACV;AACA,eAAO,MAAMC,SAASM,OAAOH,KAAAA;MAC/B;IACF;EACF;EAEA,MAAMN,2BAA2B;AAC/B,UAAMU,iBAAiB;MAAC;MAAU;MAAU;;AAE5C,QAAIC,YAAY,KAAKjB,SAASkB,QAAQ,QAAQ,EAAA,IAAM;AACpD,QAAIC,SAAS,MAAMC,MAAMH,SAAAA,EACtBI,KAAK,CAACC,QAAQA,IAAIC,KAAI,CAAA,EACtBC,MAAM,MAAA;AACL,YAAM,IAAIC,MAAM,yCAAyCR,SAAAA;IAC3D,CAAA;AAGF,QAAID,eAAeU,SAASP,OAAOQ,IAAI,GAAG;AACxC,WAAKzB,gBAAgB;AACrB,WAAKD,aAAa;QAChB;UACEH,MAAM;UACN6B,MAAMR,OAAOQ;UACb5B,aAAa;QACf;;IAEJ,WAAWoB,OAAOQ,SAAS,UAAU;AACnC,WAAKzB,gBAAgB;AACrB,WAAKD,aAAaY,OAAOC,KAAKK,OAAOS,UAAU,EAAEC,IAAI,CAACC,QAAAA;AArE5D;AAsEQ,YAAIC,WAAWZ,OAAOS,WAAWE,GAAAA;AACjC,YAAI,CAACd,eAAeU,SAASK,SAASJ,IAAI,GAAG;AAC3C,gBAAM,IAAIF,MAAM,yBAAA;QAClB;AACA,eAAO;UACL3B,MAAMgC;UACNH,MAAMI,SAASJ;UACf5B,aAAagC,SAAShC,eAAe;UACrCiC,YAAUb,YAAOa,aAAPb,mBAAiBO,SAASI,SAAQ;QAC9C;MACF,CAAA;IACF,OAAO;AACL,YAAM,IAAIL,MAAM,yBAAA;IAClB;EACF;AACF;AA3Ea5B;;;AC2Cb,OAAOoC,YAAY;;;AC5CZ,SAASC,0BACdC,UACAC,OACAC,OACAC,WAAkB;AAElBA,4BAAcC,wBAAwBF,KAAAA;AAEtC,QAAMG,SAAgB,CAAA;AACtB,QAAMC,iBAAiBC,iBAAiBL,OAAOD,KAAAA;AAC/C,MAAIK,iBAAiBH,WAAW;AAC9B,UAAM,IAAIK,MAAM,4CAA4CF,oBAAoBH,WAAW;EAC7F;AACAA,eAAaG;AAEb,aAAWG,WAAWT,UAAU;AAC9B,QAAIS,QAAQC,SAAS,UAAU;AAC7B,YAAMC,YAAYC,mBAAmBV,OAAOO,OAAAA;AAC5CN,mBAAaQ;AAEb,UAAIR,YAAY,GAAG;AACjB,cAAM,IAAIK,MAAM,uCAAA;MAClB;IACF;EACF;AAEA,MAAIK,SAAkB;AAEtB,QAAMC,mBAAmB;OAAId;IAAUe,QAAO;AAC9C,aAAWN,WAAWK,kBAAkB;AACtC,QAAIL,QAAQC,SAAS,UAAU;AAC7BL,aAAOW,QAAQP,OAAAA;AACf;IACF,WAAWI,QAAQ;AACjB;IACF;AACA,QAAIF,YAAYC,mBAAmBV,OAAOO,OAAAA;AAC1C,QAAIN,YAAYQ,WAAW;AACzBE,eAAS;AACT;IACF;AACAR,WAAOW,QAAQP,OAAAA;AACfN,iBAAaQ;EACf;AAEA,SAAON;AACT;AA9CgBN;AAgDT,SAASK,wBAAwBF,OAAa;AACnD,SAAOe,iBAAiBf,KAAAA,KAAUgB;AACpC;AAFgBd;AAIhB,IAAMc,qBAAqB;AAE3B,IAAMD,mBAA8C;;EAElD,UAAU;EACV,qBAAqB;EACrB,eAAe;EACf,0BAA0B;EAC1B,sBAAsB;EACtB,uBAAuB;EACvB,sBAAsB;EACtB,wBAAwB;EACxB,6BAA6B;EAC7B,aAAa;EACb,kBAAkB;EAClB,kBAAkB;EAClB,SAAS;EACT,cAAc;EACd,cAAc;;EAGd,sBAAsB;EACtB,iBAAiB;EACjB,sBAAsB;EACtB,0BAA0B;EAC1B,qBAAqB;EACrB,sBAAsB;EACtB,0BAA0B;EAC1B,sBAAsB;AACxB;AAEA,SAASV,iBAAiBL,OAAeD,OAAY;AACnD,MAAIA,MAAMkB,WAAW,GAAG;AACtB,WAAO;EACT;AACA,QAAMC,OAAOC,KAAKC,UAAUrB,KAAAA;AAC5B,SAAOsB,YAAYrB,OAAOkB,IAAAA;AAC5B;AANSb;AAQT,SAASK,mBAAmBV,OAAeO,SAAY;AACrD,SAAOc,YAAYrB,OAAOO,QAAQe,WAAW,EAAA;AAC/C;AAFSZ;AAIT,SAASW,YAAYrB,OAAeuB,MAAY;AAC9C,SAAOA,KAAKN,SAAS;AACvB;AAFSI;AAIF,SAASG,+BAA+BC,QAAmB;AAChE,SAAO;IACLC,MAAM;IACNC,UAAU;MACRC,MAAMH,OAAOG;MACbC,aAAaJ,OAAOI;MACpBC,YAAYX,KAAKY,MAAMN,OAAOO,UAAU;IAC1C;EACF;AACF;AATgBR;AAWT,SAASS,8BAA8B1B,SAAgB;AAC5D,MAAIA,QAAQ2B,cAAa,GAAI;AAC3B,WAAO;MACL1B,MAAMD,QAAQC;MACdc,SAASf,QAAQe;IACnB;EAIF,WAAWf,QAAQ4B,yBAAwB,GAAI;AAC7C,WAAO;MACL3B,MAAM;MACN4B,YAAY;QACV;UACEC,IAAI9B,QAAQ8B;UACZX,MAAM;UACNC,UAAU;YACRC,MAAMrB,QAAQqB;YACdU,WAAWnB,KAAKC,UAAUb,QAAQ+B,SAAS;UAC7C;QACF;;IAEJ;EACF,WAAW/B,QAAQgC,gBAAe,GAAI;AACpC,WAAO;MACL/B,MAAM;MACNc,SAASf,QAAQJ;MACjBqC,cAAcjC,QAAQkC;IACxB;EACF;AACF;AA9BgBR;AAgCT,SAASS,mCAAmCnC,SAAmC;AACpF,SAAO;IACL,GAAGA;IACH,GAAIA,QAAQC,SAAS,YAAY;MAC/BA,MAAM;MACNc,SAAS,gDAAgDf,QAAQe;IACnE;EACF;AACF;AARgBoB;;;ADvFhB,SAASC,gBAAgB;AAEzB,IAAMC,gBAAgB;AAyBf,IAAMC,gBAAN,MAAMA;EACHC,QAAgBF;EAEhBG,2BAAoC;EACpCC;EACR,IAAWC,SAAiB;AAC1B,WAAO,KAAKD;EACd;EAEAE,YAAYC,QAA8B;AACxC,SAAKH,WAAUG,iCAAQF,WAAU,IAAIG,OAAO,CAAC,CAAA;AAC7C,QAAID,iCAAQL,OAAO;AACjB,WAAKA,QAAQK,OAAOL;IACtB;AACA,SAAKC,4BAA2BI,iCAAQJ,6BAA4B;EACtE;EAEA,MAAMM,QACJC,SAC+C;AAC/C,UAAM,EACJC,UACAT,QAAQ,KAAKA,OACbU,UACAC,SACAC,aACAC,oBAAmB,IACjBL;AACJ,UAAMM,QAAQH,QAAQI,IAAIC,8BAAAA;AAE1B,QAAIC,iBAAiBP,SAASK,IAAIG,6BAAAA;AAClCD,qBAAiBE,0BAA0BF,gBAAgBH,OAAOd,KAAAA;AAElE,QAAIoB,aAAkBP,2DAAqBO;AAC3C,SAAIP,2DAAqBO,gBAAe,YAAY;AAClDA,mBAAa;QACXC,MAAM;QACNC,UAAU;UAAEC,MAAMV,oBAAoBW;QAAuB;MAC/D;IACF;AAEA,UAAMC,SAAS,KAAKtB,OAAOuB,KAAKC,KAAKC,YAAYH,OAAO;MACtDzB;MACAyB,QAAQ;MACRf,UAAUO;MACV,GAAIH,MAAMe,SAAS,KAAK;QAAEf;MAAM;MAChC,IAAID,2DAAqBiB,cAAa;QAAEC,YAAYlB,oBAAoBiB;MAAU;MAClF,IAAIjB,2DAAqBmB,SAAQ;QAAEA,MAAMnB,oBAAoBmB;MAAK;MAClE,GAAIZ,cAAc;QAAEa,aAAab;MAAW;MAC5C,GAAI,KAAKnB,4BAA4B;QAAEiC,qBAAqB;MAAM;IACpE,CAAA;AAEAtB,gBAAYa,OAAO,OAAOU,iBAAAA;AAhJ9B;AAiJM,UAAIC,OAAsC;AAC1C,uBAAiBC,SAASZ,QAAQ;AAChC,cAAMa,YAAWD,WAAME,QAAQ,CAAA,EAAGC,MAAMC,eAAvBJ,mBAAoC;AACrD,cAAMK,UAAUL,MAAME,QAAQ,CAAA,EAAGC,MAAME;AAKvC,YAAIN,SAAS,cAAaE,qCAAUK,KAAI;AACtCP,iBAAO;AACPD,uBAAaS,mBAAkB;QACjC,WAAWR,SAAS,eAAeE,aAAaO,WAAaP,qCAAUK,MAAK;AAC1EP,iBAAO;AACPD,uBAAaW,uBAAsB;QACrC;AAGA,YAAIV,SAAS,MAAM;AACjB,cAAIE,qCAAUK,IAAI;AAChBP,mBAAO;AACPD,yBAAaY,yBAAyBT,SAAUK,IAAIL,SAAUhB,SAAUC,IAAI;UAC9E,WAAWmB,SAAS;AAClBN,mBAAO;AACPD,yBAAaa,qBAAqBX,MAAMM,EAAE;UAC5C;QACF;AAGA,YAAIP,SAAS,aAAaM,SAAS;AACjCP,uBAAac,uBAAuBP,OAAAA;QACtC,WAAWN,SAAS,gBAAcE,0CAAUhB,aAAVgB,mBAAoBY,YAAW;AAC/Df,uBAAagB,wBAAwBb,SAAShB,SAAS4B,SAAS;QAClE;MACF;AAGA,UAAId,SAAS,WAAW;AACtBD,qBAAaS,mBAAkB;MACjC,WAAWR,SAAS,YAAY;AAC9BD,qBAAaW,uBAAsB;MACrC;AAEAX,mBAAaiB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACL3C,UAAUA,YAAY4C,SAAAA;IACxB;EACF;AACF;AAtGatD;;;AE5Cb,SAASuD,YAAAA,iBAAgB;AAiBlB,IAAMC,mBAAN,MAAMA;;;;;EAIXC,YAAoBC,SAAkC;SAAlCA,UAAAA;EAAmC;EAEvD,MAAMC,QACJC,SAC+C;AAC/C,UAAM,EAAEC,aAAaC,OAAOC,SAASC,UAAUC,UAAUC,MAAK,IAAKN;AACnE,UAAMO,SAAS,MAAM,KAAKT,QAAQU,QAAQ;MACxCJ,UAAUA,SAASK,IAAIC,gCAAAA;MACvBC,OAAOR,QAAQM,IAAIG,iCAAAA;MACnBV;MACAG;MACAC;IACF,CAAA;AAEAL,gBAAYY,OAAO,OAAOC,iBAAAA;AACxB,YAAMC,wBAAwB;QAC5BR;QACAO;MACF,CAAA;IACF,CAAA;AAEA,WAAO;MACLT,UAAUA,YAAYW,UAAAA;IACxB;EACF;AACF;AA7BapB;;;AC/Cb,SAASqB,kBAAkB;AAUpB,IAAMC,4BAAN,cAAwCC,iBAAAA;EAC7CC,YAAYC,SAA4C;AACtD,UAAM;MACJC,SAAS,OAAO,EAAEC,UAAUC,MAAK,MAAE;AACjC,cAAMC,QAAQ,IAAIC,WAAW;UAC3BC,YAAWN,mCAASI,UAAS;UAC7BG,YAAY;QACd,CAAA,EAAGC,UAAUL,KAAAA;AACb,eAAOC,MAAMK,OAAOP,QAAAA;MACtB;IACF,CAAA;EACF;AACF;AAZaL;;;ACFb,OAAOa,aAAY;AAqDZ,IAAMC,yBAAN,MAAMA;EACHC;EACAC;EACAC;EACAC;EACAC;EAERC,YAAYC,QAAsC;AAChD,SAAKN,SAASM,OAAON,UAAU,IAAIO,QAAO,CAAC,CAAA;AAC3C,SAAKN,yBAAyBK,OAAOL,2BAA2B,SAAS;AACzE,SAAKE,oBAAoBG,OAAOH,sBAAsB,SAAS;AAC/D,SAAKD,cAAcI,OAAOJ;AAC1B,SAAKE,4BAA2BE,iCAAQF,6BAA4B;EACtE;EAEA,MAAMI,QACJC,SAC+C;AAC/C,UAAM,EAAEC,UAAUC,SAASC,aAAaC,OAAOC,oBAAmB,IAAKL;AAEvE,QAAIM,WAAWN,QAAQM,aAAa,MAAM,KAAKf,OAAOgB,KAAKC,QAAQC,OAAM,GAAIC;AAE7E,UAAMC,cAAcV,SAASW,GAAG,EAAC;AAEjC,QAAIC,YAAgCC;AAGpC,QAAIH,YAAYI,gBAAe,KAAMX,OAAO;AAC1CS,kBAAY,MAAM,KAAKG,kBAAkBV,UAAUF,OAAOH,UAAUE,WAAAA;IACtE,WAESQ,YAAYM,cAAa,GAAI;AACpCJ,kBAAY,MAAM,KAAKK,kBACrBZ,UACAL,UACAC,SACAC,aACAE,mBAAAA;IAEJ,OAEK;AACH,YAAM,IAAIc,MAAM,6CAAA;IAClB;AAEA,WAAO;MACLb;MACAF,OAAOS;IACT;EACF;EAEA,MAAcG,kBACZV,UACAF,OACAH,UACAE,aACA;AACA,QAAIiB,MAAM,MAAM,KAAK7B,OAAOgB,KAAKC,QAAQa,KAAKC,SAAShB,UAAUF,KAAAA;AACjE,QAAI,CAACgB,IAAIG,iBAAiB;AACxB,YAAM,IAAIJ,MAAM,0BAAA;IAClB;AAGA,UAAMK,eAAeJ,IAAIG,gBAAgBE,oBAAoBC,WAAWC,IACtE,CAACC,aAAaA,SAASlB,EAAE;AAI3B,UAAMmB,iBAAiB5B,SAAS6B,OAC9B,CAACC,YAAYA,QAAQhB,gBAAe,KAAMS,aAAaQ,SAASD,QAAQE,iBAAiB,CAAA;AAG3F,QAAIT,aAAaU,UAAUL,eAAeK,QAAQ;AAChD,YAAM,IAAIf,MAAM,oEAAA;IAClB;AAGA,UAAMgB,cAA6DN,eAAeF,IAChF,CAACI,YAAAA;AACC,aAAO;QACLK,cAAcL,QAAQE;QACtBI,QAAQN,QAAQO;MAClB;IACF,CAAA;AAGF,UAAMC,SAAS,KAAKhD,OAAOgB,KAAKC,QAAQa,KAAKmB,wBAAwBlC,UAAUF,OAAO;MACpFqC,cAAcN;MACd,GAAI,KAAKxC,4BAA4B;QAAE+C,qBAAqB;MAAM;IACpE,CAAA;AAEA,UAAM,KAAKC,eAAeJ,QAAQpC,WAAAA;AAClC,WAAOC;EACT;EAEA,MAAcc,kBACZZ,UACAL,UACAC,SACAC,aACAE,qBACA;AACAJ,eAAW;SAAIA;;AAGf,UAAM2C,sBAAsB3C,SAAS4C,MAAK;AAC1C,UAAMC,eAAeF,oBAAoB3B,cAAa,IAAK2B,oBAAoBG,UAAU;AAGzF,UAAMC,cAAc/C,SACjB0B,IAAIsB,6BAAAA,EACJtB,IAAIuB,kCAAAA,EACJtC,GAAG,EAAC;AAEP,QAAIoC,YAAYG,SAAS,QAAQ;AAC/B,YAAM,IAAIhC,MAAM,uBAAA;IAClB;AAGA,UAAM,KAAK5B,OAAOgB,KAAKC,QAAQP,SAASQ,OAAOH,UAAU;MACvD6C,MAAM;MACNJ,SAASC,YAAYD;IACvB,CAAA;AAEA,UAAMK,cAAclD,QAAQyB,IAAI0B,8BAAAA;AAEhC,UAAMC,QAAQ;SACTF;SACC,KAAK5D,yBAAyB;QAAC;UAAE+D,MAAM;QAAmB;UAAsB,CAAA;SAChF,KAAK7D,oBAAoB;QAAC;UAAE6D,MAAM;QAAc;UAAsB,CAAA;;AAI5E,QAAIhB,SAAS,KAAKhD,OAAOgB,KAAKC,QAAQa,KAAKkB,OAAOjC,UAAU;MAC1DkD,cAAc,KAAK/D;MACnBqD;MACAQ;MACA,IAAIjD,2DAAqBoD,cAAa;QACpCC,uBAAuBrD,oBAAoBoD;MAC7C;MACA,GAAI,KAAK9D,4BAA4B;QAAE+C,qBAAqB;MAAM;IACpE,CAAA;AAEA,UAAM,KAAKC,eAAeJ,QAAQpC,WAAAA;AAElC,WAAOwD,mBAAmBpB,MAAAA;EAC5B;EAEA,MAAcI,eAAeJ,QAAyBpC,aAAiC;AACrFA,gBAAYoC,OAAO,OAAOqB,iBAAAA;AApO9B;AAqOM,UAAIC,iBAAiB;AAErB,uBAAiBC,SAASvB,QAAQ;AAChC,gBAAQuB,MAAMC,OAAK;UACjB,KAAK;AACH,gBAAIF,gBAAgB;AAClBD,2BAAaI,uBAAsB;YACrC;AACAJ,yBAAaK,qBAAqBH,MAAMI,KAAKxD,EAAE;AAC/C;UACF,KAAK;AACH,kBAAIoD,WAAMI,KAAKC,MAAMpB,YAAjBe,mBAA2B,GAAGP,UAAS,QAAQ;AACjDK,2BAAaQ,wBAAuBN,WAAMI,KAAKC,MAAMpB,YAAjBe,mBAA2B,GAAGO,KAAKC,KAAAA;YACzE;AACA;UACF,KAAK;AACHV,yBAAaW,mBAAkB;AAC/B;UACF,KAAK;AACH,gBAAIC;AACJ,gBAAIC;AACJ,gBAAIC;AACJ,gBACEZ,MAAMI,KAAKC,MAAMQ,aAAapB,SAAS,kBACvCO,WAAMI,KAAKC,MAAMQ,aAAajD,eAA9BoC,mBAA2C,GAAGP,UAAS,YACvD;AACAiB,4BAAaV,WAAMI,KAAKC,MAAMQ,aAAajD,eAA9BoC,mBAA2C,GAAGpD;AAC3D+D,8BAAeX,WAAMI,KAAKC,MAAMQ,aAAajD,eAA9BoC,mBAA2C,GAAGc,SAASC;AACtEH,8BAAeZ,WAAMI,KAAKC,MAAMQ,aAAajD,eAA9BoC,mBAA2C,GAAGc,SAASE;YACxE;AAEA,gBAAIL,gBAAgBD,YAAY;AAC9B,kBAAIX,gBAAgB;AAClBD,6BAAaI,uBAAsB;cACrC;AACAH,+BAAiB;AACjBD,2BAAamB,yBAAyBP,YAAYC,YAAAA;YACpD,WAAWC,cAAc;AACvBd,2BAAaoB,wBAAwBN,YAAAA;YACvC;AACA;QACJ;MACF;AACA,UAAIb,gBAAgB;AAClBD,qBAAaI,uBAAsB;MACrC;AACAJ,mBAAaqB,SAAQ;IACvB,CAAA;EACF;AACF;AAvMa3F;AAyMb,SAASqE,mBAAmBpB,QAAuB;AACjD,SAAO,IAAI2C,QAAgB,CAACC,SAASC,WAAAA;AACnC,QAAIC,cAAc,wBAACtB,UAAAA;AACjB,UAAIA,MAAMA,UAAU,sBAAsB;AACxC,cAAM3D,QAAQ2D,MAAMG,KAAKxD;AACzB6B,eAAO+C,IAAI,SAASD,WAAAA;AACpBF,gBAAQ/E,KAAAA;MACV;IACF,GANkB;AAOlBmC,WAAOgD,GAAG,SAASF,WAAAA;EACrB,CAAA;AACF;AAXS1B;;;AC/PT,OAAO6B,aAAY;AACnB,SAASC,YAAAA,iBAAgB;AAQlB,IAAMC,eAAN,MAAMA;EACHC;EACAC;EACAC;EAERC,YAAYC,SAA8B;AACxC,QAAIA,mCAASJ,QAAQ;AACnB,WAAKA,SAASI,QAAQJ;IACxB,OAAO;AACL,WAAKA,SAAS;IAChB;AACA,SAAKC,QAAQG,mCAASH;AACtB,SAAKC,QAAQ;EACf;EAEA,MAAMG,QACJC,SAC+C;AAC/C,UAAMC,QAAQD,QAAQE,QAAQC,IAAIC,8BAAAA;AAClC,UAAMC,SAAS,IAAIC,QAAO;MACxBZ,QAAQ,KAAKA;MACba,SAAS;IACX,CAAA;AAEA,UAAMC,WAAWR,QAAQQ,SAASL,IAAIM,6BAAAA;AAEtC,UAAMC,SAAS,MAAML,OAAOM,KAAKC,YAAYC,OAAO;MAClDlB,OAAO,KAAKA;MACZa;MACAE,QAAQ;MACR,GAAIT,MAAMa,SAAS,KAAK;QAAEb;MAAM;IAClC,CAAA;AAEA,QAAIN,QAAQ;AACZK,YAAQe,YAAYL,OAAO,OAAOM,iBAAAA;AApEtC;AAqEM,UAAIC,OAAsC;AAC1C,uBAAiBC,SAASR,QAAQ;AAChC,YAAI,KAAKd,OAAO;AACdD,kBAAQuB,MAAMvB;AACdqB,uBAAaG,qBAAqBC,UAAAA,CAAAA;AAClCJ,uBAAaK,uBAAuB,eAAe1B;CAAS;AAC5DqB,uBAAaM,mBAAkB;AAC/B,eAAK1B,QAAQ;QACf;AACA,cAAM2B,YAAWL,WAAMM,QAAQ,CAAA,EAAGC,MAAMC,eAAvBR,mBAAoC;AACrD,cAAMS,UAAUT,MAAMM,QAAQ,CAAA,EAAGC,MAAME;AAKvC,YAAIV,SAAS,cAAaM,qCAAUK,KAAI;AACtCX,iBAAO;AACPD,uBAAaM,mBAAkB;QACjC,WAAWL,SAAS,eAAeM,aAAaM,WAAaN,qCAAUK,MAAK;AAC1EX,iBAAO;AACPD,uBAAac,uBAAsB;QACrC;AAGA,YAAIb,SAAS,MAAM;AACjB,cAAIM,qCAAUK,IAAI;AAChBX,mBAAO;AACPD,yBAAae,yBAAyBR,SAAUK,IAAIL,SAAUS,SAAUC,IAAI;UAC9E,WAAWN,SAAS;AAClBV,mBAAO;AACPD,yBAAaG,qBAAqBD,MAAMU,EAAE;UAC5C;QACF;AAGA,YAAIX,SAAS,aAAaU,SAAS;AACjCX,uBAAaK,uBAAuBM,OAAAA;QACtC,WAAWV,SAAS,gBAAcM,0CAAUS,aAAVT,mBAAoBW,YAAW;AAC/DlB,uBAAamB,wBAAwBZ,SAASS,SAASE,SAAS;QAClE;MACF;AAGA,UAAIjB,SAAS,WAAW;AACtBD,qBAAaM,mBAAkB;MACjC,WAAWL,SAAS,YAAY;AAC9BD,qBAAac,uBAAsB;MACrC;AAEAd,mBAAaoB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACLC,UAAUrC,QAAQqC,YAAYjB,UAAAA;IAChC;EACF;AACF;AA3Fa3B;;;AChBb,SAAS6C,YAAY;AAWrB,SAASC,YAAAA,iBAAgB;AAEzB,IAAMC,iBAAgB;AAwBf,IAAMC,cAAN,MAAMA;EACHC,QAAgBF;EAEhBG,2BAAoC;EACpCC;EACR,IAAWC,OAAa;AACtB,WAAO,KAAKD;EACd;EAEAE,YAAYC,QAA4B;AACtC,SAAKH,SAAQG,iCAAQF,SAAQ,IAAIG,KAAK,CAAC,CAAA;AACvC,QAAID,iCAAQL,OAAO;AACjB,WAAKA,QAAQK,OAAOL;IACtB;AACA,SAAKC,4BAA2BI,iCAAQJ,6BAA4B;EACtE;EAEA,MAAMM,QACJC,SAC+C;AAC/C,UAAM,EACJC,UACAT,QAAQ,KAAKA,OACbU,UACAC,SACAC,aACAC,oBAAmB,IACjBL;AACJ,UAAMM,QAAQH,QAAQI,IAAIC,8BAAAA;AAE1B,QAAIC,iBAAiBP,SAASK,IAAIG,6BAAAA;AAClCD,qBAAiBE,0BAA0BF,gBAAgBH,OAAOd,KAAAA;AAElE,QAAIoB,aAAkBP,2DAAqBO;AAC3C,SAAIP,2DAAqBO,gBAAe,YAAY;AAClDA,mBAAa;QACXC,MAAM;QACNC,UAAU;UAAEC,MAAMV,oBAAoBW;QAAuB;MAC/D;IACF;AACA,UAAMC,SAAS,MAAM,KAAKtB,KAAKuB,KAAKC,YAAYC,OAAO;MACrD5B;MACAyB,QAAQ;MACRf,UAAUO;MACV,GAAIH,MAAMe,SAAS,KAAK;QAAEf;MAAM;MAChC,IAAID,2DAAqBiB,cAAa;QACpCC,YAAYlB,oBAAoBiB;MAClC;MACA,IAAIjB,2DAAqBmB,SAAQ;QAAEA,MAAMnB,oBAAoBmB;MAAK;MAClE,GAAIZ,cAAc;QAAEa,aAAab;MAAW;MAC5C,GAAI,KAAKnB,4BAA4B;QAAEiC,qBAAqB;MAAM;IACpE,CAAA;AAEAtB,gBAAYa,OAAO,OAAOU,iBAAAA;AA5G9B;AA6GM,UAAIC,OAAsC;AAC1C,uBAAiBC,SAASZ,QAAQ;AAChC,cAAMa,YAAWD,WAAME,QAAQ,CAAA,EAAGC,MAAMC,eAAvBJ,mBAAoC;AACrD,cAAMK,UAAUL,MAAME,QAAQ,CAAA,EAAGC,MAAME;AAKvC,YAAIN,SAAS,cAAaE,qCAAUK,KAAI;AACtCP,iBAAO;AACPD,uBAAaS,mBAAkB;QACjC,WAAWR,SAAS,eAAeE,aAAaO,WAAaP,qCAAUK,MAAK;AAC1EP,iBAAO;AACPD,uBAAaW,uBAAsB;QACrC;AAGA,YAAIV,SAAS,MAAM;AACjB,cAAIE,qCAAUK,IAAI;AAChBP,mBAAO;AACPD,yBAAaY,yBAAyBT,SAAUK,IAAIL,SAAUhB,SAAUC,IAAI;UAC9E,WAAWmB,SAAS;AAClBN,mBAAO;AACPD,yBAAaa,qBAAqBX,MAAMM,EAAE;UAC5C;QACF;AAGA,YAAIP,SAAS,aAAaM,SAAS;AACjCP,uBAAac,uBAAuBP,OAAAA;QACtC,WAAWN,SAAS,gBAAcE,0CAAUhB,aAAVgB,mBAAoBY,YAAW;AAC/Df,uBAAagB,wBAAwBb,SAAShB,SAAS4B,SAAS;QAClE;MACF;AAGA,UAAId,SAAS,WAAW;AACtBD,qBAAaS,mBAAkB;MACjC,WAAWR,SAAS,YAAY;AAC9BD,qBAAaW,uBAAsB;MACrC;AAEAX,mBAAaiB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACL3C,UAAUA,YAAY4C,UAAAA;IACxB;EACF;AACF;AAvGatD;;;ACnCb,OAAOuD,eAAe;;;ACXf,SAASC,2BACdC,UACAC,OACAC,OACAC,WAAkB;AAElBA,4BAAcC;AAEd,QAAMC,SAAgB,CAAA;AACtB,QAAMC,iBAAiBC,kBAAiBL,OAAOD,KAAAA;AAC/C,MAAIK,iBAAiBH,WAAW;AAC9B,UAAM,IAAIK,MAAM,4CAA4CF,oBAAoBH,WAAW;EAC7F;AACAA,eAAaG;AAEb,aAAWG,WAAWT,UAAU;AAC9B,QAAIS,QAAQC,SAAS,UAAU;AAC7B,YAAMC,YAAYC,oBAAmBV,OAAOO,OAAAA;AAC5CN,mBAAaQ;AAEb,UAAIR,YAAY,GAAG;AACjB,cAAM,IAAIK,MAAM,uCAAA;MAClB;IACF;EACF;AAEA,MAAIK,SAAkB;AAEtB,QAAMC,mBAAmB;OAAId;IAAUe,QAAO;AAC9C,aAAWN,WAAWK,kBAAkB;AACtC,QAAIL,QAAQC,SAAS,UAAU;AAC7BL,aAAOW,QAAQP,OAAAA;AACf;IACF,WAAWI,QAAQ;AACjB;IACF;AACA,QAAIF,YAAYC,oBAAmBV,OAAOO,OAAAA;AAC1C,QAAIN,YAAYQ,WAAW;AACzBE,eAAS;AACT;IACF;AACAR,WAAOW,QAAQP,OAAAA;AACfN,iBAAaQ;EACf;AAEA,SAAON;AACT;AA9CgBN,OAAAA,4BAAAA;AAgDhB,IAAMK,aAAa;AAEnB,SAASG,kBAAiBL,OAAeD,OAAY;AACnD,MAAIA,MAAMgB,WAAW,GAAG;AACtB,WAAO;EACT;AACA,QAAMC,OAAOC,KAAKC,UAAUnB,KAAAA;AAC5B,SAAOoB,aAAYnB,OAAOgB,IAAAA;AAC5B;AANSX,OAAAA,mBAAAA;AAQT,SAASK,oBAAmBV,OAAeO,SAAY;AACrD,SAAOY,aAAYnB,OAAOiB,KAAKC,UAAUX,QAAQa,OAAO,KAAK,EAAA;AAC/D;AAFSV,OAAAA,qBAAAA;AAIT,SAASS,aAAYnB,OAAeqB,MAAY;AAC9C,SAAOA,KAAKN,SAAS;AACvB;AAFSI,OAAAA,cAAAA;AAIF,SAASG,kCAAkCC,QAAmB;AACnE,SAAO;IACLC,MAAMD,OAAOC;IACbC,aAAaF,OAAOE;IACpBC,cAAcT,KAAKU,MAAMJ,OAAOK,UAAU;EAC5C;AACF;AANgBN;AAQT,SAASO,iCACdtB,SAAgB;AAEhB,MAAIA,QAAQuB,cAAa,GAAI;AAC3B,QAAIvB,QAAQC,SAAS,UAAU;AAC7B,aAAO;QACLA,MAAM;QACNY,SAAS;UACP;YAAEW,MAAM;YAAQV,MAAM,gDAAgDd,QAAQa;UAAQ;;MAE1F;IACF,OAAO;AACL,aAAO;QACLZ,MAAMD,QAAQC,SAAS,SAAS,SAAS;QACzCY,SAAS;UAAC;YAAEW,MAAM;YAAQV,MAAMd,QAAQa;UAAQ;;MAClD;IACF;EACF,WAAWb,QAAQyB,yBAAwB,GAAI;AAC7C,WAAO;MACLxB,MAAM;MACNY,SAAS;QACP;UACEa,IAAI1B,QAAQ0B;UACZF,MAAM;UACNG,OAAO3B,QAAQ4B;UACfX,MAAMjB,QAAQiB;QAChB;;IAEJ;EACF,WAAWjB,QAAQ6B,gBAAe,GAAI;AACpC,WAAO;MACL5B,MAAM;MACNY,SAAS;QACP;UACEW,MAAM;UACNX,SAASb,QAAQJ;UACjBkC,aAAa9B,QAAQ+B;QACvB;;IAEJ;EACF;AACF;AAzCgBT;AA2CT,SAASU,6BACdC,eAAgD;AAEhD,SAAOA,cAAcC,OAAO,CAACC,KAAKnC,YAAAA;AAChC,UAAMoC,YAAYD,IAAIA,IAAI3B,SAAS,CAAA;AAEnC,QAAI4B,aAAaA,UAAUnC,SAASD,QAAQC,MAAM;AAChDmC,gBAAUvB,UAAUuB,UAAUvB,QAAQwB,OAAOrC,QAAQa,OAAO;IAC9D,OAAO;AACLsB,UAAIG,KAAK;QACPrC,MAAMD,QAAQC;QACdY,SAAS;aAAKb,QAAQa;;MACxB,CAAA;IACF;AAEA,WAAOsB;EACT,GAAG,CAAA,CAAE;AACP;AAjBgBH;;;AD7FhB,SAASO,YAAAA,iBAAgB;AAGzB,IAAMC,iBAAgB;AAef,IAAMC,mBAAN,MAAMA;EACHC,QAAgBF;EAEhBG;EACR,IAAWC,YAAuB;AAChC,WAAO,KAAKD;EACd;EAEAE,YAAYC,QAAiC;AAC3C,SAAKH,cAAaG,iCAAQF,cAAa,IAAIG,UAAU,CAAC,CAAA;AACtD,QAAID,iCAAQJ,OAAO;AACjB,WAAKA,QAAQI,OAAOJ;IACtB;EACF;EAEA,MAAMM,QACJC,SAC+C;AAC/C,UAAM,EACJC,UACAR,QAAQ,KAAKA,OACbS,UAAUC,aACVC,SACAC,aACAC,oBAAmB,IACjBN;AACJ,UAAMO,QAAQH,QAAQI,IAAIC,iCAAAA;AAE1B,UAAMP,WAAW;SAAIC;;AAGrB,UAAMO,sBAAsBR,SAASS,MAAK;AAC1C,UAAMC,eAAeF,oBAAoBG,cAAa,IAAKH,oBAAoBI,UAAU;AAEzF,QAAIC,oBAAoBb,SAASM,IAAIQ,gCAAAA;AACrCD,wBAAoBE,2BAA0BF,mBAAmBR,OAAOd,KAAAA;AACxEsB,wBAAoBG,6BAA6BH,iBAAAA;AAEjD,QAAII,aAAkBb,2DAAqBa;AAC3C,SAAIb,2DAAqBa,gBAAe,YAAY;AAClDA,mBAAa;QACXC,MAAM;QACNC,MAAMf,oBAAoBgB;MAC5B;IACF;AAEA,UAAMC,SAAS,KAAK5B,UAAUO,SAASsB,OAAO;MAC5CC,QAAQb;MACRnB,OAAO,KAAKA;MACZS,UAAUa;MACVW,aAAYpB,2DAAqBqB,cAAa;MAC9C,GAAIpB,MAAMqB,SAAS,KAAK;QAAErB;MAAM;MAChC,GAAIY,cAAc;QAAEU,aAAaV;MAAW;MAC5CI,QAAQ;IACV,CAAA;AAEAlB,gBAAYkB,OAAO,OAAOO,iBAAAA;AACxB,UAAIC,OAAsC;AAC1C,UAAIC,gBAAgB;AACpB,UAAIC,mBAAmBC,UAAAA;AACvB,UAAIC,oBAAoBD,UAAAA;AACxB,UAAIE,2BAA2B,IAAIC,yBAAAA;AAEnC,uBAAiBC,SAAS,MAAMf,QAAQ;AACtC,YAAIe,MAAMlB,SAAS,iBAAiB;AAClCa,6BAAmBK,MAAMC,QAAQC;QACnC,WAAWF,MAAMlB,SAAS,uBAAuB;AAC/C,cAAIkB,MAAMG,cAAcrB,SAAS,QAAQ;AACvCY,4BAAgB;AAChBI,qCAAyBM,MAAK;AAC9BX,mBAAO;UACT,WAAWO,MAAMG,cAAcrB,SAAS,YAAY;AAClDe,gCAAoBG,MAAMG,cAAcD;AACxCV,yBAAaa,yBAAyBR,mBAAmBG,MAAMG,cAAcpB,IAAI;AACjFU,mBAAO;UACT;QACF,WAAWO,MAAMlB,SAAS,uBAAuB;AAC/C,cAAIkB,MAAMM,MAAMxB,SAAS,cAAc;AACrC,kBAAMyB,OAAOT,yBAAyBU,YAAYR,MAAMM,MAAMC,IAAI;AAClE,gBAAIA,KAAKjB,SAAS,GAAG;AACnB,kBAAI,CAACI,eAAe;AAClBF,6BAAaiB,qBAAqBd,gBAAAA;AAClCD,gCAAgB;cAClB;AACAF,2BAAakB,uBAAuBH,IAAAA;YACtC;UACF,WAAWP,MAAMM,MAAMxB,SAAS,oBAAoB;AAClDU,yBAAamB,wBAAwBX,MAAMM,MAAMM,YAAY;UAC/D;QACF,WAAWZ,MAAMlB,SAAS,sBAAsB;AAC9C,cAAIW,SAAS,WAAW;AACtB,gBAAIC,eAAe;AACjBF,2BAAaqB,mBAAkB;YACjC;UACF,WAAWpB,SAAS,YAAY;AAC9BD,yBAAasB,uBAAsB;UACrC;QACF;MACF;AAEAtB,mBAAauB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACLpD,UAAUA,YAAYiC,UAAAA;IACxB;EACF;AACF;AA3Ga1C;AA6Gb,IAAM8D,eAAe;AACrB,IAAMC,mBAAmB;AAEzB,IAAMlB,2BAAN,6BAAMA,0BAAAA;EACImB;EACAC,uBAAgC;EAExC7D,cAAc;AACZ,SAAK4D,SAAS;EAChB;EAEAV,YAAYD,MAAsB;AAChC,SAAKW,UAAUX;AACf,QAAI,KAAKY,sBAAsB;AAC7B,aAAOZ;IACT;AACA,UAAMa,eAAe,KAAKF,OAAOG,MAAM,GAAGL,aAAa1B,MAAM;AAC7D,QAAI0B,aAAaM,WAAWF,YAAAA,GAAe;AACzC,UAAI,KAAKF,OAAOK,SAASN,gBAAAA,GAAmB;AAC1C,cAAMO,MAAM,KAAKN,OAAOO,QAAQR,gBAAAA;AAChC,cAAMS,eAAe,KAAKR,OAAOG,MAAMG,MAAMP,iBAAiB3B,MAAM;AACpE,aAAK4B,SAASQ;AACd,aAAKP,uBAAuB;AAC5B,eAAOO;MACT,OAAO;AACL,eAAO;MACT;IACF;AACA,WAAOnB;EACT;EAEAH,QAAQ;AACN,SAAKc,SAAS;AACd,SAAKC,uBAAuB;EAC9B;AACF,GAhCA;","names":["RemoteRunnable","RemoteChain","name","description","chainUrl","parameters","parameterType","constructor","options","toAction","inferLangServeParameters","handler","args","runnable","RemoteRunnable","url","input","Object","keys","invoke","supportedTypes","schemaUrl","replace","schema","fetch","then","res","json","catch","Error","includes","type","properties","map","key","property","required","OpenAI","limitMessagesToTokenCount","messages","tools","model","maxTokens","maxTokensForOpenAIModel","result","toolsNumTokens","countToolsTokens","Error","message","role","numTokens","countMessageTokens","cutoff","reversedMessages","reverse","unshift","maxTokensByModel","DEFAULT_MAX_TOKENS","length","json","JSON","stringify","countTokens","content","text","convertActionInputToOpenAITool","action","type","function","name","description","parameters","parse","jsonSchema","convertMessageToOpenAIMessage","isTextMessage","isActionExecutionMessage","tool_calls","id","arguments","isResultMessage","tool_call_id","actionExecutionId","convertSystemMessageToAssistantAPI","randomId","DEFAULT_MODEL","OpenAIAdapter","model","disableParallelToolCalls","_openai","openai","constructor","params","OpenAI","process","request","threadId","messages","actions","eventSource","forwardedParameters","tools","map","convertActionInputToOpenAITool","openaiMessages","convertMessageToOpenAIMessage","limitMessagesToTokenCount","toolChoice","type","function","name","toolChoiceFunctionName","stream","beta","chat","completions","length","maxTokens","max_tokens","stop","tool_choice","parallel_tool_calls","eventStream$","mode","chunk","toolCall","choices","delta","tool_calls","content","id","sendTextMessageEnd","undefined","sendActionExecutionEnd","sendActionExecutionStart","sendTextMessageStart","sendTextMessageContent","arguments","sendActionExecutionArgs","complete","randomId","randomId","LangChainAdapter","constructor","options","process","request","eventSource","model","actions","messages","threadId","runId","result","chainFn","map","convertMessageToLangChainMessage","tools","convertActionInputToLangChainTool","stream","eventStream$","streamLangChainResponse","randomId","ChatGoogle","GoogleGenerativeAIAdapter","LangChainAdapter","constructor","options","chainFn","messages","tools","model","ChatGoogle","modelName","apiVersion","bindTools","stream","OpenAI","OpenAIAssistantAdapter","openai","codeInterpreterEnabled","assistantId","fileSearchEnabled","disableParallelToolCalls","constructor","params","OpenAI","process","request","messages","actions","eventSource","runId","forwardedParameters","threadId","beta","threads","create","id","lastMessage","at","nextRunId","undefined","isResultMessage","submitToolOutputs","isTextMessage","submitUserMessage","Error","run","runs","retrieve","required_action","toolCallsIds","submit_tool_outputs","tool_calls","map","toolCall","resultMessages","filter","message","includes","actionExecutionId","length","toolOutputs","tool_call_id","output","result","stream","submitToolOutputsStream","tool_outputs","parallel_tool_calls","streamResponse","instructionsMessage","shift","instructions","content","userMessage","convertMessageToOpenAIMessage","convertSystemMessageToAssistantAPI","role","openaiTools","convertActionInputToOpenAITool","tools","type","assistant_id","maxTokens","max_completion_tokens","getRunIdFromStream","eventStream$","inFunctionCall","chunk","event","sendActionExecutionEnd","sendTextMessageStart","data","delta","sendTextMessageContent","text","value","sendTextMessageEnd","toolCallId","toolCallName","toolCallArgs","step_details","function","name","arguments","sendActionExecutionStart","sendActionExecutionArgs","complete","Promise","resolve","reject","runIdGetter","off","on","OpenAI","randomId","UnifyAdapter","apiKey","model","start","constructor","options","process","request","tools","actions","map","convertActionInputToOpenAITool","openai","OpenAI","baseURL","messages","convertMessageToOpenAIMessage","stream","chat","completions","create","length","eventSource","eventStream$","mode","chunk","sendTextMessageStart","randomId","sendTextMessageContent","sendTextMessageEnd","toolCall","choices","delta","tool_calls","content","id","undefined","sendActionExecutionEnd","sendActionExecutionStart","function","name","arguments","sendActionExecutionArgs","complete","threadId","Groq","randomId","DEFAULT_MODEL","GroqAdapter","model","disableParallelToolCalls","_groq","groq","constructor","params","Groq","process","request","threadId","messages","actions","eventSource","forwardedParameters","tools","map","convertActionInputToOpenAITool","openaiMessages","convertMessageToOpenAIMessage","limitMessagesToTokenCount","toolChoice","type","function","name","toolChoiceFunctionName","stream","chat","completions","create","length","maxTokens","max_tokens","stop","tool_choice","parallel_tool_calls","eventStream$","mode","chunk","toolCall","choices","delta","tool_calls","content","id","sendTextMessageEnd","undefined","sendActionExecutionEnd","sendActionExecutionStart","sendTextMessageStart","sendTextMessageContent","arguments","sendActionExecutionArgs","complete","randomId","Anthropic","limitMessagesToTokenCount","messages","tools","model","maxTokens","MAX_TOKENS","result","toolsNumTokens","countToolsTokens","Error","message","role","numTokens","countMessageTokens","cutoff","reversedMessages","reverse","unshift","length","json","JSON","stringify","countTokens","content","text","convertActionInputToAnthropicTool","action","name","description","input_schema","parse","jsonSchema","convertMessageToAnthropicMessage","isTextMessage","type","isActionExecutionMessage","id","input","arguments","isResultMessage","tool_use_id","actionExecutionId","groupAnthropicMessagesByRole","messageParams","reduce","acc","lastGroup","concat","push","randomId","DEFAULT_MODEL","AnthropicAdapter","model","_anthropic","anthropic","constructor","params","Anthropic","process","request","threadId","messages","rawMessages","actions","eventSource","forwardedParameters","tools","map","convertActionInputToAnthropicTool","instructionsMessage","shift","instructions","isTextMessage","content","anthropicMessages","convertMessageToAnthropicMessage","limitMessagesToTokenCount","groupAnthropicMessagesByRole","toolChoice","type","name","toolChoiceFunctionName","stream","create","system","max_tokens","maxTokens","length","tool_choice","eventStream$","mode","didOutputText","currentMessageId","randomId","currentToolCallId","filterThinkingTextBuffer","FilterThinkingTextBuffer","chunk","message","id","content_block","reset","sendActionExecutionStart","delta","text","onTextChunk","sendTextMessageStart","sendTextMessageContent","sendActionExecutionArgs","partial_json","sendTextMessageEnd","sendActionExecutionEnd","complete","THINKING_TAG","THINKING_TAG_END","buffer","didFilterThinkingTag","potentialTag","slice","startsWith","includes","end","indexOf","filteredText"]}
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  copilotRuntimeNodeHttpEndpoint,
3
3
  telemetry_client_default
4
- } from "./chunk-2EV5DRUI.mjs";
4
+ } from "./chunk-KDMS6EKE.mjs";
5
5
  import {
6
6
  __name
7
7
  } from "./chunk-44O2JGUY.mjs";
@@ -21,4 +21,4 @@ __name(copilotRuntimeNodeExpressEndpoint, "copilotRuntimeNodeExpressEndpoint");
21
21
  export {
22
22
  copilotRuntimeNodeExpressEndpoint
23
23
  };
24
- //# sourceMappingURL=chunk-CMYZ2LM7.mjs.map
24
+ //# sourceMappingURL=chunk-ITRJ4KUY.mjs.map
@@ -36,7 +36,7 @@ var require_package = __commonJS({
36
36
  publishConfig: {
37
37
  access: "public"
38
38
  },
39
- version: "1.4.0-lgc-alpha3.0",
39
+ version: "1.4.0-pre-1-4-0.10",
40
40
  sideEffects: false,
41
41
  main: "./dist/index.js",
42
42
  module: "./dist/index.mjs",
@@ -1342,7 +1342,7 @@ async function streamEvents(controller, args) {
1342
1342
  let streamingStateExtractor = new StreamingStateExtractor([]);
1343
1343
  let prevNodeName = null;
1344
1344
  let emitIntermediateStateUntilEnd = null;
1345
- let shouldExit = null;
1345
+ let shouldExit = false;
1346
1346
  let externalRunId = null;
1347
1347
  const streamResponse2 = client.runs.stream(threadId, assistantId, {
1348
1348
  input: streamInput,
@@ -1353,6 +1353,7 @@ async function streamEvents(controller, args) {
1353
1353
  });
1354
1354
  const emit = /* @__PURE__ */ __name((message) => controller.enqueue(new TextEncoder().encode(message)), "emit");
1355
1355
  let latestStateValues = {};
1356
+ let updatedState = state;
1356
1357
  try {
1357
1358
  for await (const chunk of streamResponse2) {
1358
1359
  if (![
@@ -1375,28 +1376,29 @@ async function streamEvents(controller, args) {
1375
1376
  const runId = event.metadata.run_id;
1376
1377
  externalRunId = runId;
1377
1378
  const metadata = event.metadata;
1378
- shouldExit = shouldExit != null ? shouldExit : eventType === LangGraphEventTypes.OnCustomEvent && event.name === CustomEventNames.CopilotKitExit;
1379
+ shouldExit = shouldExit || eventType === LangGraphEventTypes.OnCustomEvent && event.name === CustomEventNames.CopilotKitExit;
1379
1380
  const emitIntermediateState = metadata["copilotkit:emit-intermediate-state"];
1380
1381
  const manuallyEmitIntermediateState = eventType === LangGraphEventTypes.OnCustomEvent && event.name === CustomEventNames.CopilotKitManuallyEmitIntermediateState;
1381
1382
  if (graphInfo["nodes"].some((node) => node.id === currentNodeName)) {
1382
1383
  nodeName = currentNodeName;
1384
+ if (eventType === LangGraphEventTypes.OnChainStart || eventType === LangGraphEventTypes.OnChainEnd) {
1385
+ updatedState = latestStateValues;
1386
+ }
1383
1387
  }
1384
1388
  if (!nodeName) {
1385
1389
  continue;
1386
1390
  }
1387
1391
  if (manuallyEmitIntermediateState) {
1388
- if (eventType === LangGraphEventTypes.OnChainEnd) {
1389
- state = event.data.output;
1390
- emit(getStateSyncEvent({
1391
- threadId,
1392
- runId,
1393
- agentName: agent.name,
1394
- nodeName,
1395
- state: event.data.output,
1396
- running: true,
1397
- active: true
1398
- }));
1399
- }
1392
+ updatedState = event.data;
1393
+ emit(getStateSyncEvent({
1394
+ threadId,
1395
+ runId,
1396
+ agentName: agent.name,
1397
+ nodeName,
1398
+ state: updatedState,
1399
+ running: true,
1400
+ active: true
1401
+ }));
1400
1402
  continue;
1401
1403
  }
1402
1404
  if (emitIntermediateState && emitIntermediateStateUntilEnd == null) {
@@ -1405,7 +1407,6 @@ async function streamEvents(controller, args) {
1405
1407
  if (emitIntermediateState && eventType === LangGraphEventTypes.OnChatModelStart) {
1406
1408
  streamingStateExtractor = new StreamingStateExtractor(emitIntermediateState);
1407
1409
  }
1408
- let updatedState = latestStateValues;
1409
1410
  if (emitIntermediateState && eventType === LangGraphEventTypes.OnChatModelStream) {
1410
1411
  streamingStateExtractor.bufferToolCalls(event);
1411
1412
  }
@@ -1486,10 +1487,10 @@ var StreamingStateExtractor = /* @__PURE__ */ __name(class StreamingStateExtract
1486
1487
  bufferToolCalls(event) {
1487
1488
  if (event.data.chunk.tool_call_chunks.length > 0) {
1488
1489
  const chunk = event.data.chunk.tool_call_chunks[0];
1489
- if (chunk.name !== null) {
1490
+ if (chunk.name !== null && chunk.name !== void 0) {
1490
1491
  this.currentToolCall = chunk.name;
1491
1492
  this.toolCallBuffer[this.currentToolCall] = chunk.args;
1492
- } else if (this.currentToolCall !== null) {
1493
+ } else if (this.currentToolCall !== null && this.currentToolCall !== void 0) {
1493
1494
  this.toolCallBuffer[this.currentToolCall] += chunk.args;
1494
1495
  }
1495
1496
  }
@@ -1565,12 +1566,10 @@ function langGraphDefaultMergeState(state, messages, actions, agentName) {
1565
1566
  mergedMessages.push(message);
1566
1567
  } else {
1567
1568
  for (let i = 0; i < mergedMessages.length; i++) {
1568
- if (mergedMessages[i].id === message.id) {
1569
- if ("tool_calls" in message) {
1570
- if (("tool_calls" in mergedMessages[i] || "additional_kwargs" in mergedMessages[i]) && mergedMessages[i].content) {
1571
- message.tool_calls = mergedMessages[i]["tool_calls"];
1572
- message.additional_kwargs = mergedMessages[i].additional_kwargs;
1573
- }
1569
+ if (mergedMessages[i].id === message.id && message.role === "assistant") {
1570
+ if (("tool_calls" in mergedMessages[i] || "additional_kwargs" in mergedMessages[i]) && mergedMessages[i].content) {
1571
+ message.tool_calls = mergedMessages[i]["tool_calls"];
1572
+ message.additional_kwargs = mergedMessages[i].additional_kwargs;
1574
1573
  }
1575
1574
  mergedMessages[i] = message;
1576
1575
  }
@@ -1622,34 +1621,15 @@ function langGraphDefaultMergeState(state, messages, actions, agentName) {
1622
1621
  }
1623
1622
  correctedMessages.push(currentMessage);
1624
1623
  }
1625
- return deepMerge(state, {
1624
+ return {
1625
+ ...state,
1626
1626
  messages: correctedMessages,
1627
1627
  copilotkit: {
1628
1628
  actions
1629
1629
  }
1630
- });
1631
- }
1632
- __name(langGraphDefaultMergeState, "langGraphDefaultMergeState");
1633
- function deepMerge(obj1, obj2) {
1634
- let result = {
1635
- ...obj1
1636
1630
  };
1637
- for (let key in obj2) {
1638
- if (typeof obj2[key] === "object" && !Array.isArray(obj2[key])) {
1639
- if (obj1[key]) {
1640
- result[key] = deepMerge(obj1[key], obj2[key]);
1641
- } else {
1642
- result[key] = {
1643
- ...obj2[key]
1644
- };
1645
- }
1646
- } else {
1647
- result[key] = obj2[key];
1648
- }
1649
- }
1650
- return result;
1651
1631
  }
1652
- __name(deepMerge, "deepMerge");
1632
+ __name(langGraphDefaultMergeState, "langGraphDefaultMergeState");
1653
1633
  function formatMessages(messages) {
1654
1634
  return messages.map((message) => {
1655
1635
  if (message.isTextMessage() && message.role === "assistant") {
@@ -2775,4 +2755,4 @@ export {
2775
2755
  getCommonConfig,
2776
2756
  copilotRuntimeNodeHttpEndpoint
2777
2757
  };
2778
- //# sourceMappingURL=chunk-2EV5DRUI.mjs.map
2758
+ //# sourceMappingURL=chunk-KDMS6EKE.mjs.map