@copilotkit/runtime 1.0.3 → 1.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/.turbo/turbo-build.log +28 -28
  2. package/CHANGELOG.md +8 -0
  3. package/README.md +1 -1
  4. package/dist/{chunk-FPVMUZ5S.mjs → chunk-67DNPQCA.mjs} +10 -10
  5. package/dist/chunk-67DNPQCA.mjs.map +1 -0
  6. package/dist/{chunk-TTT54UED.mjs → chunk-F7DCXPQ7.mjs} +2 -2
  7. package/dist/{chunk-GTYL57UF.mjs → chunk-FNDYTEEL.mjs} +2 -2
  8. package/dist/{chunk-3SJCLGQE.mjs → chunk-JWUHY4RB.mjs} +2 -2
  9. package/dist/{chunk-L2HCON4L.mjs → chunk-UFAHXJX7.mjs} +3 -3
  10. package/dist/{chunk-DVDKD6F5.mjs → chunk-URMISMK2.mjs} +8 -8
  11. package/dist/chunk-URMISMK2.mjs.map +1 -0
  12. package/dist/{chunk-VIQZS4W6.mjs → chunk-VQM4RIMQ.mjs} +9 -10
  13. package/dist/chunk-VQM4RIMQ.mjs.map +1 -0
  14. package/dist/index.js +26 -27
  15. package/dist/index.js.map +1 -1
  16. package/dist/index.mjs +7 -7
  17. package/dist/lib/index.js +26 -27
  18. package/dist/lib/index.js.map +1 -1
  19. package/dist/lib/index.mjs +7 -7
  20. package/dist/lib/integrations/index.js +7 -8
  21. package/dist/lib/integrations/index.js.map +1 -1
  22. package/dist/lib/integrations/index.mjs +5 -5
  23. package/dist/lib/integrations/nest/index.js +7 -8
  24. package/dist/lib/integrations/nest/index.js.map +1 -1
  25. package/dist/lib/integrations/nest/index.mjs +3 -3
  26. package/dist/lib/integrations/node-express/index.js +7 -8
  27. package/dist/lib/integrations/node-express/index.js.map +1 -1
  28. package/dist/lib/integrations/node-express/index.mjs +3 -3
  29. package/dist/lib/integrations/node-http/index.js +7 -8
  30. package/dist/lib/integrations/node-http/index.js.map +1 -1
  31. package/dist/lib/integrations/node-http/index.mjs +2 -2
  32. package/dist/service-adapters/index.js +15 -15
  33. package/dist/service-adapters/index.js.map +1 -1
  34. package/dist/service-adapters/index.mjs +2 -2
  35. package/package.json +4 -5
  36. package/src/graphql/resolvers/copilot.resolver.ts +6 -6
  37. package/src/service-adapters/experimental/groq/groq-adapter.ts +3 -3
  38. package/src/service-adapters/experimental/ollama/ollama-adapter.ts +3 -5
  39. package/src/service-adapters/google/google-genai-adapter.ts +4 -4
  40. package/src/service-adapters/langchain/langchain-adapter.ts +2 -2
  41. package/src/service-adapters/langchain/utils.ts +7 -7
  42. package/src/service-adapters/openai/openai-adapter.ts +2 -2
  43. package/dist/chunk-DVDKD6F5.mjs.map +0 -1
  44. package/dist/chunk-FPVMUZ5S.mjs.map +0 -1
  45. package/dist/chunk-VIQZS4W6.mjs.map +0 -1
  46. /package/dist/{chunk-TTT54UED.mjs.map → chunk-F7DCXPQ7.mjs.map} +0 -0
  47. /package/dist/{chunk-GTYL57UF.mjs.map → chunk-FNDYTEEL.mjs.map} +0 -0
  48. /package/dist/{chunk-3SJCLGQE.mjs.map → chunk-JWUHY4RB.mjs.map} +0 -0
  49. /package/dist/{chunk-L2HCON4L.mjs.map → chunk-UFAHXJX7.mjs.map} +0 -0
@@ -1,5 +1,5 @@
1
1
 
2
- > @copilotkit/runtime@1.0.3 build /home/runner/work/CopilotKit/CopilotKit/CopilotKit/packages/runtime
2
+ > @copilotkit/runtime@1.0.4 build /home/runner/work/CopilotKit/CopilotKit/CopilotKit/packages/runtime
3
3
  > tsup --clean
4
4
 
5
5
  CLI Building entry: src/index.ts, src/lib/index.ts, src/service-adapters/index.ts, src/utils/index.ts, src/lib/cloud/index.ts, src/lib/integrations/index.ts, src/graphql/types/base/index.ts, src/graphql/types/converted/index.ts, src/lib/integrations/nest/index.ts, src/lib/integrations/node-express/index.ts, src/lib/integrations/node-http/index.ts
@@ -10,45 +10,45 @@
10
10
  CLI Cleaning output folder
11
11
  ESM Build start
12
12
  CJS Build start
13
- CJS dist/index.js 96.66 KB
14
- CJS dist/lib/index.js 96.31 KB
15
- CJS dist/service-adapters/index.js 33.37 KB
13
+ CJS dist/index.js 96.73 KB
14
+ CJS dist/lib/index.js 96.37 KB
15
+ CJS dist/service-adapters/index.js 33.44 KB
16
16
  CJS dist/utils/index.js 6.32 KB
17
17
  CJS dist/lib/cloud/index.js 787.00 B
18
18
  CJS dist/lib/integrations/index.js 58.65 KB
19
- CJS dist/lib/integrations/nest/index.js 56.02 KB
20
- CJS dist/lib/integrations/node-express/index.js 56.11 KB
19
+ CJS dist/lib/integrations/nest/index.js 56.03 KB
20
+ CJS dist/lib/integrations/node-express/index.js 56.12 KB
21
21
  CJS dist/lib/integrations/node-http/index.js 55.65 KB
22
22
  CJS dist/graphql/types/base/index.js 2.44 KB
23
- CJS dist/index.js.map 150.81 KB
24
- CJS dist/service-adapters/index.js.map 64.52 KB
25
- CJS dist/lib/index.js.map 151.16 KB
23
+ CJS dist/index.js.map 150.90 KB
24
+ CJS dist/lib/index.js.map 151.25 KB
25
+ CJS dist/service-adapters/index.js.map 64.61 KB
26
26
  CJS dist/utils/index.js.map 5.63 KB
27
27
  CJS dist/lib/cloud/index.js.map 217.00 B
28
- CJS dist/lib/integrations/index.js.map 74.23 KB
29
- CJS dist/lib/integrations/nest/index.js.map 69.69 KB
28
+ CJS dist/lib/integrations/index.js.map 74.24 KB
30
29
  CJS dist/lib/integrations/node-express/index.js.map 69.72 KB
30
+ CJS dist/lib/integrations/nest/index.js.map 69.69 KB
31
31
  CJS dist/lib/integrations/node-http/index.js.map 68.87 KB
32
32
  CJS dist/graphql/types/converted/index.js 3.04 KB
33
33
  CJS dist/graphql/types/base/index.js.map 645.00 B
34
34
  CJS dist/graphql/types/converted/index.js.map 1.76 KB
35
- CJS ⚡️ Build success in 260ms
35
+ CJS ⚡️ Build success in 238ms
36
36
  ESM dist/lib/integrations/nest/index.mjs 379.00 B
37
37
  ESM dist/lib/integrations/node-express/index.mjs 393.00 B
38
38
  ESM dist/lib/integrations/node-http/index.mjs 349.00 B
39
39
  ESM dist/index.mjs 1.43 KB
40
40
  ESM dist/lib/index.mjs 1.19 KB
41
- ESM dist/chunk-L2HCON4L.mjs 4.67 KB
41
+ ESM dist/chunk-UFAHXJX7.mjs 4.67 KB
42
42
  ESM dist/service-adapters/index.mjs 459.00 B
43
- ESM dist/chunk-FPVMUZ5S.mjs 21.95 KB
43
+ ESM dist/chunk-67DNPQCA.mjs 22.01 KB
44
44
  ESM dist/utils/index.mjs 315.00 B
45
45
  ESM dist/lib/cloud/index.mjs 34.00 B
46
46
  ESM dist/lib/integrations/index.mjs 884.00 B
47
- ESM dist/chunk-3SJCLGQE.mjs 1.80 KB
48
- ESM dist/chunk-GTYL57UF.mjs 611.00 B
49
- ESM dist/chunk-TTT54UED.mjs 655.00 B
50
- ESM dist/chunk-DVDKD6F5.mjs 7.85 KB
51
- ESM dist/chunk-VIQZS4W6.mjs 49.07 KB
47
+ ESM dist/chunk-JWUHY4RB.mjs 1.80 KB
48
+ ESM dist/chunk-FNDYTEEL.mjs 611.00 B
49
+ ESM dist/chunk-F7DCXPQ7.mjs 655.00 B
50
+ ESM dist/chunk-VQM4RIMQ.mjs 49.07 KB
51
+ ESM dist/chunk-URMISMK2.mjs 7.87 KB
52
52
  ESM dist/chunk-U3V2BCGI.mjs 4.91 KB
53
53
  ESM dist/chunk-GEIBJJQ4.mjs 645.00 B
54
54
  ESM dist/chunk-RMZWGQ46.mjs 1.36 KB
@@ -58,17 +58,17 @@
58
58
  ESM dist/lib/integrations/node-http/index.mjs.map 71.00 B
59
59
  ESM dist/index.mjs.map 222.00 B
60
60
  ESM dist/lib/index.mjs.map 71.00 B
61
+ ESM dist/chunk-UFAHXJX7.mjs.map 13.94 KB
61
62
  ESM dist/service-adapters/index.mjs.map 71.00 B
62
- ESM dist/chunk-L2HCON4L.mjs.map 13.94 KB
63
- ESM dist/chunk-FPVMUZ5S.mjs.map 47.77 KB
64
63
  ESM dist/utils/index.mjs.map 71.00 B
64
+ ESM dist/chunk-67DNPQCA.mjs.map 47.84 KB
65
65
  ESM dist/lib/cloud/index.mjs.map 71.00 B
66
- ESM dist/chunk-3SJCLGQE.mjs.map 3.55 KB
67
66
  ESM dist/lib/integrations/index.mjs.map 71.00 B
68
- ESM dist/chunk-GTYL57UF.mjs.map 901.00 B
69
- ESM dist/chunk-TTT54UED.mjs.map 931.00 B
70
- ESM dist/chunk-DVDKD6F5.mjs.map 14.76 KB
71
- ESM dist/chunk-VIQZS4W6.mjs.map 65.16 KB
67
+ ESM dist/chunk-JWUHY4RB.mjs.map 3.55 KB
68
+ ESM dist/chunk-FNDYTEEL.mjs.map 901.00 B
69
+ ESM dist/chunk-F7DCXPQ7.mjs.map 931.00 B
70
+ ESM dist/chunk-VQM4RIMQ.mjs.map 65.15 KB
71
+ ESM dist/chunk-URMISMK2.mjs.map 14.79 KB
72
72
  ESM dist/chunk-U3V2BCGI.mjs.map 5.63 KB
73
73
  ESM dist/chunk-GEIBJJQ4.mjs.map 1.18 KB
74
74
  ESM dist/chunk-RMZWGQ46.mjs.map 645.00 B
@@ -77,9 +77,9 @@
77
77
  ESM dist/graphql/types/converted/index.mjs 283.00 B
78
78
  ESM dist/graphql/types/base/index.mjs.map 71.00 B
79
79
  ESM dist/graphql/types/converted/index.mjs.map 71.00 B
80
- ESM ⚡️ Build success in 263ms
80
+ ESM ⚡️ Build success in 239ms
81
81
  DTS Build start
82
- DTS ⚡️ Build success in 8275ms
82
+ DTS ⚡️ Build success in 8686ms
83
83
  DTS dist/index.d.ts 1.50 KB
84
84
  DTS dist/lib/integrations/node-http/index.d.ts 586.00 B
85
85
  DTS dist/lib/integrations/node-express/index.d.ts 592.00 B
package/CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # @copilotkit/runtime
2
2
 
3
+ ## 1.0.4
4
+
5
+ ### Patch Changes
6
+
7
+ - Remove nanoid
8
+ - Updated dependencies
9
+ - @copilotkit/shared@1.0.4
10
+
3
11
  ## 1.0.3
4
12
 
5
13
  ### Patch Changes
package/README.md CHANGED
@@ -41,7 +41,7 @@ in-app AI chatbots, AI agents, and AI Textareas.
41
41
  <img src="https://github.com/CopilotKit/CopilotKit/assets/131273140/a4a0fd04-6aee-4e02-9c3e-3f11d60b4e8b" alt="Read the Docs" height="30">
42
42
  </a>
43
43
  &nbsp;&middot;&nbsp;
44
- <a href="https://go.copilotkit.ai/kM4Lo86">
44
+ <a href="https://cloud.copilotkit.ai">
45
45
  <img src="https://github.com/CopilotKit/CopilotKit/assets/131273140/28ca62a8-cf93-4d3f-96b0-dc11bf89b734" alt="Try Copilot Cloud" height="30">
46
46
  </a>
47
47
  </p>
@@ -2,7 +2,7 @@ import {
2
2
  convertActionInputToLangChainTool,
3
3
  convertMessageToLangChainMessage,
4
4
  streamLangChainResponse
5
- } from "./chunk-DVDKD6F5.mjs";
5
+ } from "./chunk-URMISMK2.mjs";
6
6
  import {
7
7
  ActionExecutionMessage,
8
8
  ResultMessage,
@@ -162,7 +162,7 @@ function convertSystemMessageToAssistantAPI(message) {
162
162
  __name(convertSystemMessageToAssistantAPI, "convertSystemMessageToAssistantAPI");
163
163
 
164
164
  // src/service-adapters/openai/openai-adapter.ts
165
- import { nanoid } from "nanoid";
165
+ import { randomId } from "@copilotkit/shared";
166
166
  var DEFAULT_MODEL = "gpt-4o";
167
167
  var OpenAIAdapter = class {
168
168
  model = DEFAULT_MODEL;
@@ -225,7 +225,7 @@ var OpenAIAdapter = class {
225
225
  eventStream$.complete();
226
226
  });
227
227
  return {
228
- threadId: threadId || nanoid()
228
+ threadId: threadId || randomId()
229
229
  };
230
230
  }
231
231
  };
@@ -471,7 +471,7 @@ function tryParseJson(str) {
471
471
  __name(tryParseJson, "tryParseJson");
472
472
 
473
473
  // src/service-adapters/google/google-genai-adapter.ts
474
- import { nanoid as nanoid2 } from "nanoid";
474
+ import { randomId as randomId2 } from "@copilotkit/shared";
475
475
  var GoogleGenerativeAIAdapter = class {
476
476
  model;
477
477
  constructor(options) {
@@ -538,7 +538,7 @@ var GoogleGenerativeAIAdapter = class {
538
538
  }
539
539
  if (!isTextMessage) {
540
540
  isTextMessage = true;
541
- eventStream$.sendTextMessageStart(nanoid2());
541
+ eventStream$.sendTextMessageStart(randomId2());
542
542
  }
543
543
  eventStream$.sendTextMessageContent(chunkText);
544
544
  }
@@ -548,13 +548,13 @@ var GoogleGenerativeAIAdapter = class {
548
548
  let calls = (await result.response).functionCalls();
549
549
  if (calls) {
550
550
  for (let call of calls) {
551
- eventStream$.sendActionExecution(nanoid2(), call.name, JSON.stringify(replaceNewlinesInObject(call.args)));
551
+ eventStream$.sendActionExecution(randomId2(), call.name, JSON.stringify(replaceNewlinesInObject(call.args)));
552
552
  }
553
553
  }
554
554
  eventStream$.complete();
555
555
  });
556
556
  return {
557
- threadId: request.threadId || nanoid2()
557
+ threadId: request.threadId || randomId2()
558
558
  };
559
559
  }
560
560
  };
@@ -578,7 +578,7 @@ function replaceNewlinesInObject(obj) {
578
578
  __name(replaceNewlinesInObject, "replaceNewlinesInObject");
579
579
 
580
580
  // src/service-adapters/langchain/langchain-adapter.ts
581
- import { nanoid as nanoid3 } from "nanoid";
581
+ import { randomId as randomId3 } from "@copilotkit/shared";
582
582
  var LangChainAdapter = class {
583
583
  options;
584
584
  /**
@@ -603,7 +603,7 @@ var LangChainAdapter = class {
603
603
  });
604
604
  });
605
605
  return {
606
- threadId: threadId || nanoid3()
606
+ threadId: threadId || randomId3()
607
607
  };
608
608
  }
609
609
  };
@@ -714,4 +714,4 @@ export {
714
714
  RemoteChain,
715
715
  UnifyAdapter
716
716
  };
717
- //# sourceMappingURL=chunk-FPVMUZ5S.mjs.map
717
+ //# sourceMappingURL=chunk-67DNPQCA.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/service-adapters/openai/openai-adapter.ts","../src/service-adapters/openai/utils.ts","../src/service-adapters/openai/openai-assistant-adapter.ts","../src/service-adapters/google/google-genai-adapter.ts","../src/service-adapters/google/utils.ts","../src/service-adapters/langchain/langchain-adapter.ts","../src/service-adapters/langchain/langserve.ts","../src/service-adapters/unify/unify-adapter.ts"],"sourcesContent":["/**\n * CopilotRuntime Adapter for OpenAI.\n *\n * <RequestExample>\n * ```jsx CopilotRuntime Example\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(req, new OpenAIAdapter());\n * ```\n * </RequestExample>\n *\n * You can easily set the model to use by passing it to the constructor.\n * ```jsx\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new OpenAIAdapter({ model: \"gpt-4o\" }),\n * );\n * ```\n *\n * To use your custom OpenAI instance, pass the `openai` property.\n * ```jsx\n * const openai = new OpenAI({\n * organization: \"your-organization-id\",\n * apiKey: \"your-api-key\"\n * });\n *\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new OpenAIAdapter({ openai }),\n * );\n * ```\n *\n */\nimport OpenAI from \"openai\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToOpenAITool,\n convertMessageToOpenAIMessage,\n limitMessagesToTokenCount,\n} from \"./utils\";\nimport { randomId } from \"@copilotkit/shared\";\n\nconst DEFAULT_MODEL = \"gpt-4o\";\n\nexport interface OpenAIAdapterParams {\n /**\n * An optional OpenAI instance to use.\n */\n openai?: OpenAI;\n\n /**\n * The model to use.\n */\n model?: string;\n}\n\nexport class OpenAIAdapter implements CopilotServiceAdapter {\n private model: string = DEFAULT_MODEL;\n\n private _openai: OpenAI;\n public get openai(): OpenAI {\n return this._openai;\n }\n\n constructor(params?: OpenAIAdapterParams) {\n this._openai = params?.openai || new OpenAI({});\n if (params?.model) {\n this.model = params.model;\n }\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { threadId, model = this.model, messages, actions, eventSource } = request;\n const tools = actions.map(convertActionInputToOpenAITool);\n\n let openaiMessages = messages.map(convertMessageToOpenAIMessage);\n openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);\n\n const stream = this.openai.beta.chat.completions.stream({\n model: model,\n stream: true,\n messages: openaiMessages,\n ...(tools.length > 0 && { tools }),\n });\n\n eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n for await (const chunk of stream) {\n const toolCall = chunk.choices[0].delta.tool_calls?.[0];\n const content = chunk.choices[0].delta.content;\n\n // When switching from message to function or vice versa,\n // send the respective end event.\n // If toolCall?.id is defined, it means a new tool call starts.\n if (mode === \"message\" && toolCall?.id) {\n mode = null;\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\" && (toolCall === undefined || toolCall?.id)) {\n mode = null;\n eventStream$.sendActionExecutionEnd();\n }\n\n // If we send a new message type, send the appropriate start event.\n if (mode === null) {\n if (toolCall?.id) {\n mode = \"function\";\n eventStream$.sendActionExecutionStart(toolCall!.id, toolCall!.function!.name);\n } else if (content) {\n mode = \"message\";\n eventStream$.sendTextMessageStart(chunk.id);\n }\n }\n\n // send the content events\n if (mode === \"message\" && content) {\n eventStream$.sendTextMessageContent(content);\n } else if (mode === \"function\" && toolCall?.function?.arguments) {\n eventStream$.sendActionExecutionArgs(toolCall.function.arguments);\n }\n }\n\n // send the end events\n if (mode === \"message\") {\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd();\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId: threadId || randomId(),\n };\n }\n}\n","import {\n ActionExecutionMessage,\n Message,\n ResultMessage,\n TextMessage,\n} from \"../../graphql/types/converted\";\nimport { Tiktoken, TiktokenModel, encodingForModel } from \"js-tiktoken\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { ChatCompletionMessageParam, ChatCompletionTool } from \"openai/resources\";\n\nexport function limitMessagesToTokenCount(\n messages: any[],\n tools: any[],\n model: string,\n maxTokens?: number,\n): any[] {\n maxTokens ||= maxTokensForOpenAIModel(model);\n\n const result: any[] = [];\n const toolsNumTokens = countToolsTokens(model, tools);\n if (toolsNumTokens > maxTokens) {\n throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);\n }\n maxTokens -= toolsNumTokens;\n\n for (const message of messages) {\n if (message.role === \"system\") {\n const numTokens = countMessageTokens(model, message);\n maxTokens -= numTokens;\n\n if (maxTokens < 0) {\n throw new Error(\"Not enough tokens for system message.\");\n }\n }\n }\n\n let cutoff: boolean = false;\n\n const reversedMessages = [...messages].reverse();\n for (const message of reversedMessages) {\n if (message.role === \"system\") {\n result.unshift(message);\n continue;\n } else if (cutoff) {\n continue;\n }\n let numTokens = countMessageTokens(model, message);\n if (maxTokens < numTokens) {\n cutoff = true;\n continue;\n }\n result.unshift(message);\n maxTokens -= numTokens;\n }\n\n return result;\n}\n\nexport function maxTokensForOpenAIModel(model: string): number {\n return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;\n}\n\nconst DEFAULT_MAX_TOKENS = 128000;\n\nconst maxTokensByModel: { [key: string]: number } = {\n // GPT-4\n \"gpt-4o\": 128000,\n \"gpt-4o-2024-05-13\": 128000,\n \"gpt-4-turbo\": 128000,\n \"gpt-4-turbo-2024-04-09\": 128000,\n \"gpt-4-0125-preview\": 128000,\n \"gpt-4-turbo-preview\": 128000,\n \"gpt-4-1106-preview\": 128000,\n \"gpt-4-vision-preview\": 128000,\n \"gpt-4-1106-vision-preview\": 128000,\n \"gpt-4-32k\": 32768,\n \"gpt-4-32k-0613\": 32768,\n \"gpt-4-32k-0314\": 32768,\n \"gpt-4\": 8192,\n \"gpt-4-0613\": 8192,\n \"gpt-4-0314\": 8192,\n\n // GPT-3.5\n \"gpt-3.5-turbo-0125\": 16385,\n \"gpt-3.5-turbo\": 16385,\n \"gpt-3.5-turbo-1106\": 16385,\n \"gpt-3.5-turbo-instruct\": 4096,\n \"gpt-3.5-turbo-16k\": 16385,\n \"gpt-3.5-turbo-0613\": 4096,\n \"gpt-3.5-turbo-16k-0613\": 16385,\n \"gpt-3.5-turbo-0301\": 4097,\n};\n\nfunction countToolsTokens(model: string, tools: any[]): number {\n if (tools.length === 0) {\n return 0;\n }\n const json = JSON.stringify(tools);\n return countTokens(model, json);\n}\n\nfunction countMessageTokens(model: string, message: any): number {\n return countTokens(model, message.content || \"\");\n}\n\nfunction countTokens(model: string, text: string): number {\n let enc: Tiktoken;\n try {\n enc = encodingForModel(model as TiktokenModel);\n } catch (e) {\n enc = encodingForModel(\"gpt-4\");\n }\n return enc.encode(text).length;\n}\n\nexport function convertActionInputToOpenAITool(action: ActionInput): ChatCompletionTool {\n return {\n type: \"function\",\n function: {\n name: action.name,\n description: action.description,\n parameters: JSON.parse(action.jsonSchema),\n },\n };\n}\n\nexport function convertMessageToOpenAIMessage(message: Message): ChatCompletionMessageParam {\n if (message instanceof TextMessage) {\n return {\n role: message.role,\n content: message.content,\n };\n } else if (message instanceof ActionExecutionMessage) {\n return {\n role: \"assistant\",\n tool_calls: [\n {\n id: message.id,\n type: \"function\",\n function: {\n name: message.name,\n arguments: JSON.stringify(message.arguments),\n },\n },\n ],\n };\n } else if (message instanceof ResultMessage) {\n return {\n role: \"tool\",\n content: message.result,\n tool_call_id: message.actionExecutionId,\n };\n }\n}\n\nexport function convertSystemMessageToAssistantAPI(message: ChatCompletionMessageParam) {\n return {\n ...message,\n ...(message.role === \"system\" && {\n role: \"assistant\",\n content: \"THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: \" + message.content,\n }),\n };\n}\n","/**\n * CopilotKit Adapter for the OpenAI Assistant API.\n *\n * Use this adapter to get responses from the OpenAI Assistant API.\n *\n * <RequestExample>\n * ```typescript\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new OpenAIAssistantAdapter({\n * assistantId: \"your-assistant-id\"\n * })\n * );\n * ```\n * </RequestExample>\n */\nimport OpenAI from \"openai\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport { Message, ResultMessage, TextMessage } from \"../../graphql/types/converted\";\nimport {\n convertActionInputToOpenAITool,\n convertMessageToOpenAIMessage,\n convertSystemMessageToAssistantAPI,\n} from \"./utils\";\nimport { RunSubmitToolOutputsStreamParams } from \"openai/resources/beta/threads/runs/runs\";\nimport { AssistantStream } from \"openai/lib/AssistantStream\";\nimport { RuntimeEventSource } from \"../events\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { AssistantStreamEvent, AssistantTool } from \"openai/resources/beta/assistants\";\n\nexport interface OpenAIAssistantAdapterParams {\n /**\n * The ID of the assistant to use.\n */\n assistantId: string;\n\n /**\n * An instance of `OpenAI` to use for the request. If not provided, a new instance will be created.\n */\n openai?: OpenAI;\n\n /**\n * Whether to enable the code interpreter. Defaults to `true`.\n */\n codeInterpreterEnabled?: boolean;\n\n /**\n * Whether to enable retrieval. Defaults to `true`.\n */\n fileSearchEnabled?: boolean;\n}\n\nexport class OpenAIAssistantAdapter implements CopilotServiceAdapter {\n private openai: OpenAI;\n private codeInterpreterEnabled: boolean;\n private assistantId: string;\n private fileSearchEnabled: boolean;\n\n constructor(params: OpenAIAssistantAdapterParams) {\n this.openai = params.openai || new OpenAI({});\n this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;\n this.fileSearchEnabled = params.fileSearchEnabled === false || true;\n this.assistantId = params.assistantId;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { messages, actions, eventSource, runId } = request;\n // if we don't have a threadId, create a new thread\n let threadId = request.threadId || (await this.openai.beta.threads.create()).id;\n\n const lastMessage = messages.at(-1);\n\n let nextRunId: string | undefined = undefined;\n\n // submit function outputs\n if (lastMessage instanceof ResultMessage && runId) {\n nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);\n }\n // submit user message\n else if (lastMessage instanceof TextMessage) {\n nextRunId = await this.submitUserMessage(threadId, messages, actions, eventSource);\n }\n // unsupported message\n else {\n throw new Error(\"No actionable message found in the messages\");\n }\n\n return {\n threadId,\n runId: nextRunId,\n };\n }\n\n private async submitToolOutputs(\n threadId: string,\n runId: string,\n messages: Message[],\n eventSource: RuntimeEventSource,\n ) {\n let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);\n if (!run.required_action) {\n throw new Error(\"No tool outputs required\");\n }\n\n // get the required tool call ids\n const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map(\n (toolCall) => toolCall.id,\n );\n\n // search for these tool calls\n const resultMessages = messages.filter(\n (message) =>\n message instanceof ResultMessage && toolCallsIds.includes(message.actionExecutionId),\n ) as ResultMessage[];\n\n if (toolCallsIds.length != resultMessages.length) {\n throw new Error(\"Number of function results does not match the number of tool calls\");\n }\n\n // submit the tool outputs\n const toolOutputs: RunSubmitToolOutputsStreamParams.ToolOutput[] = resultMessages.map(\n (message) => {\n return {\n tool_call_id: message.actionExecutionId,\n output: message.result,\n };\n },\n );\n\n const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {\n tool_outputs: toolOutputs,\n });\n\n await this.streamResponse(stream, eventSource);\n return runId;\n }\n\n private async submitUserMessage(\n threadId: string,\n messages: Message[],\n actions: ActionInput[],\n eventSource: RuntimeEventSource,\n ) {\n messages = [...messages];\n\n // get the instruction message\n const instructionsMessage = messages.shift();\n const instructions =\n instructionsMessage instanceof TextMessage ? instructionsMessage.content : \"\";\n\n // get the latest user message\n const userMessage = messages\n .map(convertMessageToOpenAIMessage)\n .map(convertSystemMessageToAssistantAPI)\n .at(-1);\n\n if (userMessage.role !== \"user\") {\n throw new Error(\"No user message found\");\n }\n\n // create a new message on the thread\n await this.openai.beta.threads.messages.create(threadId, {\n role: \"user\",\n content: userMessage.content,\n });\n\n const openaiTools = actions.map(convertActionInputToOpenAITool);\n\n const tools = [\n ...openaiTools,\n ...(this.codeInterpreterEnabled ? [{ type: \"code_interpreter\" } as AssistantTool] : []),\n ...(this.fileSearchEnabled ? [{ type: \"file_search\" } as AssistantTool] : []),\n ];\n\n // run the thread\n let stream = this.openai.beta.threads.runs.stream(threadId, {\n assistant_id: this.assistantId,\n instructions,\n tools: tools,\n });\n\n await this.streamResponse(stream, eventSource);\n\n return getRunIdFromStream(stream);\n }\n\n private async streamResponse(stream: AssistantStream, eventSource: RuntimeEventSource) {\n eventSource.stream(async (eventStream$) => {\n let inFunctionCall = false;\n\n for await (const chunk of stream) {\n switch (chunk.event) {\n case \"thread.message.created\":\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd();\n }\n eventStream$.sendTextMessageStart(chunk.data.id);\n break;\n case \"thread.message.delta\":\n if (chunk.data.delta.content?.[0].type === \"text\") {\n eventStream$.sendTextMessageContent(chunk.data.delta.content?.[0].text.value);\n }\n break;\n case \"thread.message.completed\":\n eventStream$.sendTextMessageEnd();\n break;\n case \"thread.run.step.delta\":\n let toolCallId: string | undefined;\n let toolCallName: string | undefined;\n let toolCallArgs: string | undefined;\n if (\n chunk.data.delta.step_details.type === \"tool_calls\" &&\n chunk.data.delta.step_details.tool_calls?.[0].type === \"function\"\n ) {\n toolCallId = chunk.data.delta.step_details.tool_calls?.[0].id;\n toolCallName = chunk.data.delta.step_details.tool_calls?.[0].function.name;\n toolCallArgs = chunk.data.delta.step_details.tool_calls?.[0].function.arguments;\n }\n\n if (toolCallName && toolCallId) {\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd();\n }\n inFunctionCall = true;\n eventStream$.sendActionExecutionStart(toolCallId, toolCallName);\n } else if (toolCallArgs) {\n eventStream$.sendActionExecutionArgs(toolCallArgs);\n }\n break;\n }\n }\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd();\n }\n eventStream$.complete();\n });\n }\n}\n\nfunction getRunIdFromStream(stream: AssistantStream): Promise<string> {\n return new Promise<string>((resolve, reject) => {\n let runIdGetter = (event: AssistantStreamEvent) => {\n if (event.event === \"thread.run.created\") {\n const runId = event.data.id;\n stream.off(\"event\", runIdGetter);\n resolve(runId);\n }\n };\n stream.on(\"event\", runIdGetter);\n });\n}\n","/**\n * CopilotKit Adapter for Google Gemini\n *\n * Use this adapter for a Google Gemini backend.\n *\n * <RequestExample>\n * ```typescript\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new GoogleGenerativeAIAdapter()\n * );\n * ```\n * </RequestExample>\n *\n * To set up a different model, pass the model prop:\n *\n * ```typescript\n * const copilotKit = new CopilotRuntime();\n * const genAI = new GoogleGenerativeAI(\n * process.env[\"GOOGLE_API_KEY\"]!\n * );\n * const model = genAI.getGenerativeModel(\n * { model: \"gemini-pro\" }\n * );\n * return copilotKit.response(\n * req,\n * new GoogleGenerativeAIAdapter()\n * );\n * ```\n */\nimport { CopilotServiceAdapter } from \"../service-adapter\";\nimport {\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport { GenerativeModel, GoogleGenerativeAI } from \"@google/generative-ai\";\nimport { TextMessage } from \"../../graphql/types/converted\";\nimport { convertMessageToGoogleGenAIMessage, transformActionToGoogleGenAITool } from \"./utils\";\nimport { randomId } from \"@copilotkit/shared\";\n\ninterface GoogleGenerativeAIAdapterOptions {\n /**\n * A custom `GenerativeModel` to use for the request.\n */\n model?: GenerativeModel;\n}\n\nexport class GoogleGenerativeAIAdapter implements CopilotServiceAdapter {\n private model: GenerativeModel;\n\n constructor(options?: GoogleGenerativeAIAdapterOptions) {\n if (options?.model) {\n this.model = options.model;\n } else {\n const genAI = new GoogleGenerativeAI(process.env[\"GOOGLE_API_KEY\"]!);\n this.model = genAI.getGenerativeModel({ model: \"gemini-pro\" });\n }\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { messages, actions, eventSource } = request;\n\n // get the history (everything except the first and last message)\n const history = messages.slice(1, -1).map(convertMessageToGoogleGenAIMessage);\n\n // get the current message (the last message)\n const currentMessage = convertMessageToGoogleGenAIMessage(messages.at(-1));\n if (!currentMessage) {\n throw new Error(\"No current message\");\n }\n\n let systemMessage: string;\n const firstMessage = messages.at(0);\n if (firstMessage instanceof TextMessage && firstMessage.role === \"system\") {\n systemMessage = firstMessage.content.trim();\n } else {\n throw new Error(\"First message is not a system message\");\n }\n\n const tools = actions.map(transformActionToGoogleGenAITool);\n\n const isFirstGenGeminiPro =\n this.model.model === \"gemini-pro\" || this.model.model === \"models/gemini-pro\";\n\n const chat = this.model.startChat({\n history: [\n ...history,\n // gemini-pro does not support system instructions, so we need to add them to the history\n ...(isFirstGenGeminiPro ? [{ role: \"user\", parts: [{ text: systemMessage }] }] : []),\n ],\n // only gemini-1.5-pro-latest and later supports setting system instructions\n ...(isFirstGenGeminiPro\n ? {}\n : { systemInstruction: { role: \"user\", parts: [{ text: systemMessage }] } }),\n tools,\n });\n\n const result = await chat.sendMessageStream(currentMessage.parts);\n\n eventSource.stream(async (eventStream$) => {\n let isTextMessage = false;\n for await (const chunk of result.stream) {\n const chunkText = chunk.text();\n if (chunkText === \"\") {\n continue;\n }\n if (!isTextMessage) {\n isTextMessage = true;\n eventStream$.sendTextMessageStart(randomId());\n }\n eventStream$.sendTextMessageContent(chunkText);\n }\n if (isTextMessage) {\n eventStream$.sendTextMessageEnd();\n }\n\n let calls = (await result.response).functionCalls();\n if (calls) {\n for (let call of calls) {\n eventStream$.sendActionExecution(\n randomId(),\n call.name,\n JSON.stringify(replaceNewlinesInObject(call.args)),\n );\n }\n }\n eventStream$.complete();\n });\n\n return {\n threadId: request.threadId || randomId(),\n };\n }\n}\n\nfunction replaceNewlinesInObject(obj: any): any {\n if (typeof obj === \"string\") {\n return obj.replace(/\\\\\\\\n/g, \"\\n\");\n } else if (Array.isArray(obj)) {\n return obj.map(replaceNewlinesInObject);\n } else if (typeof obj === \"object\" && obj !== null) {\n const newObj: any = {};\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n newObj[key] = replaceNewlinesInObject(obj[key]);\n }\n }\n return newObj;\n }\n return obj;\n}\n","import {\n ActionExecutionMessage,\n Message,\n ResultMessage,\n TextMessage,\n} from \"../../graphql/types/converted\";\nimport { Tool } from \"@google/generative-ai\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\n\nexport function convertMessageToGoogleGenAIMessage(message: Message) {\n if (message instanceof TextMessage) {\n const role = {\n user: \"user\",\n assistant: \"model\",\n system: \"user\",\n }[message.role];\n\n const text =\n message.role === \"system\"\n ? \"THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: \" + message.content\n : message.content;\n\n return {\n role,\n parts: [{ text }],\n };\n } else if (message instanceof ActionExecutionMessage) {\n return {\n role: \"model\",\n parts: [\n {\n functionCall: {\n name: message.name,\n args: message.arguments,\n },\n },\n ],\n };\n } else if (message instanceof ResultMessage) {\n return {\n role: \"function\",\n parts: [\n {\n functionResponse: {\n name: message.actionName,\n response: {\n name: message.actionName,\n content: tryParseJson(message.result),\n },\n },\n },\n ],\n };\n }\n}\n\nexport function transformActionToGoogleGenAITool(action: ActionInput): Tool {\n const name = action.name;\n const description = action.description;\n const parameters = JSON.parse(action.jsonSchema);\n\n const transformProperties = (props: any) => {\n for (const key in props) {\n if (props[key].type) {\n props[key].type = props[key].type.toUpperCase();\n }\n if (props[key].properties) {\n transformProperties(props[key].properties);\n }\n }\n };\n transformProperties(parameters);\n\n return {\n functionDeclarations: [\n {\n name,\n description,\n parameters,\n },\n ],\n };\n}\n\nfunction tryParseJson(str?: string) {\n if (!str) {\n return \"\";\n }\n try {\n return JSON.parse(str);\n } catch (e) {\n return str;\n }\n}\n","/**\n * CopilotKit Adapter for LangChain\n *\n * Use this adapter to use LangChain as a backend.\n *\n * ```typescript\n * return copilotKit.response(\n * req,\n * new LangChainAdapter(async (forwardedProps) => {\n * const model = new ChatOpenAI({ modelName: \"gpt-4o\" });\n * return model.stream(forwardedProps.messages, {\n * tools: forwardedProps.tools,\n * });\n * })\n * );\n * ```\n * The async handler function can return:\n *\n * - a simple `string` response\n * - a LangChain stream `IterableReadableStream`\n * - a LangChain `BaseMessageChunk` object\n * - a LangChain `AIMessage` object\n */\n\nimport { BaseMessage } from \"@langchain/core/messages\";\nimport { CopilotServiceAdapter } from \"../service-adapter\";\nimport {\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToLangChainTool,\n convertMessageToLangChainMessage,\n streamLangChainResponse,\n} from \"./utils\";\nimport { DynamicStructuredTool } from \"@langchain/core/tools\";\nimport { LangChainReturnType } from \"./types\";\nimport { randomId } from \"@copilotkit/shared\";\n\ninterface ChainFnParameters {\n model: string;\n messages: BaseMessage[];\n tools: DynamicStructuredTool[];\n threadId?: string;\n runId?: string;\n}\n\ninterface LangChainAdapterOptions {\n chainFn: (parameters: ChainFnParameters) => Promise<LangChainReturnType>;\n}\n\nexport class LangChainAdapter implements CopilotServiceAdapter {\n /**\n * To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.\n */\n constructor(private options: LangChainAdapterOptions) {}\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { eventSource, model, actions, messages, threadId, runId } = request;\n const result = await this.options.chainFn({\n messages: messages.map(convertMessageToLangChainMessage),\n tools: actions.map(convertActionInputToLangChainTool),\n model,\n threadId,\n runId,\n });\n\n eventSource.stream(async (eventStream$) => {\n await streamLangChainResponse({\n result,\n eventStream$,\n });\n });\n\n return {\n threadId: threadId || randomId(),\n };\n }\n}\n","import { Parameter, Action } from \"@copilotkit/shared\";\nimport { RemoteRunnable } from \"langchain/runnables/remote\";\n\nexport interface RemoteChainParameters {\n name: string;\n description: string;\n chainUrl: string;\n parameters?: Parameter[];\n parameterType?: \"single\" | \"multi\";\n}\n\nexport class RemoteChain {\n name: string;\n description: string;\n chainUrl: string;\n parameters?: Parameter[];\n parameterType: \"single\" | \"multi\";\n\n constructor(options: RemoteChainParameters) {\n this.name = options.name;\n this.description = options.description;\n this.chainUrl = options.chainUrl;\n this.parameters = options.parameters;\n this.parameterType = options.parameterType || \"multi\";\n }\n\n async toAction(): Promise<Action<any>> {\n if (!this.parameters) {\n await this.inferLangServeParameters();\n }\n\n return {\n name: this.name,\n description: this.description,\n parameters: this.parameters!,\n handler: async (args: any) => {\n const runnable = new RemoteRunnable({ url: this.chainUrl });\n let input: any;\n if (this.parameterType === \"single\") {\n input = args[Object.keys(args)[0]];\n } else {\n input = args;\n }\n return await runnable.invoke(input);\n },\n };\n }\n\n async inferLangServeParameters() {\n const supportedTypes = [\"string\", \"number\", \"boolean\"];\n\n let schemaUrl = this.chainUrl.replace(/\\/+$/, \"\") + \"/input_schema\";\n let schema = await fetch(schemaUrl)\n .then((res) => res.json())\n .catch(() => {\n throw new Error(\"Failed to fetch langserve schema at \" + schemaUrl);\n });\n // for now, don't use json schema, just do a simple conversion\n\n if (supportedTypes.includes(schema.type)) {\n this.parameterType = \"single\";\n this.parameters = [\n {\n name: \"input\",\n type: schema.type,\n description: \"The input to the chain\",\n },\n ];\n } else if (schema.type === \"object\") {\n this.parameterType = \"multi\";\n this.parameters = Object.keys(schema.properties).map((key) => {\n let property = schema.properties[key];\n if (!supportedTypes.includes(property.type)) {\n throw new Error(\"Unsupported schema type\");\n }\n return {\n name: key,\n type: property.type,\n description: property.description || \"\",\n required: schema.required?.includes(key) || false,\n };\n });\n } else {\n throw new Error(\"Unsupported schema type\");\n }\n }\n}\n","/**\n * CopilotRuntime Adapter for Unify.\n *\n * <RequestExample>\n * ```jsx CopilotRuntime Example\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(req, new UnifyAdapter());\n * ```\n * </RequestExample>\n *\n * You can easily set the model to use by passing it to the constructor.\n * ```jsx\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new UnifyAdapter({ model: \"llama-3-70b-chat@together-ai\" }),\n * );\n * ```\n *\n * To use a custom OpenAI instance, pass the `openai` property.\n * ```jsx\n * const unifyOpenAi = new OpenAI({\n * apiKey: \"your-api-key\"\n * });\n *\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new UnifyAdapter({ openai: unifyOpenAi }),\n * );\n * ```\n *\n */\nimport { OpenAIAdapter, OpenAIAdapterParams } from \"../openai/openai-adapter\";\nimport {\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n CopilotServiceAdapter,\n} from \"../service-adapter\";\n\nconst UNIFY_BASE_URL = \"https://api.unify.ai/v0/chat/completions\";\nconst UNIFY_API_KEY = \"UNIFY_API_KEY\";\n\nexport interface UnifyAdapterParams extends OpenAIAdapterParams {\n apiKey?: string;\n}\n\nexport class UnifyAdapter implements CopilotServiceAdapter {\n private openaiAdapter: OpenAIAdapter;\n\n constructor(params?: UnifyAdapterParams) {\n this.openaiAdapter = new OpenAIAdapter(params);\n this.openaiAdapter.openai.baseURL = UNIFY_BASE_URL;\n\n const unifyApiKeyOverride: string | undefined = process.env[UNIFY_API_KEY] || params?.apiKey;\n if (unifyApiKeyOverride) {\n this.openaiAdapter.openai.apiKey = unifyApiKeyOverride;\n }\n }\n\n process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n return this.openaiAdapter.process(request);\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;AAkCA,OAAOA,YAAY;;;AC5BnB,SAAkCC,wBAAwB;AAInD,SAASC,0BACdC,UACAC,OACAC,OACAC,WAAkB;AAElBA,4BAAcC,wBAAwBF,KAAAA;AAEtC,QAAMG,SAAgB,CAAA;AACtB,QAAMC,iBAAiBC,iBAAiBL,OAAOD,KAAAA;AAC/C,MAAIK,iBAAiBH,WAAW;AAC9B,UAAM,IAAIK,MAAM,4CAA4CF,oBAAoBH,WAAW;EAC7F;AACAA,eAAaG;AAEb,aAAWG,WAAWT,UAAU;AAC9B,QAAIS,QAAQC,SAAS,UAAU;AAC7B,YAAMC,YAAYC,mBAAmBV,OAAOO,OAAAA;AAC5CN,mBAAaQ;AAEb,UAAIR,YAAY,GAAG;AACjB,cAAM,IAAIK,MAAM,uCAAA;MAClB;IACF;EACF;AAEA,MAAIK,SAAkB;AAEtB,QAAMC,mBAAmB;OAAId;IAAUe,QAAO;AAC9C,aAAWN,WAAWK,kBAAkB;AACtC,QAAIL,QAAQC,SAAS,UAAU;AAC7BL,aAAOW,QAAQP,OAAAA;AACf;IACF,WAAWI,QAAQ;AACjB;IACF;AACA,QAAIF,YAAYC,mBAAmBV,OAAOO,OAAAA;AAC1C,QAAIN,YAAYQ,WAAW;AACzBE,eAAS;AACT;IACF;AACAR,WAAOW,QAAQP,OAAAA;AACfN,iBAAaQ;EACf;AAEA,SAAON;AACT;AA9CgBN;AAgDT,SAASK,wBAAwBF,OAAa;AACnD,SAAOe,iBAAiBf,KAAAA,KAAUgB;AACpC;AAFgBd;AAIhB,IAAMc,qBAAqB;AAE3B,IAAMD,mBAA8C;;EAElD,UAAU;EACV,qBAAqB;EACrB,eAAe;EACf,0BAA0B;EAC1B,sBAAsB;EACtB,uBAAuB;EACvB,sBAAsB;EACtB,wBAAwB;EACxB,6BAA6B;EAC7B,aAAa;EACb,kBAAkB;EAClB,kBAAkB;EAClB,SAAS;EACT,cAAc;EACd,cAAc;;EAGd,sBAAsB;EACtB,iBAAiB;EACjB,sBAAsB;EACtB,0BAA0B;EAC1B,qBAAqB;EACrB,sBAAsB;EACtB,0BAA0B;EAC1B,sBAAsB;AACxB;AAEA,SAASV,iBAAiBL,OAAeD,OAAY;AACnD,MAAIA,MAAMkB,WAAW,GAAG;AACtB,WAAO;EACT;AACA,QAAMC,OAAOC,KAAKC,UAAUrB,KAAAA;AAC5B,SAAOsB,YAAYrB,OAAOkB,IAAAA;AAC5B;AANSb;AAQT,SAASK,mBAAmBV,OAAeO,SAAY;AACrD,SAAOc,YAAYrB,OAAOO,QAAQe,WAAW,EAAA;AAC/C;AAFSZ;AAIT,SAASW,YAAYrB,OAAeuB,MAAY;AAC9C,MAAIC;AACJ,MAAI;AACFA,UAAMC,iBAAiBzB,KAAAA;EACzB,SAAS0B,GAAP;AACAF,UAAMC,iBAAiB,OAAA;EACzB;AACA,SAAOD,IAAIG,OAAOJ,IAAAA,EAAMN;AAC1B;AARSI;AAUF,SAASO,+BAA+BC,QAAmB;AAChE,SAAO;IACLC,MAAM;IACNC,UAAU;MACRC,MAAMH,OAAOG;MACbC,aAAaJ,OAAOI;MACpBC,YAAYf,KAAKgB,MAAMN,OAAOO,UAAU;IAC1C;EACF;AACF;AATgBR;AAWT,SAASS,8BAA8B9B,SAAgB;AAC5D,MAAIA,mBAAmB+B,aAAa;AAClC,WAAO;MACL9B,MAAMD,QAAQC;MACdc,SAASf,QAAQe;IACnB;EACF,WAAWf,mBAAmBgC,wBAAwB;AACpD,WAAO;MACL/B,MAAM;MACNgC,YAAY;QACV;UACEC,IAAIlC,QAAQkC;UACZX,MAAM;UACNC,UAAU;YACRC,MAAMzB,QAAQyB;YACdU,WAAWvB,KAAKC,UAAUb,QAAQmC,SAAS;UAC7C;QACF;;IAEJ;EACF,WAAWnC,mBAAmBoC,eAAe;AAC3C,WAAO;MACLnC,MAAM;MACNc,SAASf,QAAQJ;MACjByC,cAAcrC,QAAQsC;IACxB;EACF;AACF;AA3BgBR;AA6BT,SAASS,mCAAmCvC,SAAmC;AACpF,SAAO;IACL,GAAGA;IACH,GAAIA,QAAQC,SAAS,YAAY;MAC/BA,MAAM;MACNc,SAAS,gDAAgDf,QAAQe;IACnE;EACF;AACF;AARgBwB;;;AD9GhB,SAASC,gBAAgB;AAEzB,IAAMC,gBAAgB;AAcf,IAAMC,gBAAN,MAAMA;EACHC,QAAgBF;EAEhBG;EACR,IAAWC,SAAiB;AAC1B,WAAO,KAAKD;EACd;EAEAE,YAAYC,QAA8B;AACxC,SAAKH,WAAUG,iCAAQF,WAAU,IAAIG,OAAO,CAAC,CAAA;AAC7C,QAAID,iCAAQJ,OAAO;AACjB,WAAKA,QAAQI,OAAOJ;IACtB;EACF;EAEA,MAAMM,QACJC,SAC+C;AAC/C,UAAM,EAAEC,UAAUR,QAAQ,KAAKA,OAAOS,UAAUC,SAASC,YAAW,IAAKJ;AACzE,UAAMK,QAAQF,QAAQG,IAAIC,8BAAAA;AAE1B,QAAIC,iBAAiBN,SAASI,IAAIG,6BAAAA;AAClCD,qBAAiBE,0BAA0BF,gBAAgBH,OAAOZ,KAAAA;AAElE,UAAMkB,SAAS,KAAKhB,OAAOiB,KAAKC,KAAKC,YAAYH,OAAO;MACtDlB;MACAkB,QAAQ;MACRT,UAAUM;MACV,GAAIH,MAAMU,SAAS,KAAK;QAAEV;MAAM;IAClC,CAAA;AAEAD,gBAAYO,OAAO,OAAOK,iBAAAA;AA5F9B;AA6FM,UAAIC,OAAsC;AAC1C,uBAAiBC,SAASP,QAAQ;AAChC,cAAMQ,YAAWD,WAAME,QAAQ,CAAA,EAAGC,MAAMC,eAAvBJ,mBAAoC;AACrD,cAAMK,UAAUL,MAAME,QAAQ,CAAA,EAAGC,MAAME;AAKvC,YAAIN,SAAS,cAAaE,qCAAUK,KAAI;AACtCP,iBAAO;AACPD,uBAAaS,mBAAkB;QACjC,WAAWR,SAAS,eAAeE,aAAaO,WAAaP,qCAAUK,MAAK;AAC1EP,iBAAO;AACPD,uBAAaW,uBAAsB;QACrC;AAGA,YAAIV,SAAS,MAAM;AACjB,cAAIE,qCAAUK,IAAI;AAChBP,mBAAO;AACPD,yBAAaY,yBAAyBT,SAAUK,IAAIL,SAAUU,SAAUC,IAAI;UAC9E,WAAWP,SAAS;AAClBN,mBAAO;AACPD,yBAAae,qBAAqBb,MAAMM,EAAE;UAC5C;QACF;AAGA,YAAIP,SAAS,aAAaM,SAAS;AACjCP,uBAAagB,uBAAuBT,OAAAA;QACtC,WAAWN,SAAS,gBAAcE,0CAAUU,aAAVV,mBAAoBc,YAAW;AAC/DjB,uBAAakB,wBAAwBf,SAASU,SAASI,SAAS;QAClE;MACF;AAGA,UAAIhB,SAAS,WAAW;AACtBD,qBAAaS,mBAAkB;MACjC,WAAWR,SAAS,YAAY;AAC9BD,qBAAaW,uBAAsB;MACrC;AAEAX,mBAAamB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACLlC,UAAUA,YAAYmC,SAAAA;IACxB;EACF;AACF;AAjFa5C;;;AE5Cb,OAAO6C,aAAY;AAwCZ,IAAMC,yBAAN,MAAMA;EACHC;EACAC;EACAC;EACAC;EAERC,YAAYC,QAAsC;AAChD,SAAKL,SAASK,OAAOL,UAAU,IAAIM,QAAO,CAAC,CAAA;AAC3C,SAAKL,yBAAyBI,OAAOJ,2BAA2B,SAAS;AACzE,SAAKE,oBAAoBE,OAAOF,sBAAsB,SAAS;AAC/D,SAAKD,cAAcG,OAAOH;EAC5B;EAEA,MAAMK,QACJC,SAC+C;AAC/C,UAAM,EAAEC,UAAUC,SAASC,aAAaC,MAAK,IAAKJ;AAElD,QAAIK,WAAWL,QAAQK,aAAa,MAAM,KAAKb,OAAOc,KAAKC,QAAQC,OAAM,GAAIC;AAE7E,UAAMC,cAAcT,SAASU,GAAG,EAAC;AAEjC,QAAIC,YAAgCC;AAGpC,QAAIH,uBAAuBI,iBAAiBV,OAAO;AACjDQ,kBAAY,MAAM,KAAKG,kBAAkBV,UAAUD,OAAOH,UAAUE,WAAAA;IACtE,WAESO,uBAAuBM,aAAa;AAC3CJ,kBAAY,MAAM,KAAKK,kBAAkBZ,UAAUJ,UAAUC,SAASC,WAAAA;IACxE,OAEK;AACH,YAAM,IAAIe,MAAM,6CAAA;IAClB;AAEA,WAAO;MACLb;MACAD,OAAOQ;IACT;EACF;EAEA,MAAcG,kBACZV,UACAD,OACAH,UACAE,aACA;AACA,QAAIgB,MAAM,MAAM,KAAK3B,OAAOc,KAAKC,QAAQa,KAAKC,SAAShB,UAAUD,KAAAA;AACjE,QAAI,CAACe,IAAIG,iBAAiB;AACxB,YAAM,IAAIJ,MAAM,0BAAA;IAClB;AAGA,UAAMK,eAAeJ,IAAIG,gBAAgBE,oBAAoBC,WAAWC,IACtE,CAACC,aAAaA,SAASlB,EAAE;AAI3B,UAAMmB,iBAAiB3B,SAAS4B,OAC9B,CAACC,YACCA,mBAAmBhB,iBAAiBS,aAAaQ,SAASD,QAAQE,iBAAiB,CAAA;AAGvF,QAAIT,aAAaU,UAAUL,eAAeK,QAAQ;AAChD,YAAM,IAAIf,MAAM,oEAAA;IAClB;AAGA,UAAMgB,cAA6DN,eAAeF,IAChF,CAACI,YAAAA;AACC,aAAO;QACLK,cAAcL,QAAQE;QACtBI,QAAQN,QAAQO;MAClB;IACF,CAAA;AAGF,UAAMC,SAAS,KAAK9C,OAAOc,KAAKC,QAAQa,KAAKmB,wBAAwBlC,UAAUD,OAAO;MACpFoC,cAAcN;IAChB,CAAA;AAEA,UAAM,KAAKO,eAAeH,QAAQnC,WAAAA;AAClC,WAAOC;EACT;EAEA,MAAca,kBACZZ,UACAJ,UACAC,SACAC,aACA;AACAF,eAAW;SAAIA;;AAGf,UAAMyC,sBAAsBzC,SAAS0C,MAAK;AAC1C,UAAMC,eACJF,+BAA+B1B,cAAc0B,oBAAoBG,UAAU;AAG7E,UAAMC,cAAc7C,SACjByB,IAAIqB,6BAAAA,EACJrB,IAAIsB,kCAAAA,EACJrC,GAAG,EAAC;AAEP,QAAImC,YAAYG,SAAS,QAAQ;AAC/B,YAAM,IAAI/B,MAAM,uBAAA;IAClB;AAGA,UAAM,KAAK1B,OAAOc,KAAKC,QAAQN,SAASO,OAAOH,UAAU;MACvD4C,MAAM;MACNJ,SAASC,YAAYD;IACvB,CAAA;AAEA,UAAMK,cAAchD,QAAQwB,IAAIyB,8BAAAA;AAEhC,UAAMC,QAAQ;SACTF;SACC,KAAKzD,yBAAyB;QAAC;UAAE4D,MAAM;QAAmB;UAAsB,CAAA;SAChF,KAAK1D,oBAAoB;QAAC;UAAE0D,MAAM;QAAc;UAAsB,CAAA;;AAI5E,QAAIf,SAAS,KAAK9C,OAAOc,KAAKC,QAAQa,KAAKkB,OAAOjC,UAAU;MAC1DiD,cAAc,KAAK5D;MACnBkD;MACAQ;IACF,CAAA;AAEA,UAAM,KAAKX,eAAeH,QAAQnC,WAAAA;AAElC,WAAOoD,mBAAmBjB,MAAAA;EAC5B;EAEA,MAAcG,eAAeH,QAAyBnC,aAAiC;AACrFA,gBAAYmC,OAAO,OAAOkB,iBAAAA;AAlM9B;AAmMM,UAAIC,iBAAiB;AAErB,uBAAiBC,SAASpB,QAAQ;AAChC,gBAAQoB,MAAMC,OAAK;UACjB,KAAK;AACH,gBAAIF,gBAAgB;AAClBD,2BAAaI,uBAAsB;YACrC;AACAJ,yBAAaK,qBAAqBH,MAAMI,KAAKrD,EAAE;AAC/C;UACF,KAAK;AACH,kBAAIiD,WAAMI,KAAKC,MAAMlB,YAAjBa,mBAA2B,GAAGL,UAAS,QAAQ;AACjDG,2BAAaQ,wBAAuBN,WAAMI,KAAKC,MAAMlB,YAAjBa,mBAA2B,GAAGO,KAAKC,KAAAA;YACzE;AACA;UACF,KAAK;AACHV,yBAAaW,mBAAkB;AAC/B;UACF,KAAK;AACH,gBAAIC;AACJ,gBAAIC;AACJ,gBAAIC;AACJ,gBACEZ,MAAMI,KAAKC,MAAMQ,aAAalB,SAAS,kBACvCK,WAAMI,KAAKC,MAAMQ,aAAa9C,eAA9BiC,mBAA2C,GAAGL,UAAS,YACvD;AACAe,4BAAaV,WAAMI,KAAKC,MAAMQ,aAAa9C,eAA9BiC,mBAA2C,GAAGjD;AAC3D4D,8BAAeX,WAAMI,KAAKC,MAAMQ,aAAa9C,eAA9BiC,mBAA2C,GAAGc,SAASC;AACtEH,8BAAeZ,WAAMI,KAAKC,MAAMQ,aAAa9C,eAA9BiC,mBAA2C,GAAGc,SAASE;YACxE;AAEA,gBAAIL,gBAAgBD,YAAY;AAC9B,kBAAIX,gBAAgB;AAClBD,6BAAaI,uBAAsB;cACrC;AACAH,+BAAiB;AACjBD,2BAAamB,yBAAyBP,YAAYC,YAAAA;YACpD,WAAWC,cAAc;AACvBd,2BAAaoB,wBAAwBN,YAAAA;YACvC;AACA;QACJ;MACF;AACA,UAAIb,gBAAgB;AAClBD,qBAAaI,uBAAsB;MACrC;AACAJ,mBAAaqB,SAAQ;IACvB,CAAA;EACF;AACF;AA3LatF;AA6Lb,SAASgE,mBAAmBjB,QAAuB;AACjD,SAAO,IAAIwC,QAAgB,CAACC,SAASC,WAAAA;AACnC,QAAIC,cAAc,wBAACtB,UAAAA;AACjB,UAAIA,MAAMA,UAAU,sBAAsB;AACxC,cAAMvD,QAAQuD,MAAMG,KAAKrD;AACzB6B,eAAO4C,IAAI,SAASD,WAAAA;AACpBF,gBAAQ3E,KAAAA;MACV;IACF,GANkB;AAOlBkC,WAAO6C,GAAG,SAASF,WAAAA;EACrB,CAAA;AACF;AAXS1B;;;AClNT,SAA0B6B,0BAA0B;;;AC3B7C,SAASC,mCAAmCC,SAAgB;AACjE,MAAIA,mBAAmBC,aAAa;AAClC,UAAMC,OAAO;MACXC,MAAM;MACNC,WAAW;MACXC,QAAQ;IACV,EAAEL,QAAQE,IAAI;AAEd,UAAMI,OACJN,QAAQE,SAAS,WACb,gDAAgDF,QAAQO,UACxDP,QAAQO;AAEd,WAAO;MACLL;MACAM,OAAO;QAAC;UAAEF;QAAK;;IACjB;EACF,WAAWN,mBAAmBS,wBAAwB;AACpD,WAAO;MACLP,MAAM;MACNM,OAAO;QACL;UACEE,cAAc;YACZC,MAAMX,QAAQW;YACdC,MAAMZ,QAAQa;UAChB;QACF;;IAEJ;EACF,WAAWb,mBAAmBc,eAAe;AAC3C,WAAO;MACLZ,MAAM;MACNM,OAAO;QACL;UACEO,kBAAkB;YAChBJ,MAAMX,QAAQgB;YACdC,UAAU;cACRN,MAAMX,QAAQgB;cACdT,SAASW,aAAalB,QAAQmB,MAAM;YACtC;UACF;QACF;;IAEJ;EACF;AACF;AA7CgBpB;AA+CT,SAASqB,iCAAiCC,QAAmB;AAClE,QAAMV,OAAOU,OAAOV;AACpB,QAAMW,cAAcD,OAAOC;AAC3B,QAAMC,aAAaC,KAAKC,MAAMJ,OAAOK,UAAU;AAE/C,QAAMC,sBAAsB,wBAACC,UAAAA;AAC3B,eAAWC,OAAOD,OAAO;AACvB,UAAIA,MAAMC,GAAAA,EAAKC,MAAM;AACnBF,cAAMC,GAAAA,EAAKC,OAAOF,MAAMC,GAAAA,EAAKC,KAAKC,YAAW;MAC/C;AACA,UAAIH,MAAMC,GAAAA,EAAKG,YAAY;AACzBL,4BAAoBC,MAAMC,GAAAA,EAAKG,UAAU;MAC3C;IACF;EACF,GAT4B;AAU5BL,sBAAoBJ,UAAAA;AAEpB,SAAO;IACLU,sBAAsB;MACpB;QACEtB;QACAW;QACAC;MACF;;EAEJ;AACF;AA1BgBH;AA4BhB,SAASF,aAAagB,KAAY;AAChC,MAAI,CAACA,KAAK;AACR,WAAO;EACT;AACA,MAAI;AACF,WAAOV,KAAKC,MAAMS,GAAAA;EACpB,SAASC,GAAP;AACA,WAAOD;EACT;AACF;AATShB;;;AD7CT,SAASkB,YAAAA,iBAAgB;AASlB,IAAMC,4BAAN,MAAMA;EACHC;EAERC,YAAYC,SAA4C;AACtD,QAAIA,mCAASF,OAAO;AAClB,WAAKA,QAAQE,QAAQF;IACvB,OAAO;AACL,YAAMG,QAAQ,IAAIC,mBAAmBC,QAAQC,IAAI,gBAAA,CAAiB;AAClE,WAAKN,QAAQG,MAAMI,mBAAmB;QAAEP,OAAO;MAAa,CAAA;IAC9D;EACF;EAEA,MAAMK,QACJG,SAC+C;AAC/C,UAAM,EAAEC,UAAUC,SAASC,YAAW,IAAKH;AAG3C,UAAMI,UAAUH,SAASI,MAAM,GAAG,EAAC,EAAGC,IAAIC,kCAAAA;AAG1C,UAAMC,iBAAiBD,mCAAmCN,SAASQ,GAAG,EAAC,CAAA;AACvE,QAAI,CAACD,gBAAgB;AACnB,YAAM,IAAIE,MAAM,oBAAA;IAClB;AAEA,QAAIC;AACJ,UAAMC,eAAeX,SAASQ,GAAG,CAAA;AACjC,QAAIG,wBAAwBC,eAAeD,aAAaE,SAAS,UAAU;AACzEH,sBAAgBC,aAAaG,QAAQC,KAAI;IAC3C,OAAO;AACL,YAAM,IAAIN,MAAM,uCAAA;IAClB;AAEA,UAAMO,QAAQf,QAAQI,IAAIY,gCAAAA;AAE1B,UAAMC,sBACJ,KAAK3B,MAAMA,UAAU,gBAAgB,KAAKA,MAAMA,UAAU;AAE5D,UAAM4B,OAAO,KAAK5B,MAAM6B,UAAU;MAChCjB,SAAS;WACJA;;WAECe,sBAAsB;UAAC;YAAEL,MAAM;YAAQQ,OAAO;cAAC;gBAAEC,MAAMZ;cAAc;;UAAG;YAAK,CAAA;;;MAGnF,GAAIQ,sBACA,CAAC,IACD;QAAEK,mBAAmB;UAAEV,MAAM;UAAQQ,OAAO;YAAC;cAAEC,MAAMZ;YAAc;;QAAG;MAAE;MAC5EM;IACF,CAAA;AAEA,UAAMQ,SAAS,MAAML,KAAKM,kBAAkBlB,eAAec,KAAK;AAEhEnB,gBAAYwB,OAAO,OAAOC,iBAAAA;AACxB,UAAIC,gBAAgB;AACpB,uBAAiBC,SAASL,OAAOE,QAAQ;AACvC,cAAMI,YAAYD,MAAMP,KAAI;AAC5B,YAAIQ,cAAc,IAAI;AACpB;QACF;AACA,YAAI,CAACF,eAAe;AAClBA,0BAAgB;AAChBD,uBAAaI,qBAAqBC,UAAAA,CAAAA;QACpC;AACAL,qBAAaM,uBAAuBH,SAAAA;MACtC;AACA,UAAIF,eAAe;AACjBD,qBAAaO,mBAAkB;MACjC;AAEA,UAAIC,SAAS,MAAMX,OAAOY,UAAUC,cAAa;AACjD,UAAIF,OAAO;AACT,iBAASG,QAAQH,OAAO;AACtBR,uBAAaY,oBACXP,UAAAA,GACAM,KAAKE,MACLC,KAAKC,UAAUC,wBAAwBL,KAAKM,IAAI,CAAA,CAAA;QAEpD;MACF;AACAjB,mBAAakB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACLC,UAAU/C,QAAQ+C,YAAYd,UAAAA;IAChC;EACF;AACF;AAxFa1C;AA0Fb,SAASqD,wBAAwBI,KAAQ;AACvC,MAAI,OAAOA,QAAQ,UAAU;AAC3B,WAAOA,IAAIC,QAAQ,UAAU,IAAA;EAC/B,WAAWC,MAAMC,QAAQH,GAAAA,GAAM;AAC7B,WAAOA,IAAI1C,IAAIsC,uBAAAA;EACjB,WAAW,OAAOI,QAAQ,YAAYA,QAAQ,MAAM;AAClD,UAAMI,SAAc,CAAC;AACrB,eAAWC,OAAOL,KAAK;AACrB,UAAIA,IAAIM,eAAeD,GAAAA,GAAM;AAC3BD,eAAOC,GAAAA,IAAOT,wBAAwBI,IAAIK,GAAAA,CAAI;MAChD;IACF;AACA,WAAOD;EACT;AACA,SAAOJ;AACT;AAfSJ;;;AErGT,SAASW,YAAAA,iBAAgB;AAclB,IAAMC,mBAAN,MAAMA;;;;;EAIXC,YAAoBC,SAAkC;SAAlCA,UAAAA;EAAmC;EAEvD,MAAMC,QACJC,SAC+C;AAC/C,UAAM,EAAEC,aAAaC,OAAOC,SAASC,UAAUC,UAAUC,MAAK,IAAKN;AACnE,UAAMO,SAAS,MAAM,KAAKT,QAAQU,QAAQ;MACxCJ,UAAUA,SAASK,IAAIC,gCAAAA;MACvBC,OAAOR,QAAQM,IAAIG,iCAAAA;MACnBV;MACAG;MACAC;IACF,CAAA;AAEAL,gBAAYY,OAAO,OAAOC,iBAAAA;AACxB,YAAMC,wBAAwB;QAC5BR;QACAO;MACF,CAAA;IACF,CAAA;AAEA,WAAO;MACLT,UAAUA,YAAYW,UAAAA;IACxB;EACF;AACF;AA7BapB;;;AClDb,SAASqB,sBAAsB;AAUxB,IAAMC,cAAN,MAAMA;EACXC;EACAC;EACAC;EACAC;EACAC;EAEAC,YAAYC,SAAgC;AAC1C,SAAKN,OAAOM,QAAQN;AACpB,SAAKC,cAAcK,QAAQL;AAC3B,SAAKC,WAAWI,QAAQJ;AACxB,SAAKC,aAAaG,QAAQH;AAC1B,SAAKC,gBAAgBE,QAAQF,iBAAiB;EAChD;EAEA,MAAMG,WAAiC;AACrC,QAAI,CAAC,KAAKJ,YAAY;AACpB,YAAM,KAAKK,yBAAwB;IACrC;AAEA,WAAO;MACLR,MAAM,KAAKA;MACXC,aAAa,KAAKA;MAClBE,YAAY,KAAKA;MACjBM,SAAS,OAAOC,SAAAA;AACd,cAAMC,WAAW,IAAIC,eAAe;UAAEC,KAAK,KAAKX;QAAS,CAAA;AACzD,YAAIY;AACJ,YAAI,KAAKV,kBAAkB,UAAU;AACnCU,kBAAQJ,KAAKK,OAAOC,KAAKN,IAAAA,EAAM,CAAA,CAAE;QACnC,OAAO;AACLI,kBAAQJ;QACV;AACA,eAAO,MAAMC,SAASM,OAAOH,KAAAA;MAC/B;IACF;EACF;EAEA,MAAMN,2BAA2B;AAC/B,UAAMU,iBAAiB;MAAC;MAAU;MAAU;;AAE5C,QAAIC,YAAY,KAAKjB,SAASkB,QAAQ,QAAQ,EAAA,IAAM;AACpD,QAAIC,SAAS,MAAMC,MAAMH,SAAAA,EACtBI,KAAK,CAACC,QAAQA,IAAIC,KAAI,CAAA,EACtBC,MAAM,MAAA;AACL,YAAM,IAAIC,MAAM,yCAAyCR,SAAAA;IAC3D,CAAA;AAGF,QAAID,eAAeU,SAASP,OAAOQ,IAAI,GAAG;AACxC,WAAKzB,gBAAgB;AACrB,WAAKD,aAAa;QAChB;UACEH,MAAM;UACN6B,MAAMR,OAAOQ;UACb5B,aAAa;QACf;;IAEJ,WAAWoB,OAAOQ,SAAS,UAAU;AACnC,WAAKzB,gBAAgB;AACrB,WAAKD,aAAaY,OAAOC,KAAKK,OAAOS,UAAU,EAAEC,IAAI,CAACC,QAAAA;AArE5D;AAsEQ,YAAIC,WAAWZ,OAAOS,WAAWE,GAAAA;AACjC,YAAI,CAACd,eAAeU,SAASK,SAASJ,IAAI,GAAG;AAC3C,gBAAM,IAAIF,MAAM,yBAAA;QAClB;AACA,eAAO;UACL3B,MAAMgC;UACNH,MAAMI,SAASJ;UACf5B,aAAagC,SAAShC,eAAe;UACrCiC,YAAUb,YAAOa,aAAPb,mBAAiBO,SAASI,SAAQ;QAC9C;MACF,CAAA;IACF,OAAO;AACL,YAAM,IAAIL,MAAM,yBAAA;IAClB;EACF;AACF;AA3Ea5B;;;AC6Bb,IAAMoC,iBAAiB;AACvB,IAAMC,gBAAgB;AAMf,IAAMC,eAAN,MAAMA;EACHC;EAERC,YAAYC,QAA6B;AACvC,SAAKF,gBAAgB,IAAIG,cAAcD,MAAAA;AACvC,SAAKF,cAAcI,OAAOC,UAAUR;AAEpC,UAAMS,sBAA0CC,QAAQC,IAAIV,aAAAA,MAAkBI,iCAAQO;AACtF,QAAIH,qBAAqB;AACvB,WAAKN,cAAcI,OAAOK,SAASH;IACrC;EACF;EAEAC,QACEG,SAC+C;AAC/C,WAAO,KAAKV,cAAcO,QAAQG,OAAAA;EACpC;AACF;AAlBaX;","names":["OpenAI","encodingForModel","limitMessagesToTokenCount","messages","tools","model","maxTokens","maxTokensForOpenAIModel","result","toolsNumTokens","countToolsTokens","Error","message","role","numTokens","countMessageTokens","cutoff","reversedMessages","reverse","unshift","maxTokensByModel","DEFAULT_MAX_TOKENS","length","json","JSON","stringify","countTokens","content","text","enc","encodingForModel","e","encode","convertActionInputToOpenAITool","action","type","function","name","description","parameters","parse","jsonSchema","convertMessageToOpenAIMessage","TextMessage","ActionExecutionMessage","tool_calls","id","arguments","ResultMessage","tool_call_id","actionExecutionId","convertSystemMessageToAssistantAPI","randomId","DEFAULT_MODEL","OpenAIAdapter","model","_openai","openai","constructor","params","OpenAI","process","request","threadId","messages","actions","eventSource","tools","map","convertActionInputToOpenAITool","openaiMessages","convertMessageToOpenAIMessage","limitMessagesToTokenCount","stream","beta","chat","completions","length","eventStream$","mode","chunk","toolCall","choices","delta","tool_calls","content","id","sendTextMessageEnd","undefined","sendActionExecutionEnd","sendActionExecutionStart","function","name","sendTextMessageStart","sendTextMessageContent","arguments","sendActionExecutionArgs","complete","randomId","OpenAI","OpenAIAssistantAdapter","openai","codeInterpreterEnabled","assistantId","fileSearchEnabled","constructor","params","OpenAI","process","request","messages","actions","eventSource","runId","threadId","beta","threads","create","id","lastMessage","at","nextRunId","undefined","ResultMessage","submitToolOutputs","TextMessage","submitUserMessage","Error","run","runs","retrieve","required_action","toolCallsIds","submit_tool_outputs","tool_calls","map","toolCall","resultMessages","filter","message","includes","actionExecutionId","length","toolOutputs","tool_call_id","output","result","stream","submitToolOutputsStream","tool_outputs","streamResponse","instructionsMessage","shift","instructions","content","userMessage","convertMessageToOpenAIMessage","convertSystemMessageToAssistantAPI","role","openaiTools","convertActionInputToOpenAITool","tools","type","assistant_id","getRunIdFromStream","eventStream$","inFunctionCall","chunk","event","sendActionExecutionEnd","sendTextMessageStart","data","delta","sendTextMessageContent","text","value","sendTextMessageEnd","toolCallId","toolCallName","toolCallArgs","step_details","function","name","arguments","sendActionExecutionStart","sendActionExecutionArgs","complete","Promise","resolve","reject","runIdGetter","off","on","GoogleGenerativeAI","convertMessageToGoogleGenAIMessage","message","TextMessage","role","user","assistant","system","text","content","parts","ActionExecutionMessage","functionCall","name","args","arguments","ResultMessage","functionResponse","actionName","response","tryParseJson","result","transformActionToGoogleGenAITool","action","description","parameters","JSON","parse","jsonSchema","transformProperties","props","key","type","toUpperCase","properties","functionDeclarations","str","e","randomId","GoogleGenerativeAIAdapter","model","constructor","options","genAI","GoogleGenerativeAI","process","env","getGenerativeModel","request","messages","actions","eventSource","history","slice","map","convertMessageToGoogleGenAIMessage","currentMessage","at","Error","systemMessage","firstMessage","TextMessage","role","content","trim","tools","transformActionToGoogleGenAITool","isFirstGenGeminiPro","chat","startChat","parts","text","systemInstruction","result","sendMessageStream","stream","eventStream$","isTextMessage","chunk","chunkText","sendTextMessageStart","randomId","sendTextMessageContent","sendTextMessageEnd","calls","response","functionCalls","call","sendActionExecution","name","JSON","stringify","replaceNewlinesInObject","args","complete","threadId","obj","replace","Array","isArray","newObj","key","hasOwnProperty","randomId","LangChainAdapter","constructor","options","process","request","eventSource","model","actions","messages","threadId","runId","result","chainFn","map","convertMessageToLangChainMessage","tools","convertActionInputToLangChainTool","stream","eventStream$","streamLangChainResponse","randomId","RemoteRunnable","RemoteChain","name","description","chainUrl","parameters","parameterType","constructor","options","toAction","inferLangServeParameters","handler","args","runnable","RemoteRunnable","url","input","Object","keys","invoke","supportedTypes","schemaUrl","replace","schema","fetch","then","res","json","catch","Error","includes","type","properties","map","key","property","required","UNIFY_BASE_URL","UNIFY_API_KEY","UnifyAdapter","openaiAdapter","constructor","params","OpenAIAdapter","openai","baseURL","unifyApiKeyOverride","process","env","apiKey","request"]}
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  copilotRuntimeNodeHttpEndpoint,
3
3
  telemetry_client_default
4
- } from "./chunk-VIQZS4W6.mjs";
4
+ } from "./chunk-VQM4RIMQ.mjs";
5
5
  import {
6
6
  __name
7
7
  } from "./chunk-44O2JGUY.mjs";
@@ -21,4 +21,4 @@ __name(copilotRuntimeNodeExpressEndpoint, "copilotRuntimeNodeExpressEndpoint");
21
21
  export {
22
22
  copilotRuntimeNodeExpressEndpoint
23
23
  };
24
- //# sourceMappingURL=chunk-TTT54UED.mjs.map
24
+ //# sourceMappingURL=chunk-F7DCXPQ7.mjs.map
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  copilotRuntimeNodeHttpEndpoint,
3
3
  telemetry_client_default
4
- } from "./chunk-VIQZS4W6.mjs";
4
+ } from "./chunk-VQM4RIMQ.mjs";
5
5
  import {
6
6
  __name
7
7
  } from "./chunk-44O2JGUY.mjs";
@@ -21,4 +21,4 @@ __name(copilotRuntimeNestEndpoint, "copilotRuntimeNestEndpoint");
21
21
  export {
22
22
  copilotRuntimeNestEndpoint
23
23
  };
24
- //# sourceMappingURL=chunk-GTYL57UF.mjs.map
24
+ //# sourceMappingURL=chunk-FNDYTEEL.mjs.map
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  getCommonConfig,
3
3
  telemetry_client_default
4
- } from "./chunk-VIQZS4W6.mjs";
4
+ } from "./chunk-VQM4RIMQ.mjs";
5
5
  import {
6
6
  __name
7
7
  } from "./chunk-44O2JGUY.mjs";
@@ -64,4 +64,4 @@ export {
64
64
  config,
65
65
  copilotRuntimeNextJSPagesRouterEndpoint
66
66
  };
67
- //# sourceMappingURL=chunk-3SJCLGQE.mjs.map
67
+ //# sourceMappingURL=chunk-JWUHY4RB.mjs.map
@@ -1,9 +1,9 @@
1
1
  import {
2
2
  RemoteChain
3
- } from "./chunk-FPVMUZ5S.mjs";
3
+ } from "./chunk-67DNPQCA.mjs";
4
4
  import {
5
5
  RuntimeEventSource
6
- } from "./chunk-VIQZS4W6.mjs";
6
+ } from "./chunk-VQM4RIMQ.mjs";
7
7
  import {
8
8
  ActionExecutionMessage,
9
9
  ResultMessage,
@@ -151,4 +151,4 @@ export {
151
151
  CopilotRuntime,
152
152
  flattenToolCallsNoDuplicates
153
153
  };
154
- //# sourceMappingURL=chunk-L2HCON4L.mjs.map
154
+ //# sourceMappingURL=chunk-UFAHXJX7.mjs.map
@@ -11,7 +11,7 @@ import {
11
11
  import { AIMessage, HumanMessage, SystemMessage, ToolMessage } from "@langchain/core/messages";
12
12
  import { DynamicStructuredTool } from "@langchain/core/tools";
13
13
  import { z } from "zod";
14
- import { nanoid } from "nanoid";
14
+ import { randomId } from "@copilotkit/shared";
15
15
  function convertMessageToLangChainMessage(message) {
16
16
  if (message instanceof TextMessage) {
17
17
  if (message.role == "user") {
@@ -104,26 +104,26 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
104
104
  var _a, _b, _c, _d, _e, _f, _g, _h;
105
105
  if (typeof result === "string") {
106
106
  if (!actionExecution) {
107
- eventStream$.sendTextMessage(nanoid(), result);
107
+ eventStream$.sendTextMessage(randomId(), result);
108
108
  } else {
109
109
  eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, result);
110
110
  }
111
111
  } else if (isAIMessage(result)) {
112
112
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
113
113
  if (result.content) {
114
- eventStream$.sendTextMessage(nanoid(), result.content);
114
+ eventStream$.sendTextMessage(randomId(), result.content);
115
115
  }
116
116
  for (const toolCall of result.tool_calls) {
117
- eventStream$.sendActionExecution(toolCall.id || nanoid(), toolCall.name, JSON.stringify(toolCall.args));
117
+ eventStream$.sendActionExecution(toolCall.id || randomId(), toolCall.name, JSON.stringify(toolCall.args));
118
118
  }
119
119
  } else if (isBaseMessageChunk(result)) {
120
120
  maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
121
121
  if ((_a = result.lc_kwargs) == null ? void 0 : _a.content) {
122
- eventStream$.sendTextMessage(nanoid(), result.content);
122
+ eventStream$.sendTextMessage(randomId(), result.content);
123
123
  }
124
124
  if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
125
125
  for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
126
- eventStream$.sendActionExecution(toolCall.id || nanoid(), toolCall.name, JSON.stringify(toolCall.args));
126
+ eventStream$.sendActionExecution(toolCall.id || randomId(), toolCall.name, JSON.stringify(toolCall.args));
127
127
  }
128
128
  }
129
129
  } else if (result && "getReader" in result) {
@@ -167,7 +167,7 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
167
167
  eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
168
168
  } else if (content) {
169
169
  mode = "message";
170
- eventStream$.sendTextMessageStart(nanoid());
170
+ eventStream$.sendTextMessageStart(randomId());
171
171
  }
172
172
  }
173
173
  if (mode === "message" && content) {
@@ -204,4 +204,4 @@ export {
204
204
  convertActionInputToLangChainTool,
205
205
  streamLangChainResponse
206
206
  };
207
- //# sourceMappingURL=chunk-DVDKD6F5.mjs.map
207
+ //# sourceMappingURL=chunk-URMISMK2.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/service-adapters/langchain/utils.ts"],"sourcesContent":["import {\n ActionExecutionMessage,\n Message,\n ResultMessage,\n TextMessage,\n} from \"../../graphql/types/converted\";\nimport {\n AIMessage,\n AIMessageChunk,\n BaseMessage,\n BaseMessageChunk,\n HumanMessage,\n SystemMessage,\n ToolMessage,\n} from \"@langchain/core/messages\";\nimport { DynamicStructuredTool } from \"@langchain/core/tools\";\nimport { z } from \"zod\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { LangChainReturnType } from \"./types\";\nimport { RuntimeEventSubject } from \"../events\";\nimport { randomId } from \"@copilotkit/shared\";\n\nexport function convertMessageToLangChainMessage(message: Message): BaseMessage {\n if (message instanceof TextMessage) {\n if (message.role == \"user\") {\n return new HumanMessage(message.content);\n } else if (message.role == \"assistant\") {\n return new AIMessage(message.content);\n } else if (message.role === \"system\") {\n return new SystemMessage(message.content);\n }\n } else if (message instanceof ActionExecutionMessage) {\n return new AIMessage({\n content: \"\",\n tool_calls: [\n {\n id: message.id,\n args: message.arguments,\n name: message.name,\n },\n ],\n });\n } else if (message instanceof ResultMessage) {\n return new ToolMessage({\n content: message.result,\n tool_call_id: message.actionExecutionId,\n });\n }\n}\n\nexport function convertJsonSchemaToZodSchema(jsonSchema: any, required: boolean): z.ZodSchema {\n if (jsonSchema.type === \"object\") {\n const spec: { [key: string]: z.ZodSchema } = {};\n for (const [key, value] of Object.entries(jsonSchema.properties)) {\n spec[key] = convertJsonSchemaToZodSchema(\n value,\n jsonSchema.required ? jsonSchema.required.includes(key) : false,\n );\n }\n let schema = z.object(spec);\n return !required ? schema.optional() : schema;\n } else if (jsonSchema.type === \"string\") {\n let schema = z.string().describe(jsonSchema.description);\n return !required ? schema.optional() : schema;\n } else if (jsonSchema.type === \"number\") {\n let schema = z.number().describe(jsonSchema.description);\n return !required ? schema.optional() : schema;\n } else if (jsonSchema.type === \"boolean\") {\n let schema = z.boolean().describe(jsonSchema.description);\n return !required ? schema.optional() : schema;\n } else if (jsonSchema.type === \"array\") {\n let itemSchema = convertJsonSchemaToZodSchema(jsonSchema.items, false);\n let schema = z.array(itemSchema);\n return !required ? schema.optional() : schema;\n }\n}\n\nexport function convertActionInputToLangChainTool(actionInput: ActionInput): any {\n return new DynamicStructuredTool({\n name: actionInput.name,\n description: actionInput.description,\n schema: convertJsonSchemaToZodSchema(\n JSON.parse(actionInput.jsonSchema),\n true,\n ) as z.ZodObject<any>,\n func: async () => {\n return \"\";\n },\n });\n}\n\ninterface StreamLangChainResponseParams {\n result: LangChainReturnType;\n eventStream$: RuntimeEventSubject;\n actionExecution?: {\n id: string;\n name: string;\n };\n}\n\nfunction getConstructorName(object: any): string {\n if (object && typeof object === \"object\" && object.constructor && object.constructor.name) {\n return object.constructor.name;\n }\n return \"\";\n}\n\nfunction isAIMessage(message: any): message is AIMessage {\n return getConstructorName(message) === \"AIMessage\";\n}\n\nfunction isAIMessageChunk(message: any): message is AIMessageChunk {\n return getConstructorName(message) === \"AIMessageChunk\";\n}\n\nfunction isBaseMessageChunk(message: any): message is BaseMessageChunk {\n return getConstructorName(message) === \"BaseMessageChunk\";\n}\n\nfunction maybeSendActionExecutionResultIsMessage(\n eventStream$: RuntimeEventSubject,\n actionExecution?: { id: string; name: string },\n) {\n // language models need a result after the function call\n // we simply let them know that we are sending a message\n if (actionExecution) {\n eventStream$.sendActionExecutionResult(\n actionExecution.id,\n actionExecution.name,\n \"Sending a message\",\n );\n }\n}\n\nexport async function streamLangChainResponse({\n result,\n eventStream$,\n actionExecution,\n}: StreamLangChainResponseParams) {\n // We support several types of return values from LangChain functions:\n\n // 1. string\n\n if (typeof result === \"string\") {\n if (!actionExecution) {\n // Just send one chunk with the string as the content.\n eventStream$.sendTextMessage(randomId(), result);\n } else {\n // Send as a result\n eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, result);\n }\n }\n\n // 2. AIMessage\n // Send the content and function call of the AIMessage as the content of the chunk.\n else if (isAIMessage(result)) {\n maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);\n\n if (result.content) {\n eventStream$.sendTextMessage(randomId(), result.content as string);\n }\n for (const toolCall of result.tool_calls) {\n eventStream$.sendActionExecution(\n toolCall.id || randomId(),\n toolCall.name,\n JSON.stringify(toolCall.args),\n );\n }\n }\n\n // 3. BaseMessageChunk\n // Send the content and function call of the AIMessage as the content of the chunk.\n else if (isBaseMessageChunk(result)) {\n maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);\n\n if (result.lc_kwargs?.content) {\n eventStream$.sendTextMessage(randomId(), result.content as string);\n }\n if (result.lc_kwargs?.tool_calls) {\n for (const toolCall of result.lc_kwargs?.tool_calls) {\n eventStream$.sendActionExecution(\n toolCall.id || randomId(),\n toolCall.name,\n JSON.stringify(toolCall.args),\n );\n }\n }\n }\n\n // 4. IterableReadableStream\n // Stream the result of the LangChain function.\n else if (result && \"getReader\" in result) {\n maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);\n\n let reader = result.getReader();\n\n let mode: \"function\" | \"message\" | null = null;\n\n while (true) {\n try {\n const { done, value } = await reader.read();\n\n let toolCallName: string | undefined = undefined;\n let toolCallId: string | undefined = undefined;\n let toolCallArgs: string | undefined = undefined;\n let hasToolCall: boolean = false;\n let content = value?.content as string;\n\n if (isAIMessageChunk(value)) {\n let chunk = value.tool_call_chunks?.[0];\n toolCallName = chunk?.name;\n toolCallId = chunk?.id;\n toolCallArgs = chunk?.args;\n hasToolCall = chunk != undefined;\n } else if (isBaseMessageChunk(value)) {\n let chunk = value.additional_kwargs?.tool_calls?.[0];\n toolCallName = chunk?.function?.name;\n toolCallId = chunk?.id;\n toolCallArgs = chunk?.function?.arguments;\n hasToolCall = chunk?.function != undefined;\n }\n\n // When switching from message to function or vice versa,\n // send the respective end event.\n // If toolCallName is defined, it means a new tool call starts.\n if (mode === \"message\" && (toolCallId || done)) {\n mode = null;\n eventStream$.sendTextMessageEnd();\n } else if (mode === \"function\" && (!hasToolCall || done)) {\n mode = null;\n eventStream$.sendActionExecutionEnd();\n }\n\n if (done) {\n break;\n }\n\n // If we send a new message type, send the appropriate start event.\n if (mode === null) {\n if (hasToolCall) {\n mode = \"function\";\n eventStream$.sendActionExecutionStart(toolCallId, toolCallName);\n } else if (content) {\n mode = \"message\";\n eventStream$.sendTextMessageStart(randomId());\n }\n }\n\n // send the content events\n if (mode === \"message\" && content) {\n eventStream$.sendTextMessageContent(content);\n } else if (mode === \"function\" && toolCallArgs) {\n eventStream$.sendActionExecutionArgs(toolCallArgs);\n }\n } catch (error) {\n console.error(\"Error reading from stream\", error);\n break;\n }\n }\n } else if (actionExecution) {\n eventStream$.sendActionExecutionResult(\n actionExecution.id,\n actionExecution.name,\n encodeResult(result),\n );\n }\n\n // unsupported type\n else {\n throw new Error(\"Invalid return type from LangChain function.\");\n }\n\n eventStream$.complete();\n}\n\nfunction encodeResult(result: any): string {\n if (result === undefined) {\n return \"\";\n } else if (typeof result === \"string\") {\n return result;\n } else {\n return JSON.stringify(result);\n }\n}\n"],"mappings":";;;;;;;;;;AAMA,SACEA,WAIAC,cACAC,eACAC,mBACK;AACP,SAASC,6BAA6B;AACtC,SAASC,SAAS;AAIlB,SAASC,gBAAgB;AAElB,SAASC,iCAAiCC,SAAgB;AAC/D,MAAIA,mBAAmBC,aAAa;AAClC,QAAID,QAAQE,QAAQ,QAAQ;AAC1B,aAAO,IAAIC,aAAaH,QAAQI,OAAO;IACzC,WAAWJ,QAAQE,QAAQ,aAAa;AACtC,aAAO,IAAIG,UAAUL,QAAQI,OAAO;IACtC,WAAWJ,QAAQE,SAAS,UAAU;AACpC,aAAO,IAAII,cAAcN,QAAQI,OAAO;IAC1C;EACF,WAAWJ,mBAAmBO,wBAAwB;AACpD,WAAO,IAAIF,UAAU;MACnBD,SAAS;MACTI,YAAY;QACV;UACEC,IAAIT,QAAQS;UACZC,MAAMV,QAAQW;UACdC,MAAMZ,QAAQY;QAChB;;IAEJ,CAAA;EACF,WAAWZ,mBAAmBa,eAAe;AAC3C,WAAO,IAAIC,YAAY;MACrBV,SAASJ,QAAQe;MACjBC,cAAchB,QAAQiB;IACxB,CAAA;EACF;AACF;AA1BgBlB;AA4BT,SAASmB,6BAA6BC,YAAiBC,UAAiB;AAC7E,MAAID,WAAWE,SAAS,UAAU;AAChC,UAAMC,OAAuC,CAAC;AAC9C,eAAW,CAACC,KAAKC,KAAAA,KAAUC,OAAOC,QAAQP,WAAWQ,UAAU,GAAG;AAChEL,WAAKC,GAAAA,IAAOL,6BACVM,OACAL,WAAWC,WAAWD,WAAWC,SAASQ,SAASL,GAAAA,IAAO,KAAA;IAE9D;AACA,QAAIM,SAASC,EAAEC,OAAOT,IAAAA;AACtB,WAAO,CAACF,WAAWS,OAAOG,SAAQ,IAAKH;EACzC,WAAWV,WAAWE,SAAS,UAAU;AACvC,QAAIQ,SAASC,EAAEG,OAAM,EAAGC,SAASf,WAAWgB,WAAW;AACvD,WAAO,CAACf,WAAWS,OAAOG,SAAQ,IAAKH;EACzC,WAAWV,WAAWE,SAAS,UAAU;AACvC,QAAIQ,SAASC,EAAEM,OAAM,EAAGF,SAASf,WAAWgB,WAAW;AACvD,WAAO,CAACf,WAAWS,OAAOG,SAAQ,IAAKH;EACzC,WAAWV,WAAWE,SAAS,WAAW;AACxC,QAAIQ,SAASC,EAAEO,QAAO,EAAGH,SAASf,WAAWgB,WAAW;AACxD,WAAO,CAACf,WAAWS,OAAOG,SAAQ,IAAKH;EACzC,WAAWV,WAAWE,SAAS,SAAS;AACtC,QAAIiB,aAAapB,6BAA6BC,WAAWoB,OAAO,KAAA;AAChE,QAAIV,SAASC,EAAEU,MAAMF,UAAAA;AACrB,WAAO,CAAClB,WAAWS,OAAOG,SAAQ,IAAKH;EACzC;AACF;AAzBgBX;AA2BT,SAASuB,kCAAkCC,aAAwB;AACxE,SAAO,IAAIC,sBAAsB;IAC/B/B,MAAM8B,YAAY9B;IAClBuB,aAAaO,YAAYP;IACzBN,QAAQX,6BACN0B,KAAKC,MAAMH,YAAYvB,UAAU,GACjC,IAAA;IAEF2B,MAAM,YAAA;AACJ,aAAO;IACT;EACF,CAAA;AACF;AAZgBL;AAuBhB,SAASM,mBAAmBhB,QAAW;AACrC,MAAIA,UAAU,OAAOA,WAAW,YAAYA,OAAOiB,eAAejB,OAAOiB,YAAYpC,MAAM;AACzF,WAAOmB,OAAOiB,YAAYpC;EAC5B;AACA,SAAO;AACT;AALSmC;AAOT,SAASE,YAAYjD,SAAY;AAC/B,SAAO+C,mBAAmB/C,OAAAA,MAAa;AACzC;AAFSiD;AAIT,SAASC,iBAAiBlD,SAAY;AACpC,SAAO+C,mBAAmB/C,OAAAA,MAAa;AACzC;AAFSkD;AAIT,SAASC,mBAAmBnD,SAAY;AACtC,SAAO+C,mBAAmB/C,OAAAA,MAAa;AACzC;AAFSmD;AAIT,SAASC,wCACPC,cACAC,iBAA8C;AAI9C,MAAIA,iBAAiB;AACnBD,iBAAaE,0BACXD,gBAAgB7C,IAChB6C,gBAAgB1C,MAChB,mBAAA;EAEJ;AACF;AAbSwC;AAeT,eAAsBI,wBAAwB,EAC5CzC,QACAsC,cACAC,gBAAe,GACe;AA1IhC;AA+IE,MAAI,OAAOvC,WAAW,UAAU;AAC9B,QAAI,CAACuC,iBAAiB;AAEpBD,mBAAaI,gBAAgBC,SAAAA,GAAY3C,MAAAA;IAC3C,OAAO;AAELsC,mBAAaE,0BAA0BD,gBAAgB7C,IAAI6C,gBAAgB1C,MAAMG,MAAAA;IACnF;EACF,WAISkC,YAAYlC,MAAAA,GAAS;AAC5BqC,4CAAwCC,cAAcC,eAAAA;AAEtD,QAAIvC,OAAOX,SAAS;AAClBiD,mBAAaI,gBAAgBC,SAAAA,GAAY3C,OAAOX,OAAO;IACzD;AACA,eAAWuD,YAAY5C,OAAOP,YAAY;AACxC6C,mBAAaO,oBACXD,SAASlD,MAAMiD,SAAAA,GACfC,SAAS/C,MACTgC,KAAKiB,UAAUF,SAASjD,IAAI,CAAA;IAEhC;EACF,WAISyC,mBAAmBpC,MAAAA,GAAS;AACnCqC,4CAAwCC,cAAcC,eAAAA;AAEtD,SAAIvC,YAAO+C,cAAP/C,mBAAkBX,SAAS;AAC7BiD,mBAAaI,gBAAgBC,SAAAA,GAAY3C,OAAOX,OAAO;IACzD;AACA,SAAIW,YAAO+C,cAAP/C,mBAAkBP,YAAY;AAChC,iBAAWmD,aAAY5C,YAAO+C,cAAP/C,mBAAkBP,YAAY;AACnD6C,qBAAaO,oBACXD,SAASlD,MAAMiD,SAAAA,GACfC,SAAS/C,MACTgC,KAAKiB,UAAUF,SAASjD,IAAI,CAAA;MAEhC;IACF;EACF,WAISK,UAAU,eAAeA,QAAQ;AACxCqC,4CAAwCC,cAAcC,eAAAA;AAEtD,QAAIS,SAAShD,OAAOiD,UAAS;AAE7B,QAAIC,OAAsC;AAE1C,WAAO,MAAM;AACX,UAAI;AACF,cAAM,EAAEC,MAAM1C,MAAK,IAAK,MAAMuC,OAAOI,KAAI;AAEzC,YAAIC,eAAmCC;AACvC,YAAIC,aAAiCD;AACrC,YAAIE,eAAmCF;AACvC,YAAIG,cAAuB;AAC3B,YAAIpE,UAAUoB,+BAAOpB;AAErB,YAAI8C,iBAAiB1B,KAAAA,GAAQ;AAC3B,cAAIiD,SAAQjD,WAAMkD,qBAANlD,mBAAyB;AACrC4C,yBAAeK,+BAAO7D;AACtB0D,uBAAaG,+BAAOhE;AACpB8D,yBAAeE,+BAAO/D;AACtB8D,wBAAcC,SAASJ;QACzB,WAAWlB,mBAAmB3B,KAAAA,GAAQ;AACpC,cAAIiD,SAAQjD,iBAAMmD,sBAANnD,mBAAyBhB,eAAzBgB,mBAAsC;AAClD4C,0BAAeK,oCAAOG,aAAPH,mBAAiB7D;AAChC0D,uBAAaG,+BAAOhE;AACpB8D,0BAAeE,oCAAOG,aAAPH,mBAAiB9D;AAChC6D,yBAAcC,+BAAOG,aAAYP;QACnC;AAKA,YAAIJ,SAAS,cAAcK,cAAcJ,OAAO;AAC9CD,iBAAO;AACPZ,uBAAawB,mBAAkB;QACjC,WAAWZ,SAAS,eAAe,CAACO,eAAeN,OAAO;AACxDD,iBAAO;AACPZ,uBAAayB,uBAAsB;QACrC;AAEA,YAAIZ,MAAM;AACR;QACF;AAGA,YAAID,SAAS,MAAM;AACjB,cAAIO,aAAa;AACfP,mBAAO;AACPZ,yBAAa0B,yBAAyBT,YAAYF,YAAAA;UACpD,WAAWhE,SAAS;AAClB6D,mBAAO;AACPZ,yBAAa2B,qBAAqBtB,SAAAA,CAAAA;UACpC;QACF;AAGA,YAAIO,SAAS,aAAa7D,SAAS;AACjCiD,uBAAa4B,uBAAuB7E,OAAAA;QACtC,WAAW6D,SAAS,cAAcM,cAAc;AAC9ClB,uBAAa6B,wBAAwBX,YAAAA;QACvC;MACF,SAASY,OAAP;AACAC,gBAAQD,MAAM,6BAA6BA,KAAAA;AAC3C;MACF;IACF;EACF,WAAW7B,iBAAiB;AAC1BD,iBAAaE,0BACXD,gBAAgB7C,IAChB6C,gBAAgB1C,MAChByE,aAAatE,MAAAA,CAAAA;EAEjB,OAGK;AACH,UAAM,IAAIuE,MAAM,8CAAA;EAClB;AAEAjC,eAAakC,SAAQ;AACvB;AA3IsB/B;AA6ItB,SAAS6B,aAAatE,QAAW;AAC/B,MAAIA,WAAWsD,QAAW;AACxB,WAAO;EACT,WAAW,OAAOtD,WAAW,UAAU;AACrC,WAAOA;EACT,OAAO;AACL,WAAO6B,KAAKiB,UAAU9C,MAAAA;EACxB;AACF;AARSsE;","names":["AIMessage","HumanMessage","SystemMessage","ToolMessage","DynamicStructuredTool","z","randomId","convertMessageToLangChainMessage","message","TextMessage","role","HumanMessage","content","AIMessage","SystemMessage","ActionExecutionMessage","tool_calls","id","args","arguments","name","ResultMessage","ToolMessage","result","tool_call_id","actionExecutionId","convertJsonSchemaToZodSchema","jsonSchema","required","type","spec","key","value","Object","entries","properties","includes","schema","z","object","optional","string","describe","description","number","boolean","itemSchema","items","array","convertActionInputToLangChainTool","actionInput","DynamicStructuredTool","JSON","parse","func","getConstructorName","constructor","isAIMessage","isAIMessageChunk","isBaseMessageChunk","maybeSendActionExecutionResultIsMessage","eventStream$","actionExecution","sendActionExecutionResult","streamLangChainResponse","sendTextMessage","randomId","toolCall","sendActionExecution","stringify","lc_kwargs","reader","getReader","mode","done","read","toolCallName","undefined","toolCallId","toolCallArgs","hasToolCall","chunk","tool_call_chunks","additional_kwargs","function","sendTextMessageEnd","sendActionExecutionEnd","sendActionExecutionStart","sendTextMessageStart","sendTextMessageContent","sendActionExecutionArgs","error","console","encodeResult","Error","complete"]}
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  streamLangChainResponse
3
- } from "./chunk-DVDKD6F5.mjs";
3
+ } from "./chunk-URMISMK2.mjs";
4
4
  import {
5
5
  GuardrailsValidationFailureResponse,
6
6
  MessageStreamInterruptedResponse,
@@ -35,7 +35,7 @@ var require_package = __commonJS({
35
35
  publishConfig: {
36
36
  access: "public"
37
37
  },
38
- version: "1.0.3",
38
+ version: "1.0.4",
39
39
  sideEffects: false,
40
40
  main: "./dist/index.js",
41
41
  module: "./dist/index.mjs",
@@ -83,7 +83,6 @@ var require_package = __commonJS({
83
83
  "groq-sdk": "^0.5.0",
84
84
  "js-tiktoken": "^1.0.8",
85
85
  langchain: "^0.1.36",
86
- nanoid: "3.3.4",
87
86
  openai: "^4.50.0",
88
87
  pino: "^9.2.0",
89
88
  "pino-pretty": "^11.2.1",
@@ -734,7 +733,6 @@ CopilotResponse = _ts_decorate8([
734
733
 
735
734
  // src/graphql/resolvers/copilot.resolver.ts
736
735
  import { Repeater } from "graphql-yoga";
737
- import { nanoid } from "nanoid";
738
736
 
739
737
  // src/service-adapters/events.ts
740
738
  import { of, concat, map, scan, concatMap, ReplaySubject, firstValueFrom } from "rxjs";
@@ -900,6 +898,7 @@ var telemetryClient = new TelemetryClient({
900
898
  var telemetry_client_default = telemetryClient;
901
899
 
902
900
  // src/graphql/resolvers/copilot.resolver.ts
901
+ import { randomId } from "@copilotkit/shared";
903
902
  function _ts_decorate9(decorators, target, key, desc) {
904
903
  var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
905
904
  if (typeof Reflect === "object" && typeof Reflect.decorate === "function")
@@ -1017,7 +1016,7 @@ var CopilotResolver = class {
1017
1016
  rejectOutputMessagesPromise = reject;
1018
1017
  });
1019
1018
  logger2.debug("Processing");
1020
- const { eventSource, threadId = nanoid(), runId, actions } = await copilotRuntime.process({
1019
+ const { eventSource, threadId = randomId(), runId, actions } = await copilotRuntime.process({
1021
1020
  serviceAdapter,
1022
1021
  messages: data.messages,
1023
1022
  actions: data.frontend.actions,
@@ -1058,7 +1057,7 @@ var CopilotResolver = class {
1058
1057
  });
1059
1058
  outputMessages = [
1060
1059
  plainToInstance(TextMessage, {
1061
- id: nanoid(),
1060
+ id: randomId(),
1062
1061
  createdAt: /* @__PURE__ */ new Date(),
1063
1062
  content: result.reason,
1064
1063
  role: MessageRole.assistant
@@ -1105,7 +1104,7 @@ var CopilotResolver = class {
1105
1104
  takeWhile((e) => e.type != RuntimeEventTypes.TextMessageEnd)
1106
1105
  );
1107
1106
  const streamingTextStatus = new Subject();
1108
- const messageId = nanoid();
1107
+ const messageId = randomId();
1109
1108
  pushMessage({
1110
1109
  id: messageId,
1111
1110
  status: firstValueFrom2(streamingTextStatus),
@@ -1217,7 +1216,7 @@ var CopilotResolver = class {
1217
1216
  result: event.result
1218
1217
  }, "Action execution result event received");
1219
1218
  pushMessage({
1220
- id: nanoid(),
1219
+ id: randomId(),
1221
1220
  status: new SuccessMessageStatus(),
1222
1221
  createdAt: /* @__PURE__ */ new Date(),
1223
1222
  actionExecutionId: event.actionExecutionId,
@@ -1225,7 +1224,7 @@ var CopilotResolver = class {
1225
1224
  result: event.result
1226
1225
  });
1227
1226
  outputMessages.push(plainToInstance(ResultMessage, {
1228
- id: nanoid(),
1227
+ id: randomId(),
1229
1228
  createdAt: /* @__PURE__ */ new Date(),
1230
1229
  actionExecutionId: event.actionExecutionId,
1231
1230
  actionName: event.actionName,
@@ -1412,4 +1411,4 @@ export {
1412
1411
  getCommonConfig,
1413
1412
  copilotRuntimeNodeHttpEndpoint
1414
1413
  };
1415
- //# sourceMappingURL=chunk-VIQZS4W6.mjs.map
1414
+ //# sourceMappingURL=chunk-VQM4RIMQ.mjs.map