@copilotkit/runtime 1.5.12-next.6 → 1.5.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/__snapshots__/schema/schema.graphql +33 -0
- package/dist/{chunk-XM2VJFL6.mjs → chunk-4LRVKGBI.mjs} +2 -2
- package/dist/{chunk-YGXAWYRB.mjs → chunk-IS3QAGOU.mjs} +2 -2
- package/dist/{chunk-ON4AESON.mjs → chunk-PFELVFS7.mjs} +540 -221
- package/dist/chunk-PFELVFS7.mjs.map +1 -0
- package/dist/{chunk-TPTCSIAR.mjs → chunk-S3KKBII4.mjs} +42 -30
- package/dist/chunk-S3KKBII4.mjs.map +1 -0
- package/dist/{chunk-BKIGYRXE.mjs → chunk-W6EE6OTN.mjs} +2 -2
- package/dist/{copilot-runtime-da917bd5.d.ts → copilot-runtime-8c442d65.d.ts} +16 -5
- package/dist/graphql/types/converted/index.d.ts +1 -1
- package/dist/{groq-adapter-c35c5374.d.ts → groq-adapter-7a82cd22.d.ts} +21 -1
- package/dist/{index-24315d90.d.ts → index-a7f37670.d.ts} +1 -1
- package/dist/index.d.ts +4 -4
- package/dist/index.js +628 -297
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +10 -10
- package/dist/{langserve-a16ef8f4.d.ts → langserve-e308c437.d.ts} +32 -3
- package/dist/lib/index.d.ts +4 -4
- package/dist/lib/index.js +626 -295
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/index.mjs +6 -6
- package/dist/lib/integrations/index.d.ts +4 -4
- package/dist/lib/integrations/index.js +495 -237
- package/dist/lib/integrations/index.js.map +1 -1
- package/dist/lib/integrations/index.mjs +5 -5
- package/dist/lib/integrations/nest/index.d.ts +3 -3
- package/dist/lib/integrations/nest/index.js +495 -237
- package/dist/lib/integrations/nest/index.js.map +1 -1
- package/dist/lib/integrations/nest/index.mjs +3 -3
- package/dist/lib/integrations/node-express/index.d.ts +3 -3
- package/dist/lib/integrations/node-express/index.js +495 -237
- package/dist/lib/integrations/node-express/index.js.map +1 -1
- package/dist/lib/integrations/node-express/index.mjs +3 -3
- package/dist/lib/integrations/node-http/index.d.ts +3 -3
- package/dist/lib/integrations/node-http/index.js +495 -237
- package/dist/lib/integrations/node-http/index.js.map +1 -1
- package/dist/lib/integrations/node-http/index.mjs +2 -2
- package/dist/service-adapters/index.d.ts +4 -4
- package/dist/service-adapters/index.js +23 -11
- package/dist/service-adapters/index.js.map +1 -1
- package/dist/service-adapters/index.mjs +1 -1
- package/package.json +2 -2
- package/src/graphql/inputs/extensions.input.ts +21 -0
- package/src/graphql/inputs/generate-copilot-response.input.ts +4 -0
- package/src/graphql/inputs/load-agent-state.input.ts +10 -0
- package/src/graphql/resolvers/copilot.resolver.ts +8 -3
- package/src/graphql/resolvers/state.resolver.ts +23 -0
- package/src/graphql/types/agents-response.type.ts +1 -4
- package/src/graphql/types/copilot-response.type.ts +5 -1
- package/src/graphql/types/extensions-response.type.ts +23 -0
- package/src/graphql/types/load-agent-state-response.type.ts +17 -0
- package/src/lib/integrations/shared.ts +2 -1
- package/src/lib/runtime/copilot-runtime.ts +102 -9
- package/src/lib/runtime/remote-lg-action.ts +24 -12
- package/src/service-adapters/anthropic/anthropic-adapter.ts +2 -3
- package/src/service-adapters/empty/empty-adapter.ts +2 -2
- package/src/service-adapters/events.ts +5 -0
- package/src/service-adapters/experimental/ollama/ollama-adapter.ts +2 -2
- package/src/service-adapters/groq/groq-adapter.ts +2 -2
- package/src/service-adapters/langchain/langchain-adapter.ts +10 -3
- package/src/service-adapters/openai/openai-adapter.ts +4 -3
- package/src/service-adapters/openai/openai-assistant-adapter.ts +15 -4
- package/src/service-adapters/service-adapter.ts +4 -0
- package/src/service-adapters/unify/unify-adapter.ts +2 -3
- package/dist/chunk-ON4AESON.mjs.map +0 -1
- package/dist/chunk-TPTCSIAR.mjs.map +0 -1
- /package/dist/{chunk-XM2VJFL6.mjs.map → chunk-4LRVKGBI.mjs.map} +0 -0
- /package/dist/{chunk-YGXAWYRB.mjs.map → chunk-IS3QAGOU.mjs.map} +0 -0
- /package/dist/{chunk-BKIGYRXE.mjs.map → chunk-W6EE6OTN.mjs.map} +0 -0
|
@@ -223,7 +223,7 @@ function convertSystemMessageToAssistantAPI(message) {
|
|
|
223
223
|
__name(convertSystemMessageToAssistantAPI, "convertSystemMessageToAssistantAPI");
|
|
224
224
|
|
|
225
225
|
// src/service-adapters/openai/openai-adapter.ts
|
|
226
|
-
import {
|
|
226
|
+
import { randomUUID } from "@copilotkit/shared";
|
|
227
227
|
var DEFAULT_MODEL = "gpt-4o";
|
|
228
228
|
var OpenAIAdapter = class {
|
|
229
229
|
model = DEFAULT_MODEL;
|
|
@@ -240,8 +240,9 @@ var OpenAIAdapter = class {
|
|
|
240
240
|
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
|
|
241
241
|
}
|
|
242
242
|
async process(request) {
|
|
243
|
-
const { threadId, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
|
|
243
|
+
const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
|
|
244
244
|
const tools = actions.map(convertActionInputToOpenAITool);
|
|
245
|
+
const threadId = threadIdFromRequest ?? randomUUID();
|
|
245
246
|
let openaiMessages = messages.map(convertMessageToOpenAIMessage);
|
|
246
247
|
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
|
|
247
248
|
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
@@ -339,7 +340,7 @@ var OpenAIAdapter = class {
|
|
|
339
340
|
eventStream$.complete();
|
|
340
341
|
});
|
|
341
342
|
return {
|
|
342
|
-
threadId
|
|
343
|
+
threadId
|
|
343
344
|
};
|
|
344
345
|
}
|
|
345
346
|
};
|
|
@@ -348,7 +349,7 @@ __name(OpenAIAdapter, "OpenAIAdapter");
|
|
|
348
349
|
// src/service-adapters/langchain/utils.ts
|
|
349
350
|
import { AIMessage, HumanMessage, SystemMessage, ToolMessage } from "@langchain/core/messages";
|
|
350
351
|
import { DynamicStructuredTool } from "@langchain/core/tools";
|
|
351
|
-
import { randomId
|
|
352
|
+
import { randomId, convertJsonSchemaToZodSchema } from "@copilotkit/shared";
|
|
352
353
|
function convertMessageToLangChainMessage(message) {
|
|
353
354
|
if (message.isTextMessage()) {
|
|
354
355
|
if (message.role == "user") {
|
|
@@ -414,7 +415,7 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
414
415
|
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
415
416
|
if (typeof result === "string") {
|
|
416
417
|
if (!actionExecution) {
|
|
417
|
-
eventStream$.sendTextMessage(
|
|
418
|
+
eventStream$.sendTextMessage(randomId(), result);
|
|
418
419
|
} else {
|
|
419
420
|
eventStream$.sendActionExecutionResult({
|
|
420
421
|
actionExecutionId: actionExecution.id,
|
|
@@ -425,11 +426,11 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
425
426
|
} else if (isAIMessage(result)) {
|
|
426
427
|
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
427
428
|
if (result.content) {
|
|
428
|
-
eventStream$.sendTextMessage(
|
|
429
|
+
eventStream$.sendTextMessage(randomId(), result.content);
|
|
429
430
|
}
|
|
430
431
|
for (const toolCall of result.tool_calls) {
|
|
431
432
|
eventStream$.sendActionExecution({
|
|
432
|
-
actionExecutionId: toolCall.id ||
|
|
433
|
+
actionExecutionId: toolCall.id || randomId(),
|
|
433
434
|
actionName: toolCall.name,
|
|
434
435
|
args: JSON.stringify(toolCall.args)
|
|
435
436
|
});
|
|
@@ -437,12 +438,12 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
437
438
|
} else if (isBaseMessageChunk(result)) {
|
|
438
439
|
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
439
440
|
if ((_a = result.lc_kwargs) == null ? void 0 : _a.content) {
|
|
440
|
-
eventStream$.sendTextMessage(
|
|
441
|
+
eventStream$.sendTextMessage(randomId(), result.content);
|
|
441
442
|
}
|
|
442
443
|
if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
|
|
443
444
|
for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
|
|
444
445
|
eventStream$.sendActionExecution({
|
|
445
|
-
actionExecutionId: toolCall.id ||
|
|
446
|
+
actionExecutionId: toolCall.id || randomId(),
|
|
446
447
|
actionName: toolCall.name,
|
|
447
448
|
args: JSON.stringify(toolCall.args)
|
|
448
449
|
});
|
|
@@ -516,7 +517,7 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
516
517
|
});
|
|
517
518
|
} else if (content) {
|
|
518
519
|
mode = "message";
|
|
519
|
-
currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) ||
|
|
520
|
+
currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || randomId();
|
|
520
521
|
eventStream$.sendTextMessageStart({
|
|
521
522
|
messageId: currentMessageId
|
|
522
523
|
});
|
|
@@ -573,7 +574,7 @@ function encodeResult(result) {
|
|
|
573
574
|
__name(encodeResult, "encodeResult");
|
|
574
575
|
|
|
575
576
|
// src/service-adapters/langchain/langchain-adapter.ts
|
|
576
|
-
import {
|
|
577
|
+
import { randomUUID as randomUUID2 } from "@copilotkit/shared";
|
|
577
578
|
import { awaitAllCallbacks } from "@langchain/core/callbacks/promises";
|
|
578
579
|
var LangChainAdapter = class {
|
|
579
580
|
options;
|
|
@@ -585,8 +586,8 @@ var LangChainAdapter = class {
|
|
|
585
586
|
}
|
|
586
587
|
async process(request) {
|
|
587
588
|
try {
|
|
588
|
-
const { eventSource, model, actions, messages, runId } = request;
|
|
589
|
-
const threadId =
|
|
589
|
+
const { eventSource, model, actions, messages, runId, threadId: threadIdFromRequest } = request;
|
|
590
|
+
const threadId = threadIdFromRequest ?? randomUUID2();
|
|
590
591
|
const result = await this.options.chainFn({
|
|
591
592
|
messages: messages.map(convertMessageToLangChainMessage),
|
|
592
593
|
tools: actions.map(convertActionInputToLangChainTool),
|
|
@@ -647,8 +648,12 @@ var OpenAIAssistantAdapter = class {
|
|
|
647
648
|
this.disableParallelToolCalls = (params == null ? void 0 : params.disableParallelToolCalls) || false;
|
|
648
649
|
}
|
|
649
650
|
async process(request) {
|
|
651
|
+
var _a, _b;
|
|
650
652
|
const { messages, actions, eventSource, runId, forwardedParameters } = request;
|
|
651
|
-
let threadId = request.
|
|
653
|
+
let threadId = (_b = (_a = request.extensions) == null ? void 0 : _a.openaiAssistantAPI) == null ? void 0 : _b.threadId;
|
|
654
|
+
if (!threadId) {
|
|
655
|
+
threadId = (await this.openai.beta.threads.create()).id;
|
|
656
|
+
}
|
|
652
657
|
const lastMessage = messages.at(-1);
|
|
653
658
|
let nextRunId = void 0;
|
|
654
659
|
if (lastMessage.isResultMessage() && runId) {
|
|
@@ -659,8 +664,15 @@ var OpenAIAssistantAdapter = class {
|
|
|
659
664
|
throw new Error("No actionable message found in the messages");
|
|
660
665
|
}
|
|
661
666
|
return {
|
|
667
|
+
runId: nextRunId,
|
|
662
668
|
threadId,
|
|
663
|
-
|
|
669
|
+
extensions: {
|
|
670
|
+
...request.extensions,
|
|
671
|
+
openaiAssistantAPI: {
|
|
672
|
+
threadId,
|
|
673
|
+
runId: nextRunId
|
|
674
|
+
}
|
|
675
|
+
}
|
|
664
676
|
};
|
|
665
677
|
}
|
|
666
678
|
async submitToolOutputs(threadId, runId, messages, eventSource) {
|
|
@@ -819,7 +831,7 @@ __name(getRunIdFromStream, "getRunIdFromStream");
|
|
|
819
831
|
|
|
820
832
|
// src/service-adapters/unify/unify-adapter.ts
|
|
821
833
|
import OpenAI3 from "openai";
|
|
822
|
-
import { randomId as
|
|
834
|
+
import { randomId as randomId2, randomUUID as randomUUID3 } from "@copilotkit/shared";
|
|
823
835
|
var UnifyAdapter = class {
|
|
824
836
|
apiKey;
|
|
825
837
|
model;
|
|
@@ -861,7 +873,7 @@ var UnifyAdapter = class {
|
|
|
861
873
|
for await (const chunk of stream) {
|
|
862
874
|
if (this.start) {
|
|
863
875
|
model = chunk.model;
|
|
864
|
-
currentMessageId =
|
|
876
|
+
currentMessageId = randomId2();
|
|
865
877
|
eventStream$.sendTextMessageStart({
|
|
866
878
|
messageId: currentMessageId
|
|
867
879
|
});
|
|
@@ -928,7 +940,7 @@ var UnifyAdapter = class {
|
|
|
928
940
|
eventStream$.complete();
|
|
929
941
|
});
|
|
930
942
|
return {
|
|
931
|
-
threadId: request.threadId ||
|
|
943
|
+
threadId: request.threadId || randomUUID3()
|
|
932
944
|
};
|
|
933
945
|
}
|
|
934
946
|
};
|
|
@@ -936,7 +948,7 @@ __name(UnifyAdapter, "UnifyAdapter");
|
|
|
936
948
|
|
|
937
949
|
// src/service-adapters/groq/groq-adapter.ts
|
|
938
950
|
import { Groq } from "groq-sdk";
|
|
939
|
-
import {
|
|
951
|
+
import { randomUUID as randomUUID4 } from "@copilotkit/shared";
|
|
940
952
|
var DEFAULT_MODEL2 = "llama3-groq-70b-8192-tool-use-preview";
|
|
941
953
|
var GroqAdapter = class {
|
|
942
954
|
model = DEFAULT_MODEL2;
|
|
@@ -1049,7 +1061,7 @@ var GroqAdapter = class {
|
|
|
1049
1061
|
eventStream$.complete();
|
|
1050
1062
|
});
|
|
1051
1063
|
return {
|
|
1052
|
-
threadId: threadId ||
|
|
1064
|
+
threadId: request.threadId || randomUUID4()
|
|
1053
1065
|
};
|
|
1054
1066
|
}
|
|
1055
1067
|
};
|
|
@@ -1191,7 +1203,7 @@ function groupAnthropicMessagesByRole(messageParams) {
|
|
|
1191
1203
|
__name(groupAnthropicMessagesByRole, "groupAnthropicMessagesByRole");
|
|
1192
1204
|
|
|
1193
1205
|
// src/service-adapters/anthropic/anthropic-adapter.ts
|
|
1194
|
-
import { randomId as
|
|
1206
|
+
import { randomId as randomId3, randomUUID as randomUUID5 } from "@copilotkit/shared";
|
|
1195
1207
|
var DEFAULT_MODEL3 = "claude-3-sonnet-20240229";
|
|
1196
1208
|
var AnthropicAdapter = class {
|
|
1197
1209
|
model = DEFAULT_MODEL3;
|
|
@@ -1242,8 +1254,8 @@ var AnthropicAdapter = class {
|
|
|
1242
1254
|
eventSource.stream(async (eventStream$) => {
|
|
1243
1255
|
let mode = null;
|
|
1244
1256
|
let didOutputText = false;
|
|
1245
|
-
let currentMessageId =
|
|
1246
|
-
let currentToolCallId =
|
|
1257
|
+
let currentMessageId = randomId3();
|
|
1258
|
+
let currentToolCallId = randomId3();
|
|
1247
1259
|
let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
|
|
1248
1260
|
for await (const chunk of await stream) {
|
|
1249
1261
|
if (chunk.type === "message_start") {
|
|
@@ -1300,7 +1312,7 @@ var AnthropicAdapter = class {
|
|
|
1300
1312
|
eventStream$.complete();
|
|
1301
1313
|
});
|
|
1302
1314
|
return {
|
|
1303
|
-
threadId: threadId ||
|
|
1315
|
+
threadId: threadId || randomUUID5()
|
|
1304
1316
|
};
|
|
1305
1317
|
}
|
|
1306
1318
|
};
|
|
@@ -1340,7 +1352,7 @@ var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBu
|
|
|
1340
1352
|
|
|
1341
1353
|
// src/service-adapters/experimental/ollama/ollama-adapter.ts
|
|
1342
1354
|
import { Ollama } from "@langchain/community/llms/ollama";
|
|
1343
|
-
import { randomId as
|
|
1355
|
+
import { randomId as randomId4, randomUUID as randomUUID6 } from "@copilotkit/shared";
|
|
1344
1356
|
var DEFAULT_MODEL4 = "llama3:latest";
|
|
1345
1357
|
var ExperimentalOllamaAdapter = class {
|
|
1346
1358
|
model;
|
|
@@ -1359,7 +1371,7 @@ var ExperimentalOllamaAdapter = class {
|
|
|
1359
1371
|
const contents = messages.filter((m) => m.isTextMessage()).map((m) => m.content);
|
|
1360
1372
|
const _stream = await ollama.stream(contents);
|
|
1361
1373
|
eventSource.stream(async (eventStream$) => {
|
|
1362
|
-
const currentMessageId =
|
|
1374
|
+
const currentMessageId = randomId4();
|
|
1363
1375
|
eventStream$.sendTextMessageStart({
|
|
1364
1376
|
messageId: currentMessageId
|
|
1365
1377
|
});
|
|
@@ -1375,18 +1387,18 @@ var ExperimentalOllamaAdapter = class {
|
|
|
1375
1387
|
eventStream$.complete();
|
|
1376
1388
|
});
|
|
1377
1389
|
return {
|
|
1378
|
-
threadId: request.threadId ||
|
|
1390
|
+
threadId: request.threadId || randomUUID6()
|
|
1379
1391
|
};
|
|
1380
1392
|
}
|
|
1381
1393
|
};
|
|
1382
1394
|
__name(ExperimentalOllamaAdapter, "ExperimentalOllamaAdapter");
|
|
1383
1395
|
|
|
1384
1396
|
// src/service-adapters/empty/empty-adapter.ts
|
|
1385
|
-
import {
|
|
1397
|
+
import { randomUUID as randomUUID7 } from "@copilotkit/shared";
|
|
1386
1398
|
var EmptyAdapter = class {
|
|
1387
1399
|
async process(request) {
|
|
1388
1400
|
return {
|
|
1389
|
-
threadId: request.threadId ||
|
|
1401
|
+
threadId: request.threadId || randomUUID7()
|
|
1390
1402
|
};
|
|
1391
1403
|
}
|
|
1392
1404
|
};
|
|
@@ -1407,4 +1419,4 @@ export {
|
|
|
1407
1419
|
EmptyAdapter,
|
|
1408
1420
|
ExperimentalEmptyAdapter
|
|
1409
1421
|
};
|
|
1410
|
-
//# sourceMappingURL=chunk-
|
|
1422
|
+
//# sourceMappingURL=chunk-S3KKBII4.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/service-adapters/langchain/langserve.ts","../src/service-adapters/openai/openai-adapter.ts","../src/service-adapters/openai/utils.ts","../src/service-adapters/langchain/utils.ts","../src/service-adapters/langchain/langchain-adapter.ts","../src/service-adapters/google/google-genai-adapter.ts","../src/service-adapters/openai/openai-assistant-adapter.ts","../src/service-adapters/unify/unify-adapter.ts","../src/service-adapters/groq/groq-adapter.ts","../src/service-adapters/anthropic/anthropic-adapter.ts","../src/service-adapters/anthropic/utils.ts","../src/service-adapters/experimental/ollama/ollama-adapter.ts","../src/service-adapters/empty/empty-adapter.ts"],"sourcesContent":["import { Parameter, Action } from \"@copilotkit/shared\";\nimport { RemoteRunnable } from \"langchain/runnables/remote\";\n\nexport interface RemoteChainParameters {\n name: string;\n description: string;\n chainUrl: string;\n parameters?: Parameter[];\n parameterType?: \"single\" | \"multi\";\n}\n\nexport class RemoteChain {\n name: string;\n description: string;\n chainUrl: string;\n parameters?: Parameter[];\n parameterType: \"single\" | \"multi\";\n\n constructor(options: RemoteChainParameters) {\n this.name = options.name;\n this.description = options.description;\n this.chainUrl = options.chainUrl;\n this.parameters = options.parameters;\n this.parameterType = options.parameterType || \"multi\";\n }\n\n async toAction(): Promise<Action<any>> {\n if (!this.parameters) {\n await this.inferLangServeParameters();\n }\n\n return {\n name: this.name,\n description: this.description,\n parameters: this.parameters!,\n handler: async (args: any) => {\n const runnable = new RemoteRunnable({ url: this.chainUrl });\n let input: any;\n if (this.parameterType === \"single\") {\n input = args[Object.keys(args)[0]];\n } else {\n input = args;\n }\n return await runnable.invoke(input);\n },\n };\n }\n\n async inferLangServeParameters() {\n const supportedTypes = [\"string\", \"number\", \"boolean\"];\n\n let schemaUrl = this.chainUrl.replace(/\\/+$/, \"\") + \"/input_schema\";\n let schema = await fetch(schemaUrl)\n .then((res) => res.json())\n .catch(() => {\n throw new Error(\"Failed to fetch langserve schema at \" + schemaUrl);\n });\n // for now, don't use json schema, just do a simple conversion\n\n if (supportedTypes.includes(schema.type)) {\n this.parameterType = \"single\";\n this.parameters = [\n {\n name: \"input\",\n type: schema.type,\n description: \"The input to the chain\",\n },\n ];\n } else if (schema.type === \"object\") {\n this.parameterType = \"multi\";\n this.parameters = Object.keys(schema.properties).map((key) => {\n let property = schema.properties[key];\n if (!supportedTypes.includes(property.type)) {\n throw new Error(\"Unsupported schema type\");\n }\n return {\n name: key,\n type: property.type,\n description: property.description || \"\",\n required: schema.required?.includes(key) || false,\n };\n });\n } else {\n throw new Error(\"Unsupported schema type\");\n }\n }\n}\n","/**\n * Copilot Runtime adapter for OpenAI.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, OpenAIAdapter } from \"@copilotkit/runtime\";\n * import OpenAI from \"openai\";\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const openai = new OpenAI({\n * organization: \"<your-organization-id>\", // optional\n * apiKey: \"<your-api-key>\",\n * });\n *\n * return new OpenAIAdapter({ openai });\n * ```\n *\n * ## Example with Azure OpenAI\n *\n * ```ts\n * import { CopilotRuntime, OpenAIAdapter } from \"@copilotkit/runtime\";\n * import OpenAI from \"openai\";\n *\n * // The name of your Azure OpenAI Instance.\n * // https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal#create-a-resource\n * const instance = \"<your instance name>\";\n *\n * // Corresponds to your Model deployment within your OpenAI resource, e.g. my-gpt35-16k-deployment\n * // Navigate to the Azure OpenAI Studio to deploy a model.\n * const model = \"<your model>\";\n *\n * const apiKey = process.env[\"AZURE_OPENAI_API_KEY\"];\n * if (!apiKey) {\n * throw new Error(\"The AZURE_OPENAI_API_KEY environment variable is missing or empty.\");\n * }\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const openai = new OpenAI({\n * apiKey,\n * baseURL: `https://${instance}.openai.azure.com/openai/deployments/${model}`,\n * defaultQuery: { \"api-version\": \"2024-04-01-preview\" },\n * defaultHeaders: { \"api-key\": apiKey },\n * });\n *\n * return new OpenAIAdapter({ openai });\n * ```\n */\nimport OpenAI from \"openai\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToOpenAITool,\n convertMessageToOpenAIMessage,\n limitMessagesToTokenCount,\n} from \"./utils\";\nimport { randomUUID } from \"@copilotkit/shared\";\n\nconst DEFAULT_MODEL = \"gpt-4o\";\n\nexport interface OpenAIAdapterParams {\n /**\n * An optional OpenAI instance to use. If not provided, a new instance will be\n * created.\n */\n openai?: OpenAI;\n\n /**\n * The model to use.\n */\n model?: string;\n\n /**\n * Whether to disable parallel tool calls.\n * You can disable parallel tool calls to force the model to execute tool calls sequentially.\n * This is useful if you want to execute tool calls in a specific order so that the state changes\n * introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)\n *\n * @default false\n */\n disableParallelToolCalls?: boolean;\n}\n\nexport class OpenAIAdapter implements CopilotServiceAdapter {\n private model: string = DEFAULT_MODEL;\n\n private disableParallelToolCalls: boolean = false;\n private _openai: OpenAI;\n public get openai(): OpenAI {\n return this._openai;\n }\n\n constructor(params?: OpenAIAdapterParams) {\n this._openai = params?.openai || new OpenAI({});\n if (params?.model) {\n this.model = params.model;\n }\n this.disableParallelToolCalls = params?.disableParallelToolCalls || false;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const {\n threadId: threadIdFromRequest,\n model = this.model,\n messages,\n actions,\n eventSource,\n forwardedParameters,\n } = request;\n const tools = actions.map(convertActionInputToOpenAITool);\n const threadId = threadIdFromRequest ?? randomUUID();\n\n let openaiMessages = messages.map(convertMessageToOpenAIMessage);\n openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);\n\n let toolChoice: any = forwardedParameters?.toolChoice;\n if (forwardedParameters?.toolChoice === \"function\") {\n toolChoice = {\n type: \"function\",\n function: { name: forwardedParameters.toolChoiceFunctionName },\n };\n }\n\n const stream = this.openai.beta.chat.completions.stream({\n model: model,\n stream: true,\n messages: openaiMessages,\n ...(tools.length > 0 && { tools }),\n ...(forwardedParameters?.maxTokens && { max_tokens: forwardedParameters.maxTokens }),\n ...(forwardedParameters?.stop && { stop: forwardedParameters.stop }),\n ...(toolChoice && { tool_choice: toolChoice }),\n ...(this.disableParallelToolCalls && { parallel_tool_calls: false }),\n ...(forwardedParameters?.temperature && { temperature: forwardedParameters.temperature }),\n });\n\n eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n let currentMessageId: string;\n let currentToolCallId: string;\n for await (const chunk of stream) {\n if (chunk.choices.length === 0) {\n continue;\n }\n\n const toolCall = chunk.choices[0].delta.tool_calls?.[0];\n const content = chunk.choices[0].delta.content;\n\n // When switching from message to function or vice versa,\n // send the respective end event.\n // If toolCall?.id is defined, it means a new tool call starts.\n if (mode === \"message\" && toolCall?.id) {\n mode = null;\n eventStream$.sendTextMessageEnd({ messageId: currentMessageId });\n } else if (mode === \"function\" && (toolCall === undefined || toolCall?.id)) {\n mode = null;\n eventStream$.sendActionExecutionEnd({ actionExecutionId: currentToolCallId });\n }\n\n // If we send a new message type, send the appropriate start event.\n if (mode === null) {\n if (toolCall?.id) {\n mode = \"function\";\n currentToolCallId = toolCall!.id;\n eventStream$.sendActionExecutionStart({\n actionExecutionId: currentToolCallId,\n parentMessageId: chunk.id,\n actionName: toolCall!.function!.name,\n });\n } else if (content) {\n mode = \"message\";\n currentMessageId = chunk.id;\n eventStream$.sendTextMessageStart({ messageId: currentMessageId });\n }\n }\n\n // send the content events\n if (mode === \"message\" && content) {\n eventStream$.sendTextMessageContent({\n messageId: currentMessageId,\n content: content,\n });\n } else if (mode === \"function\" && toolCall?.function?.arguments) {\n eventStream$.sendActionExecutionArgs({\n actionExecutionId: currentToolCallId,\n args: toolCall.function.arguments,\n });\n }\n }\n\n // send the end events\n if (mode === \"message\") {\n eventStream$.sendTextMessageEnd({ messageId: currentMessageId });\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd({ actionExecutionId: currentToolCallId });\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId,\n };\n }\n}\n","import { Message } from \"../../graphql/types/converted\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport {\n ChatCompletionMessageParam,\n ChatCompletionTool,\n ChatCompletionUserMessageParam,\n ChatCompletionAssistantMessageParam,\n ChatCompletionSystemMessageParam,\n} from \"openai/resources\";\n\nexport function limitMessagesToTokenCount(\n messages: any[],\n tools: any[],\n model: string,\n maxTokens?: number,\n): any[] {\n maxTokens ||= maxTokensForOpenAIModel(model);\n\n const result: any[] = [];\n const toolsNumTokens = countToolsTokens(model, tools);\n if (toolsNumTokens > maxTokens) {\n throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);\n }\n maxTokens -= toolsNumTokens;\n\n for (const message of messages) {\n if (message.role === \"system\") {\n const numTokens = countMessageTokens(model, message);\n maxTokens -= numTokens;\n\n if (maxTokens < 0) {\n throw new Error(\"Not enough tokens for system message.\");\n }\n }\n }\n\n let cutoff: boolean = false;\n\n const reversedMessages = [...messages].reverse();\n for (const message of reversedMessages) {\n if (message.role === \"system\") {\n result.unshift(message);\n continue;\n } else if (cutoff) {\n continue;\n }\n let numTokens = countMessageTokens(model, message);\n if (maxTokens < numTokens) {\n cutoff = true;\n continue;\n }\n result.unshift(message);\n maxTokens -= numTokens;\n }\n\n return result;\n}\n\nexport function maxTokensForOpenAIModel(model: string): number {\n return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;\n}\n\nconst DEFAULT_MAX_TOKENS = 128000;\n\nconst maxTokensByModel: { [key: string]: number } = {\n // GPT-4\n \"gpt-4o\": 128000,\n \"gpt-4o-2024-05-13\": 128000,\n \"gpt-4-turbo\": 128000,\n \"gpt-4-turbo-2024-04-09\": 128000,\n \"gpt-4-0125-preview\": 128000,\n \"gpt-4-turbo-preview\": 128000,\n \"gpt-4-1106-preview\": 128000,\n \"gpt-4-vision-preview\": 128000,\n \"gpt-4-1106-vision-preview\": 128000,\n \"gpt-4-32k\": 32768,\n \"gpt-4-32k-0613\": 32768,\n \"gpt-4-32k-0314\": 32768,\n \"gpt-4\": 8192,\n \"gpt-4-0613\": 8192,\n \"gpt-4-0314\": 8192,\n\n // GPT-3.5\n \"gpt-3.5-turbo-0125\": 16385,\n \"gpt-3.5-turbo\": 16385,\n \"gpt-3.5-turbo-1106\": 16385,\n \"gpt-3.5-turbo-instruct\": 4096,\n \"gpt-3.5-turbo-16k\": 16385,\n \"gpt-3.5-turbo-0613\": 4096,\n \"gpt-3.5-turbo-16k-0613\": 16385,\n \"gpt-3.5-turbo-0301\": 4097,\n};\n\nfunction countToolsTokens(model: string, tools: any[]): number {\n if (tools.length === 0) {\n return 0;\n }\n const json = JSON.stringify(tools);\n return countTokens(model, json);\n}\n\nfunction countMessageTokens(model: string, message: any): number {\n return countTokens(model, message.content || \"\");\n}\n\nfunction countTokens(model: string, text: string): number {\n return text.length / 3;\n}\n\nexport function convertActionInputToOpenAITool(action: ActionInput): ChatCompletionTool {\n return {\n type: \"function\",\n function: {\n name: action.name,\n description: action.description,\n parameters: JSON.parse(action.jsonSchema),\n },\n };\n}\n\nexport function convertMessageToOpenAIMessage(message: Message): ChatCompletionMessageParam {\n if (message.isTextMessage()) {\n return {\n role: message.role as ChatCompletionUserMessageParam[\"role\"],\n content: message.content,\n } satisfies\n | ChatCompletionUserMessageParam\n | ChatCompletionAssistantMessageParam\n | ChatCompletionSystemMessageParam;\n } else if (message.isActionExecutionMessage()) {\n return {\n role: \"assistant\",\n tool_calls: [\n {\n id: message.id,\n type: \"function\",\n function: {\n name: message.name,\n arguments: JSON.stringify(message.arguments),\n },\n },\n ],\n };\n } else if (message.isResultMessage()) {\n return {\n role: \"tool\",\n content: message.result,\n tool_call_id: message.actionExecutionId,\n };\n }\n}\n\nexport function convertSystemMessageToAssistantAPI(message: ChatCompletionMessageParam) {\n return {\n ...message,\n ...(message.role === \"system\" && {\n role: \"assistant\",\n content: \"THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: \" + message.content,\n }),\n };\n}\n","import {\n ActionExecutionMessage,\n Message,\n ResultMessage,\n TextMessage,\n} from \"../../graphql/types/converted\";\nimport {\n AIMessage,\n AIMessageChunk,\n BaseMessage,\n BaseMessageChunk,\n HumanMessage,\n SystemMessage,\n ToolMessage,\n} from \"@langchain/core/messages\";\nimport { DynamicStructuredTool } from \"@langchain/core/tools\";\nimport { z } from \"zod\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { LangChainReturnType } from \"./types\";\nimport { RuntimeEventSubject } from \"../events\";\nimport { randomId, convertJsonSchemaToZodSchema } from \"@copilotkit/shared\";\n\nexport function convertMessageToLangChainMessage(message: Message): BaseMessage {\n if (message.isTextMessage()) {\n if (message.role == \"user\") {\n return new HumanMessage(message.content);\n } else if (message.role == \"assistant\") {\n return new AIMessage(message.content);\n } else if (message.role === \"system\") {\n return new SystemMessage(message.content);\n }\n } else if (message.isActionExecutionMessage()) {\n return new AIMessage({\n content: \"\",\n tool_calls: [\n {\n id: message.id,\n args: message.arguments,\n name: message.name,\n },\n ],\n });\n } else if (message.isResultMessage()) {\n return new ToolMessage({\n content: message.result,\n tool_call_id: message.actionExecutionId,\n });\n }\n}\n\nexport function convertActionInputToLangChainTool(actionInput: ActionInput): any {\n return new DynamicStructuredTool({\n name: actionInput.name,\n description: actionInput.description,\n schema: convertJsonSchemaToZodSchema(\n JSON.parse(actionInput.jsonSchema),\n true,\n ) as z.ZodObject<any>,\n func: async () => {\n return \"\";\n },\n });\n}\n\ninterface StreamLangChainResponseParams {\n result: LangChainReturnType;\n eventStream$: RuntimeEventSubject;\n actionExecution?: {\n id: string;\n name: string;\n };\n}\n\nfunction getConstructorName(object: any): string {\n if (object && typeof object === \"object\" && object.constructor && object.constructor.name) {\n return object.constructor.name;\n }\n return \"\";\n}\n\nfunction isAIMessage(message: any): message is AIMessage {\n return Object.prototype.toString.call(message) === \"[object AIMessage]\";\n}\n\nfunction isAIMessageChunk(message: any): message is AIMessageChunk {\n return Object.prototype.toString.call(message) === \"[object AIMessageChunk]\";\n}\n\nfunction isBaseMessageChunk(message: any): message is BaseMessageChunk {\n return Object.prototype.toString.call(message) === \"[object BaseMessageChunk]\";\n}\n\nfunction maybeSendActionExecutionResultIsMessage(\n eventStream$: RuntimeEventSubject,\n actionExecution?: { id: string; name: string },\n) {\n // language models need a result after the function call\n // we simply let them know that we are sending a message\n if (actionExecution) {\n eventStream$.sendActionExecutionResult({\n actionExecutionId: actionExecution.id,\n actionName: actionExecution.name,\n result: \"Sending a message\",\n });\n }\n}\n\nexport async function streamLangChainResponse({\n result,\n eventStream$,\n actionExecution,\n}: StreamLangChainResponseParams) {\n // We support several types of return values from LangChain functions:\n\n // 1. string\n\n if (typeof result === \"string\") {\n if (!actionExecution) {\n // Just send one chunk with the string as the content.\n eventStream$.sendTextMessage(randomId(), result);\n } else {\n // Send as a result\n eventStream$.sendActionExecutionResult({\n actionExecutionId: actionExecution.id,\n actionName: actionExecution.name,\n result: result,\n });\n }\n }\n\n // 2. AIMessage\n // Send the content and function call of the AIMessage as the content of the chunk.\n else if (isAIMessage(result)) {\n maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);\n\n if (result.content) {\n eventStream$.sendTextMessage(randomId(), result.content as string);\n }\n for (const toolCall of result.tool_calls) {\n eventStream$.sendActionExecution({\n actionExecutionId: toolCall.id || randomId(),\n actionName: toolCall.name,\n args: JSON.stringify(toolCall.args),\n });\n }\n }\n\n // 3. BaseMessageChunk\n // Send the content and function call of the AIMessage as the content of the chunk.\n else if (isBaseMessageChunk(result)) {\n maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);\n\n if (result.lc_kwargs?.content) {\n eventStream$.sendTextMessage(randomId(), result.content as string);\n }\n if (result.lc_kwargs?.tool_calls) {\n for (const toolCall of result.lc_kwargs?.tool_calls) {\n eventStream$.sendActionExecution({\n actionExecutionId: toolCall.id || randomId(),\n actionName: toolCall.name,\n args: JSON.stringify(toolCall.args),\n });\n }\n }\n }\n\n // 4. IterableReadableStream\n // Stream the result of the LangChain function.\n else if (result && \"getReader\" in result) {\n maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);\n\n let reader = result.getReader();\n\n let mode: \"function\" | \"message\" | null = null;\n let currentMessageId: string;\n\n const toolCallDetails = {\n name: null,\n id: null,\n index: null,\n prevIndex: null,\n };\n\n while (true) {\n try {\n const { done, value } = await reader.read();\n\n let toolCallName: string | undefined = undefined;\n let toolCallId: string | undefined = undefined;\n let toolCallArgs: string | undefined = undefined;\n let hasToolCall: boolean = false;\n let content = \"\";\n if (value && value.content) {\n content = Array.isArray(value.content)\n ? (((value.content[0] as any)?.text ?? \"\") as string)\n : value.content;\n }\n\n if (isAIMessageChunk(value)) {\n let chunk = value.tool_call_chunks?.[0];\n toolCallArgs = chunk?.args;\n hasToolCall = chunk != undefined;\n if (chunk?.name) toolCallDetails.name = chunk.name;\n // track different index on the same tool cool\n if (chunk?.index != null) {\n toolCallDetails.index = chunk.index; // 1\n if (toolCallDetails.prevIndex == null) toolCallDetails.prevIndex = chunk.index;\n }\n // Differentiate when calling the same tool but with different index\n if (chunk?.id)\n toolCallDetails.id = chunk.index != null ? `${chunk.id}-idx-${chunk.index}` : chunk.id;\n\n // Assign to internal variables that the entire script here knows how to work with\n toolCallName = toolCallDetails.name;\n toolCallId = toolCallDetails.id;\n } else if (isBaseMessageChunk(value)) {\n let chunk = value.additional_kwargs?.tool_calls?.[0];\n toolCallName = chunk?.function?.name;\n toolCallId = chunk?.id;\n toolCallArgs = chunk?.function?.arguments;\n hasToolCall = chunk?.function != undefined;\n }\n\n // When switching from message to function or vice versa,\n // send the respective end event.\n // If toolCallName is defined, it means a new tool call starts.\n if (mode === \"message\" && (toolCallId || done)) {\n mode = null;\n eventStream$.sendTextMessageEnd({ messageId: currentMessageId });\n } else if (mode === \"function\" && (!hasToolCall || done)) {\n mode = null;\n eventStream$.sendActionExecutionEnd({ actionExecutionId: toolCallId });\n }\n\n if (done) {\n break;\n }\n\n // If we send a new message type, send the appropriate start event.\n if (mode === null) {\n if (hasToolCall && toolCallId && toolCallName) {\n mode = \"function\";\n eventStream$.sendActionExecutionStart({\n actionExecutionId: toolCallId,\n actionName: toolCallName,\n parentMessageId: value.lc_kwargs?.id,\n });\n } else if (content) {\n mode = \"message\";\n currentMessageId = value.lc_kwargs?.id || randomId();\n eventStream$.sendTextMessageStart({ messageId: currentMessageId });\n }\n }\n\n // send the content events\n if (mode === \"message\" && content) {\n eventStream$.sendTextMessageContent({\n messageId: currentMessageId,\n content,\n });\n } else if (mode === \"function\" && toolCallArgs) {\n // For calls of the same tool with different index, we seal last tool call and register a new one\n if (toolCallDetails.index !== toolCallDetails.prevIndex) {\n eventStream$.sendActionExecutionEnd({ actionExecutionId: toolCallId });\n eventStream$.sendActionExecutionStart({\n actionExecutionId: toolCallId,\n actionName: toolCallName,\n parentMessageId: value.lc_kwargs?.id,\n });\n toolCallDetails.prevIndex = toolCallDetails.index;\n }\n eventStream$.sendActionExecutionArgs({\n actionExecutionId: toolCallId,\n args: toolCallArgs,\n });\n }\n } catch (error) {\n console.error(\"Error reading from stream\", error);\n break;\n }\n }\n } else if (actionExecution) {\n eventStream$.sendActionExecutionResult({\n actionExecutionId: actionExecution.id,\n actionName: actionExecution.name,\n result: encodeResult(result),\n });\n }\n\n // unsupported type\n else {\n throw new Error(\"Invalid return type from LangChain function.\");\n }\n\n eventStream$.complete();\n}\n\nfunction encodeResult(result: any): string {\n if (result === undefined) {\n return \"\";\n } else if (typeof result === \"string\") {\n return result;\n } else {\n return JSON.stringify(result);\n }\n}\n","/**\n * Copilot Runtime adapter for LangChain.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, LangChainAdapter } from \"@copilotkit/runtime\";\n * import { ChatOpenAI } from \"@langchain/openai\";\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const model = new ChatOpenAI({\n * model: \"gpt-4o\",\n * apiKey: \"<your-api-key>\",\n * });\n *\n * return new LangChainAdapter({\n * chainFn: async ({ messages, tools }) => {\n * return model.bindTools(tools).stream(messages);\n * // or optionally enable strict mode\n * // return model.bindTools(tools, { strict: true }).stream(messages);\n * }\n * });\n * ```\n *\n * The asynchronous handler function (`chainFn`) can return any of the following:\n *\n * - A simple `string` response\n * - A LangChain stream (`IterableReadableStream`)\n * - A LangChain `BaseMessageChunk` object\n * - A LangChain `AIMessage` object\n */\n\nimport { BaseMessage } from \"@langchain/core/messages\";\nimport { CopilotServiceAdapter } from \"../service-adapter\";\nimport {\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToLangChainTool,\n convertMessageToLangChainMessage,\n streamLangChainResponse,\n} from \"./utils\";\nimport { DynamicStructuredTool } from \"@langchain/core/tools\";\nimport { LangChainReturnType } from \"./types\";\nimport { randomUUID } from \"@copilotkit/shared\";\nimport { awaitAllCallbacks } from \"@langchain/core/callbacks/promises\";\n\ninterface ChainFnParameters {\n model: string;\n messages: BaseMessage[];\n tools: DynamicStructuredTool[];\n threadId?: string;\n runId?: string;\n}\n\ninterface LangChainAdapterOptions {\n /**\n * A function that uses the LangChain API to generate a response.\n */\n chainFn: (parameters: ChainFnParameters) => Promise<LangChainReturnType>;\n}\n\nexport class LangChainAdapter implements CopilotServiceAdapter {\n /**\n * To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.\n */\n constructor(private options: LangChainAdapterOptions) {}\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n try {\n const {\n eventSource,\n model,\n actions,\n messages,\n runId,\n threadId: threadIdFromRequest,\n } = request;\n const threadId = threadIdFromRequest ?? randomUUID();\n const result = await this.options.chainFn({\n messages: messages.map(convertMessageToLangChainMessage),\n tools: actions.map(convertActionInputToLangChainTool),\n model,\n threadId,\n runId,\n });\n\n eventSource.stream(async (eventStream$) => {\n await streamLangChainResponse({\n result,\n eventStream$,\n });\n });\n\n return {\n threadId,\n };\n } finally {\n await awaitAllCallbacks();\n }\n }\n}\n","/**\n * Copilot Runtime adapter for Google Generative AI (e.g. Gemini).\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, GoogleGenerativeAIAdapter } from \"@copilotkit/runtime\";\n * const { GoogleGenerativeAI } = require(\"@google/generative-ai\");\n *\n * const genAI = new GoogleGenerativeAI(process.env[\"GOOGLE_API_KEY\"]);\n *\n * const copilotKit = new CopilotRuntime();\n *\n * return new GoogleGenerativeAIAdapter({ model: \"gemini-1.5-pro\" });\n * ```\n */\nimport { ChatGoogle } from \"@langchain/google-gauth\";\nimport { LangChainAdapter } from \"../langchain/langchain-adapter\";\n\ninterface GoogleGenerativeAIAdapterOptions {\n /**\n * A custom Google Generative AI model to use.\n */\n model?: string;\n}\n\nexport class GoogleGenerativeAIAdapter extends LangChainAdapter {\n constructor(options?: GoogleGenerativeAIAdapterOptions) {\n super({\n chainFn: async ({ messages, tools, threadId }) => {\n const model = new ChatGoogle({\n modelName: options?.model ?? \"gemini-1.5-pro\",\n apiVersion: \"v1beta\",\n }).bindTools(tools);\n return model.stream(messages, { metadata: { conversation_id: threadId } });\n },\n });\n }\n}\n","/**\n * Copilot Runtime adapter for the OpenAI Assistant API.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, OpenAIAssistantAdapter } from \"@copilotkit/runtime\";\n * import OpenAI from \"openai\";\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const openai = new OpenAI({\n * organization: \"<your-organization-id>\",\n * apiKey: \"<your-api-key>\",\n * });\n *\n * return new OpenAIAssistantAdapter({\n * openai,\n * assistantId: \"<your-assistant-id>\",\n * codeInterpreterEnabled: true,\n * fileSearchEnabled: true,\n * });\n * ```\n */\nimport OpenAI from \"openai\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport { Message, ResultMessage, TextMessage } from \"../../graphql/types/converted\";\nimport {\n convertActionInputToOpenAITool,\n convertMessageToOpenAIMessage,\n convertSystemMessageToAssistantAPI,\n} from \"./utils\";\nimport { RunSubmitToolOutputsStreamParams } from \"openai/resources/beta/threads/runs/runs\";\nimport { AssistantStream } from \"openai/lib/AssistantStream\";\nimport { RuntimeEventSource } from \"../events\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { AssistantStreamEvent, AssistantTool } from \"openai/resources/beta/assistants\";\nimport { ForwardedParametersInput } from \"../../graphql/inputs/forwarded-parameters.input\";\n\nexport interface OpenAIAssistantAdapterParams {\n /**\n * The ID of the assistant to use.\n */\n assistantId: string;\n\n /**\n * An optional OpenAI instance to use. If not provided, a new instance will be created.\n */\n openai?: OpenAI;\n\n /**\n * Whether to enable code interpretation.\n * @default true\n */\n codeInterpreterEnabled?: boolean;\n\n /**\n * Whether to enable file search.\n * @default true\n */\n fileSearchEnabled?: boolean;\n\n /**\n * Whether to disable parallel tool calls.\n * You can disable parallel tool calls to force the model to execute tool calls sequentially.\n * This is useful if you want to execute tool calls in a specific order so that the state changes\n * introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)\n *\n * @default false\n */\n disableParallelToolCalls?: boolean;\n}\n\nexport class OpenAIAssistantAdapter implements CopilotServiceAdapter {\n private openai: OpenAI;\n private codeInterpreterEnabled: boolean;\n private assistantId: string;\n private fileSearchEnabled: boolean;\n private disableParallelToolCalls: boolean;\n\n constructor(params: OpenAIAssistantAdapterParams) {\n this.openai = params.openai || new OpenAI({});\n this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;\n this.fileSearchEnabled = params.fileSearchEnabled === false || true;\n this.assistantId = params.assistantId;\n this.disableParallelToolCalls = params?.disableParallelToolCalls || false;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { messages, actions, eventSource, runId, forwardedParameters } = request;\n\n // if we don't have a threadId, create a new thread\n let threadId = request.extensions?.openaiAssistantAPI?.threadId;\n\n if (!threadId) {\n threadId = (await this.openai.beta.threads.create()).id;\n }\n\n const lastMessage = messages.at(-1);\n\n let nextRunId: string | undefined = undefined;\n\n // submit function outputs\n if (lastMessage.isResultMessage() && runId) {\n nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);\n }\n // submit user message\n else if (lastMessage.isTextMessage()) {\n nextRunId = await this.submitUserMessage(\n threadId,\n messages,\n actions,\n eventSource,\n forwardedParameters,\n );\n }\n // unsupported message\n else {\n throw new Error(\"No actionable message found in the messages\");\n }\n\n return {\n runId: nextRunId,\n threadId,\n extensions: {\n ...request.extensions,\n openaiAssistantAPI: {\n threadId: threadId,\n runId: nextRunId,\n },\n },\n };\n }\n\n private async submitToolOutputs(\n threadId: string,\n runId: string,\n messages: Message[],\n eventSource: RuntimeEventSource,\n ) {\n let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);\n\n if (!run.required_action) {\n throw new Error(\"No tool outputs required\");\n }\n\n // get the required tool call ids\n const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map(\n (toolCall) => toolCall.id,\n );\n\n // search for these tool calls\n const resultMessages = messages.filter(\n (message) => message.isResultMessage() && toolCallsIds.includes(message.actionExecutionId),\n ) as ResultMessage[];\n\n if (toolCallsIds.length != resultMessages.length) {\n throw new Error(\"Number of function results does not match the number of tool calls\");\n }\n\n // submit the tool outputs\n const toolOutputs: RunSubmitToolOutputsStreamParams.ToolOutput[] = resultMessages.map(\n (message) => {\n return {\n tool_call_id: message.actionExecutionId,\n output: message.result,\n };\n },\n );\n\n const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {\n tool_outputs: toolOutputs,\n ...(this.disableParallelToolCalls && { parallel_tool_calls: false }),\n });\n\n await this.streamResponse(stream, eventSource);\n return runId;\n }\n\n private async submitUserMessage(\n threadId: string,\n messages: Message[],\n actions: ActionInput[],\n eventSource: RuntimeEventSource,\n forwardedParameters: ForwardedParametersInput,\n ) {\n messages = [...messages];\n\n // get the instruction message\n const instructionsMessage = messages.shift();\n const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : \"\";\n\n // get the latest user message\n const userMessage = messages\n .map(convertMessageToOpenAIMessage)\n .map(convertSystemMessageToAssistantAPI)\n .at(-1);\n\n if (userMessage.role !== \"user\") {\n throw new Error(\"No user message found\");\n }\n\n await this.openai.beta.threads.messages.create(threadId, {\n role: \"user\",\n content: userMessage.content,\n });\n\n const openaiTools = actions.map(convertActionInputToOpenAITool);\n\n const tools = [\n ...openaiTools,\n ...(this.codeInterpreterEnabled ? [{ type: \"code_interpreter\" } as AssistantTool] : []),\n ...(this.fileSearchEnabled ? [{ type: \"file_search\" } as AssistantTool] : []),\n ];\n\n let stream = this.openai.beta.threads.runs.stream(threadId, {\n assistant_id: this.assistantId,\n instructions,\n tools: tools,\n ...(forwardedParameters?.maxTokens && {\n max_completion_tokens: forwardedParameters.maxTokens,\n }),\n ...(this.disableParallelToolCalls && { parallel_tool_calls: false }),\n });\n\n await this.streamResponse(stream, eventSource);\n\n return getRunIdFromStream(stream);\n }\n\n private async streamResponse(stream: AssistantStream, eventSource: RuntimeEventSource) {\n eventSource.stream(async (eventStream$) => {\n let inFunctionCall = false;\n let currentMessageId: string;\n let currentToolCallId: string;\n\n for await (const chunk of stream) {\n switch (chunk.event) {\n case \"thread.message.created\":\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd({ actionExecutionId: currentToolCallId });\n }\n currentMessageId = chunk.data.id;\n eventStream$.sendTextMessageStart({ messageId: currentMessageId });\n break;\n case \"thread.message.delta\":\n if (chunk.data.delta.content?.[0].type === \"text\") {\n eventStream$.sendTextMessageContent({\n messageId: currentMessageId,\n content: chunk.data.delta.content?.[0].text.value,\n });\n }\n break;\n case \"thread.message.completed\":\n eventStream$.sendTextMessageEnd({ messageId: currentMessageId });\n break;\n case \"thread.run.step.delta\":\n let toolCallId: string | undefined;\n let toolCallName: string | undefined;\n let toolCallArgs: string | undefined;\n if (\n chunk.data.delta.step_details.type === \"tool_calls\" &&\n chunk.data.delta.step_details.tool_calls?.[0].type === \"function\"\n ) {\n toolCallId = chunk.data.delta.step_details.tool_calls?.[0].id;\n toolCallName = chunk.data.delta.step_details.tool_calls?.[0].function.name;\n toolCallArgs = chunk.data.delta.step_details.tool_calls?.[0].function.arguments;\n }\n\n if (toolCallName && toolCallId) {\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd({ actionExecutionId: currentToolCallId });\n }\n inFunctionCall = true;\n currentToolCallId = toolCallId;\n eventStream$.sendActionExecutionStart({\n actionExecutionId: currentToolCallId,\n parentMessageId: chunk.data.id,\n actionName: toolCallName,\n });\n } else if (toolCallArgs) {\n eventStream$.sendActionExecutionArgs({\n actionExecutionId: currentToolCallId,\n args: toolCallArgs,\n });\n }\n break;\n }\n }\n if (inFunctionCall) {\n eventStream$.sendActionExecutionEnd({ actionExecutionId: currentToolCallId });\n }\n eventStream$.complete();\n });\n }\n}\n\nfunction getRunIdFromStream(stream: AssistantStream): Promise<string> {\n return new Promise<string>((resolve, reject) => {\n let runIdGetter = (event: AssistantStreamEvent) => {\n if (event.event === \"thread.run.created\") {\n const runId = event.data.id;\n stream.off(\"event\", runIdGetter);\n resolve(runId);\n }\n };\n stream.on(\"event\", runIdGetter);\n });\n}\n","/**\n * CopilotKit Adapter for Unify\n *\n * <RequestExample>\n * ```jsx CopilotRuntime Example\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(req, new UnifyAdapter());\n * ```\n * </RequestExample>\n *\n * You can easily set the model to use by passing it to the constructor.\n * ```jsx\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new UnifyAdapter({ model: \"llama-3-8b-chat@fireworks-ai\" }),\n * );\n * ```\n */\nimport {\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n CopilotServiceAdapter,\n} from \"../service-adapter\";\nimport OpenAI from \"openai\";\nimport { randomId, randomUUID } from \"@copilotkit/shared\";\nimport { convertActionInputToOpenAITool, convertMessageToOpenAIMessage } from \"../openai/utils\";\n\nexport interface UnifyAdapterParams {\n apiKey?: string;\n model: string;\n}\n\nexport class UnifyAdapter implements CopilotServiceAdapter {\n private apiKey: string;\n private model: string;\n private start: boolean;\n\n constructor(options?: UnifyAdapterParams) {\n if (options?.apiKey) {\n this.apiKey = options.apiKey;\n } else {\n this.apiKey = \"UNIFY_API_KEY\";\n }\n this.model = options?.model;\n this.start = true;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const tools = request.actions.map(convertActionInputToOpenAITool);\n const openai = new OpenAI({\n apiKey: this.apiKey,\n baseURL: \"https://api.unify.ai/v0/\",\n });\n const forwardedParameters = request.forwardedParameters;\n\n const messages = request.messages.map(convertMessageToOpenAIMessage);\n\n const stream = await openai.chat.completions.create({\n model: this.model,\n messages: messages,\n stream: true,\n ...(tools.length > 0 && { tools }),\n ...(forwardedParameters?.temperature && { temperature: forwardedParameters.temperature }),\n });\n\n let model = null;\n let currentMessageId: string;\n let currentToolCallId: string;\n request.eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n for await (const chunk of stream) {\n if (this.start) {\n model = chunk.model;\n currentMessageId = randomId();\n eventStream$.sendTextMessageStart({ messageId: currentMessageId });\n eventStream$.sendTextMessageContent({\n messageId: currentMessageId,\n content: `Model used: ${model}\\n`,\n });\n eventStream$.sendTextMessageEnd({ messageId: currentMessageId });\n this.start = false;\n }\n const toolCall = chunk.choices[0].delta.tool_calls?.[0];\n const content = chunk.choices[0].delta.content;\n\n // When switching from message to function or vice versa,\n // send the respective end event.\n // If toolCall?.id is defined, it means a new tool call starts.\n if (mode === \"message\" && toolCall?.id) {\n mode = null;\n eventStream$.sendTextMessageEnd({ messageId: currentMessageId });\n } else if (mode === \"function\" && (toolCall === undefined || toolCall?.id)) {\n mode = null;\n eventStream$.sendActionExecutionEnd({ actionExecutionId: currentToolCallId });\n }\n\n // If we send a new message type, send the appropriate start event.\n if (mode === null) {\n if (toolCall?.id) {\n mode = \"function\";\n currentToolCallId = toolCall!.id;\n eventStream$.sendActionExecutionStart({\n actionExecutionId: currentToolCallId,\n actionName: toolCall!.function!.name,\n });\n } else if (content) {\n mode = \"message\";\n currentMessageId = chunk.id;\n eventStream$.sendTextMessageStart({ messageId: currentMessageId });\n }\n }\n\n // send the content events\n if (mode === \"message\" && content) {\n eventStream$.sendTextMessageContent({\n messageId: currentMessageId,\n content: content,\n });\n } else if (mode === \"function\" && toolCall?.function?.arguments) {\n eventStream$.sendActionExecutionArgs({\n actionExecutionId: currentToolCallId,\n args: toolCall.function.arguments,\n });\n }\n }\n\n // send the end events\n if (mode === \"message\") {\n eventStream$.sendTextMessageEnd({ messageId: currentMessageId });\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd({ actionExecutionId: currentToolCallId });\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId: request.threadId || randomUUID(),\n };\n }\n}\n","/**\n * Copilot Runtime adapter for Groq.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, GroqAdapter } from \"@copilotkit/runtime\";\n * import { Groq } from \"groq-sdk\";\n *\n * const groq = new Groq({ apiKey: process.env[\"GROQ_API_KEY\"] });\n *\n * const copilotKit = new CopilotRuntime();\n *\n * return new GroqAdapter({ groq, model: \"<model-name>\" });\n * ```\n */\nimport { Groq } from \"groq-sdk\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToOpenAITool,\n convertMessageToOpenAIMessage,\n limitMessagesToTokenCount,\n} from \"../openai/utils\";\nimport { randomUUID } from \"@copilotkit/shared\";\n\nconst DEFAULT_MODEL = \"llama3-groq-70b-8192-tool-use-preview\";\n\nexport interface GroqAdapterParams {\n /**\n * An optional Groq instance to use.\n */\n groq?: Groq;\n\n /**\n * The model to use.\n */\n model?: string;\n\n /**\n * Whether to disable parallel tool calls.\n * You can disable parallel tool calls to force the model to execute tool calls sequentially.\n * This is useful if you want to execute tool calls in a specific order so that the state changes\n * introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)\n *\n * @default false\n */\n disableParallelToolCalls?: boolean;\n}\n\nexport class GroqAdapter implements CopilotServiceAdapter {\n private model: string = DEFAULT_MODEL;\n\n private disableParallelToolCalls: boolean = false;\n private _groq: Groq;\n public get groq(): Groq {\n return this._groq;\n }\n\n constructor(params?: GroqAdapterParams) {\n this._groq = params?.groq || new Groq({});\n if (params?.model) {\n this.model = params.model;\n }\n this.disableParallelToolCalls = params?.disableParallelToolCalls || false;\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const {\n threadId,\n model = this.model,\n messages,\n actions,\n eventSource,\n forwardedParameters,\n } = request;\n const tools = actions.map(convertActionInputToOpenAITool);\n\n let openaiMessages = messages.map(convertMessageToOpenAIMessage);\n openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);\n\n let toolChoice: any = forwardedParameters?.toolChoice;\n if (forwardedParameters?.toolChoice === \"function\") {\n toolChoice = {\n type: \"function\",\n function: { name: forwardedParameters.toolChoiceFunctionName },\n };\n }\n const stream = await this.groq.chat.completions.create({\n model: model,\n stream: true,\n messages: openaiMessages,\n ...(tools.length > 0 && { tools }),\n ...(forwardedParameters?.maxTokens && {\n max_tokens: forwardedParameters.maxTokens,\n }),\n ...(forwardedParameters?.stop && { stop: forwardedParameters.stop }),\n ...(toolChoice && { tool_choice: toolChoice }),\n ...(this.disableParallelToolCalls && { parallel_tool_calls: false }),\n ...(forwardedParameters?.temperature && { temperature: forwardedParameters.temperature }),\n });\n\n eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n let currentMessageId: string;\n let currentToolCallId: string;\n\n for await (const chunk of stream) {\n const toolCall = chunk.choices[0].delta.tool_calls?.[0];\n const content = chunk.choices[0].delta.content;\n\n // When switching from message to function or vice versa,\n // send the respective end event.\n // If toolCall?.id is defined, it means a new tool call starts.\n if (mode === \"message\" && toolCall?.id) {\n mode = null;\n eventStream$.sendTextMessageEnd({ messageId: currentMessageId });\n } else if (mode === \"function\" && (toolCall === undefined || toolCall?.id)) {\n mode = null;\n eventStream$.sendActionExecutionEnd({ actionExecutionId: currentToolCallId });\n }\n\n // If we send a new message type, send the appropriate start event.\n if (mode === null) {\n if (toolCall?.id) {\n mode = \"function\";\n currentToolCallId = toolCall!.id;\n eventStream$.sendActionExecutionStart({\n actionExecutionId: currentToolCallId,\n actionName: toolCall!.function!.name,\n parentMessageId: chunk.id,\n });\n } else if (content) {\n mode = \"message\";\n currentMessageId = chunk.id;\n eventStream$.sendTextMessageStart({ messageId: currentMessageId });\n }\n }\n\n // send the content events\n if (mode === \"message\" && content) {\n eventStream$.sendTextMessageContent({\n messageId: currentMessageId,\n content,\n });\n } else if (mode === \"function\" && toolCall?.function?.arguments) {\n eventStream$.sendActionExecutionArgs({\n actionExecutionId: currentToolCallId,\n args: toolCall.function.arguments,\n });\n }\n }\n\n // send the end events\n if (mode === \"message\") {\n eventStream$.sendTextMessageEnd({ messageId: currentMessageId });\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd({ actionExecutionId: currentToolCallId });\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId: request.threadId || randomUUID(),\n };\n }\n}\n","/**\n * Copilot Runtime adapter for Anthropic.\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, AnthropicAdapter } from \"@copilotkit/runtime\";\n * import Anthropic from \"@anthropic-ai/sdk\";\n *\n * const copilotKit = new CopilotRuntime();\n *\n * const anthropic = new Anthropic({\n * apiKey: \"<your-api-key>\",\n * });\n *\n * return new AnthropicAdapter({ anthropic });\n * ```\n */\nimport Anthropic from \"@anthropic-ai/sdk\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport {\n convertActionInputToAnthropicTool,\n convertMessageToAnthropicMessage,\n groupAnthropicMessagesByRole,\n limitMessagesToTokenCount,\n} from \"./utils\";\n\nimport { randomId, randomUUID } from \"@copilotkit/shared\";\n\nconst DEFAULT_MODEL = \"claude-3-sonnet-20240229\";\n\nexport interface AnthropicAdapterParams {\n /**\n * An optional Anthropic instance to use. If not provided, a new instance will be\n * created.\n */\n anthropic?: Anthropic;\n\n /**\n * The model to use.\n */\n model?: string;\n}\n\nexport class AnthropicAdapter implements CopilotServiceAdapter {\n private model: string = DEFAULT_MODEL;\n\n private _anthropic: Anthropic;\n public get anthropic(): Anthropic {\n return this._anthropic;\n }\n\n constructor(params?: AnthropicAdapterParams) {\n this._anthropic = params?.anthropic || new Anthropic({});\n if (params?.model) {\n this.model = params.model;\n }\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const {\n threadId,\n model = this.model,\n messages: rawMessages,\n actions,\n eventSource,\n forwardedParameters,\n } = request;\n const tools = actions.map(convertActionInputToAnthropicTool);\n\n const messages = [...rawMessages];\n\n // get the instruction message\n const instructionsMessage = messages.shift();\n const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : \"\";\n\n let anthropicMessages = messages.map(convertMessageToAnthropicMessage);\n anthropicMessages = limitMessagesToTokenCount(anthropicMessages, tools, model);\n anthropicMessages = groupAnthropicMessagesByRole(anthropicMessages);\n\n let toolChoice: any = forwardedParameters?.toolChoice;\n if (forwardedParameters?.toolChoice === \"function\") {\n toolChoice = {\n type: \"tool\",\n name: forwardedParameters.toolChoiceFunctionName,\n };\n }\n\n const stream = this.anthropic.messages.create({\n system: instructions,\n model: this.model,\n messages: anthropicMessages,\n max_tokens: forwardedParameters?.maxTokens || 1024,\n ...(forwardedParameters?.temperature ? { temperature: forwardedParameters.temperature } : {}),\n ...(tools.length > 0 && { tools }),\n ...(toolChoice && { tool_choice: toolChoice }),\n stream: true,\n });\n\n eventSource.stream(async (eventStream$) => {\n let mode: \"function\" | \"message\" | null = null;\n let didOutputText = false;\n let currentMessageId = randomId();\n let currentToolCallId = randomId();\n let filterThinkingTextBuffer = new FilterThinkingTextBuffer();\n\n for await (const chunk of await stream) {\n if (chunk.type === \"message_start\") {\n currentMessageId = chunk.message.id;\n } else if (chunk.type === \"content_block_start\") {\n if (chunk.content_block.type === \"text\") {\n didOutputText = false;\n filterThinkingTextBuffer.reset();\n mode = \"message\";\n } else if (chunk.content_block.type === \"tool_use\") {\n currentToolCallId = chunk.content_block.id;\n eventStream$.sendActionExecutionStart({\n actionExecutionId: currentToolCallId,\n actionName: chunk.content_block.name,\n parentMessageId: currentMessageId,\n });\n mode = \"function\";\n }\n } else if (chunk.type === \"content_block_delta\") {\n if (chunk.delta.type === \"text_delta\") {\n const text = filterThinkingTextBuffer.onTextChunk(chunk.delta.text);\n if (text.length > 0) {\n if (!didOutputText) {\n eventStream$.sendTextMessageStart({ messageId: currentMessageId });\n didOutputText = true;\n }\n eventStream$.sendTextMessageContent({\n messageId: currentMessageId,\n content: text,\n });\n }\n } else if (chunk.delta.type === \"input_json_delta\") {\n eventStream$.sendActionExecutionArgs({\n actionExecutionId: currentToolCallId,\n args: chunk.delta.partial_json,\n });\n }\n } else if (chunk.type === \"content_block_stop\") {\n if (mode === \"message\") {\n if (didOutputText) {\n eventStream$.sendTextMessageEnd({ messageId: currentMessageId });\n }\n } else if (mode === \"function\") {\n eventStream$.sendActionExecutionEnd({ actionExecutionId: currentToolCallId });\n }\n }\n }\n\n eventStream$.complete();\n });\n\n return {\n threadId: threadId || randomUUID(),\n };\n }\n}\n\nconst THINKING_TAG = \"<thinking>\";\nconst THINKING_TAG_END = \"</thinking>\";\n\nclass FilterThinkingTextBuffer {\n private buffer: string;\n private didFilterThinkingTag: boolean = false;\n\n constructor() {\n this.buffer = \"\";\n }\n\n onTextChunk(text: string): string {\n this.buffer += text;\n if (this.didFilterThinkingTag) {\n return text;\n }\n const potentialTag = this.buffer.slice(0, THINKING_TAG.length);\n if (THINKING_TAG.startsWith(potentialTag)) {\n if (this.buffer.includes(THINKING_TAG_END)) {\n const end = this.buffer.indexOf(THINKING_TAG_END);\n const filteredText = this.buffer.slice(end + THINKING_TAG_END.length);\n this.buffer = filteredText;\n this.didFilterThinkingTag = true;\n return filteredText;\n } else {\n return \"\";\n }\n }\n return text;\n }\n\n reset() {\n this.buffer = \"\";\n this.didFilterThinkingTag = false;\n }\n}\n","import {\n ActionExecutionMessage,\n Message,\n ResultMessage,\n TextMessage,\n} from \"../../graphql/types/converted\";\nimport { ActionInput } from \"../../graphql/inputs/action.input\";\nimport { Anthropic } from \"@anthropic-ai/sdk\";\n\nexport function limitMessagesToTokenCount(\n messages: any[],\n tools: any[],\n model: string,\n maxTokens?: number,\n): any[] {\n maxTokens ||= MAX_TOKENS;\n\n const result: any[] = [];\n const toolsNumTokens = countToolsTokens(model, tools);\n if (toolsNumTokens > maxTokens) {\n throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);\n }\n maxTokens -= toolsNumTokens;\n\n for (const message of messages) {\n if (message.role === \"system\") {\n const numTokens = countMessageTokens(model, message);\n maxTokens -= numTokens;\n\n if (maxTokens < 0) {\n throw new Error(\"Not enough tokens for system message.\");\n }\n }\n }\n\n let cutoff: boolean = false;\n\n const reversedMessages = [...messages].reverse();\n for (const message of reversedMessages) {\n if (message.role === \"system\") {\n result.unshift(message);\n continue;\n } else if (cutoff) {\n continue;\n }\n let numTokens = countMessageTokens(model, message);\n if (maxTokens < numTokens) {\n cutoff = true;\n continue;\n }\n result.unshift(message);\n maxTokens -= numTokens;\n }\n\n return result;\n}\n\nconst MAX_TOKENS = 128000;\n\nfunction countToolsTokens(model: string, tools: any[]): number {\n if (tools.length === 0) {\n return 0;\n }\n const json = JSON.stringify(tools);\n return countTokens(model, json);\n}\n\nfunction countMessageTokens(model: string, message: any): number {\n return countTokens(model, JSON.stringify(message.content) || \"\");\n}\n\nfunction countTokens(model: string, text: string): number {\n return text.length / 3;\n}\n\nexport function convertActionInputToAnthropicTool(action: ActionInput): Anthropic.Messages.Tool {\n return {\n name: action.name,\n description: action.description,\n input_schema: JSON.parse(action.jsonSchema),\n };\n}\n\nexport function convertMessageToAnthropicMessage(\n message: Message,\n): Anthropic.Messages.MessageParam {\n if (message.isTextMessage()) {\n if (message.role === \"system\") {\n return {\n role: \"assistant\",\n content: [\n { type: \"text\", text: \"THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: \" + message.content },\n ],\n };\n } else {\n return {\n role: message.role === \"user\" ? \"user\" : \"assistant\",\n content: [{ type: \"text\", text: message.content }],\n };\n }\n } else if (message.isActionExecutionMessage()) {\n return {\n role: \"assistant\",\n content: [\n {\n id: message.id,\n type: \"tool_use\",\n input: message.arguments,\n name: message.name,\n },\n ],\n };\n } else if (message.isResultMessage()) {\n return {\n role: \"user\",\n content: [\n {\n type: \"tool_result\",\n content: message.result,\n tool_use_id: message.actionExecutionId,\n },\n ],\n };\n }\n}\n\nexport function groupAnthropicMessagesByRole(\n messageParams: Anthropic.Messages.MessageParam[],\n): Anthropic.Messages.MessageParam[] {\n return messageParams.reduce((acc, message) => {\n const lastGroup = acc[acc.length - 1];\n\n if (lastGroup && lastGroup.role === message.role) {\n lastGroup.content = lastGroup.content.concat(message.content as any);\n } else {\n acc.push({\n role: message.role,\n content: [...(message.content as any)],\n });\n }\n\n return acc;\n }, [] as Anthropic.Messages.MessageParam[]);\n}\n","/**\n * CopilotKit Adapter for Ollama\n *\n * <RequestExample>\n * ```jsx CopilotRuntime Example\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(req, new OllamaAdapter());\n * ```\n * </RequestExample>\n *\n * You can easily set the model to use by passing it to the constructor.\n * ```jsx\n * const copilotKit = new CopilotRuntime();\n * return copilotKit.response(\n * req,\n * new OllamaAdapter({ model: \"llama3-70b-8192\" }),\n * );\n * ```\n */\nimport { TextMessage } from \"../../../graphql/types/converted\";\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../../service-adapter\";\nimport { Ollama } from \"@langchain/community/llms/ollama\";\nimport { randomId, randomUUID } from \"@copilotkit/shared\";\n\nconst DEFAULT_MODEL = \"llama3:latest\";\n\ninterface OllamaAdapterOptions {\n model?: string;\n}\n\nexport class ExperimentalOllamaAdapter implements CopilotServiceAdapter {\n private model: string;\n\n constructor(options?: OllamaAdapterOptions) {\n if (options?.model) {\n this.model = options.model;\n } else {\n this.model = DEFAULT_MODEL;\n }\n }\n\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n const { messages, actions, eventSource } = request;\n // const messages = this.transformMessages(forwardedProps.messages);\n\n const ollama = new Ollama({\n model: this.model,\n });\n const contents = (messages.filter((m) => m.isTextMessage()) as TextMessage[]).map(\n (m) => m.content,\n );\n const _stream = await ollama.stream(contents); // [TODO] role info is dropped...\n\n eventSource.stream(async (eventStream$) => {\n const currentMessageId = randomId();\n eventStream$.sendTextMessageStart({ messageId: currentMessageId });\n for await (const chunkText of _stream) {\n eventStream$.sendTextMessageContent({\n messageId: currentMessageId,\n content: chunkText,\n });\n }\n eventStream$.sendTextMessageEnd({ messageId: currentMessageId });\n // we may need to add this later.. [nc]\n // let calls = (await result.response).functionCalls();\n\n eventStream$.complete();\n });\n return {\n threadId: request.threadId || randomUUID(),\n };\n }\n}\n","/**\n * CopilotKit Empty Adapter\n *\n * This adapter is meant to preserve adherence to runtime requirements, while doing nothing\n * Ideal if you don't want to connect an LLM the to the runtime, and only use your LangGraph agent.\n * Be aware that Copilot Suggestions will not work if you use this adapter\n *\n * ## Example\n *\n * ```ts\n * import { CopilotRuntime, EmptyAdapter } from \"@copilotkit/runtime\";\n *\n * const copilotKit = new CopilotRuntime();\n *\n * return new EmptyAdapter();\n * ```\n */\nimport {\n CopilotServiceAdapter,\n CopilotRuntimeChatCompletionRequest,\n CopilotRuntimeChatCompletionResponse,\n} from \"../service-adapter\";\nimport { randomUUID } from \"@copilotkit/shared\";\n\nexport class EmptyAdapter implements CopilotServiceAdapter {\n async process(\n request: CopilotRuntimeChatCompletionRequest,\n ): Promise<CopilotRuntimeChatCompletionResponse> {\n return {\n threadId: request.threadId || randomUUID(),\n };\n }\n}\n\nexport const ExperimentalEmptyAdapter = EmptyAdapter;\n"],"mappings":";;;;;AACA,SAASA,sBAAsB;AAUxB,IAAMC,cAAN,MAAMA;EACXC;EACAC;EACAC;EACAC;EACAC;EAEAC,YAAYC,SAAgC;AAC1C,SAAKN,OAAOM,QAAQN;AACpB,SAAKC,cAAcK,QAAQL;AAC3B,SAAKC,WAAWI,QAAQJ;AACxB,SAAKC,aAAaG,QAAQH;AAC1B,SAAKC,gBAAgBE,QAAQF,iBAAiB;EAChD;EAEA,MAAMG,WAAiC;AACrC,QAAI,CAAC,KAAKJ,YAAY;AACpB,YAAM,KAAKK,yBAAwB;IACrC;AAEA,WAAO;MACLR,MAAM,KAAKA;MACXC,aAAa,KAAKA;MAClBE,YAAY,KAAKA;MACjBM,SAAS,OAAOC,SAAAA;AACd,cAAMC,WAAW,IAAIC,eAAe;UAAEC,KAAK,KAAKX;QAAS,CAAA;AACzD,YAAIY;AACJ,YAAI,KAAKV,kBAAkB,UAAU;AACnCU,kBAAQJ,KAAKK,OAAOC,KAAKN,IAAAA,EAAM,CAAA,CAAE;QACnC,OAAO;AACLI,kBAAQJ;QACV;AACA,eAAO,MAAMC,SAASM,OAAOH,KAAAA;MAC/B;IACF;EACF;EAEA,MAAMN,2BAA2B;AAC/B,UAAMU,iBAAiB;MAAC;MAAU;MAAU;;AAE5C,QAAIC,YAAY,KAAKjB,SAASkB,QAAQ,QAAQ,EAAA,IAAM;AACpD,QAAIC,SAAS,MAAMC,MAAMH,SAAAA,EACtBI,KAAK,CAACC,QAAQA,IAAIC,KAAI,CAAA,EACtBC,MAAM,MAAA;AACL,YAAM,IAAIC,MAAM,yCAAyCR,SAAAA;IAC3D,CAAA;AAGF,QAAID,eAAeU,SAASP,OAAOQ,IAAI,GAAG;AACxC,WAAKzB,gBAAgB;AACrB,WAAKD,aAAa;QAChB;UACEH,MAAM;UACN6B,MAAMR,OAAOQ;UACb5B,aAAa;QACf;;IAEJ,WAAWoB,OAAOQ,SAAS,UAAU;AACnC,WAAKzB,gBAAgB;AACrB,WAAKD,aAAaY,OAAOC,KAAKK,OAAOS,UAAU,EAAEC,IAAI,CAACC,QAAAA;AArE5D;AAsEQ,YAAIC,WAAWZ,OAAOS,WAAWE,GAAAA;AACjC,YAAI,CAACd,eAAeU,SAASK,SAASJ,IAAI,GAAG;AAC3C,gBAAM,IAAIF,MAAM,yBAAA;QAClB;AACA,eAAO;UACL3B,MAAMgC;UACNH,MAAMI,SAASJ;UACf5B,aAAagC,SAAShC,eAAe;UACrCiC,YAAUb,YAAOa,aAAPb,mBAAiBO,SAASI,SAAQ;QAC9C;MACF,CAAA;IACF,OAAO;AACL,YAAM,IAAIL,MAAM,yBAAA;IAClB;EACF;AACF;AA3Ea5B;;;ACuCb,OAAOoC,YAAY;;;ACxCZ,SAASC,0BACdC,UACAC,OACAC,OACAC,WAAkB;AAElBA,4BAAcC,wBAAwBF,KAAAA;AAEtC,QAAMG,SAAgB,CAAA;AACtB,QAAMC,iBAAiBC,iBAAiBL,OAAOD,KAAAA;AAC/C,MAAIK,iBAAiBH,WAAW;AAC9B,UAAM,IAAIK,MAAM,4CAA4CF,oBAAoBH,WAAW;EAC7F;AACAA,eAAaG;AAEb,aAAWG,WAAWT,UAAU;AAC9B,QAAIS,QAAQC,SAAS,UAAU;AAC7B,YAAMC,YAAYC,mBAAmBV,OAAOO,OAAAA;AAC5CN,mBAAaQ;AAEb,UAAIR,YAAY,GAAG;AACjB,cAAM,IAAIK,MAAM,uCAAA;MAClB;IACF;EACF;AAEA,MAAIK,SAAkB;AAEtB,QAAMC,mBAAmB;OAAId;IAAUe,QAAO;AAC9C,aAAWN,WAAWK,kBAAkB;AACtC,QAAIL,QAAQC,SAAS,UAAU;AAC7BL,aAAOW,QAAQP,OAAAA;AACf;IACF,WAAWI,QAAQ;AACjB;IACF;AACA,QAAIF,YAAYC,mBAAmBV,OAAOO,OAAAA;AAC1C,QAAIN,YAAYQ,WAAW;AACzBE,eAAS;AACT;IACF;AACAR,WAAOW,QAAQP,OAAAA;AACfN,iBAAaQ;EACf;AAEA,SAAON;AACT;AA9CgBN;AAgDT,SAASK,wBAAwBF,OAAa;AACnD,SAAOe,iBAAiBf,KAAAA,KAAUgB;AACpC;AAFgBd;AAIhB,IAAMc,qBAAqB;AAE3B,IAAMD,mBAA8C;;EAElD,UAAU;EACV,qBAAqB;EACrB,eAAe;EACf,0BAA0B;EAC1B,sBAAsB;EACtB,uBAAuB;EACvB,sBAAsB;EACtB,wBAAwB;EACxB,6BAA6B;EAC7B,aAAa;EACb,kBAAkB;EAClB,kBAAkB;EAClB,SAAS;EACT,cAAc;EACd,cAAc;;EAGd,sBAAsB;EACtB,iBAAiB;EACjB,sBAAsB;EACtB,0BAA0B;EAC1B,qBAAqB;EACrB,sBAAsB;EACtB,0BAA0B;EAC1B,sBAAsB;AACxB;AAEA,SAASV,iBAAiBL,OAAeD,OAAY;AACnD,MAAIA,MAAMkB,WAAW,GAAG;AACtB,WAAO;EACT;AACA,QAAMC,OAAOC,KAAKC,UAAUrB,KAAAA;AAC5B,SAAOsB,YAAYrB,OAAOkB,IAAAA;AAC5B;AANSb;AAQT,SAASK,mBAAmBV,OAAeO,SAAY;AACrD,SAAOc,YAAYrB,OAAOO,QAAQe,WAAW,EAAA;AAC/C;AAFSZ;AAIT,SAASW,YAAYrB,OAAeuB,MAAY;AAC9C,SAAOA,KAAKN,SAAS;AACvB;AAFSI;AAIF,SAASG,+BAA+BC,QAAmB;AAChE,SAAO;IACLC,MAAM;IACNC,UAAU;MACRC,MAAMH,OAAOG;MACbC,aAAaJ,OAAOI;MACpBC,YAAYX,KAAKY,MAAMN,OAAOO,UAAU;IAC1C;EACF;AACF;AATgBR;AAWT,SAASS,8BAA8B1B,SAAgB;AAC5D,MAAIA,QAAQ2B,cAAa,GAAI;AAC3B,WAAO;MACL1B,MAAMD,QAAQC;MACdc,SAASf,QAAQe;IACnB;EAIF,WAAWf,QAAQ4B,yBAAwB,GAAI;AAC7C,WAAO;MACL3B,MAAM;MACN4B,YAAY;QACV;UACEC,IAAI9B,QAAQ8B;UACZX,MAAM;UACNC,UAAU;YACRC,MAAMrB,QAAQqB;YACdU,WAAWnB,KAAKC,UAAUb,QAAQ+B,SAAS;UAC7C;QACF;;IAEJ;EACF,WAAW/B,QAAQgC,gBAAe,GAAI;AACpC,WAAO;MACL/B,MAAM;MACNc,SAASf,QAAQJ;MACjBqC,cAAcjC,QAAQkC;IACxB;EACF;AACF;AA9BgBR;AAgCT,SAASS,mCAAmCnC,SAAmC;AACpF,SAAO;IACL,GAAGA;IACH,GAAIA,QAAQC,SAAS,YAAY;MAC/BA,MAAM;MACNc,SAAS,gDAAgDf,QAAQe;IACnE;EACF;AACF;AARgBoB;;;AD3FhB,SAASC,kBAAkB;AAE3B,IAAMC,gBAAgB;AAyBf,IAAMC,gBAAN,MAAMA;EACHC,QAAgBF;EAEhBG,2BAAoC;EACpCC;EACR,IAAWC,SAAiB;AAC1B,WAAO,KAAKD;EACd;EAEAE,YAAYC,QAA8B;AACxC,SAAKH,WAAUG,iCAAQF,WAAU,IAAIG,OAAO,CAAC,CAAA;AAC7C,QAAID,iCAAQL,OAAO;AACjB,WAAKA,QAAQK,OAAOL;IACtB;AACA,SAAKC,4BAA2BI,iCAAQJ,6BAA4B;EACtE;EAEA,MAAMM,QACJC,SAC+C;AAC/C,UAAM,EACJC,UAAUC,qBACVV,QAAQ,KAAKA,OACbW,UACAC,SACAC,aACAC,oBAAmB,IACjBN;AACJ,UAAMO,QAAQH,QAAQI,IAAIC,8BAAAA;AAC1B,UAAMR,WAAWC,uBAAuBQ,WAAAA;AAExC,QAAIC,iBAAiBR,SAASK,IAAII,6BAAAA;AAClCD,qBAAiBE,0BAA0BF,gBAAgBJ,OAAOf,KAAAA;AAElE,QAAIsB,aAAkBR,2DAAqBQ;AAC3C,SAAIR,2DAAqBQ,gBAAe,YAAY;AAClDA,mBAAa;QACXC,MAAM;QACNC,UAAU;UAAEC,MAAMX,oBAAoBY;QAAuB;MAC/D;IACF;AAEA,UAAMC,SAAS,KAAKxB,OAAOyB,KAAKC,KAAKC,YAAYH,OAAO;MACtD3B;MACA2B,QAAQ;MACRhB,UAAUQ;MACV,GAAIJ,MAAMgB,SAAS,KAAK;QAAEhB;MAAM;MAChC,IAAID,2DAAqBkB,cAAa;QAAEC,YAAYnB,oBAAoBkB;MAAU;MAClF,IAAIlB,2DAAqBoB,SAAQ;QAAEA,MAAMpB,oBAAoBoB;MAAK;MAClE,GAAIZ,cAAc;QAAEa,aAAab;MAAW;MAC5C,GAAI,KAAKrB,4BAA4B;QAAEmC,qBAAqB;MAAM;MAClE,IAAItB,2DAAqBuB,gBAAe;QAAEA,aAAavB,oBAAoBuB;MAAY;IACzF,CAAA;AAEAxB,gBAAYc,OAAO,OAAOW,iBAAAA;AA9I9B;AA+IM,UAAIC,OAAsC;AAC1C,UAAIC;AACJ,UAAIC;AACJ,uBAAiBC,SAASf,QAAQ;AAChC,YAAIe,MAAMC,QAAQZ,WAAW,GAAG;AAC9B;QACF;AAEA,cAAMa,YAAWF,WAAMC,QAAQ,CAAA,EAAGE,MAAMC,eAAvBJ,mBAAoC;AACrD,cAAMK,UAAUL,MAAMC,QAAQ,CAAA,EAAGE,MAAME;AAKvC,YAAIR,SAAS,cAAaK,qCAAUI,KAAI;AACtCT,iBAAO;AACPD,uBAAaW,mBAAmB;YAAEC,WAAWV;UAAiB,CAAA;QAChE,WAAWD,SAAS,eAAeK,aAAaO,WAAaP,qCAAUI,MAAK;AAC1ET,iBAAO;AACPD,uBAAac,uBAAuB;YAAEC,mBAAmBZ;UAAkB,CAAA;QAC7E;AAGA,YAAIF,SAAS,MAAM;AACjB,cAAIK,qCAAUI,IAAI;AAChBT,mBAAO;AACPE,gCAAoBG,SAAUI;AAC9BV,yBAAagB,yBAAyB;cACpCD,mBAAmBZ;cACnBc,iBAAiBb,MAAMM;cACvBQ,YAAYZ,SAAUpB,SAAUC;YAClC,CAAA;UACF,WAAWsB,SAAS;AAClBR,mBAAO;AACPC,+BAAmBE,MAAMM;AACzBV,yBAAamB,qBAAqB;cAAEP,WAAWV;YAAiB,CAAA;UAClE;QACF;AAGA,YAAID,SAAS,aAAaQ,SAAS;AACjCT,uBAAaoB,uBAAuB;YAClCR,WAAWV;YACXO;UACF,CAAA;QACF,WAAWR,SAAS,gBAAcK,0CAAUpB,aAAVoB,mBAAoBe,YAAW;AAC/DrB,uBAAasB,wBAAwB;YACnCP,mBAAmBZ;YACnBoB,MAAMjB,SAASpB,SAASmC;UAC1B,CAAA;QACF;MACF;AAGA,UAAIpB,SAAS,WAAW;AACtBD,qBAAaW,mBAAmB;UAAEC,WAAWV;QAAiB,CAAA;MAChE,WAAWD,SAAS,YAAY;AAC9BD,qBAAac,uBAAuB;UAAEC,mBAAmBZ;QAAkB,CAAA;MAC7E;AAEAH,mBAAawB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACLrD;IACF;EACF;AACF;AA1HaV;;;AElFb,SACEgE,WAIAC,cACAC,eACAC,mBACK;AACP,SAASC,6BAA6B;AAKtC,SAASC,UAAUC,oCAAoC;AAEhD,SAASC,iCAAiCC,SAAgB;AAC/D,MAAIA,QAAQC,cAAa,GAAI;AAC3B,QAAID,QAAQE,QAAQ,QAAQ;AAC1B,aAAO,IAAIC,aAAaH,QAAQI,OAAO;IACzC,WAAWJ,QAAQE,QAAQ,aAAa;AACtC,aAAO,IAAIG,UAAUL,QAAQI,OAAO;IACtC,WAAWJ,QAAQE,SAAS,UAAU;AACpC,aAAO,IAAII,cAAcN,QAAQI,OAAO;IAC1C;EACF,WAAWJ,QAAQO,yBAAwB,GAAI;AAC7C,WAAO,IAAIF,UAAU;MACnBD,SAAS;MACTI,YAAY;QACV;UACEC,IAAIT,QAAQS;UACZC,MAAMV,QAAQW;UACdC,MAAMZ,QAAQY;QAChB;;IAEJ,CAAA;EACF,WAAWZ,QAAQa,gBAAe,GAAI;AACpC,WAAO,IAAIC,YAAY;MACrBV,SAASJ,QAAQe;MACjBC,cAAchB,QAAQiB;IACxB,CAAA;EACF;AACF;AA1BgBlB;AA4BT,SAASmB,kCAAkCC,aAAwB;AACxE,SAAO,IAAIC,sBAAsB;IAC/BR,MAAMO,YAAYP;IAClBS,aAAaF,YAAYE;IACzBC,QAAQC,6BACNC,KAAKC,MAAMN,YAAYO,UAAU,GACjC,IAAA;IAEFC,MAAM,YAAA;AACJ,aAAO;IACT;EACF,CAAA;AACF;AAZgBT;AA8BhB,SAASU,YAAYC,SAAY;AAC/B,SAAOC,OAAOC,UAAUC,SAASC,KAAKJ,OAAAA,MAAa;AACrD;AAFSD;AAIT,SAASM,iBAAiBL,SAAY;AACpC,SAAOC,OAAOC,UAAUC,SAASC,KAAKJ,OAAAA,MAAa;AACrD;AAFSK;AAIT,SAASC,mBAAmBN,SAAY;AACtC,SAAOC,OAAOC,UAAUC,SAASC,KAAKJ,OAAAA,MAAa;AACrD;AAFSM;AAIT,SAASC,wCACPC,cACAC,iBAA8C;AAI9C,MAAIA,iBAAiB;AACnBD,iBAAaE,0BAA0B;MACrCC,mBAAmBF,gBAAgBG;MACnCC,YAAYJ,gBAAgBK;MAC5BC,QAAQ;IACV,CAAA;EACF;AACF;AAbSR;AAeT,eAAsBS,wBAAwB,EAC5CD,QACAP,cACAC,gBAAe,GACe;AAzGhC;AA8GE,MAAI,OAAOM,WAAW,UAAU;AAC9B,QAAI,CAACN,iBAAiB;AAEpBD,mBAAaS,gBAAgBC,SAAAA,GAAYH,MAAAA;IAC3C,OAAO;AAELP,mBAAaE,0BAA0B;QACrCC,mBAAmBF,gBAAgBG;QACnCC,YAAYJ,gBAAgBK;QAC5BC;MACF,CAAA;IACF;EACF,WAIShB,YAAYgB,MAAAA,GAAS;AAC5BR,4CAAwCC,cAAcC,eAAAA;AAEtD,QAAIM,OAAOI,SAAS;AAClBX,mBAAaS,gBAAgBC,SAAAA,GAAYH,OAAOI,OAAO;IACzD;AACA,eAAWC,YAAYL,OAAOM,YAAY;AACxCb,mBAAac,oBAAoB;QAC/BX,mBAAmBS,SAASR,MAAMM,SAAAA;QAClCL,YAAYO,SAASN;QACrBS,MAAMC,KAAKC,UAAUL,SAASG,IAAI;MACpC,CAAA;IACF;EACF,WAISjB,mBAAmBS,MAAAA,GAAS;AACnCR,4CAAwCC,cAAcC,eAAAA;AAEtD,SAAIM,YAAOW,cAAPX,mBAAkBI,SAAS;AAC7BX,mBAAaS,gBAAgBC,SAAAA,GAAYH,OAAOI,OAAO;IACzD;AACA,SAAIJ,YAAOW,cAAPX,mBAAkBM,YAAY;AAChC,iBAAWD,aAAYL,YAAOW,cAAPX,mBAAkBM,YAAY;AACnDb,qBAAac,oBAAoB;UAC/BX,mBAAmBS,SAASR,MAAMM,SAAAA;UAClCL,YAAYO,SAASN;UACrBS,MAAMC,KAAKC,UAAUL,SAASG,IAAI;QACpC,CAAA;MACF;IACF;EACF,WAISR,UAAU,eAAeA,QAAQ;AACxCR,4CAAwCC,cAAcC,eAAAA;AAEtD,QAAIkB,SAASZ,OAAOa,UAAS;AAE7B,QAAIC,OAAsC;AAC1C,QAAIC;AAEJ,UAAMC,kBAAkB;MACtBjB,MAAM;MACNF,IAAI;MACJoB,OAAO;MACPC,WAAW;IACb;AAEA,WAAO,MAAM;AACX,UAAI;AACF,cAAM,EAAEC,MAAMC,MAAK,IAAK,MAAMR,OAAOS,KAAI;AAEzC,YAAIC,eAAmCC;AACvC,YAAIC,aAAiCD;AACrC,YAAIE,eAAmCF;AACvC,YAAIG,cAAuB;AAC3B,YAAItB,UAAU;AACd,YAAIgB,SAASA,MAAMhB,SAAS;AAC1BA,oBAAUuB,MAAMC,QAAQR,MAAMhB,OAAO,MAC9BgB,WAAMhB,QAAQ,CAAA,MAAdgB,mBAA0BS,SAAQ,KACrCT,MAAMhB;QACZ;AAEA,YAAId,iBAAiB8B,KAAAA,GAAQ;AAC3B,cAAIU,SAAQV,WAAMW,qBAANX,mBAAyB;AACrCK,yBAAeK,+BAAOtB;AACtBkB,wBAAcI,SAASP;AACvB,cAAIO,+BAAO/B;AAAMiB,4BAAgBjB,OAAO+B,MAAM/B;AAE9C,eAAI+B,+BAAOb,UAAS,MAAM;AACxBD,4BAAgBC,QAAQa,MAAMb;AAC9B,gBAAID,gBAAgBE,aAAa;AAAMF,8BAAgBE,YAAYY,MAAMb;UAC3E;AAEA,cAAIa,+BAAOjC;AACTmB,4BAAgBnB,KAAKiC,MAAMb,SAAS,OAAO,GAAGa,MAAMjC,UAAUiC,MAAMb,UAAUa,MAAMjC;AAGtFyB,yBAAeN,gBAAgBjB;AAC/ByB,uBAAaR,gBAAgBnB;QAC/B,WAAWN,mBAAmB6B,KAAAA,GAAQ;AACpC,cAAIU,SAAQV,iBAAMY,sBAANZ,mBAAyBd,eAAzBc,mBAAsC;AAClDE,0BAAeQ,oCAAOG,aAAPH,mBAAiB/B;AAChCyB,uBAAaM,+BAAOjC;AACpB4B,0BAAeK,oCAAOG,aAAPH,mBAAiBI;AAChCR,yBAAcI,+BAAOG,aAAYV;QACnC;AAKA,YAAIT,SAAS,cAAcU,cAAcL,OAAO;AAC9CL,iBAAO;AACPrB,uBAAa0C,mBAAmB;YAAEC,WAAWrB;UAAiB,CAAA;QAChE,WAAWD,SAAS,eAAe,CAACY,eAAeP,OAAO;AACxDL,iBAAO;AACPrB,uBAAa4C,uBAAuB;YAAEzC,mBAAmB4B;UAAW,CAAA;QACtE;AAEA,YAAIL,MAAM;AACR;QACF;AAGA,YAAIL,SAAS,MAAM;AACjB,cAAIY,eAAeF,cAAcF,cAAc;AAC7CR,mBAAO;AACPrB,yBAAa6C,yBAAyB;cACpC1C,mBAAmB4B;cACnB1B,YAAYwB;cACZiB,kBAAiBnB,WAAMT,cAANS,mBAAiBvB;YACpC,CAAA;UACF,WAAWO,SAAS;AAClBU,mBAAO;AACPC,iCAAmBK,WAAMT,cAANS,mBAAiBvB,OAAMM,SAAAA;AAC1CV,yBAAa+C,qBAAqB;cAAEJ,WAAWrB;YAAiB,CAAA;UAClE;QACF;AAGA,YAAID,SAAS,aAAaV,SAAS;AACjCX,uBAAagD,uBAAuB;YAClCL,WAAWrB;YACXX;UACF,CAAA;QACF,WAAWU,SAAS,cAAcW,cAAc;AAE9C,cAAIT,gBAAgBC,UAAUD,gBAAgBE,WAAW;AACvDzB,yBAAa4C,uBAAuB;cAAEzC,mBAAmB4B;YAAW,CAAA;AACpE/B,yBAAa6C,yBAAyB;cACpC1C,mBAAmB4B;cACnB1B,YAAYwB;cACZiB,kBAAiBnB,WAAMT,cAANS,mBAAiBvB;YACpC,CAAA;AACAmB,4BAAgBE,YAAYF,gBAAgBC;UAC9C;AACAxB,uBAAaiD,wBAAwB;YACnC9C,mBAAmB4B;YACnBhB,MAAMiB;UACR,CAAA;QACF;MACF,SAASkB,OAAP;AACAC,gBAAQD,MAAM,6BAA6BA,KAAAA;AAC3C;MACF;IACF;EACF,WAAWjD,iBAAiB;AAC1BD,iBAAaE,0BAA0B;MACrCC,mBAAmBF,gBAAgBG;MACnCC,YAAYJ,gBAAgBK;MAC5BC,QAAQ6C,aAAa7C,MAAAA;IACvB,CAAA;EACF,OAGK;AACH,UAAM,IAAI8C,MAAM,8CAAA;EAClB;AAEArD,eAAasD,SAAQ;AACvB;AA5LsB9C;AA8LtB,SAAS4C,aAAa7C,QAAW;AAC/B,MAAIA,WAAWuB,QAAW;AACxB,WAAO;EACT,WAAW,OAAOvB,WAAW,UAAU;AACrC,WAAOA;EACT,OAAO;AACL,WAAOS,KAAKC,UAAUV,MAAAA;EACxB;AACF;AARS6C;;;AC3PT,SAASG,cAAAA,mBAAkB;AAC3B,SAASC,yBAAyB;AAiB3B,IAAMC,mBAAN,MAAMA;;;;;EAIXC,YAAoBC,SAAkC;SAAlCA,UAAAA;EAAmC;EAEvD,MAAMC,QACJC,SAC+C;AAC/C,QAAI;AACF,YAAM,EACJC,aACAC,OACAC,SACAC,UACAC,OACAC,UAAUC,oBAAmB,IAC3BP;AACJ,YAAMM,WAAWC,uBAAuBC,YAAAA;AACxC,YAAMC,SAAS,MAAM,KAAKX,QAAQY,QAAQ;QACxCN,UAAUA,SAASO,IAAIC,gCAAAA;QACvBC,OAAOV,QAAQQ,IAAIG,iCAAAA;QACnBZ;QACAI;QACAD;MACF,CAAA;AAEAJ,kBAAYc,OAAO,OAAOC,iBAAAA;AACxB,cAAMC,wBAAwB;UAC5BR;UACAO;QACF,CAAA;MACF,CAAA;AAEA,aAAO;QACLV;MACF;IACF,UAAA;AACE,YAAMY,kBAAAA;IACR;EACF;AACF;AAzCatB;;;AChDb,SAASuB,kBAAkB;AAUpB,IAAMC,4BAAN,cAAwCC,iBAAAA;EAC7CC,YAAYC,SAA4C;AACtD,UAAM;MACJC,SAAS,OAAO,EAAEC,UAAUC,OAAOC,SAAQ,MAAE;AAC3C,cAAMC,QAAQ,IAAIC,WAAW;UAC3BC,YAAWP,mCAASK,UAAS;UAC7BG,YAAY;QACd,CAAA,EAAGC,UAAUN,KAAAA;AACb,eAAOE,MAAMK,OAAOR,UAAU;UAAES,UAAU;YAAEC,iBAAiBR;UAAS;QAAE,CAAA;MAC1E;IACF,CAAA;EACF;AACF;AAZaP;;;ACFb,OAAOgB,aAAY;AAqDZ,IAAMC,yBAAN,MAAMA;EACHC;EACAC;EACAC;EACAC;EACAC;EAERC,YAAYC,QAAsC;AAChD,SAAKN,SAASM,OAAON,UAAU,IAAIO,QAAO,CAAC,CAAA;AAC3C,SAAKN,yBAAyBK,OAAOL,2BAA2B,SAAS;AACzE,SAAKE,oBAAoBG,OAAOH,sBAAsB,SAAS;AAC/D,SAAKD,cAAcI,OAAOJ;AAC1B,SAAKE,4BAA2BE,iCAAQF,6BAA4B;EACtE;EAEA,MAAMI,QACJC,SAC+C;AA9FnD;AA+FI,UAAM,EAAEC,UAAUC,SAASC,aAAaC,OAAOC,oBAAmB,IAAKL;AAGvE,QAAIM,YAAWN,mBAAQO,eAARP,mBAAoBQ,uBAApBR,mBAAwCM;AAEvD,QAAI,CAACA,UAAU;AACbA,kBAAY,MAAM,KAAKf,OAAOkB,KAAKC,QAAQC,OAAM,GAAIC;IACvD;AAEA,UAAMC,cAAcZ,SAASa,GAAG,EAAC;AAEjC,QAAIC,YAAgCC;AAGpC,QAAIH,YAAYI,gBAAe,KAAMb,OAAO;AAC1CW,kBAAY,MAAM,KAAKG,kBAAkBZ,UAAUF,OAAOH,UAAUE,WAAAA;IACtE,WAESU,YAAYM,cAAa,GAAI;AACpCJ,kBAAY,MAAM,KAAKK,kBACrBd,UACAL,UACAC,SACAC,aACAE,mBAAAA;IAEJ,OAEK;AACH,YAAM,IAAIgB,MAAM,6CAAA;IAClB;AAEA,WAAO;MACLjB,OAAOW;MACPT;MACAC,YAAY;QACV,GAAGP,QAAQO;QACXC,oBAAoB;UAClBF;UACAF,OAAOW;QACT;MACF;IACF;EACF;EAEA,MAAcG,kBACZZ,UACAF,OACAH,UACAE,aACA;AACA,QAAImB,MAAM,MAAM,KAAK/B,OAAOkB,KAAKC,QAAQa,KAAKC,SAASlB,UAAUF,KAAAA;AAEjE,QAAI,CAACkB,IAAIG,iBAAiB;AACxB,YAAM,IAAIJ,MAAM,0BAAA;IAClB;AAGA,UAAMK,eAAeJ,IAAIG,gBAAgBE,oBAAoBC,WAAWC,IACtE,CAACC,aAAaA,SAASlB,EAAE;AAI3B,UAAMmB,iBAAiB9B,SAAS+B,OAC9B,CAACC,YAAYA,QAAQhB,gBAAe,KAAMS,aAAaQ,SAASD,QAAQE,iBAAiB,CAAA;AAG3F,QAAIT,aAAaU,UAAUL,eAAeK,QAAQ;AAChD,YAAM,IAAIf,MAAM,oEAAA;IAClB;AAGA,UAAMgB,cAA6DN,eAAeF,IAChF,CAACI,YAAAA;AACC,aAAO;QACLK,cAAcL,QAAQE;QACtBI,QAAQN,QAAQO;MAClB;IACF,CAAA;AAGF,UAAMC,SAAS,KAAKlD,OAAOkB,KAAKC,QAAQa,KAAKmB,wBAAwBpC,UAAUF,OAAO;MACpFuC,cAAcN;MACd,GAAI,KAAK1C,4BAA4B;QAAEiD,qBAAqB;MAAM;IACpE,CAAA;AAEA,UAAM,KAAKC,eAAeJ,QAAQtC,WAAAA;AAClC,WAAOC;EACT;EAEA,MAAcgB,kBACZd,UACAL,UACAC,SACAC,aACAE,qBACA;AACAJ,eAAW;SAAIA;;AAGf,UAAM6C,sBAAsB7C,SAAS8C,MAAK;AAC1C,UAAMC,eAAeF,oBAAoB3B,cAAa,IAAK2B,oBAAoBG,UAAU;AAGzF,UAAMC,cAAcjD,SACjB4B,IAAIsB,6BAAAA,EACJtB,IAAIuB,kCAAAA,EACJtC,GAAG,EAAC;AAEP,QAAIoC,YAAYG,SAAS,QAAQ;AAC/B,YAAM,IAAIhC,MAAM,uBAAA;IAClB;AAEA,UAAM,KAAK9B,OAAOkB,KAAKC,QAAQT,SAASU,OAAOL,UAAU;MACvD+C,MAAM;MACNJ,SAASC,YAAYD;IACvB,CAAA;AAEA,UAAMK,cAAcpD,QAAQ2B,IAAI0B,8BAAAA;AAEhC,UAAMC,QAAQ;SACTF;SACC,KAAK9D,yBAAyB;QAAC;UAAEiE,MAAM;QAAmB;UAAsB,CAAA;SAChF,KAAK/D,oBAAoB;QAAC;UAAE+D,MAAM;QAAc;UAAsB,CAAA;;AAG5E,QAAIhB,SAAS,KAAKlD,OAAOkB,KAAKC,QAAQa,KAAKkB,OAAOnC,UAAU;MAC1DoD,cAAc,KAAKjE;MACnBuD;MACAQ;MACA,IAAInD,2DAAqBsD,cAAa;QACpCC,uBAAuBvD,oBAAoBsD;MAC7C;MACA,GAAI,KAAKhE,4BAA4B;QAAEiD,qBAAqB;MAAM;IACpE,CAAA;AAEA,UAAM,KAAKC,eAAeJ,QAAQtC,WAAAA;AAElC,WAAO0D,mBAAmBpB,MAAAA;EAC5B;EAEA,MAAcI,eAAeJ,QAAyBtC,aAAiC;AACrFA,gBAAYsC,OAAO,OAAOqB,iBAAAA;AA7O9B;AA8OM,UAAIC,iBAAiB;AACrB,UAAIC;AACJ,UAAIC;AAEJ,uBAAiBC,SAASzB,QAAQ;AAChC,gBAAQyB,MAAMC,OAAK;UACjB,KAAK;AACH,gBAAIJ,gBAAgB;AAClBD,2BAAaM,uBAAuB;gBAAEjC,mBAAmB8B;cAAkB,CAAA;YAC7E;AACAD,+BAAmBE,MAAMG,KAAKzD;AAC9BkD,yBAAaQ,qBAAqB;cAAEC,WAAWP;YAAiB,CAAA;AAChE;UACF,KAAK;AACH,kBAAIE,WAAMG,KAAKG,MAAMvB,YAAjBiB,mBAA2B,GAAGT,UAAS,QAAQ;AACjDK,2BAAaW,uBAAuB;gBAClCF,WAAWP;gBACXf,UAASiB,WAAMG,KAAKG,MAAMvB,YAAjBiB,mBAA2B,GAAGQ,KAAKC;cAC9C,CAAA;YACF;AACA;UACF,KAAK;AACHb,yBAAac,mBAAmB;cAAEL,WAAWP;YAAiB,CAAA;AAC9D;UACF,KAAK;AACH,gBAAIa;AACJ,gBAAIC;AACJ,gBAAIC;AACJ,gBACEb,MAAMG,KAAKG,MAAMQ,aAAavB,SAAS,kBACvCS,WAAMG,KAAKG,MAAMQ,aAAapD,eAA9BsC,mBAA2C,GAAGT,UAAS,YACvD;AACAoB,4BAAaX,WAAMG,KAAKG,MAAMQ,aAAapD,eAA9BsC,mBAA2C,GAAGtD;AAC3DkE,8BAAeZ,WAAMG,KAAKG,MAAMQ,aAAapD,eAA9BsC,mBAA2C,GAAGe,SAASC;AACtEH,8BAAeb,WAAMG,KAAKG,MAAMQ,aAAapD,eAA9BsC,mBAA2C,GAAGe,SAASE;YACxE;AAEA,gBAAIL,gBAAgBD,YAAY;AAC9B,kBAAId,gBAAgB;AAClBD,6BAAaM,uBAAuB;kBAAEjC,mBAAmB8B;gBAAkB,CAAA;cAC7E;AACAF,+BAAiB;AACjBE,kCAAoBY;AACpBf,2BAAasB,yBAAyB;gBACpCjD,mBAAmB8B;gBACnBoB,iBAAiBnB,MAAMG,KAAKzD;gBAC5B0E,YAAYR;cACd,CAAA;YACF,WAAWC,cAAc;AACvBjB,2BAAayB,wBAAwB;gBACnCpD,mBAAmB8B;gBACnBuB,MAAMT;cACR,CAAA;YACF;AACA;QACJ;MACF;AACA,UAAIhB,gBAAgB;AAClBD,qBAAaM,uBAAuB;UAAEjC,mBAAmB8B;QAAkB,CAAA;MAC7E;AACAH,mBAAa2B,SAAQ;IACvB,CAAA;EACF;AACF;AAhOanG;AAkOb,SAASuE,mBAAmBpB,QAAuB;AACjD,SAAO,IAAIiD,QAAgB,CAACC,SAASC,WAAAA;AACnC,QAAIC,cAAc,wBAAC1B,UAAAA;AACjB,UAAIA,MAAMA,UAAU,sBAAsB;AACxC,cAAM/D,QAAQ+D,MAAME,KAAKzD;AACzB6B,eAAOqD,IAAI,SAASD,WAAAA;AACpBF,gBAAQvF,KAAAA;MACV;IACF,GANkB;AAOlBqC,WAAOsD,GAAG,SAASF,WAAAA;EACrB,CAAA;AACF;AAXShC;;;ACvRT,OAAOmC,aAAY;AACnB,SAASC,YAAAA,WAAUC,cAAAA,mBAAkB;AAQ9B,IAAMC,eAAN,MAAMA;EACHC;EACAC;EACAC;EAERC,YAAYC,SAA8B;AACxC,QAAIA,mCAASJ,QAAQ;AACnB,WAAKA,SAASI,QAAQJ;IACxB,OAAO;AACL,WAAKA,SAAS;IAChB;AACA,SAAKC,QAAQG,mCAASH;AACtB,SAAKC,QAAQ;EACf;EAEA,MAAMG,QACJC,SAC+C;AAC/C,UAAMC,QAAQD,QAAQE,QAAQC,IAAIC,8BAAAA;AAClC,UAAMC,SAAS,IAAIC,QAAO;MACxBZ,QAAQ,KAAKA;MACba,SAAS;IACX,CAAA;AACA,UAAMC,sBAAsBR,QAAQQ;AAEpC,UAAMC,WAAWT,QAAQS,SAASN,IAAIO,6BAAAA;AAEtC,UAAMC,SAAS,MAAMN,OAAOO,KAAKC,YAAYC,OAAO;MAClDnB,OAAO,KAAKA;MACZc;MACAE,QAAQ;MACR,GAAIV,MAAMc,SAAS,KAAK;QAAEd;MAAM;MAChC,IAAIO,2DAAqBQ,gBAAe;QAAEA,aAAaR,oBAAoBQ;MAAY;IACzF,CAAA;AAEA,QAAIrB,QAAQ;AACZ,QAAIsB;AACJ,QAAIC;AACJlB,YAAQmB,YAAYR,OAAO,OAAOS,iBAAAA;AAvEtC;AAwEM,UAAIC,OAAsC;AAC1C,uBAAiBC,SAASX,QAAQ;AAChC,YAAI,KAAKf,OAAO;AACdD,kBAAQ2B,MAAM3B;AACdsB,6BAAmBM,UAAAA;AACnBH,uBAAaI,qBAAqB;YAAEC,WAAWR;UAAiB,CAAA;AAChEG,uBAAaM,uBAAuB;YAClCD,WAAWR;YACXU,SAAS,eAAehC;;UAC1B,CAAA;AACAyB,uBAAaQ,mBAAmB;YAAEH,WAAWR;UAAiB,CAAA;AAC9D,eAAKrB,QAAQ;QACf;AACA,cAAMiC,YAAWP,WAAMQ,QAAQ,CAAA,EAAGC,MAAMC,eAAvBV,mBAAoC;AACrD,cAAMK,UAAUL,MAAMQ,QAAQ,CAAA,EAAGC,MAAMJ;AAKvC,YAAIN,SAAS,cAAaQ,qCAAUI,KAAI;AACtCZ,iBAAO;AACPD,uBAAaQ,mBAAmB;YAAEH,WAAWR;UAAiB,CAAA;QAChE,WAAWI,SAAS,eAAeQ,aAAaK,WAAaL,qCAAUI,MAAK;AAC1EZ,iBAAO;AACPD,uBAAae,uBAAuB;YAAEC,mBAAmBlB;UAAkB,CAAA;QAC7E;AAGA,YAAIG,SAAS,MAAM;AACjB,cAAIQ,qCAAUI,IAAI;AAChBZ,mBAAO;AACPH,gCAAoBW,SAAUI;AAC9Bb,yBAAaiB,yBAAyB;cACpCD,mBAAmBlB;cACnBoB,YAAYT,SAAUU,SAAUC;YAClC,CAAA;UACF,WAAWb,SAAS;AAClBN,mBAAO;AACPJ,+BAAmBK,MAAMW;AACzBb,yBAAaI,qBAAqB;cAAEC,WAAWR;YAAiB,CAAA;UAClE;QACF;AAGA,YAAII,SAAS,aAAaM,SAAS;AACjCP,uBAAaM,uBAAuB;YAClCD,WAAWR;YACXU;UACF,CAAA;QACF,WAAWN,SAAS,gBAAcQ,0CAAUU,aAAVV,mBAAoBY,YAAW;AAC/DrB,uBAAasB,wBAAwB;YACnCN,mBAAmBlB;YACnByB,MAAMd,SAASU,SAASE;UAC1B,CAAA;QACF;MACF;AAGA,UAAIpB,SAAS,WAAW;AACtBD,qBAAaQ,mBAAmB;UAAEH,WAAWR;QAAiB,CAAA;MAChE,WAAWI,SAAS,YAAY;AAC9BD,qBAAae,uBAAuB;UAAEC,mBAAmBlB;QAAkB,CAAA;MAC7E;AAEAE,mBAAawB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACLC,UAAU7C,QAAQ6C,YAAYC,YAAAA;IAChC;EACF;AACF;AA9GarD;;;ACjBb,SAASsD,YAAY;AAWrB,SAASC,cAAAA,mBAAkB;AAE3B,IAAMC,iBAAgB;AAwBf,IAAMC,cAAN,MAAMA;EACHC,QAAgBF;EAEhBG,2BAAoC;EACpCC;EACR,IAAWC,OAAa;AACtB,WAAO,KAAKD;EACd;EAEAE,YAAYC,QAA4B;AACtC,SAAKH,SAAQG,iCAAQF,SAAQ,IAAIG,KAAK,CAAC,CAAA;AACvC,QAAID,iCAAQL,OAAO;AACjB,WAAKA,QAAQK,OAAOL;IACtB;AACA,SAAKC,4BAA2BI,iCAAQJ,6BAA4B;EACtE;EAEA,MAAMM,QACJC,SAC+C;AAC/C,UAAM,EACJC,UACAT,QAAQ,KAAKA,OACbU,UACAC,SACAC,aACAC,oBAAmB,IACjBL;AACJ,UAAMM,QAAQH,QAAQI,IAAIC,8BAAAA;AAE1B,QAAIC,iBAAiBP,SAASK,IAAIG,6BAAAA;AAClCD,qBAAiBE,0BAA0BF,gBAAgBH,OAAOd,KAAAA;AAElE,QAAIoB,aAAkBP,2DAAqBO;AAC3C,SAAIP,2DAAqBO,gBAAe,YAAY;AAClDA,mBAAa;QACXC,MAAM;QACNC,UAAU;UAAEC,MAAMV,oBAAoBW;QAAuB;MAC/D;IACF;AACA,UAAMC,SAAS,MAAM,KAAKtB,KAAKuB,KAAKC,YAAYC,OAAO;MACrD5B;MACAyB,QAAQ;MACRf,UAAUO;MACV,GAAIH,MAAMe,SAAS,KAAK;QAAEf;MAAM;MAChC,IAAID,2DAAqBiB,cAAa;QACpCC,YAAYlB,oBAAoBiB;MAClC;MACA,IAAIjB,2DAAqBmB,SAAQ;QAAEA,MAAMnB,oBAAoBmB;MAAK;MAClE,GAAIZ,cAAc;QAAEa,aAAab;MAAW;MAC5C,GAAI,KAAKnB,4BAA4B;QAAEiC,qBAAqB;MAAM;MAClE,IAAIrB,2DAAqBsB,gBAAe;QAAEA,aAAatB,oBAAoBsB;MAAY;IACzF,CAAA;AAEAvB,gBAAYa,OAAO,OAAOW,iBAAAA;AA3G9B;AA4GM,UAAIC,OAAsC;AAC1C,UAAIC;AACJ,UAAIC;AAEJ,uBAAiBC,SAASf,QAAQ;AAChC,cAAMgB,YAAWD,WAAME,QAAQ,CAAA,EAAGC,MAAMC,eAAvBJ,mBAAoC;AACrD,cAAMK,UAAUL,MAAME,QAAQ,CAAA,EAAGC,MAAME;AAKvC,YAAIR,SAAS,cAAaI,qCAAUK,KAAI;AACtCT,iBAAO;AACPD,uBAAaW,mBAAmB;YAAEC,WAAWV;UAAiB,CAAA;QAChE,WAAWD,SAAS,eAAeI,aAAaQ,WAAaR,qCAAUK,MAAK;AAC1ET,iBAAO;AACPD,uBAAac,uBAAuB;YAAEC,mBAAmBZ;UAAkB,CAAA;QAC7E;AAGA,YAAIF,SAAS,MAAM;AACjB,cAAII,qCAAUK,IAAI;AAChBT,mBAAO;AACPE,gCAAoBE,SAAUK;AAC9BV,yBAAagB,yBAAyB;cACpCD,mBAAmBZ;cACnBc,YAAYZ,SAAUnB,SAAUC;cAChC+B,iBAAiBd,MAAMM;YACzB,CAAA;UACF,WAAWD,SAAS;AAClBR,mBAAO;AACPC,+BAAmBE,MAAMM;AACzBV,yBAAamB,qBAAqB;cAAEP,WAAWV;YAAiB,CAAA;UAClE;QACF;AAGA,YAAID,SAAS,aAAaQ,SAAS;AACjCT,uBAAaoB,uBAAuB;YAClCR,WAAWV;YACXO;UACF,CAAA;QACF,WAAWR,SAAS,gBAAcI,0CAAUnB,aAAVmB,mBAAoBgB,YAAW;AAC/DrB,uBAAasB,wBAAwB;YACnCP,mBAAmBZ;YACnBoB,MAAMlB,SAASnB,SAASmC;UAC1B,CAAA;QACF;MACF;AAGA,UAAIpB,SAAS,WAAW;AACtBD,qBAAaW,mBAAmB;UAAEC,WAAWV;QAAiB,CAAA;MAChE,WAAWD,SAAS,YAAY;AAC9BD,qBAAac,uBAAuB;UAAEC,mBAAmBZ;QAAkB,CAAA;MAC7E;AAEAH,mBAAawB,SAAQ;IACvB,CAAA;AAEA,WAAO;MACLnD,UAAUD,QAAQC,YAAYoD,YAAAA;IAChC;EACF;AACF;AAvHa9D;;;ACnCb,OAAO+D,eAAe;;;ACTf,SAASC,2BACdC,UACAC,OACAC,OACAC,WAAkB;AAElBA,4BAAcC;AAEd,QAAMC,SAAgB,CAAA;AACtB,QAAMC,iBAAiBC,kBAAiBL,OAAOD,KAAAA;AAC/C,MAAIK,iBAAiBH,WAAW;AAC9B,UAAM,IAAIK,MAAM,4CAA4CF,oBAAoBH,WAAW;EAC7F;AACAA,eAAaG;AAEb,aAAWG,WAAWT,UAAU;AAC9B,QAAIS,QAAQC,SAAS,UAAU;AAC7B,YAAMC,YAAYC,oBAAmBV,OAAOO,OAAAA;AAC5CN,mBAAaQ;AAEb,UAAIR,YAAY,GAAG;AACjB,cAAM,IAAIK,MAAM,uCAAA;MAClB;IACF;EACF;AAEA,MAAIK,SAAkB;AAEtB,QAAMC,mBAAmB;OAAId;IAAUe,QAAO;AAC9C,aAAWN,WAAWK,kBAAkB;AACtC,QAAIL,QAAQC,SAAS,UAAU;AAC7BL,aAAOW,QAAQP,OAAAA;AACf;IACF,WAAWI,QAAQ;AACjB;IACF;AACA,QAAIF,YAAYC,oBAAmBV,OAAOO,OAAAA;AAC1C,QAAIN,YAAYQ,WAAW;AACzBE,eAAS;AACT;IACF;AACAR,WAAOW,QAAQP,OAAAA;AACfN,iBAAaQ;EACf;AAEA,SAAON;AACT;AA9CgBN,OAAAA,4BAAAA;AAgDhB,IAAMK,aAAa;AAEnB,SAASG,kBAAiBL,OAAeD,OAAY;AACnD,MAAIA,MAAMgB,WAAW,GAAG;AACtB,WAAO;EACT;AACA,QAAMC,OAAOC,KAAKC,UAAUnB,KAAAA;AAC5B,SAAOoB,aAAYnB,OAAOgB,IAAAA;AAC5B;AANSX,OAAAA,mBAAAA;AAQT,SAASK,oBAAmBV,OAAeO,SAAY;AACrD,SAAOY,aAAYnB,OAAOiB,KAAKC,UAAUX,QAAQa,OAAO,KAAK,EAAA;AAC/D;AAFSV,OAAAA,qBAAAA;AAIT,SAASS,aAAYnB,OAAeqB,MAAY;AAC9C,SAAOA,KAAKN,SAAS;AACvB;AAFSI,OAAAA,cAAAA;AAIF,SAASG,kCAAkCC,QAAmB;AACnE,SAAO;IACLC,MAAMD,OAAOC;IACbC,aAAaF,OAAOE;IACpBC,cAAcT,KAAKU,MAAMJ,OAAOK,UAAU;EAC5C;AACF;AANgBN;AAQT,SAASO,iCACdtB,SAAgB;AAEhB,MAAIA,QAAQuB,cAAa,GAAI;AAC3B,QAAIvB,QAAQC,SAAS,UAAU;AAC7B,aAAO;QACLA,MAAM;QACNY,SAAS;UACP;YAAEW,MAAM;YAAQV,MAAM,gDAAgDd,QAAQa;UAAQ;;MAE1F;IACF,OAAO;AACL,aAAO;QACLZ,MAAMD,QAAQC,SAAS,SAAS,SAAS;QACzCY,SAAS;UAAC;YAAEW,MAAM;YAAQV,MAAMd,QAAQa;UAAQ;;MAClD;IACF;EACF,WAAWb,QAAQyB,yBAAwB,GAAI;AAC7C,WAAO;MACLxB,MAAM;MACNY,SAAS;QACP;UACEa,IAAI1B,QAAQ0B;UACZF,MAAM;UACNG,OAAO3B,QAAQ4B;UACfX,MAAMjB,QAAQiB;QAChB;;IAEJ;EACF,WAAWjB,QAAQ6B,gBAAe,GAAI;AACpC,WAAO;MACL5B,MAAM;MACNY,SAAS;QACP;UACEW,MAAM;UACNX,SAASb,QAAQJ;UACjBkC,aAAa9B,QAAQ+B;QACvB;;IAEJ;EACF;AACF;AAzCgBT;AA2CT,SAASU,6BACdC,eAAgD;AAEhD,SAAOA,cAAcC,OAAO,CAACC,KAAKnC,YAAAA;AAChC,UAAMoC,YAAYD,IAAIA,IAAI3B,SAAS,CAAA;AAEnC,QAAI4B,aAAaA,UAAUnC,SAASD,QAAQC,MAAM;AAChDmC,gBAAUvB,UAAUuB,UAAUvB,QAAQwB,OAAOrC,QAAQa,OAAO;IAC9D,OAAO;AACLsB,UAAIG,KAAK;QACPrC,MAAMD,QAAQC;QACdY,SAAS;aAAKb,QAAQa;;MACxB,CAAA;IACF;AAEA,WAAOsB;EACT,GAAG,CAAA,CAAE;AACP;AAjBgBH;;;AD/FhB,SAASO,YAAAA,WAAUC,cAAAA,mBAAkB;AAErC,IAAMC,iBAAgB;AAef,IAAMC,mBAAN,MAAMA;EACHC,QAAgBF;EAEhBG;EACR,IAAWC,YAAuB;AAChC,WAAO,KAAKD;EACd;EAEAE,YAAYC,QAAiC;AAC3C,SAAKH,cAAaG,iCAAQF,cAAa,IAAIG,UAAU,CAAC,CAAA;AACtD,QAAID,iCAAQJ,OAAO;AACjB,WAAKA,QAAQI,OAAOJ;IACtB;EACF;EAEA,MAAMM,QACJC,SAC+C;AAC/C,UAAM,EACJC,UACAR,QAAQ,KAAKA,OACbS,UAAUC,aACVC,SACAC,aACAC,oBAAmB,IACjBN;AACJ,UAAMO,QAAQH,QAAQI,IAAIC,iCAAAA;AAE1B,UAAMP,WAAW;SAAIC;;AAGrB,UAAMO,sBAAsBR,SAASS,MAAK;AAC1C,UAAMC,eAAeF,oBAAoBG,cAAa,IAAKH,oBAAoBI,UAAU;AAEzF,QAAIC,oBAAoBb,SAASM,IAAIQ,gCAAAA;AACrCD,wBAAoBE,2BAA0BF,mBAAmBR,OAAOd,KAAAA;AACxEsB,wBAAoBG,6BAA6BH,iBAAAA;AAEjD,QAAII,aAAkBb,2DAAqBa;AAC3C,SAAIb,2DAAqBa,gBAAe,YAAY;AAClDA,mBAAa;QACXC,MAAM;QACNC,MAAMf,oBAAoBgB;MAC5B;IACF;AAEA,UAAMC,SAAS,KAAK5B,UAAUO,SAASsB,OAAO;MAC5CC,QAAQb;MACRnB,OAAO,KAAKA;MACZS,UAAUa;MACVW,aAAYpB,2DAAqBqB,cAAa;MAC9C,IAAIrB,2DAAqBsB,eAAc;QAAEA,aAAatB,oBAAoBsB;MAAY,IAAI,CAAC;MAC3F,GAAIrB,MAAMsB,SAAS,KAAK;QAAEtB;MAAM;MAChC,GAAIY,cAAc;QAAEW,aAAaX;MAAW;MAC5CI,QAAQ;IACV,CAAA;AAEAlB,gBAAYkB,OAAO,OAAOQ,iBAAAA;AACxB,UAAIC,OAAsC;AAC1C,UAAIC,gBAAgB;AACpB,UAAIC,mBAAmBC,UAAAA;AACvB,UAAIC,oBAAoBD,UAAAA;AACxB,UAAIE,2BAA2B,IAAIC,yBAAAA;AAEnC,uBAAiBC,SAAS,MAAMhB,QAAQ;AACtC,YAAIgB,MAAMnB,SAAS,iBAAiB;AAClCc,6BAAmBK,MAAMC,QAAQC;QACnC,WAAWF,MAAMnB,SAAS,uBAAuB;AAC/C,cAAImB,MAAMG,cAActB,SAAS,QAAQ;AACvCa,4BAAgB;AAChBI,qCAAyBM,MAAK;AAC9BX,mBAAO;UACT,WAAWO,MAAMG,cAActB,SAAS,YAAY;AAClDgB,gCAAoBG,MAAMG,cAAcD;AACxCV,yBAAaa,yBAAyB;cACpCC,mBAAmBT;cACnBU,YAAYP,MAAMG,cAAcrB;cAChC0B,iBAAiBb;YACnB,CAAA;AACAF,mBAAO;UACT;QACF,WAAWO,MAAMnB,SAAS,uBAAuB;AAC/C,cAAImB,MAAMS,MAAM5B,SAAS,cAAc;AACrC,kBAAM6B,OAAOZ,yBAAyBa,YAAYX,MAAMS,MAAMC,IAAI;AAClE,gBAAIA,KAAKpB,SAAS,GAAG;AACnB,kBAAI,CAACI,eAAe;AAClBF,6BAAaoB,qBAAqB;kBAAEC,WAAWlB;gBAAiB,CAAA;AAChED,gCAAgB;cAClB;AACAF,2BAAasB,uBAAuB;gBAClCD,WAAWlB;gBACXpB,SAASmC;cACX,CAAA;YACF;UACF,WAAWV,MAAMS,MAAM5B,SAAS,oBAAoB;AAClDW,yBAAauB,wBAAwB;cACnCT,mBAAmBT;cACnBmB,MAAMhB,MAAMS,MAAMQ;YACpB,CAAA;UACF;QACF,WAAWjB,MAAMnB,SAAS,sBAAsB;AAC9C,cAAIY,SAAS,WAAW;AACtB,gBAAIC,eAAe;AACjBF,2BAAa0B,mBAAmB;gBAAEL,WAAWlB;cAAiB,CAAA;YAChE;UACF,WAAWF,SAAS,YAAY;AAC9BD,yBAAa2B,uBAAuB;cAAEb,mBAAmBT;YAAkB,CAAA;UAC7E;QACF;MACF;AAEAL,mBAAa4B,SAAQ;IACvB,CAAA;AAEA,WAAO;MACL1D,UAAUA,YAAY2D,YAAAA;IACxB;EACF;AACF;AAtHapE;AAwHb,IAAMqE,eAAe;AACrB,IAAMC,mBAAmB;AAEzB,IAAMxB,2BAAN,6BAAMA,0BAAAA;EACIyB;EACAC,uBAAgC;EAExCpE,cAAc;AACZ,SAAKmE,SAAS;EAChB;EAEAb,YAAYD,MAAsB;AAChC,SAAKc,UAAUd;AACf,QAAI,KAAKe,sBAAsB;AAC7B,aAAOf;IACT;AACA,UAAMgB,eAAe,KAAKF,OAAOG,MAAM,GAAGL,aAAahC,MAAM;AAC7D,QAAIgC,aAAaM,WAAWF,YAAAA,GAAe;AACzC,UAAI,KAAKF,OAAOK,SAASN,gBAAAA,GAAmB;AAC1C,cAAMO,MAAM,KAAKN,OAAOO,QAAQR,gBAAAA;AAChC,cAAMS,eAAe,KAAKR,OAAOG,MAAMG,MAAMP,iBAAiBjC,MAAM;AACpE,aAAKkC,SAASQ;AACd,aAAKP,uBAAuB;AAC5B,eAAOO;MACT,OAAO;AACL,eAAO;MACT;IACF;AACA,WAAOtB;EACT;EAEAN,QAAQ;AACN,SAAKoB,SAAS;AACd,SAAKC,uBAAuB;EAC9B;AACF,GAhCA;;;AElJA,SAASQ,cAAc;AACvB,SAASC,YAAAA,WAAUC,cAAAA,mBAAkB;AAErC,IAAMC,iBAAgB;AAMf,IAAMC,4BAAN,MAAMA;EACHC;EAERC,YAAYC,SAAgC;AAC1C,QAAIA,mCAASF,OAAO;AAClB,WAAKA,QAAQE,QAAQF;IACvB,OAAO;AACL,WAAKA,QAAQF;IACf;EACF;EAEA,MAAMK,QACJC,SAC+C;AAC/C,UAAM,EAAEC,UAAUC,SAASC,YAAW,IAAKH;AAG3C,UAAMI,SAAS,IAAIC,OAAO;MACxBT,OAAO,KAAKA;IACd,CAAA;AACA,UAAMU,WAAYL,SAASM,OAAO,CAACC,MAAMA,EAAEC,cAAa,CAAA,EAAsBC,IAC5E,CAACF,MAAMA,EAAEG,OAAO;AAElB,UAAMC,UAAU,MAAMR,OAAOS,OAAOP,QAAAA;AAEpCH,gBAAYU,OAAO,OAAOC,iBAAAA;AACxB,YAAMC,mBAAmBC,UAAAA;AACzBF,mBAAaG,qBAAqB;QAAEC,WAAWH;MAAiB,CAAA;AAChE,uBAAiBI,aAAaP,SAAS;AACrCE,qBAAaM,uBAAuB;UAClCF,WAAWH;UACXJ,SAASQ;QACX,CAAA;MACF;AACAL,mBAAaO,mBAAmB;QAAEH,WAAWH;MAAiB,CAAA;AAI9DD,mBAAaQ,SAAQ;IACvB,CAAA;AACA,WAAO;MACLC,UAAUvB,QAAQuB,YAAYC,YAAAA;IAChC;EACF;AACF;AA5Ca7B;;;ACZb,SAAS8B,cAAAA,mBAAkB;AAEpB,IAAMC,eAAN,MAAMA;EACX,MAAMC,QACJC,SAC+C;AAC/C,WAAO;MACLC,UAAUD,QAAQC,YAAYC,YAAAA;IAChC;EACF;AACF;AARaJ;AAUN,IAAMK,2BAA2BL;","names":["RemoteRunnable","RemoteChain","name","description","chainUrl","parameters","parameterType","constructor","options","toAction","inferLangServeParameters","handler","args","runnable","RemoteRunnable","url","input","Object","keys","invoke","supportedTypes","schemaUrl","replace","schema","fetch","then","res","json","catch","Error","includes","type","properties","map","key","property","required","OpenAI","limitMessagesToTokenCount","messages","tools","model","maxTokens","maxTokensForOpenAIModel","result","toolsNumTokens","countToolsTokens","Error","message","role","numTokens","countMessageTokens","cutoff","reversedMessages","reverse","unshift","maxTokensByModel","DEFAULT_MAX_TOKENS","length","json","JSON","stringify","countTokens","content","text","convertActionInputToOpenAITool","action","type","function","name","description","parameters","parse","jsonSchema","convertMessageToOpenAIMessage","isTextMessage","isActionExecutionMessage","tool_calls","id","arguments","isResultMessage","tool_call_id","actionExecutionId","convertSystemMessageToAssistantAPI","randomUUID","DEFAULT_MODEL","OpenAIAdapter","model","disableParallelToolCalls","_openai","openai","constructor","params","OpenAI","process","request","threadId","threadIdFromRequest","messages","actions","eventSource","forwardedParameters","tools","map","convertActionInputToOpenAITool","randomUUID","openaiMessages","convertMessageToOpenAIMessage","limitMessagesToTokenCount","toolChoice","type","function","name","toolChoiceFunctionName","stream","beta","chat","completions","length","maxTokens","max_tokens","stop","tool_choice","parallel_tool_calls","temperature","eventStream$","mode","currentMessageId","currentToolCallId","chunk","choices","toolCall","delta","tool_calls","content","id","sendTextMessageEnd","messageId","undefined","sendActionExecutionEnd","actionExecutionId","sendActionExecutionStart","parentMessageId","actionName","sendTextMessageStart","sendTextMessageContent","arguments","sendActionExecutionArgs","args","complete","AIMessage","HumanMessage","SystemMessage","ToolMessage","DynamicStructuredTool","randomId","convertJsonSchemaToZodSchema","convertMessageToLangChainMessage","message","isTextMessage","role","HumanMessage","content","AIMessage","SystemMessage","isActionExecutionMessage","tool_calls","id","args","arguments","name","isResultMessage","ToolMessage","result","tool_call_id","actionExecutionId","convertActionInputToLangChainTool","actionInput","DynamicStructuredTool","description","schema","convertJsonSchemaToZodSchema","JSON","parse","jsonSchema","func","isAIMessage","message","Object","prototype","toString","call","isAIMessageChunk","isBaseMessageChunk","maybeSendActionExecutionResultIsMessage","eventStream$","actionExecution","sendActionExecutionResult","actionExecutionId","id","actionName","name","result","streamLangChainResponse","sendTextMessage","randomId","content","toolCall","tool_calls","sendActionExecution","args","JSON","stringify","lc_kwargs","reader","getReader","mode","currentMessageId","toolCallDetails","index","prevIndex","done","value","read","toolCallName","undefined","toolCallId","toolCallArgs","hasToolCall","Array","isArray","text","chunk","tool_call_chunks","additional_kwargs","function","arguments","sendTextMessageEnd","messageId","sendActionExecutionEnd","sendActionExecutionStart","parentMessageId","sendTextMessageStart","sendTextMessageContent","sendActionExecutionArgs","error","console","encodeResult","Error","complete","randomUUID","awaitAllCallbacks","LangChainAdapter","constructor","options","process","request","eventSource","model","actions","messages","runId","threadId","threadIdFromRequest","randomUUID","result","chainFn","map","convertMessageToLangChainMessage","tools","convertActionInputToLangChainTool","stream","eventStream$","streamLangChainResponse","awaitAllCallbacks","ChatGoogle","GoogleGenerativeAIAdapter","LangChainAdapter","constructor","options","chainFn","messages","tools","threadId","model","ChatGoogle","modelName","apiVersion","bindTools","stream","metadata","conversation_id","OpenAI","OpenAIAssistantAdapter","openai","codeInterpreterEnabled","assistantId","fileSearchEnabled","disableParallelToolCalls","constructor","params","OpenAI","process","request","messages","actions","eventSource","runId","forwardedParameters","threadId","extensions","openaiAssistantAPI","beta","threads","create","id","lastMessage","at","nextRunId","undefined","isResultMessage","submitToolOutputs","isTextMessage","submitUserMessage","Error","run","runs","retrieve","required_action","toolCallsIds","submit_tool_outputs","tool_calls","map","toolCall","resultMessages","filter","message","includes","actionExecutionId","length","toolOutputs","tool_call_id","output","result","stream","submitToolOutputsStream","tool_outputs","parallel_tool_calls","streamResponse","instructionsMessage","shift","instructions","content","userMessage","convertMessageToOpenAIMessage","convertSystemMessageToAssistantAPI","role","openaiTools","convertActionInputToOpenAITool","tools","type","assistant_id","maxTokens","max_completion_tokens","getRunIdFromStream","eventStream$","inFunctionCall","currentMessageId","currentToolCallId","chunk","event","sendActionExecutionEnd","data","sendTextMessageStart","messageId","delta","sendTextMessageContent","text","value","sendTextMessageEnd","toolCallId","toolCallName","toolCallArgs","step_details","function","name","arguments","sendActionExecutionStart","parentMessageId","actionName","sendActionExecutionArgs","args","complete","Promise","resolve","reject","runIdGetter","off","on","OpenAI","randomId","randomUUID","UnifyAdapter","apiKey","model","start","constructor","options","process","request","tools","actions","map","convertActionInputToOpenAITool","openai","OpenAI","baseURL","forwardedParameters","messages","convertMessageToOpenAIMessage","stream","chat","completions","create","length","temperature","currentMessageId","currentToolCallId","eventSource","eventStream$","mode","chunk","randomId","sendTextMessageStart","messageId","sendTextMessageContent","content","sendTextMessageEnd","toolCall","choices","delta","tool_calls","id","undefined","sendActionExecutionEnd","actionExecutionId","sendActionExecutionStart","actionName","function","name","arguments","sendActionExecutionArgs","args","complete","threadId","randomUUID","Groq","randomUUID","DEFAULT_MODEL","GroqAdapter","model","disableParallelToolCalls","_groq","groq","constructor","params","Groq","process","request","threadId","messages","actions","eventSource","forwardedParameters","tools","map","convertActionInputToOpenAITool","openaiMessages","convertMessageToOpenAIMessage","limitMessagesToTokenCount","toolChoice","type","function","name","toolChoiceFunctionName","stream","chat","completions","create","length","maxTokens","max_tokens","stop","tool_choice","parallel_tool_calls","temperature","eventStream$","mode","currentMessageId","currentToolCallId","chunk","toolCall","choices","delta","tool_calls","content","id","sendTextMessageEnd","messageId","undefined","sendActionExecutionEnd","actionExecutionId","sendActionExecutionStart","actionName","parentMessageId","sendTextMessageStart","sendTextMessageContent","arguments","sendActionExecutionArgs","args","complete","randomUUID","Anthropic","limitMessagesToTokenCount","messages","tools","model","maxTokens","MAX_TOKENS","result","toolsNumTokens","countToolsTokens","Error","message","role","numTokens","countMessageTokens","cutoff","reversedMessages","reverse","unshift","length","json","JSON","stringify","countTokens","content","text","convertActionInputToAnthropicTool","action","name","description","input_schema","parse","jsonSchema","convertMessageToAnthropicMessage","isTextMessage","type","isActionExecutionMessage","id","input","arguments","isResultMessage","tool_use_id","actionExecutionId","groupAnthropicMessagesByRole","messageParams","reduce","acc","lastGroup","concat","push","randomId","randomUUID","DEFAULT_MODEL","AnthropicAdapter","model","_anthropic","anthropic","constructor","params","Anthropic","process","request","threadId","messages","rawMessages","actions","eventSource","forwardedParameters","tools","map","convertActionInputToAnthropicTool","instructionsMessage","shift","instructions","isTextMessage","content","anthropicMessages","convertMessageToAnthropicMessage","limitMessagesToTokenCount","groupAnthropicMessagesByRole","toolChoice","type","name","toolChoiceFunctionName","stream","create","system","max_tokens","maxTokens","temperature","length","tool_choice","eventStream$","mode","didOutputText","currentMessageId","randomId","currentToolCallId","filterThinkingTextBuffer","FilterThinkingTextBuffer","chunk","message","id","content_block","reset","sendActionExecutionStart","actionExecutionId","actionName","parentMessageId","delta","text","onTextChunk","sendTextMessageStart","messageId","sendTextMessageContent","sendActionExecutionArgs","args","partial_json","sendTextMessageEnd","sendActionExecutionEnd","complete","randomUUID","THINKING_TAG","THINKING_TAG_END","buffer","didFilterThinkingTag","potentialTag","slice","startsWith","includes","end","indexOf","filteredText","Ollama","randomId","randomUUID","DEFAULT_MODEL","ExperimentalOllamaAdapter","model","constructor","options","process","request","messages","actions","eventSource","ollama","Ollama","contents","filter","m","isTextMessage","map","content","_stream","stream","eventStream$","currentMessageId","randomId","sendTextMessageStart","messageId","chunkText","sendTextMessageContent","sendTextMessageEnd","complete","threadId","randomUUID","randomUUID","EmptyAdapter","process","request","threadId","randomUUID","ExperimentalEmptyAdapter"]}
|
|
@@ -2,7 +2,7 @@ import {
|
|
|
2
2
|
getCommonConfig,
|
|
3
3
|
getRuntimeInstanceTelemetryInfo,
|
|
4
4
|
telemetry_client_default
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-PFELVFS7.mjs";
|
|
6
6
|
import {
|
|
7
7
|
__name
|
|
8
8
|
} from "./chunk-44O2JGUY.mjs";
|
|
@@ -77,4 +77,4 @@ export {
|
|
|
77
77
|
config,
|
|
78
78
|
copilotRuntimeNextJSPagesRouterEndpoint
|
|
79
79
|
};
|
|
80
|
-
//# sourceMappingURL=chunk-
|
|
80
|
+
//# sourceMappingURL=chunk-W6EE6OTN.mjs.map
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { Parameter, Action } from '@copilotkit/shared';
|
|
2
|
-
import { b as CopilotServiceAdapter, R as RemoteChainParameters, A as ActionInput, F as ForwardedParametersInput, d as RuntimeEventSource } from './langserve-
|
|
3
|
-
import {
|
|
2
|
+
import { b as CopilotServiceAdapter, R as RemoteChainParameters, A as ActionInput, F as ForwardedParametersInput, E as ExtensionsInput, d as RuntimeEventSource, e as ExtensionsResponse } from './langserve-e308c437.js';
|
|
3
|
+
import { M as MessageInput, a as Message } from './index-a7f37670.js';
|
|
4
4
|
import * as graphql from 'graphql';
|
|
5
5
|
import * as pino from 'pino';
|
|
6
6
|
import { YogaInitialContext, createYoga } from 'graphql-yoga';
|
|
@@ -81,6 +81,13 @@ declare class Agent {
|
|
|
81
81
|
description?: string;
|
|
82
82
|
}
|
|
83
83
|
|
|
84
|
+
declare class LoadAgentStateResponse {
|
|
85
|
+
threadId: string;
|
|
86
|
+
threadExists: boolean;
|
|
87
|
+
state: string;
|
|
88
|
+
messages: string;
|
|
89
|
+
}
|
|
90
|
+
|
|
84
91
|
/**
|
|
85
92
|
* <Callout type="info">
|
|
86
93
|
* This is the reference for the `CopilotRuntime` class. For more information and example code snippets, please see [Concept: Copilot Runtime](/concepts/copilot-runtime).
|
|
@@ -108,6 +115,7 @@ interface CopilotRuntimeRequest {
|
|
|
108
115
|
graphqlContext: GraphQLContext;
|
|
109
116
|
forwardedParameters?: ForwardedParametersInput;
|
|
110
117
|
url?: string;
|
|
118
|
+
extensions?: ExtensionsInput;
|
|
111
119
|
}
|
|
112
120
|
interface CopilotRuntimeResponse {
|
|
113
121
|
threadId: string;
|
|
@@ -115,6 +123,7 @@ interface CopilotRuntimeResponse {
|
|
|
115
123
|
eventSource: RuntimeEventSource;
|
|
116
124
|
serverSideActions: Action<any>[];
|
|
117
125
|
actionInputsWithoutAgents: ActionInput[];
|
|
126
|
+
extensions?: ExtensionsResponse;
|
|
118
127
|
}
|
|
119
128
|
type ActionsConfiguration<T extends Parameter[] | [] = []> = Action<T>[] | ((ctx: {
|
|
120
129
|
properties: any;
|
|
@@ -147,6 +156,9 @@ interface Middleware {
|
|
|
147
156
|
*/
|
|
148
157
|
onAfterRequest?: OnAfterRequestHandler;
|
|
149
158
|
}
|
|
159
|
+
type AgentWithEndpoint = Agent & {
|
|
160
|
+
endpoint: EndpointDefinition;
|
|
161
|
+
};
|
|
150
162
|
interface CopilotRuntimeConstructorParams<T extends Parameter[] | [] = []> {
|
|
151
163
|
/**
|
|
152
164
|
* Middleware to be used by the runtime.
|
|
@@ -184,9 +196,8 @@ declare class CopilotRuntime<const T extends Parameter[] | [] = []> {
|
|
|
184
196
|
private onAfterRequest?;
|
|
185
197
|
constructor(params?: CopilotRuntimeConstructorParams<T>);
|
|
186
198
|
processRuntimeRequest(request: CopilotRuntimeRequest): Promise<CopilotRuntimeResponse>;
|
|
187
|
-
discoverAgentsFromEndpoints(graphqlContext: GraphQLContext): Promise<
|
|
188
|
-
|
|
189
|
-
})[]>;
|
|
199
|
+
discoverAgentsFromEndpoints(graphqlContext: GraphQLContext): Promise<AgentWithEndpoint[]>;
|
|
200
|
+
loadAgentState(graphqlContext: GraphQLContext, threadId: string, agentName: string): Promise<LoadAgentStateResponse>;
|
|
190
201
|
private processAgentRequest;
|
|
191
202
|
private getServerSideActions;
|
|
192
203
|
}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export { c as ActionExecutionMessage, d as AgentStateMessage,
|
|
1
|
+
export { c as ActionExecutionMessage, d as AgentStateMessage, a as Message, b as MessageType, R as ResultMessage, T as TextMessage } from '../../../index-a7f37670.js';
|
|
2
2
|
import '../base/index.js';
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import OpenAI from 'openai';
|
|
2
|
-
import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from './langserve-
|
|
2
|
+
import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from './langserve-e308c437.js';
|
|
3
3
|
import { BaseMessageChunk, AIMessage, AIMessageChunk, BaseMessage } from '@langchain/core/messages';
|
|
4
4
|
import { DynamicStructuredTool } from '@langchain/core/tools';
|
|
5
5
|
import { IterableReadableStream, IterableReadableStreamInterface } from '@langchain/core/utils/stream';
|
|
@@ -221,6 +221,26 @@ declare class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
|
221
221
|
private streamResponse;
|
|
222
222
|
}
|
|
223
223
|
|
|
224
|
+
/**
|
|
225
|
+
* CopilotKit Adapter for Unify
|
|
226
|
+
*
|
|
227
|
+
* <RequestExample>
|
|
228
|
+
* ```jsx CopilotRuntime Example
|
|
229
|
+
* const copilotKit = new CopilotRuntime();
|
|
230
|
+
* return copilotKit.response(req, new UnifyAdapter());
|
|
231
|
+
* ```
|
|
232
|
+
* </RequestExample>
|
|
233
|
+
*
|
|
234
|
+
* You can easily set the model to use by passing it to the constructor.
|
|
235
|
+
* ```jsx
|
|
236
|
+
* const copilotKit = new CopilotRuntime();
|
|
237
|
+
* return copilotKit.response(
|
|
238
|
+
* req,
|
|
239
|
+
* new UnifyAdapter({ model: "llama-3-8b-chat@fireworks-ai" }),
|
|
240
|
+
* );
|
|
241
|
+
* ```
|
|
242
|
+
*/
|
|
243
|
+
|
|
224
244
|
interface UnifyAdapterParams {
|
|
225
245
|
apiKey?: string;
|
|
226
246
|
model: string;
|
|
@@ -100,4 +100,4 @@ declare class AgentStateMessage extends Message implements Omit<AgentStateMessag
|
|
|
100
100
|
running: boolean;
|
|
101
101
|
}
|
|
102
102
|
|
|
103
|
-
export { ActionInputAvailability as A,
|
|
103
|
+
export { ActionInputAvailability as A, MessageInput as M, ResultMessage as R, TextMessage as T, Message as a, MessageType as b, ActionExecutionMessage as c, AgentStateMessage as d };
|