@copilotkit/runtime 1.6.0-next.3 → 1.6.0-next.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/dist/{chunk-25Z2ZUVM.mjs → chunk-3C73PW47.mjs} +2 -2
- package/dist/{chunk-OS5YD32G.mjs → chunk-7EXH7PVD.mjs} +3 -2
- package/dist/chunk-7EXH7PVD.mjs.map +1 -0
- package/dist/{chunk-J6E3ZTJ3.mjs → chunk-F6WKKTYT.mjs} +29 -31
- package/dist/chunk-F6WKKTYT.mjs.map +1 -0
- package/dist/{chunk-7HPGWUP7.mjs → chunk-I6W6FUR5.mjs} +2 -2
- package/dist/{chunk-ZM4JOETB.mjs → chunk-RG3UJM7Q.mjs} +2 -2
- package/dist/index.js +99 -100
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +5 -5
- package/dist/lib/index.js +92 -93
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/index.mjs +5 -5
- package/dist/lib/integrations/index.js +2 -2
- package/dist/lib/integrations/index.js.map +1 -1
- package/dist/lib/integrations/index.mjs +5 -5
- package/dist/lib/integrations/nest/index.js +2 -2
- package/dist/lib/integrations/nest/index.js.map +1 -1
- package/dist/lib/integrations/nest/index.mjs +3 -3
- package/dist/lib/integrations/node-express/index.js +2 -2
- package/dist/lib/integrations/node-express/index.js.map +1 -1
- package/dist/lib/integrations/node-express/index.mjs +3 -3
- package/dist/lib/integrations/node-http/index.js +2 -2
- package/dist/lib/integrations/node-http/index.js.map +1 -1
- package/dist/lib/integrations/node-http/index.mjs +2 -2
- package/dist/service-adapters/index.js +28 -27
- package/dist/service-adapters/index.js.map +1 -1
- package/dist/service-adapters/index.mjs +1 -1
- package/package.json +3 -3
- package/src/lib/runtime/__tests__/remote-action-constructors.test.ts +236 -0
- package/src/lib/runtime/remote-action-constructors.ts +9 -7
- package/src/lib/runtime/remote-lg-action.ts +2 -6
- package/src/service-adapters/conversion.ts +39 -46
- package/src/service-adapters/openai/utils.ts +2 -1
- package/tsconfig.json +3 -2
- package/dist/chunk-J6E3ZTJ3.mjs.map +0 -1
- package/dist/chunk-OS5YD32G.mjs.map +0 -1
- /package/dist/{chunk-25Z2ZUVM.mjs.map → chunk-3C73PW47.mjs.map} +0 -0
- /package/dist/{chunk-7HPGWUP7.mjs.map → chunk-I6W6FUR5.mjs.map} +0 -0
- /package/dist/{chunk-ZM4JOETB.mjs.map → chunk-RG3UJM7Q.mjs.map} +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import {
|
|
2
2
|
copilotRuntimeNodeHttpEndpoint
|
|
3
|
-
} from "../../../chunk-
|
|
4
|
-
import "../../../chunk-
|
|
3
|
+
} from "../../../chunk-F6WKKTYT.mjs";
|
|
4
|
+
import "../../../chunk-7EXH7PVD.mjs";
|
|
5
5
|
import "../../../chunk-5BIEM2UU.mjs";
|
|
6
6
|
import "../../../chunk-RTFJTJMA.mjs";
|
|
7
7
|
import "../../../chunk-2OZAGFV3.mjs";
|
|
@@ -126,6 +126,7 @@ __name(RemoteChain, "RemoteChain");
|
|
|
126
126
|
var import_openai = __toESM(require("openai"));
|
|
127
127
|
|
|
128
128
|
// src/service-adapters/openai/utils.ts
|
|
129
|
+
var import_shared = require("@copilotkit/shared");
|
|
129
130
|
function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
|
|
130
131
|
maxTokens || (maxTokens = maxTokensForOpenAIModel(model));
|
|
131
132
|
const result = [];
|
|
@@ -219,7 +220,7 @@ function convertActionInputToOpenAITool(action) {
|
|
|
219
220
|
function: {
|
|
220
221
|
name: action.name,
|
|
221
222
|
description: action.description,
|
|
222
|
-
parameters:
|
|
223
|
+
parameters: (0, import_shared.parseJson)(action.jsonSchema, {})
|
|
223
224
|
}
|
|
224
225
|
};
|
|
225
226
|
}
|
|
@@ -265,7 +266,7 @@ function convertSystemMessageToAssistantAPI(message) {
|
|
|
265
266
|
__name(convertSystemMessageToAssistantAPI, "convertSystemMessageToAssistantAPI");
|
|
266
267
|
|
|
267
268
|
// src/service-adapters/openai/openai-adapter.ts
|
|
268
|
-
var
|
|
269
|
+
var import_shared2 = require("@copilotkit/shared");
|
|
269
270
|
var DEFAULT_MODEL = "gpt-4o";
|
|
270
271
|
var OpenAIAdapter = class {
|
|
271
272
|
model = DEFAULT_MODEL;
|
|
@@ -284,7 +285,7 @@ var OpenAIAdapter = class {
|
|
|
284
285
|
async process(request) {
|
|
285
286
|
const { threadId: threadIdFromRequest, model = this.model, messages, actions, eventSource, forwardedParameters } = request;
|
|
286
287
|
const tools = actions.map(convertActionInputToOpenAITool);
|
|
287
|
-
const threadId = threadIdFromRequest ?? (0,
|
|
288
|
+
const threadId = threadIdFromRequest ?? (0, import_shared2.randomUUID)();
|
|
288
289
|
let openaiMessages = messages.map(convertMessageToOpenAIMessage);
|
|
289
290
|
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
|
|
290
291
|
let toolChoice = forwardedParameters == null ? void 0 : forwardedParameters.toolChoice;
|
|
@@ -391,7 +392,7 @@ __name(OpenAIAdapter, "OpenAIAdapter");
|
|
|
391
392
|
// src/service-adapters/langchain/utils.ts
|
|
392
393
|
var import_messages = require("@langchain/core/messages");
|
|
393
394
|
var import_tools = require("@langchain/core/tools");
|
|
394
|
-
var
|
|
395
|
+
var import_shared3 = require("@copilotkit/shared");
|
|
395
396
|
function convertMessageToLangChainMessage(message) {
|
|
396
397
|
if (message.isTextMessage()) {
|
|
397
398
|
if (message.role == "user") {
|
|
@@ -424,7 +425,7 @@ function convertActionInputToLangChainTool(actionInput) {
|
|
|
424
425
|
return new import_tools.DynamicStructuredTool({
|
|
425
426
|
name: actionInput.name,
|
|
426
427
|
description: actionInput.description,
|
|
427
|
-
schema: (0,
|
|
428
|
+
schema: (0, import_shared3.convertJsonSchemaToZodSchema)(JSON.parse(actionInput.jsonSchema), true),
|
|
428
429
|
func: async () => {
|
|
429
430
|
return "";
|
|
430
431
|
}
|
|
@@ -457,7 +458,7 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
457
458
|
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
458
459
|
if (typeof result === "string") {
|
|
459
460
|
if (!actionExecution) {
|
|
460
|
-
eventStream$.sendTextMessage((0,
|
|
461
|
+
eventStream$.sendTextMessage((0, import_shared3.randomId)(), result);
|
|
461
462
|
} else {
|
|
462
463
|
eventStream$.sendActionExecutionResult({
|
|
463
464
|
actionExecutionId: actionExecution.id,
|
|
@@ -468,11 +469,11 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
468
469
|
} else if (isAIMessage(result)) {
|
|
469
470
|
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
470
471
|
if (result.content) {
|
|
471
|
-
eventStream$.sendTextMessage((0,
|
|
472
|
+
eventStream$.sendTextMessage((0, import_shared3.randomId)(), result.content);
|
|
472
473
|
}
|
|
473
474
|
for (const toolCall of result.tool_calls) {
|
|
474
475
|
eventStream$.sendActionExecution({
|
|
475
|
-
actionExecutionId: toolCall.id || (0,
|
|
476
|
+
actionExecutionId: toolCall.id || (0, import_shared3.randomId)(),
|
|
476
477
|
actionName: toolCall.name,
|
|
477
478
|
args: JSON.stringify(toolCall.args)
|
|
478
479
|
});
|
|
@@ -480,12 +481,12 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
480
481
|
} else if (isBaseMessageChunk(result)) {
|
|
481
482
|
maybeSendActionExecutionResultIsMessage(eventStream$, actionExecution);
|
|
482
483
|
if ((_a = result.lc_kwargs) == null ? void 0 : _a.content) {
|
|
483
|
-
eventStream$.sendTextMessage((0,
|
|
484
|
+
eventStream$.sendTextMessage((0, import_shared3.randomId)(), result.content);
|
|
484
485
|
}
|
|
485
486
|
if ((_b = result.lc_kwargs) == null ? void 0 : _b.tool_calls) {
|
|
486
487
|
for (const toolCall of (_c = result.lc_kwargs) == null ? void 0 : _c.tool_calls) {
|
|
487
488
|
eventStream$.sendActionExecution({
|
|
488
|
-
actionExecutionId: toolCall.id || (0,
|
|
489
|
+
actionExecutionId: toolCall.id || (0, import_shared3.randomId)(),
|
|
489
490
|
actionName: toolCall.name,
|
|
490
491
|
args: JSON.stringify(toolCall.args)
|
|
491
492
|
});
|
|
@@ -559,7 +560,7 @@ async function streamLangChainResponse({ result, eventStream$, actionExecution }
|
|
|
559
560
|
});
|
|
560
561
|
} else if (content) {
|
|
561
562
|
mode = "message";
|
|
562
|
-
currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || (0,
|
|
563
|
+
currentMessageId = ((_k = value.lc_kwargs) == null ? void 0 : _k.id) || (0, import_shared3.randomId)();
|
|
563
564
|
eventStream$.sendTextMessageStart({
|
|
564
565
|
messageId: currentMessageId
|
|
565
566
|
});
|
|
@@ -616,7 +617,7 @@ function encodeResult(result) {
|
|
|
616
617
|
__name(encodeResult, "encodeResult");
|
|
617
618
|
|
|
618
619
|
// src/service-adapters/langchain/langchain-adapter.ts
|
|
619
|
-
var
|
|
620
|
+
var import_shared4 = require("@copilotkit/shared");
|
|
620
621
|
var import_promises = require("@langchain/core/callbacks/promises");
|
|
621
622
|
var LangChainAdapter = class {
|
|
622
623
|
options;
|
|
@@ -629,7 +630,7 @@ var LangChainAdapter = class {
|
|
|
629
630
|
async process(request) {
|
|
630
631
|
try {
|
|
631
632
|
const { eventSource, model, actions, messages, runId, threadId: threadIdFromRequest } = request;
|
|
632
|
-
const threadId = threadIdFromRequest ?? (0,
|
|
633
|
+
const threadId = threadIdFromRequest ?? (0, import_shared4.randomUUID)();
|
|
633
634
|
const result = await this.options.chainFn({
|
|
634
635
|
messages: messages.map(convertMessageToLangChainMessage),
|
|
635
636
|
tools: actions.map(convertActionInputToLangChainTool),
|
|
@@ -873,7 +874,7 @@ __name(getRunIdFromStream, "getRunIdFromStream");
|
|
|
873
874
|
|
|
874
875
|
// src/service-adapters/unify/unify-adapter.ts
|
|
875
876
|
var import_openai3 = __toESM(require("openai"));
|
|
876
|
-
var
|
|
877
|
+
var import_shared5 = require("@copilotkit/shared");
|
|
877
878
|
var UnifyAdapter = class {
|
|
878
879
|
apiKey;
|
|
879
880
|
model;
|
|
@@ -915,7 +916,7 @@ var UnifyAdapter = class {
|
|
|
915
916
|
for await (const chunk of stream) {
|
|
916
917
|
if (this.start) {
|
|
917
918
|
model = chunk.model;
|
|
918
|
-
currentMessageId = (0,
|
|
919
|
+
currentMessageId = (0, import_shared5.randomId)();
|
|
919
920
|
eventStream$.sendTextMessageStart({
|
|
920
921
|
messageId: currentMessageId
|
|
921
922
|
});
|
|
@@ -982,7 +983,7 @@ var UnifyAdapter = class {
|
|
|
982
983
|
eventStream$.complete();
|
|
983
984
|
});
|
|
984
985
|
return {
|
|
985
|
-
threadId: request.threadId || (0,
|
|
986
|
+
threadId: request.threadId || (0, import_shared5.randomUUID)()
|
|
986
987
|
};
|
|
987
988
|
}
|
|
988
989
|
};
|
|
@@ -990,7 +991,7 @@ __name(UnifyAdapter, "UnifyAdapter");
|
|
|
990
991
|
|
|
991
992
|
// src/service-adapters/groq/groq-adapter.ts
|
|
992
993
|
var import_groq_sdk = require("groq-sdk");
|
|
993
|
-
var
|
|
994
|
+
var import_shared6 = require("@copilotkit/shared");
|
|
994
995
|
var DEFAULT_MODEL2 = "llama3-groq-70b-8192-tool-use-preview";
|
|
995
996
|
var GroqAdapter = class {
|
|
996
997
|
model = DEFAULT_MODEL2;
|
|
@@ -1103,7 +1104,7 @@ var GroqAdapter = class {
|
|
|
1103
1104
|
eventStream$.complete();
|
|
1104
1105
|
});
|
|
1105
1106
|
return {
|
|
1106
|
-
threadId: request.threadId || (0,
|
|
1107
|
+
threadId: request.threadId || (0, import_shared6.randomUUID)()
|
|
1107
1108
|
};
|
|
1108
1109
|
}
|
|
1109
1110
|
};
|
|
@@ -1245,7 +1246,7 @@ function groupAnthropicMessagesByRole(messageParams) {
|
|
|
1245
1246
|
__name(groupAnthropicMessagesByRole, "groupAnthropicMessagesByRole");
|
|
1246
1247
|
|
|
1247
1248
|
// src/service-adapters/anthropic/anthropic-adapter.ts
|
|
1248
|
-
var
|
|
1249
|
+
var import_shared7 = require("@copilotkit/shared");
|
|
1249
1250
|
var DEFAULT_MODEL3 = "claude-3-sonnet-20240229";
|
|
1250
1251
|
var AnthropicAdapter = class {
|
|
1251
1252
|
model = DEFAULT_MODEL3;
|
|
@@ -1296,8 +1297,8 @@ var AnthropicAdapter = class {
|
|
|
1296
1297
|
eventSource.stream(async (eventStream$) => {
|
|
1297
1298
|
let mode = null;
|
|
1298
1299
|
let didOutputText = false;
|
|
1299
|
-
let currentMessageId = (0,
|
|
1300
|
-
let currentToolCallId = (0,
|
|
1300
|
+
let currentMessageId = (0, import_shared7.randomId)();
|
|
1301
|
+
let currentToolCallId = (0, import_shared7.randomId)();
|
|
1301
1302
|
let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
|
|
1302
1303
|
for await (const chunk of await stream) {
|
|
1303
1304
|
if (chunk.type === "message_start") {
|
|
@@ -1354,7 +1355,7 @@ var AnthropicAdapter = class {
|
|
|
1354
1355
|
eventStream$.complete();
|
|
1355
1356
|
});
|
|
1356
1357
|
return {
|
|
1357
|
-
threadId: threadId || (0,
|
|
1358
|
+
threadId: threadId || (0, import_shared7.randomUUID)()
|
|
1358
1359
|
};
|
|
1359
1360
|
}
|
|
1360
1361
|
};
|
|
@@ -1394,7 +1395,7 @@ var FilterThinkingTextBuffer = /* @__PURE__ */ __name(class FilterThinkingTextBu
|
|
|
1394
1395
|
|
|
1395
1396
|
// src/service-adapters/experimental/ollama/ollama-adapter.ts
|
|
1396
1397
|
var import_ollama = require("@langchain/community/llms/ollama");
|
|
1397
|
-
var
|
|
1398
|
+
var import_shared8 = require("@copilotkit/shared");
|
|
1398
1399
|
var DEFAULT_MODEL4 = "llama3:latest";
|
|
1399
1400
|
var ExperimentalOllamaAdapter = class {
|
|
1400
1401
|
model;
|
|
@@ -1413,7 +1414,7 @@ var ExperimentalOllamaAdapter = class {
|
|
|
1413
1414
|
const contents = messages.filter((m) => m.isTextMessage()).map((m) => m.content);
|
|
1414
1415
|
const _stream = await ollama.stream(contents);
|
|
1415
1416
|
eventSource.stream(async (eventStream$) => {
|
|
1416
|
-
const currentMessageId = (0,
|
|
1417
|
+
const currentMessageId = (0, import_shared8.randomId)();
|
|
1417
1418
|
eventStream$.sendTextMessageStart({
|
|
1418
1419
|
messageId: currentMessageId
|
|
1419
1420
|
});
|
|
@@ -1429,18 +1430,18 @@ var ExperimentalOllamaAdapter = class {
|
|
|
1429
1430
|
eventStream$.complete();
|
|
1430
1431
|
});
|
|
1431
1432
|
return {
|
|
1432
|
-
threadId: request.threadId || (0,
|
|
1433
|
+
threadId: request.threadId || (0, import_shared8.randomUUID)()
|
|
1433
1434
|
};
|
|
1434
1435
|
}
|
|
1435
1436
|
};
|
|
1436
1437
|
__name(ExperimentalOllamaAdapter, "ExperimentalOllamaAdapter");
|
|
1437
1438
|
|
|
1438
1439
|
// src/service-adapters/empty/empty-adapter.ts
|
|
1439
|
-
var
|
|
1440
|
+
var import_shared9 = require("@copilotkit/shared");
|
|
1440
1441
|
var EmptyAdapter = class {
|
|
1441
1442
|
async process(request) {
|
|
1442
1443
|
return {
|
|
1443
|
-
threadId: request.threadId || (0,
|
|
1444
|
+
threadId: request.threadId || (0, import_shared9.randomUUID)()
|
|
1444
1445
|
};
|
|
1445
1446
|
}
|
|
1446
1447
|
};
|