@copilotkit/runtime 1.3.6 → 1.3.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/dist/{chunk-7AFOEQJG.mjs → chunk-3BSPBSR7.mjs} +31 -214
- package/dist/chunk-3BSPBSR7.mjs.map +1 -0
- package/dist/{chunk-RMZWGQ46.mjs → chunk-D2WLFQS6.mjs} +8 -8
- package/dist/chunk-D2WLFQS6.mjs.map +1 -0
- package/dist/{chunk-I5XVY6LS.mjs → chunk-DC7DLTU6.mjs} +4 -4
- package/dist/{chunk-UB4LPMMV.mjs → chunk-HKURA7H3.mjs} +2 -2
- package/dist/{chunk-TY5C4PL4.mjs → chunk-IL7POK7R.mjs} +2 -2
- package/dist/{chunk-LYLGFMKH.mjs → chunk-JUNGFQE7.mjs} +2 -2
- package/dist/chunk-MYZHUCL6.mjs +65 -0
- package/dist/chunk-MYZHUCL6.mjs.map +1 -0
- package/dist/{chunk-VRFBTIGK.mjs → chunk-TXVG65YE.mjs} +10 -10
- package/dist/chunk-TXVG65YE.mjs.map +1 -0
- package/dist/{chunk-73NMP3DI.mjs → chunk-V6JSDIHG.mjs} +5 -10
- package/dist/chunk-V6JSDIHG.mjs.map +1 -0
- package/dist/{copilot-runtime-a1b5f1ce.d.ts → copilot-runtime-df3527ad.d.ts} +2 -2
- package/dist/graphql/types/base/index.d.ts +2 -2
- package/dist/graphql/types/base/index.js +8 -8
- package/dist/graphql/types/base/index.js.map +1 -1
- package/dist/graphql/types/base/index.mjs +3 -3
- package/dist/graphql/types/converted/index.d.ts +1 -1
- package/dist/graphql/types/converted/index.js +32 -10
- package/dist/graphql/types/converted/index.js.map +1 -1
- package/dist/graphql/types/converted/index.mjs +4 -2
- package/dist/{groq-adapter-a07f59f2.d.ts → groq-adapter-b122e71f.d.ts} +3 -29
- package/dist/{index-0476e4f7.d.ts → index-cff31380.d.ts} +18 -7
- package/dist/index.d.ts +4 -5
- package/dist/index.js +139 -297
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +9 -9
- package/dist/{langserve-15a1286b.d.ts → langserve-a14a6849.d.ts} +1 -1
- package/dist/lib/index.d.ts +4 -5
- package/dist/lib/index.js +131 -289
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/index.mjs +9 -9
- package/dist/lib/integrations/index.d.ts +4 -4
- package/dist/lib/integrations/index.js +61 -41
- package/dist/lib/integrations/index.js.map +1 -1
- package/dist/lib/integrations/index.mjs +7 -7
- package/dist/lib/integrations/nest/index.d.ts +3 -3
- package/dist/lib/integrations/nest/index.js +61 -41
- package/dist/lib/integrations/nest/index.js.map +1 -1
- package/dist/lib/integrations/nest/index.mjs +5 -5
- package/dist/lib/integrations/node-express/index.d.ts +3 -3
- package/dist/lib/integrations/node-express/index.js +61 -41
- package/dist/lib/integrations/node-express/index.js.map +1 -1
- package/dist/lib/integrations/node-express/index.mjs +5 -5
- package/dist/lib/integrations/node-http/index.d.ts +3 -3
- package/dist/lib/integrations/node-http/index.js +61 -41
- package/dist/lib/integrations/node-http/index.js.map +1 -1
- package/dist/lib/integrations/node-http/index.mjs +4 -4
- package/dist/service-adapters/index.d.ts +4 -5
- package/dist/service-adapters/index.js +33 -265
- package/dist/service-adapters/index.js.map +1 -1
- package/dist/service-adapters/index.mjs +2 -4
- package/package.json +7 -7
- package/src/graphql/inputs/message.input.ts +2 -2
- package/src/graphql/types/base/index.ts +1 -1
- package/src/graphql/types/converted/index.ts +31 -10
- package/src/service-adapters/anthropic/anthropic-adapter.ts +1 -2
- package/src/service-adapters/anthropic/utils.ts +3 -3
- package/src/service-adapters/experimental/ollama/ollama-adapter.ts +1 -1
- package/src/service-adapters/google/google-genai-adapter.ts +13 -117
- package/src/service-adapters/langchain/utils.ts +4 -4
- package/src/service-adapters/openai/openai-assistant-adapter.ts +4 -6
- package/src/service-adapters/openai/utils.ts +3 -3
- package/dist/chunk-73NMP3DI.mjs.map +0 -1
- package/dist/chunk-7AFOEQJG.mjs.map +0 -1
- package/dist/chunk-RMZWGQ46.mjs.map +0 -1
- package/dist/chunk-TBZGOJJX.mjs +0 -44
- package/dist/chunk-TBZGOJJX.mjs.map +0 -1
- package/dist/chunk-VRFBTIGK.mjs.map +0 -1
- package/src/service-adapters/google/utils.ts +0 -94
- /package/dist/{chunk-I5XVY6LS.mjs.map → chunk-DC7DLTU6.mjs.map} +0 -0
- /package/dist/{chunk-UB4LPMMV.mjs.map → chunk-HKURA7H3.mjs.map} +0 -0
- /package/dist/{chunk-TY5C4PL4.mjs.map → chunk-IL7POK7R.mjs.map} +0 -0
- /package/dist/{chunk-LYLGFMKH.mjs.map → chunk-JUNGFQE7.mjs.map} +0 -0
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import {
|
|
2
2
|
copilotRuntimeNodeHttpEndpoint
|
|
3
|
-
} from "../../../chunk-
|
|
4
|
-
import "../../../chunk-
|
|
3
|
+
} from "../../../chunk-TXVG65YE.mjs";
|
|
4
|
+
import "../../../chunk-V6JSDIHG.mjs";
|
|
5
5
|
import "../../../chunk-U3V2BCGI.mjs";
|
|
6
|
-
import "../../../chunk-
|
|
7
|
-
import "../../../chunk-
|
|
6
|
+
import "../../../chunk-MYZHUCL6.mjs";
|
|
7
|
+
import "../../../chunk-D2WLFQS6.mjs";
|
|
8
8
|
import "../../../chunk-44O2JGUY.mjs";
|
|
9
9
|
export {
|
|
10
10
|
copilotRuntimeNodeHttpEndpoint
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { C as CopilotServiceAdapter, c as CopilotRuntimeChatCompletionRequest, d as CopilotRuntimeChatCompletionResponse } from '../langserve-
|
|
2
|
-
export { a as RemoteChain, R as RemoteChainParameters } from '../langserve-
|
|
3
|
-
export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-
|
|
1
|
+
import { C as CopilotServiceAdapter, c as CopilotRuntimeChatCompletionRequest, d as CopilotRuntimeChatCompletionResponse } from '../langserve-a14a6849.js';
|
|
2
|
+
export { a as RemoteChain, R as RemoteChainParameters } from '../langserve-a14a6849.js';
|
|
3
|
+
export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, L as LangChainAdapter, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-b122e71f.js';
|
|
4
4
|
import Anthropic from '@anthropic-ai/sdk';
|
|
5
|
-
import '../index-
|
|
5
|
+
import '../index-cff31380.js';
|
|
6
6
|
import '../graphql/types/base/index.js';
|
|
7
7
|
import 'rxjs';
|
|
8
8
|
import '@copilotkit/shared';
|
|
@@ -10,7 +10,6 @@ import 'openai';
|
|
|
10
10
|
import '@langchain/core/messages';
|
|
11
11
|
import '@langchain/core/tools';
|
|
12
12
|
import '@langchain/core/utils/stream';
|
|
13
|
-
import '@google/generative-ai';
|
|
14
13
|
import 'groq-sdk';
|
|
15
14
|
|
|
16
15
|
/**
|
|
@@ -122,60 +122,6 @@ __name(RemoteChain, "RemoteChain");
|
|
|
122
122
|
// src/service-adapters/openai/openai-adapter.ts
|
|
123
123
|
var import_openai = __toESM(require("openai"));
|
|
124
124
|
|
|
125
|
-
// src/graphql/types/base/index.ts
|
|
126
|
-
var import_type_graphql = require("type-graphql");
|
|
127
|
-
function _ts_decorate(decorators, target, key, desc) {
|
|
128
|
-
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
129
|
-
if (typeof Reflect === "object" && typeof Reflect.decorate === "function")
|
|
130
|
-
r = Reflect.decorate(decorators, target, key, desc);
|
|
131
|
-
else
|
|
132
|
-
for (var i = decorators.length - 1; i >= 0; i--)
|
|
133
|
-
if (d = decorators[i])
|
|
134
|
-
r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
135
|
-
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
136
|
-
}
|
|
137
|
-
__name(_ts_decorate, "_ts_decorate");
|
|
138
|
-
function _ts_metadata(k, v) {
|
|
139
|
-
if (typeof Reflect === "object" && typeof Reflect.metadata === "function")
|
|
140
|
-
return Reflect.metadata(k, v);
|
|
141
|
-
}
|
|
142
|
-
__name(_ts_metadata, "_ts_metadata");
|
|
143
|
-
var BaseMessage = class {
|
|
144
|
-
id;
|
|
145
|
-
createdAt;
|
|
146
|
-
};
|
|
147
|
-
__name(BaseMessage, "BaseMessage");
|
|
148
|
-
_ts_decorate([
|
|
149
|
-
(0, import_type_graphql.Field)(() => String),
|
|
150
|
-
_ts_metadata("design:type", String)
|
|
151
|
-
], BaseMessage.prototype, "id", void 0);
|
|
152
|
-
_ts_decorate([
|
|
153
|
-
(0, import_type_graphql.Field)(() => Date),
|
|
154
|
-
_ts_metadata("design:type", typeof Date === "undefined" ? Object : Date)
|
|
155
|
-
], BaseMessage.prototype, "createdAt", void 0);
|
|
156
|
-
BaseMessage = _ts_decorate([
|
|
157
|
-
(0, import_type_graphql.InputType)()
|
|
158
|
-
], BaseMessage);
|
|
159
|
-
|
|
160
|
-
// src/graphql/types/converted/index.ts
|
|
161
|
-
var TextMessage = class extends BaseMessage {
|
|
162
|
-
content;
|
|
163
|
-
role;
|
|
164
|
-
};
|
|
165
|
-
__name(TextMessage, "TextMessage");
|
|
166
|
-
var ActionExecutionMessage = class extends BaseMessage {
|
|
167
|
-
name;
|
|
168
|
-
arguments;
|
|
169
|
-
scope;
|
|
170
|
-
};
|
|
171
|
-
__name(ActionExecutionMessage, "ActionExecutionMessage");
|
|
172
|
-
var ResultMessage = class extends BaseMessage {
|
|
173
|
-
actionExecutionId;
|
|
174
|
-
actionName;
|
|
175
|
-
result;
|
|
176
|
-
};
|
|
177
|
-
__name(ResultMessage, "ResultMessage");
|
|
178
|
-
|
|
179
125
|
// src/service-adapters/openai/utils.ts
|
|
180
126
|
function limitMessagesToTokenCount(messages, tools, model, maxTokens) {
|
|
181
127
|
maxTokens || (maxTokens = maxTokensForOpenAIModel(model));
|
|
@@ -276,12 +222,12 @@ function convertActionInputToOpenAITool(action) {
|
|
|
276
222
|
}
|
|
277
223
|
__name(convertActionInputToOpenAITool, "convertActionInputToOpenAITool");
|
|
278
224
|
function convertMessageToOpenAIMessage(message) {
|
|
279
|
-
if (message
|
|
225
|
+
if (message.isTextMessage()) {
|
|
280
226
|
return {
|
|
281
227
|
role: message.role,
|
|
282
228
|
content: message.content
|
|
283
229
|
};
|
|
284
|
-
} else if (message
|
|
230
|
+
} else if (message.isActionExecutionMessage()) {
|
|
285
231
|
return {
|
|
286
232
|
role: "assistant",
|
|
287
233
|
tool_calls: [
|
|
@@ -295,7 +241,7 @@ function convertMessageToOpenAIMessage(message) {
|
|
|
295
241
|
}
|
|
296
242
|
]
|
|
297
243
|
};
|
|
298
|
-
} else if (message
|
|
244
|
+
} else if (message.isResultMessage()) {
|
|
299
245
|
return {
|
|
300
246
|
role: "tool",
|
|
301
247
|
content: message.result,
|
|
@@ -414,7 +360,7 @@ var import_tools = require("@langchain/core/tools");
|
|
|
414
360
|
var import_zod = require("zod");
|
|
415
361
|
var import_shared2 = require("@copilotkit/shared");
|
|
416
362
|
function convertMessageToLangChainMessage(message) {
|
|
417
|
-
if (message
|
|
363
|
+
if (message.isTextMessage()) {
|
|
418
364
|
if (message.role == "user") {
|
|
419
365
|
return new import_messages.HumanMessage(message.content);
|
|
420
366
|
} else if (message.role == "assistant") {
|
|
@@ -422,7 +368,7 @@ function convertMessageToLangChainMessage(message) {
|
|
|
422
368
|
} else if (message.role === "system") {
|
|
423
369
|
return new import_messages.SystemMessage(message.content);
|
|
424
370
|
}
|
|
425
|
-
} else if (message
|
|
371
|
+
} else if (message.isActionExecutionMessage()) {
|
|
426
372
|
return new import_messages.AIMessage({
|
|
427
373
|
content: "",
|
|
428
374
|
tool_calls: [
|
|
@@ -433,7 +379,7 @@ function convertMessageToLangChainMessage(message) {
|
|
|
433
379
|
}
|
|
434
380
|
]
|
|
435
381
|
});
|
|
436
|
-
} else if (message
|
|
382
|
+
} else if (message.isResultMessage()) {
|
|
437
383
|
return new import_messages.ToolMessage({
|
|
438
384
|
content: message.result,
|
|
439
385
|
tool_call_id: message.actionExecutionId
|
|
@@ -459,7 +405,7 @@ function convertJsonSchemaToZodSchema(jsonSchema, required) {
|
|
|
459
405
|
let schema = import_zod.z.boolean().describe(jsonSchema.description);
|
|
460
406
|
return !required ? schema.optional() : schema;
|
|
461
407
|
} else if (jsonSchema.type === "array") {
|
|
462
|
-
let itemSchema = convertJsonSchemaToZodSchema(jsonSchema.items,
|
|
408
|
+
let itemSchema = convertJsonSchemaToZodSchema(jsonSchema.items, true);
|
|
463
409
|
let schema = import_zod.z.array(itemSchema);
|
|
464
410
|
return !required ? schema.optional() : schema;
|
|
465
411
|
}
|
|
@@ -633,199 +579,21 @@ var LangChainAdapter = class {
|
|
|
633
579
|
__name(LangChainAdapter, "LangChainAdapter");
|
|
634
580
|
|
|
635
581
|
// src/service-adapters/google/google-genai-adapter.ts
|
|
636
|
-
var
|
|
637
|
-
|
|
638
|
-
// src/service-adapters/google/utils.ts
|
|
639
|
-
function convertMessageToGoogleGenAIMessage(message) {
|
|
640
|
-
if (message instanceof TextMessage) {
|
|
641
|
-
const role = {
|
|
642
|
-
user: "user",
|
|
643
|
-
assistant: "model",
|
|
644
|
-
system: "user"
|
|
645
|
-
}[message.role];
|
|
646
|
-
const text = message.role === "system" ? "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content : message.content;
|
|
647
|
-
return {
|
|
648
|
-
role,
|
|
649
|
-
parts: [
|
|
650
|
-
{
|
|
651
|
-
text
|
|
652
|
-
}
|
|
653
|
-
]
|
|
654
|
-
};
|
|
655
|
-
} else if (message instanceof ActionExecutionMessage) {
|
|
656
|
-
return {
|
|
657
|
-
role: "model",
|
|
658
|
-
parts: [
|
|
659
|
-
{
|
|
660
|
-
functionCall: {
|
|
661
|
-
name: message.name,
|
|
662
|
-
args: message.arguments
|
|
663
|
-
}
|
|
664
|
-
}
|
|
665
|
-
]
|
|
666
|
-
};
|
|
667
|
-
} else if (message instanceof ResultMessage) {
|
|
668
|
-
return {
|
|
669
|
-
role: "function",
|
|
670
|
-
parts: [
|
|
671
|
-
{
|
|
672
|
-
functionResponse: {
|
|
673
|
-
name: message.actionName,
|
|
674
|
-
response: {
|
|
675
|
-
name: message.actionName,
|
|
676
|
-
content: tryParseJson(message.result)
|
|
677
|
-
}
|
|
678
|
-
}
|
|
679
|
-
}
|
|
680
|
-
]
|
|
681
|
-
};
|
|
682
|
-
}
|
|
683
|
-
}
|
|
684
|
-
__name(convertMessageToGoogleGenAIMessage, "convertMessageToGoogleGenAIMessage");
|
|
685
|
-
function transformActionToGoogleGenAITool(action) {
|
|
686
|
-
const name = action.name;
|
|
687
|
-
const description = action.description;
|
|
688
|
-
const parameters = JSON.parse(action.jsonSchema);
|
|
689
|
-
const transformProperties = /* @__PURE__ */ __name((props) => {
|
|
690
|
-
for (const key in props) {
|
|
691
|
-
if (props[key].type) {
|
|
692
|
-
props[key].type = props[key].type.toUpperCase();
|
|
693
|
-
}
|
|
694
|
-
if (props[key].properties) {
|
|
695
|
-
transformProperties(props[key].properties);
|
|
696
|
-
}
|
|
697
|
-
}
|
|
698
|
-
}, "transformProperties");
|
|
699
|
-
transformProperties(parameters);
|
|
700
|
-
return {
|
|
701
|
-
functionDeclarations: [
|
|
702
|
-
{
|
|
703
|
-
name,
|
|
704
|
-
description,
|
|
705
|
-
parameters
|
|
706
|
-
}
|
|
707
|
-
]
|
|
708
|
-
};
|
|
709
|
-
}
|
|
710
|
-
__name(transformActionToGoogleGenAITool, "transformActionToGoogleGenAITool");
|
|
711
|
-
function tryParseJson(str) {
|
|
712
|
-
if (!str) {
|
|
713
|
-
return "";
|
|
714
|
-
}
|
|
715
|
-
try {
|
|
716
|
-
return JSON.parse(str);
|
|
717
|
-
} catch (e) {
|
|
718
|
-
return str;
|
|
719
|
-
}
|
|
720
|
-
}
|
|
721
|
-
__name(tryParseJson, "tryParseJson");
|
|
722
|
-
|
|
723
|
-
// src/service-adapters/google/google-genai-adapter.ts
|
|
724
|
-
var import_shared4 = require("@copilotkit/shared");
|
|
725
|
-
var GoogleGenerativeAIAdapter = class {
|
|
726
|
-
model;
|
|
582
|
+
var import_google_gauth = require("@langchain/google-gauth");
|
|
583
|
+
var GoogleGenerativeAIAdapter = class extends LangChainAdapter {
|
|
727
584
|
constructor(options) {
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
}
|
|
736
|
-
}
|
|
737
|
-
async process(request) {
|
|
738
|
-
const { messages, actions, eventSource } = request;
|
|
739
|
-
const history = messages.slice(1, -1).map(convertMessageToGoogleGenAIMessage);
|
|
740
|
-
const currentMessage = convertMessageToGoogleGenAIMessage(messages.at(-1));
|
|
741
|
-
if (!currentMessage) {
|
|
742
|
-
throw new Error("No current message");
|
|
743
|
-
}
|
|
744
|
-
let systemMessage;
|
|
745
|
-
const firstMessage = messages.at(0);
|
|
746
|
-
if (firstMessage instanceof TextMessage && firstMessage.role === "system") {
|
|
747
|
-
systemMessage = firstMessage.content.trim();
|
|
748
|
-
} else {
|
|
749
|
-
throw new Error("First message is not a system message");
|
|
750
|
-
}
|
|
751
|
-
const tools = actions.map(transformActionToGoogleGenAITool);
|
|
752
|
-
const isFirstGenGeminiPro = this.model.model === "gemini-pro" || this.model.model === "models/gemini-pro";
|
|
753
|
-
const chat = this.model.startChat({
|
|
754
|
-
history: [
|
|
755
|
-
...history,
|
|
756
|
-
// gemini-pro does not support system instructions, so we need to add them to the history
|
|
757
|
-
...isFirstGenGeminiPro ? [
|
|
758
|
-
{
|
|
759
|
-
role: "user",
|
|
760
|
-
parts: [
|
|
761
|
-
{
|
|
762
|
-
text: systemMessage
|
|
763
|
-
}
|
|
764
|
-
]
|
|
765
|
-
}
|
|
766
|
-
] : []
|
|
767
|
-
],
|
|
768
|
-
// only gemini-1.5-pro-latest and later supports setting system instructions
|
|
769
|
-
...isFirstGenGeminiPro ? {} : {
|
|
770
|
-
systemInstruction: {
|
|
771
|
-
role: "user",
|
|
772
|
-
parts: [
|
|
773
|
-
{
|
|
774
|
-
text: systemMessage
|
|
775
|
-
}
|
|
776
|
-
]
|
|
777
|
-
}
|
|
778
|
-
},
|
|
779
|
-
tools
|
|
780
|
-
});
|
|
781
|
-
const result = await chat.sendMessageStream(currentMessage.parts);
|
|
782
|
-
eventSource.stream(async (eventStream$) => {
|
|
783
|
-
let isTextMessage = false;
|
|
784
|
-
for await (const chunk of result.stream) {
|
|
785
|
-
const chunkText = chunk.text();
|
|
786
|
-
if (chunkText === "") {
|
|
787
|
-
continue;
|
|
788
|
-
}
|
|
789
|
-
if (!isTextMessage) {
|
|
790
|
-
isTextMessage = true;
|
|
791
|
-
eventStream$.sendTextMessageStart((0, import_shared4.randomId)());
|
|
792
|
-
}
|
|
793
|
-
eventStream$.sendTextMessageContent(chunkText);
|
|
585
|
+
super({
|
|
586
|
+
chainFn: async ({ messages, tools }) => {
|
|
587
|
+
const model = new import_google_gauth.ChatGoogle({
|
|
588
|
+
modelName: (options == null ? void 0 : options.model) ?? "gemini-1.5-pro",
|
|
589
|
+
apiVersion: "v1beta"
|
|
590
|
+
}).bindTools(tools);
|
|
591
|
+
return model.stream(messages);
|
|
794
592
|
}
|
|
795
|
-
if (isTextMessage) {
|
|
796
|
-
eventStream$.sendTextMessageEnd();
|
|
797
|
-
}
|
|
798
|
-
let calls = (await result.response).functionCalls();
|
|
799
|
-
if (calls) {
|
|
800
|
-
for (let call of calls) {
|
|
801
|
-
eventStream$.sendActionExecution((0, import_shared4.randomId)(), call.name, JSON.stringify(replaceNewlinesInObject(call.args)));
|
|
802
|
-
}
|
|
803
|
-
}
|
|
804
|
-
eventStream$.complete();
|
|
805
593
|
});
|
|
806
|
-
return {
|
|
807
|
-
threadId: request.threadId || (0, import_shared4.randomId)()
|
|
808
|
-
};
|
|
809
594
|
}
|
|
810
595
|
};
|
|
811
596
|
__name(GoogleGenerativeAIAdapter, "GoogleGenerativeAIAdapter");
|
|
812
|
-
function replaceNewlinesInObject(obj) {
|
|
813
|
-
if (typeof obj === "string") {
|
|
814
|
-
return obj.replace(/\\\\n/g, "\n");
|
|
815
|
-
} else if (Array.isArray(obj)) {
|
|
816
|
-
return obj.map(replaceNewlinesInObject);
|
|
817
|
-
} else if (typeof obj === "object" && obj !== null) {
|
|
818
|
-
const newObj = {};
|
|
819
|
-
for (const key in obj) {
|
|
820
|
-
if (obj.hasOwnProperty(key)) {
|
|
821
|
-
newObj[key] = replaceNewlinesInObject(obj[key]);
|
|
822
|
-
}
|
|
823
|
-
}
|
|
824
|
-
return newObj;
|
|
825
|
-
}
|
|
826
|
-
return obj;
|
|
827
|
-
}
|
|
828
|
-
__name(replaceNewlinesInObject, "replaceNewlinesInObject");
|
|
829
597
|
|
|
830
598
|
// src/service-adapters/openai/openai-assistant-adapter.ts
|
|
831
599
|
var import_openai2 = __toESM(require("openai"));
|
|
@@ -847,9 +615,9 @@ var OpenAIAssistantAdapter = class {
|
|
|
847
615
|
let threadId = request.threadId || (await this.openai.beta.threads.create()).id;
|
|
848
616
|
const lastMessage = messages.at(-1);
|
|
849
617
|
let nextRunId = void 0;
|
|
850
|
-
if (lastMessage
|
|
618
|
+
if (lastMessage.isResultMessage() && runId) {
|
|
851
619
|
nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);
|
|
852
|
-
} else if (lastMessage
|
|
620
|
+
} else if (lastMessage.isTextMessage()) {
|
|
853
621
|
nextRunId = await this.submitUserMessage(threadId, messages, actions, eventSource, forwardedParameters);
|
|
854
622
|
} else {
|
|
855
623
|
throw new Error("No actionable message found in the messages");
|
|
@@ -865,7 +633,7 @@ var OpenAIAssistantAdapter = class {
|
|
|
865
633
|
throw new Error("No tool outputs required");
|
|
866
634
|
}
|
|
867
635
|
const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map((toolCall) => toolCall.id);
|
|
868
|
-
const resultMessages = messages.filter((message) => message
|
|
636
|
+
const resultMessages = messages.filter((message) => message.isResultMessage() && toolCallsIds.includes(message.actionExecutionId));
|
|
869
637
|
if (toolCallsIds.length != resultMessages.length) {
|
|
870
638
|
throw new Error("Number of function results does not match the number of tool calls");
|
|
871
639
|
}
|
|
@@ -889,7 +657,7 @@ var OpenAIAssistantAdapter = class {
|
|
|
889
657
|
...messages
|
|
890
658
|
];
|
|
891
659
|
const instructionsMessage = messages.shift();
|
|
892
|
-
const instructions = instructionsMessage
|
|
660
|
+
const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
|
|
893
661
|
const userMessage = messages.map(convertMessageToOpenAIMessage).map(convertSystemMessageToAssistantAPI).at(-1);
|
|
894
662
|
if (userMessage.role !== "user") {
|
|
895
663
|
throw new Error("No user message found");
|
|
@@ -991,7 +759,7 @@ __name(getRunIdFromStream, "getRunIdFromStream");
|
|
|
991
759
|
|
|
992
760
|
// src/service-adapters/unify/unify-adapter.ts
|
|
993
761
|
var import_openai3 = __toESM(require("openai"));
|
|
994
|
-
var
|
|
762
|
+
var import_shared4 = require("@copilotkit/shared");
|
|
995
763
|
var UnifyAdapter = class {
|
|
996
764
|
apiKey;
|
|
997
765
|
model;
|
|
@@ -1027,7 +795,7 @@ var UnifyAdapter = class {
|
|
|
1027
795
|
for await (const chunk of stream) {
|
|
1028
796
|
if (this.start) {
|
|
1029
797
|
model = chunk.model;
|
|
1030
|
-
eventStream$.sendTextMessageStart((0,
|
|
798
|
+
eventStream$.sendTextMessageStart((0, import_shared4.randomId)());
|
|
1031
799
|
eventStream$.sendTextMessageContent(`Model used: ${model}
|
|
1032
800
|
`);
|
|
1033
801
|
eventStream$.sendTextMessageEnd();
|
|
@@ -1065,7 +833,7 @@ var UnifyAdapter = class {
|
|
|
1065
833
|
eventStream$.complete();
|
|
1066
834
|
});
|
|
1067
835
|
return {
|
|
1068
|
-
threadId: request.threadId || (0,
|
|
836
|
+
threadId: request.threadId || (0, import_shared4.randomId)()
|
|
1069
837
|
};
|
|
1070
838
|
}
|
|
1071
839
|
};
|
|
@@ -1073,7 +841,7 @@ __name(UnifyAdapter, "UnifyAdapter");
|
|
|
1073
841
|
|
|
1074
842
|
// src/service-adapters/groq/groq-adapter.ts
|
|
1075
843
|
var import_groq_sdk = require("groq-sdk");
|
|
1076
|
-
var
|
|
844
|
+
var import_shared5 = require("@copilotkit/shared");
|
|
1077
845
|
var DEFAULT_MODEL2 = "llama3-groq-70b-8192-tool-use-preview";
|
|
1078
846
|
var GroqAdapter = class {
|
|
1079
847
|
model = DEFAULT_MODEL2;
|
|
@@ -1159,7 +927,7 @@ var GroqAdapter = class {
|
|
|
1159
927
|
eventStream$.complete();
|
|
1160
928
|
});
|
|
1161
929
|
return {
|
|
1162
|
-
threadId: threadId || (0,
|
|
930
|
+
threadId: threadId || (0, import_shared5.randomId)()
|
|
1163
931
|
};
|
|
1164
932
|
}
|
|
1165
933
|
};
|
|
@@ -1234,7 +1002,7 @@ function convertActionInputToAnthropicTool(action) {
|
|
|
1234
1002
|
}
|
|
1235
1003
|
__name(convertActionInputToAnthropicTool, "convertActionInputToAnthropicTool");
|
|
1236
1004
|
function convertMessageToAnthropicMessage(message) {
|
|
1237
|
-
if (message
|
|
1005
|
+
if (message.isTextMessage()) {
|
|
1238
1006
|
if (message.role === "system") {
|
|
1239
1007
|
return {
|
|
1240
1008
|
role: "assistant",
|
|
@@ -1256,7 +1024,7 @@ function convertMessageToAnthropicMessage(message) {
|
|
|
1256
1024
|
]
|
|
1257
1025
|
};
|
|
1258
1026
|
}
|
|
1259
|
-
} else if (message
|
|
1027
|
+
} else if (message.isActionExecutionMessage()) {
|
|
1260
1028
|
return {
|
|
1261
1029
|
role: "assistant",
|
|
1262
1030
|
content: [
|
|
@@ -1268,7 +1036,7 @@ function convertMessageToAnthropicMessage(message) {
|
|
|
1268
1036
|
}
|
|
1269
1037
|
]
|
|
1270
1038
|
};
|
|
1271
|
-
} else if (message
|
|
1039
|
+
} else if (message.isResultMessage()) {
|
|
1272
1040
|
return {
|
|
1273
1041
|
role: "user",
|
|
1274
1042
|
content: [
|
|
@@ -1301,7 +1069,7 @@ function groupAnthropicMessagesByRole(messageParams) {
|
|
|
1301
1069
|
__name(groupAnthropicMessagesByRole, "groupAnthropicMessagesByRole");
|
|
1302
1070
|
|
|
1303
1071
|
// src/service-adapters/anthropic/anthropic-adapter.ts
|
|
1304
|
-
var
|
|
1072
|
+
var import_shared6 = require("@copilotkit/shared");
|
|
1305
1073
|
var DEFAULT_MODEL3 = "claude-3-opus-20240229";
|
|
1306
1074
|
var AnthropicAdapter = class {
|
|
1307
1075
|
model = DEFAULT_MODEL3;
|
|
@@ -1322,7 +1090,7 @@ var AnthropicAdapter = class {
|
|
|
1322
1090
|
...rawMessages
|
|
1323
1091
|
];
|
|
1324
1092
|
const instructionsMessage = messages.shift();
|
|
1325
|
-
const instructions = instructionsMessage
|
|
1093
|
+
const instructions = instructionsMessage.isTextMessage() ? instructionsMessage.content : "";
|
|
1326
1094
|
let anthropicMessages = messages.map(convertMessageToAnthropicMessage);
|
|
1327
1095
|
anthropicMessages = limitMessagesToTokenCount2(anthropicMessages, tools, model);
|
|
1328
1096
|
anthropicMessages = groupAnthropicMessagesByRole(anthropicMessages);
|
|
@@ -1349,8 +1117,8 @@ var AnthropicAdapter = class {
|
|
|
1349
1117
|
eventSource.stream(async (eventStream$) => {
|
|
1350
1118
|
let mode = null;
|
|
1351
1119
|
let didOutputText = false;
|
|
1352
|
-
let currentMessageId = (0,
|
|
1353
|
-
let currentToolCallId = (0,
|
|
1120
|
+
let currentMessageId = (0, import_shared6.randomId)();
|
|
1121
|
+
let currentToolCallId = (0, import_shared6.randomId)();
|
|
1354
1122
|
let filterThinkingTextBuffer = new FilterThinkingTextBuffer();
|
|
1355
1123
|
for await (const chunk of await stream) {
|
|
1356
1124
|
if (chunk.type === "message_start") {
|
|
@@ -1391,7 +1159,7 @@ var AnthropicAdapter = class {
|
|
|
1391
1159
|
eventStream$.complete();
|
|
1392
1160
|
});
|
|
1393
1161
|
return {
|
|
1394
|
-
threadId: threadId || (0,
|
|
1162
|
+
threadId: threadId || (0, import_shared6.randomId)()
|
|
1395
1163
|
};
|
|
1396
1164
|
}
|
|
1397
1165
|
};
|