@copilotkit/runtime 0.37.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.js +7 -0
- package/.turbo/turbo-build.log +70 -0
- package/CHANGELOG.md +1 -0
- package/__snapshots__/schema/schema.graphql +178 -0
- package/dist/chunk-2CCVVJDU.mjs +56 -0
- package/dist/chunk-2CCVVJDU.mjs.map +1 -0
- package/dist/chunk-4UA4RB4C.mjs +185 -0
- package/dist/chunk-4UA4RB4C.mjs.map +1 -0
- package/dist/chunk-5HGYI6EG.mjs +678 -0
- package/dist/chunk-5HGYI6EG.mjs.map +1 -0
- package/dist/chunk-7IFP53C6.mjs +169 -0
- package/dist/chunk-7IFP53C6.mjs.map +1 -0
- package/dist/chunk-BLTAUVRP.mjs +30 -0
- package/dist/chunk-BLTAUVRP.mjs.map +1 -0
- package/dist/chunk-NFCPM5AM.mjs +43 -0
- package/dist/chunk-NFCPM5AM.mjs.map +1 -0
- package/dist/chunk-XPAUPJMW.mjs +1051 -0
- package/dist/chunk-XPAUPJMW.mjs.map +1 -0
- package/dist/graphql/types/base/index.d.ts +6 -0
- package/dist/graphql/types/base/index.js +63 -0
- package/dist/graphql/types/base/index.js.map +1 -0
- package/dist/graphql/types/base/index.mjs +7 -0
- package/dist/graphql/types/base/index.mjs.map +1 -0
- package/dist/graphql/types/converted/index.d.ts +2 -0
- package/dist/graphql/types/converted/index.js +88 -0
- package/dist/graphql/types/converted/index.js.map +1 -0
- package/dist/graphql/types/converted/index.mjs +12 -0
- package/dist/graphql/types/converted/index.mjs.map +1 -0
- package/dist/index-aa091e3c.d.ts +49 -0
- package/dist/index-f0875df3.d.ts +197 -0
- package/dist/index.d.ts +15 -0
- package/dist/index.js +2171 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +49 -0
- package/dist/index.mjs.map +1 -0
- package/dist/langchain-adapter-9ce103f3.d.ts +200 -0
- package/dist/langserve-fd5066ee.d.ts +96 -0
- package/dist/lib/index.d.ts +15 -0
- package/dist/lib/index.js +2170 -0
- package/dist/lib/index.js.map +1 -0
- package/dist/lib/index.mjs +46 -0
- package/dist/lib/index.mjs.map +1 -0
- package/dist/lib/integrations/index.d.ts +9 -0
- package/dist/lib/integrations/index.js +1024 -0
- package/dist/lib/integrations/index.js.map +1 -0
- package/dist/lib/integrations/index.mjs +24 -0
- package/dist/lib/integrations/index.mjs.map +1 -0
- package/dist/lib/integrations/node-http/index.d.ts +8 -0
- package/dist/lib/integrations/node-http/index.js +969 -0
- package/dist/lib/integrations/node-http/index.js.map +1 -0
- package/dist/lib/integrations/node-http/index.mjs +10 -0
- package/dist/lib/integrations/node-http/index.mjs.map +1 -0
- package/dist/pages-router-b6bc6c60.d.ts +30 -0
- package/dist/service-adapters/index.d.ts +11 -0
- package/dist/service-adapters/index.js +912 -0
- package/dist/service-adapters/index.js.map +1 -0
- package/dist/service-adapters/index.mjs +18 -0
- package/dist/service-adapters/index.mjs.map +1 -0
- package/jest.config.js +5 -0
- package/package.json +63 -0
- package/scripts/generate-gql-schema.ts +13 -0
- package/src/graphql/inputs/action.input.ts +13 -0
- package/src/graphql/inputs/cloud-guardrails.input.ts +19 -0
- package/src/graphql/inputs/cloud.input.ts +8 -0
- package/src/graphql/inputs/context-property.input.ts +10 -0
- package/src/graphql/inputs/custom-property.input.ts +15 -0
- package/src/graphql/inputs/frontend.input.ts +11 -0
- package/src/graphql/inputs/generate-copilot-response.input.ts +22 -0
- package/src/graphql/inputs/message.input.ts +50 -0
- package/src/graphql/resolvers/copilot.resolver.ts +147 -0
- package/src/graphql/types/base/index.ts +10 -0
- package/src/graphql/types/converted/index.ts +29 -0
- package/src/graphql/types/copilot-response.type.ts +75 -0
- package/src/graphql/types/enums.ts +22 -0
- package/src/graphql/types/guardrails-result.type.ts +20 -0
- package/src/graphql/types/message-status.type.ts +40 -0
- package/src/graphql/types/response-status.type.ts +52 -0
- package/src/index.ts +2 -0
- package/src/lib/copilot-cloud.ts +63 -0
- package/src/lib/copilot-runtime.ts +261 -0
- package/src/lib/guardrails.ts +3 -0
- package/src/lib/index.ts +7 -0
- package/src/lib/integrations/index.ts +4 -0
- package/src/lib/integrations/nextjs/app-router.ts +29 -0
- package/src/lib/integrations/nextjs/pages-router.ts +36 -0
- package/src/lib/integrations/node-http/index.ts +23 -0
- package/src/lib/integrations/shared.ts +68 -0
- package/src/service-adapters/conversion.ts +47 -0
- package/src/service-adapters/events.ts +197 -0
- package/src/service-adapters/experimental/groq/groq-adapter.ts +124 -0
- package/src/service-adapters/experimental/ollama/ollama-adapter.ts +75 -0
- package/src/service-adapters/google/google-genai-adapter.ts +149 -0
- package/src/service-adapters/google/utils.ts +94 -0
- package/src/service-adapters/index.ts +6 -0
- package/src/service-adapters/langchain/langchain-adapter.ts +82 -0
- package/src/service-adapters/langchain/langserve.ts +81 -0
- package/src/service-adapters/langchain/types.ts +14 -0
- package/src/service-adapters/langchain/utils.ts +235 -0
- package/src/service-adapters/openai/openai-adapter.ts +142 -0
- package/src/service-adapters/openai/openai-assistant-adapter.ts +260 -0
- package/src/service-adapters/openai/utils.ts +164 -0
- package/src/service-adapters/service-adapter.ts +29 -0
- package/tsconfig.json +11 -0
- package/tsup.config.ts +17 -0
- package/typedoc.json +4 -0
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CopilotKit Adapter for the OpenAI Assistant API.
|
|
3
|
+
*
|
|
4
|
+
* Use this adapter to get responses from the OpenAI Assistant API.
|
|
5
|
+
*
|
|
6
|
+
* <RequestExample>
|
|
7
|
+
* ```typescript
|
|
8
|
+
* const copilotKit = new CopilotRuntime();
|
|
9
|
+
* return copilotKit.response(
|
|
10
|
+
* req,
|
|
11
|
+
* new OpenAIAssistantAdapter({
|
|
12
|
+
* assistantId: "your-assistant-id"
|
|
13
|
+
* })
|
|
14
|
+
* );
|
|
15
|
+
* ```
|
|
16
|
+
* </RequestExample>
|
|
17
|
+
*/
|
|
18
|
+
import OpenAI from "openai";
|
|
19
|
+
import {
|
|
20
|
+
CopilotServiceAdapter,
|
|
21
|
+
CopilotRuntimeChatCompletionRequest,
|
|
22
|
+
CopilotRuntimeChatCompletionResponse,
|
|
23
|
+
} from "../service-adapter";
|
|
24
|
+
import { Message, ResultMessage, TextMessage } from "../../graphql/types/converted";
|
|
25
|
+
import {
|
|
26
|
+
convertActionInputToOpenAITool,
|
|
27
|
+
convertMessageToOpenAIMessage,
|
|
28
|
+
convertSystemMessageToAssistantAPI,
|
|
29
|
+
} from "./utils";
|
|
30
|
+
import { RunSubmitToolOutputsStreamParams } from "openai/resources/beta/threads/runs/runs";
|
|
31
|
+
import { AssistantStream } from "openai/lib/AssistantStream";
|
|
32
|
+
import { RuntimeEventSource } from "../events";
|
|
33
|
+
import { ActionInput } from "../../graphql/inputs/action.input";
|
|
34
|
+
import { AssistantStreamEvent, AssistantTool } from "openai/resources/beta/assistants";
|
|
35
|
+
|
|
36
|
+
export interface OpenAIAssistantAdapterParams {
|
|
37
|
+
/**
|
|
38
|
+
* The ID of the assistant to use.
|
|
39
|
+
*/
|
|
40
|
+
assistantId: string;
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* An instance of `OpenAI` to use for the request. If not provided, a new instance will be created.
|
|
44
|
+
*/
|
|
45
|
+
openai?: OpenAI;
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Whether to enable the code interpreter. Defaults to `true`.
|
|
49
|
+
*/
|
|
50
|
+
codeInterpreterEnabled?: boolean;
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Whether to enable retrieval. Defaults to `true`.
|
|
54
|
+
*/
|
|
55
|
+
fileSearchEnabled?: boolean;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
export class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
59
|
+
private openai: OpenAI;
|
|
60
|
+
private codeInterpreterEnabled: boolean;
|
|
61
|
+
private assistantId: string;
|
|
62
|
+
private fileSearchEnabled: boolean;
|
|
63
|
+
|
|
64
|
+
constructor(params: OpenAIAssistantAdapterParams) {
|
|
65
|
+
this.openai = params.openai || new OpenAI({});
|
|
66
|
+
this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;
|
|
67
|
+
this.fileSearchEnabled = params.fileSearchEnabled === false || true;
|
|
68
|
+
this.assistantId = params.assistantId;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
async process({
|
|
72
|
+
messages,
|
|
73
|
+
actions,
|
|
74
|
+
eventSource,
|
|
75
|
+
threadId,
|
|
76
|
+
runId,
|
|
77
|
+
}: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse> {
|
|
78
|
+
// if we don't have a threadId, create a new thread
|
|
79
|
+
threadId ||= (await this.openai.beta.threads.create()).id;
|
|
80
|
+
const lastMessage = messages.at(-1);
|
|
81
|
+
|
|
82
|
+
let nextRunId: string | undefined = undefined;
|
|
83
|
+
|
|
84
|
+
// submit function outputs
|
|
85
|
+
if (lastMessage instanceof ResultMessage && runId) {
|
|
86
|
+
nextRunId = await this.submitToolOutputs(threadId, runId, messages, eventSource);
|
|
87
|
+
}
|
|
88
|
+
// submit user message
|
|
89
|
+
else if (lastMessage instanceof TextMessage) {
|
|
90
|
+
nextRunId = await this.submitUserMessage(threadId, messages, actions, eventSource);
|
|
91
|
+
}
|
|
92
|
+
// unsupported message
|
|
93
|
+
else {
|
|
94
|
+
throw new Error("No actionable message found in the messages");
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return {
|
|
98
|
+
threadId,
|
|
99
|
+
runId: nextRunId,
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
private async submitToolOutputs(
|
|
104
|
+
threadId: string,
|
|
105
|
+
runId: string,
|
|
106
|
+
messages: Message[],
|
|
107
|
+
eventSource: RuntimeEventSource,
|
|
108
|
+
) {
|
|
109
|
+
let run = await this.openai.beta.threads.runs.retrieve(threadId, runId);
|
|
110
|
+
if (!run.required_action) {
|
|
111
|
+
throw new Error("No tool outputs required");
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// get the required tool call ids
|
|
115
|
+
const toolCallsIds = run.required_action.submit_tool_outputs.tool_calls.map(
|
|
116
|
+
(toolCall) => toolCall.id,
|
|
117
|
+
);
|
|
118
|
+
|
|
119
|
+
// search for these tool calls
|
|
120
|
+
const resultMessages = messages.filter(
|
|
121
|
+
(message) =>
|
|
122
|
+
message instanceof ResultMessage && toolCallsIds.includes(message.actionExecutionId),
|
|
123
|
+
) as ResultMessage[];
|
|
124
|
+
|
|
125
|
+
if (toolCallsIds.length != resultMessages.length) {
|
|
126
|
+
throw new Error("Number of function results does not match the number of tool calls");
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// submit the tool outputs
|
|
130
|
+
const toolOutputs: RunSubmitToolOutputsStreamParams.ToolOutput[] = resultMessages.map(
|
|
131
|
+
(message) => {
|
|
132
|
+
return {
|
|
133
|
+
tool_call_id: message.actionExecutionId,
|
|
134
|
+
output: message.result,
|
|
135
|
+
};
|
|
136
|
+
},
|
|
137
|
+
);
|
|
138
|
+
|
|
139
|
+
const stream = this.openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
|
|
140
|
+
tool_outputs: toolOutputs,
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
await this.streamResponse(stream, eventSource);
|
|
144
|
+
return runId;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
private async submitUserMessage(
|
|
148
|
+
threadId: string,
|
|
149
|
+
messages: Message[],
|
|
150
|
+
actions: ActionInput[],
|
|
151
|
+
eventSource: RuntimeEventSource,
|
|
152
|
+
) {
|
|
153
|
+
messages = [...messages];
|
|
154
|
+
|
|
155
|
+
// get the instruction message
|
|
156
|
+
const instructionsMessage = messages.shift();
|
|
157
|
+
const instructions =
|
|
158
|
+
instructionsMessage instanceof TextMessage ? instructionsMessage.content : "";
|
|
159
|
+
|
|
160
|
+
// get the latest user message
|
|
161
|
+
const userMessage = messages
|
|
162
|
+
.map(convertMessageToOpenAIMessage)
|
|
163
|
+
.map(convertSystemMessageToAssistantAPI)
|
|
164
|
+
.at(-1);
|
|
165
|
+
|
|
166
|
+
if (userMessage.role !== "user") {
|
|
167
|
+
throw new Error("No user message found");
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// create a new message on the thread
|
|
171
|
+
await this.openai.beta.threads.messages.create(threadId, {
|
|
172
|
+
role: "user",
|
|
173
|
+
content: userMessage.content,
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
const openaiTools = actions.map(convertActionInputToOpenAITool);
|
|
177
|
+
|
|
178
|
+
const tools = [
|
|
179
|
+
...openaiTools,
|
|
180
|
+
...(this.codeInterpreterEnabled ? [{ type: "code_interpreter" } as AssistantTool] : []),
|
|
181
|
+
...(this.fileSearchEnabled ? [{ type: "file_search" } as AssistantTool] : []),
|
|
182
|
+
];
|
|
183
|
+
|
|
184
|
+
// run the thread
|
|
185
|
+
let stream = this.openai.beta.threads.runs.stream(threadId, {
|
|
186
|
+
assistant_id: this.assistantId,
|
|
187
|
+
instructions,
|
|
188
|
+
tools: tools,
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
await this.streamResponse(stream, eventSource);
|
|
192
|
+
|
|
193
|
+
return getRunIdFromStream(stream);
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
private async streamResponse(stream: AssistantStream, eventSource: RuntimeEventSource) {
|
|
197
|
+
eventSource.stream(async (eventStream$) => {
|
|
198
|
+
let inFunctionCall = false;
|
|
199
|
+
|
|
200
|
+
for await (const chunk of stream) {
|
|
201
|
+
switch (chunk.event) {
|
|
202
|
+
case "thread.message.created":
|
|
203
|
+
if (inFunctionCall) {
|
|
204
|
+
eventStream$.sendActionExecutionEnd();
|
|
205
|
+
}
|
|
206
|
+
eventStream$.sendTextMessageStart(chunk.data.id);
|
|
207
|
+
break;
|
|
208
|
+
case "thread.message.delta":
|
|
209
|
+
if (chunk.data.delta.content?.[0].type === "text") {
|
|
210
|
+
eventStream$.sendTextMessageContent(chunk.data.delta.content?.[0].text.value);
|
|
211
|
+
}
|
|
212
|
+
break;
|
|
213
|
+
case "thread.message.completed":
|
|
214
|
+
eventStream$.sendTextMessageEnd();
|
|
215
|
+
break;
|
|
216
|
+
case "thread.run.step.delta":
|
|
217
|
+
let toolCallId: string | undefined;
|
|
218
|
+
let toolCallName: string | undefined;
|
|
219
|
+
let toolCallArgs: string | undefined;
|
|
220
|
+
if (
|
|
221
|
+
chunk.data.delta.step_details.type === "tool_calls" &&
|
|
222
|
+
chunk.data.delta.step_details.tool_calls?.[0].type === "function"
|
|
223
|
+
) {
|
|
224
|
+
toolCallId = chunk.data.delta.step_details.tool_calls?.[0].id;
|
|
225
|
+
toolCallName = chunk.data.delta.step_details.tool_calls?.[0].function.name;
|
|
226
|
+
toolCallArgs = chunk.data.delta.step_details.tool_calls?.[0].function.arguments;
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
if (toolCallName && toolCallId) {
|
|
230
|
+
if (inFunctionCall) {
|
|
231
|
+
eventStream$.sendActionExecutionEnd();
|
|
232
|
+
}
|
|
233
|
+
inFunctionCall = true;
|
|
234
|
+
eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
|
|
235
|
+
} else if (toolCallArgs) {
|
|
236
|
+
eventStream$.sendActionExecutionArgs(toolCallArgs);
|
|
237
|
+
}
|
|
238
|
+
break;
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
if (inFunctionCall) {
|
|
242
|
+
eventStream$.sendActionExecutionEnd();
|
|
243
|
+
}
|
|
244
|
+
eventStream$.complete();
|
|
245
|
+
});
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
function getRunIdFromStream(stream: AssistantStream): Promise<string> {
|
|
250
|
+
return new Promise<string>((resolve, reject) => {
|
|
251
|
+
let runIdGetter = (event: AssistantStreamEvent) => {
|
|
252
|
+
if (event.event === "thread.run.created") {
|
|
253
|
+
const runId = event.data.id;
|
|
254
|
+
stream.off("event", runIdGetter);
|
|
255
|
+
resolve(runId);
|
|
256
|
+
}
|
|
257
|
+
};
|
|
258
|
+
stream.on("event", runIdGetter);
|
|
259
|
+
});
|
|
260
|
+
}
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ActionExecutionMessage,
|
|
3
|
+
Message,
|
|
4
|
+
ResultMessage,
|
|
5
|
+
TextMessage,
|
|
6
|
+
} from "../../graphql/types/converted";
|
|
7
|
+
import { Tiktoken, TiktokenModel, encodingForModel } from "js-tiktoken";
|
|
8
|
+
import { ActionInput } from "../../graphql/inputs/action.input";
|
|
9
|
+
import { ChatCompletionMessageParam, ChatCompletionTool } from "openai/resources";
|
|
10
|
+
|
|
11
|
+
export function limitMessagesToTokenCount(
|
|
12
|
+
messages: any[],
|
|
13
|
+
tools: any[],
|
|
14
|
+
model: string,
|
|
15
|
+
maxTokens?: number,
|
|
16
|
+
): any[] {
|
|
17
|
+
maxTokens ||= maxTokensForOpenAIModel(model);
|
|
18
|
+
|
|
19
|
+
const result: any[] = [];
|
|
20
|
+
const toolsNumTokens = countToolsTokens(model, tools);
|
|
21
|
+
if (toolsNumTokens > maxTokens) {
|
|
22
|
+
throw new Error(`Too many tokens in function definitions: ${toolsNumTokens} > ${maxTokens}`);
|
|
23
|
+
}
|
|
24
|
+
maxTokens -= toolsNumTokens;
|
|
25
|
+
|
|
26
|
+
for (const message of messages) {
|
|
27
|
+
if (message.role === "system") {
|
|
28
|
+
const numTokens = countMessageTokens(model, message);
|
|
29
|
+
maxTokens -= numTokens;
|
|
30
|
+
|
|
31
|
+
if (maxTokens < 0) {
|
|
32
|
+
throw new Error("Not enough tokens for system message.");
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
let cutoff: boolean = false;
|
|
38
|
+
|
|
39
|
+
const reversedMessages = [...messages].reverse();
|
|
40
|
+
for (const message of reversedMessages) {
|
|
41
|
+
if (message.role === "system") {
|
|
42
|
+
result.unshift(message);
|
|
43
|
+
continue;
|
|
44
|
+
} else if (cutoff) {
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
let numTokens = countMessageTokens(model, message);
|
|
48
|
+
if (maxTokens < numTokens) {
|
|
49
|
+
cutoff = true;
|
|
50
|
+
continue;
|
|
51
|
+
}
|
|
52
|
+
result.unshift(message);
|
|
53
|
+
maxTokens -= numTokens;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return result;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
export function maxTokensForOpenAIModel(model: string): number {
|
|
60
|
+
return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const DEFAULT_MAX_TOKENS = 128000;
|
|
64
|
+
|
|
65
|
+
const maxTokensByModel: { [key: string]: number } = {
|
|
66
|
+
// GPT-4
|
|
67
|
+
"gpt-4o": 128000,
|
|
68
|
+
"gpt-4o-2024-05-13": 128000,
|
|
69
|
+
"gpt-4-turbo": 128000,
|
|
70
|
+
"gpt-4-turbo-2024-04-09": 128000,
|
|
71
|
+
"gpt-4-0125-preview": 128000,
|
|
72
|
+
"gpt-4-turbo-preview": 128000,
|
|
73
|
+
"gpt-4-1106-preview": 128000,
|
|
74
|
+
"gpt-4-vision-preview": 128000,
|
|
75
|
+
"gpt-4-1106-vision-preview": 128000,
|
|
76
|
+
"gpt-4-32k": 32768,
|
|
77
|
+
"gpt-4-32k-0613": 32768,
|
|
78
|
+
"gpt-4-32k-0314": 32768,
|
|
79
|
+
"gpt-4": 8192,
|
|
80
|
+
"gpt-4-0613": 8192,
|
|
81
|
+
"gpt-4-0314": 8192,
|
|
82
|
+
|
|
83
|
+
// GPT-3.5
|
|
84
|
+
"gpt-3.5-turbo-0125": 16385,
|
|
85
|
+
"gpt-3.5-turbo": 16385,
|
|
86
|
+
"gpt-3.5-turbo-1106": 16385,
|
|
87
|
+
"gpt-3.5-turbo-instruct": 4096,
|
|
88
|
+
"gpt-3.5-turbo-16k": 16385,
|
|
89
|
+
"gpt-3.5-turbo-0613": 4096,
|
|
90
|
+
"gpt-3.5-turbo-16k-0613": 16385,
|
|
91
|
+
"gpt-3.5-turbo-0301": 4097,
|
|
92
|
+
};
|
|
93
|
+
|
|
94
|
+
function countToolsTokens(model: string, tools: any[]): number {
|
|
95
|
+
if (tools.length === 0) {
|
|
96
|
+
return 0;
|
|
97
|
+
}
|
|
98
|
+
const json = JSON.stringify(tools);
|
|
99
|
+
return countTokens(model, json);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
function countMessageTokens(model: string, message: any): number {
|
|
103
|
+
return countTokens(model, message.content || "");
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
function countTokens(model: string, text: string): number {
|
|
107
|
+
let enc: Tiktoken;
|
|
108
|
+
try {
|
|
109
|
+
enc = encodingForModel(model as TiktokenModel);
|
|
110
|
+
} catch (e) {
|
|
111
|
+
enc = encodingForModel("gpt-4");
|
|
112
|
+
}
|
|
113
|
+
return enc.encode(text).length;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
export function convertActionInputToOpenAITool(action: ActionInput): ChatCompletionTool {
|
|
117
|
+
return {
|
|
118
|
+
type: "function",
|
|
119
|
+
function: {
|
|
120
|
+
name: action.name,
|
|
121
|
+
description: action.description,
|
|
122
|
+
parameters: JSON.parse(action.jsonSchema),
|
|
123
|
+
},
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
export function convertMessageToOpenAIMessage(message: Message): ChatCompletionMessageParam {
|
|
128
|
+
if (message instanceof TextMessage) {
|
|
129
|
+
return {
|
|
130
|
+
role: message.role,
|
|
131
|
+
content: message.content,
|
|
132
|
+
};
|
|
133
|
+
} else if (message instanceof ActionExecutionMessage) {
|
|
134
|
+
return {
|
|
135
|
+
role: "assistant",
|
|
136
|
+
tool_calls: [
|
|
137
|
+
{
|
|
138
|
+
id: message.id,
|
|
139
|
+
type: "function",
|
|
140
|
+
function: {
|
|
141
|
+
name: message.name,
|
|
142
|
+
arguments: JSON.stringify(message.arguments),
|
|
143
|
+
},
|
|
144
|
+
},
|
|
145
|
+
],
|
|
146
|
+
};
|
|
147
|
+
} else if (message instanceof ResultMessage) {
|
|
148
|
+
return {
|
|
149
|
+
role: "tool",
|
|
150
|
+
content: message.result,
|
|
151
|
+
tool_call_id: message.actionExecutionId,
|
|
152
|
+
};
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
export function convertSystemMessageToAssistantAPI(message: ChatCompletionMessageParam) {
|
|
157
|
+
return {
|
|
158
|
+
...message,
|
|
159
|
+
...(message.role === "system" && {
|
|
160
|
+
role: "assistant",
|
|
161
|
+
content: "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content,
|
|
162
|
+
}),
|
|
163
|
+
};
|
|
164
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { Message } from "../graphql/types/converted";
|
|
2
|
+
import { RuntimeEventSource } from "./events";
|
|
3
|
+
import { ActionInput } from "../graphql/inputs/action.input";
|
|
4
|
+
|
|
5
|
+
export interface CopilotKitResponse {
|
|
6
|
+
stream: ReadableStream;
|
|
7
|
+
headers?: Record<string, string>;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export interface CopilotRuntimeChatCompletionRequest {
|
|
11
|
+
eventSource: RuntimeEventSource;
|
|
12
|
+
messages: Message[];
|
|
13
|
+
actions: ActionInput[];
|
|
14
|
+
model?: string;
|
|
15
|
+
threadId?: string;
|
|
16
|
+
runId?: string;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export interface CopilotRuntimeChatCompletionResponse {
|
|
20
|
+
stream?: ReadableStream;
|
|
21
|
+
threadId?: string;
|
|
22
|
+
runId?: string;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export interface CopilotServiceAdapter {
|
|
26
|
+
process(
|
|
27
|
+
request: CopilotRuntimeChatCompletionRequest,
|
|
28
|
+
): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
29
|
+
}
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
{
|
|
2
|
+
"extends": "tsconfig/base.json",
|
|
3
|
+
"compilerOptions": {
|
|
4
|
+
"lib": ["es2017", "dom"],
|
|
5
|
+
"emitDecoratorMetadata": true,
|
|
6
|
+
"experimentalDecorators": true,
|
|
7
|
+
"strict": false,
|
|
8
|
+
},
|
|
9
|
+
"include": ["./src/**/*.ts"],
|
|
10
|
+
"exclude": ["dist", "build", "node_modules", "**/*.test.ts", "**/*.test.tsx", "**/__tests__/*"]
|
|
11
|
+
}
|
package/tsup.config.ts
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { defineConfig, Options } from "tsup";
|
|
2
|
+
|
|
3
|
+
export default defineConfig((options: Options) => ({
|
|
4
|
+
...options,
|
|
5
|
+
clean: true,
|
|
6
|
+
entry: ["src/**/index.ts"],
|
|
7
|
+
format: ["esm", "cjs"],
|
|
8
|
+
dts: true,
|
|
9
|
+
minify: false,
|
|
10
|
+
external: [],
|
|
11
|
+
sourcemap: true,
|
|
12
|
+
exclude: [
|
|
13
|
+
"**/*.test.ts", // Exclude TypeScript test files
|
|
14
|
+
"**/*.test.tsx", // Exclude TypeScript React test files
|
|
15
|
+
"**/__tests__/*", // Exclude any files inside a __tests__ directory
|
|
16
|
+
],
|
|
17
|
+
}));
|
package/typedoc.json
ADDED