@copilotkit/runtime 1.50.0-beta.1 → 1.50.0-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.ts +76 -286
- package/dist/index.js +306 -281
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +299 -270
- package/dist/index.mjs.map +1 -1
- package/dist/langgraph.d.ts +284 -0
- package/dist/langgraph.js +211 -0
- package/dist/langgraph.js.map +1 -0
- package/dist/langgraph.mjs +206 -0
- package/dist/langgraph.mjs.map +1 -0
- package/dist/v2/index.d.ts +1 -0
- package/dist/v2/index.js +7 -0
- package/dist/v2/index.js.map +1 -1
- package/dist/v2/index.mjs +1 -0
- package/dist/v2/index.mjs.map +1 -1
- package/package.json +55 -17
- package/src/graphql/message-conversion/agui-to-gql.test.ts +2 -2
- package/src/graphql/message-conversion/gql-to-agui.test.ts +30 -28
- package/src/graphql/message-conversion/roundtrip-conversion.test.ts +8 -8
- package/src/langgraph.ts +1 -0
- package/src/lib/index.ts +41 -1
- package/src/lib/integrations/nextjs/app-router.ts +3 -1
- package/src/lib/integrations/node-http/index.ts +132 -11
- package/src/lib/integrations/shared.ts +2 -2
- package/src/lib/runtime/agent-integrations/{langgraph.agent.ts → langgraph/agent.ts} +5 -30
- package/src/lib/runtime/agent-integrations/langgraph/consts.ts +34 -0
- package/src/lib/runtime/agent-integrations/langgraph/index.ts +2 -0
- package/src/lib/runtime/copilot-runtime.ts +25 -46
- package/src/service-adapters/anthropic/anthropic-adapter.ts +16 -3
- package/src/service-adapters/bedrock/bedrock-adapter.ts +4 -1
- package/src/service-adapters/experimental/ollama/ollama-adapter.ts +2 -1
- package/src/service-adapters/google/google-genai-adapter.ts +9 -4
- package/src/service-adapters/groq/groq-adapter.ts +16 -3
- package/src/service-adapters/langchain/langchain-adapter.ts +5 -3
- package/src/service-adapters/langchain/langserve.ts +2 -1
- package/src/service-adapters/openai/openai-adapter.ts +17 -3
- package/src/service-adapters/openai/openai-assistant-adapter.ts +26 -11
- package/src/service-adapters/unify/unify-adapter.ts +3 -1
- package/src/v2/index.ts +1 -0
- package/tsup.config.ts +5 -2
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
* return new GroqAdapter({ groq, model: "<model-name>" });
|
|
15
15
|
* ```
|
|
16
16
|
*/
|
|
17
|
-
import { Groq } from "groq-sdk";
|
|
17
|
+
import type { Groq } from "groq-sdk";
|
|
18
18
|
import type { ChatCompletionMessageParam } from "groq-sdk/resources/chat";
|
|
19
19
|
import {
|
|
20
20
|
CopilotServiceAdapter,
|
|
@@ -67,13 +67,25 @@ export class GroqAdapter implements CopilotServiceAdapter {
|
|
|
67
67
|
}
|
|
68
68
|
|
|
69
69
|
constructor(params?: GroqAdapterParams) {
|
|
70
|
-
|
|
70
|
+
if (params?.groq) {
|
|
71
|
+
this._groq = params.groq;
|
|
72
|
+
}
|
|
73
|
+
// If no instance provided, we'll lazy-load in ensureGroq()
|
|
71
74
|
if (params?.model) {
|
|
72
75
|
this.model = params.model;
|
|
73
76
|
}
|
|
74
77
|
this.disableParallelToolCalls = params?.disableParallelToolCalls || false;
|
|
75
78
|
}
|
|
76
79
|
|
|
80
|
+
private ensureGroq(): Groq {
|
|
81
|
+
if (!this._groq) {
|
|
82
|
+
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
83
|
+
const { Groq } = require("groq-sdk");
|
|
84
|
+
this._groq = new Groq({});
|
|
85
|
+
}
|
|
86
|
+
return this._groq;
|
|
87
|
+
}
|
|
88
|
+
|
|
77
89
|
async process(
|
|
78
90
|
request: CopilotRuntimeChatCompletionRequest,
|
|
79
91
|
): Promise<CopilotRuntimeChatCompletionResponse> {
|
|
@@ -101,7 +113,8 @@ export class GroqAdapter implements CopilotServiceAdapter {
|
|
|
101
113
|
}
|
|
102
114
|
let stream;
|
|
103
115
|
try {
|
|
104
|
-
|
|
116
|
+
const groq = this.ensureGroq();
|
|
117
|
+
stream = await groq.chat.completions.create({
|
|
105
118
|
model: model,
|
|
106
119
|
stream: true,
|
|
107
120
|
messages: openaiMessages as unknown as ChatCompletionMessageParam[],
|
|
@@ -31,7 +31,7 @@
|
|
|
31
31
|
* - A LangChain `AIMessage` object
|
|
32
32
|
*/
|
|
33
33
|
|
|
34
|
-
import { BaseMessage } from "@langchain/core/messages";
|
|
34
|
+
import type { BaseMessage } from "@langchain/core/messages";
|
|
35
35
|
import { CopilotServiceAdapter } from "../service-adapter";
|
|
36
36
|
import {
|
|
37
37
|
CopilotRuntimeChatCompletionRequest,
|
|
@@ -42,10 +42,9 @@ import {
|
|
|
42
42
|
convertMessageToLangChainMessage,
|
|
43
43
|
streamLangChainResponse,
|
|
44
44
|
} from "./utils";
|
|
45
|
-
import { DynamicStructuredTool } from "@langchain/core/tools";
|
|
45
|
+
import type { DynamicStructuredTool } from "@langchain/core/tools";
|
|
46
46
|
import { LangChainReturnType } from "./types";
|
|
47
47
|
import { randomUUID } from "@copilotkit/shared";
|
|
48
|
-
import { awaitAllCallbacks } from "@langchain/core/callbacks/promises";
|
|
49
48
|
|
|
50
49
|
interface ChainFnParameters {
|
|
51
50
|
model: string;
|
|
@@ -103,6 +102,9 @@ export class LangChainAdapter implements CopilotServiceAdapter {
|
|
|
103
102
|
threadId,
|
|
104
103
|
};
|
|
105
104
|
} finally {
|
|
105
|
+
// Lazy require for optional peer dependency
|
|
106
|
+
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
107
|
+
const { awaitAllCallbacks } = require("@langchain/core/callbacks/promises");
|
|
106
108
|
await awaitAllCallbacks();
|
|
107
109
|
}
|
|
108
110
|
}
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import { Parameter, Action } from "@copilotkit/shared";
|
|
2
|
-
import { RemoteRunnable } from "langchain/runnables/remote";
|
|
3
2
|
|
|
4
3
|
export interface RemoteChainParameters {
|
|
5
4
|
name: string;
|
|
@@ -34,6 +33,8 @@ export class RemoteChain {
|
|
|
34
33
|
description: this.description,
|
|
35
34
|
parameters: this.parameters!,
|
|
36
35
|
handler: async (args: any) => {
|
|
36
|
+
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
37
|
+
const { RemoteRunnable } = require("langchain/runnables/remote");
|
|
37
38
|
const runnable = new RemoteRunnable({ url: this.chainUrl });
|
|
38
39
|
let input: any;
|
|
39
40
|
if (this.parameterType === "single") {
|
|
@@ -48,7 +48,7 @@
|
|
|
48
48
|
* return new OpenAIAdapter({ openai });
|
|
49
49
|
* ```
|
|
50
50
|
*/
|
|
51
|
-
import OpenAI from "openai";
|
|
51
|
+
import type OpenAI from "openai";
|
|
52
52
|
import {
|
|
53
53
|
CopilotServiceAdapter,
|
|
54
54
|
CopilotRuntimeChatCompletionRequest,
|
|
@@ -111,7 +111,11 @@ export class OpenAIAdapter implements CopilotServiceAdapter {
|
|
|
111
111
|
}
|
|
112
112
|
|
|
113
113
|
constructor(params?: OpenAIAdapterParams) {
|
|
114
|
-
|
|
114
|
+
if (params?.openai) {
|
|
115
|
+
this._openai = params.openai;
|
|
116
|
+
}
|
|
117
|
+
// If no instance provided, we'll lazy-load in ensureOpenAI()
|
|
118
|
+
|
|
115
119
|
if (params?.model) {
|
|
116
120
|
this.model = params.model;
|
|
117
121
|
}
|
|
@@ -119,6 +123,15 @@ export class OpenAIAdapter implements CopilotServiceAdapter {
|
|
|
119
123
|
this.keepSystemRole = params?.keepSystemRole ?? false;
|
|
120
124
|
}
|
|
121
125
|
|
|
126
|
+
private ensureOpenAI(): OpenAI {
|
|
127
|
+
if (!this._openai) {
|
|
128
|
+
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
129
|
+
const OpenAI = require("openai").default;
|
|
130
|
+
this._openai = new OpenAI();
|
|
131
|
+
}
|
|
132
|
+
return this._openai;
|
|
133
|
+
}
|
|
134
|
+
|
|
122
135
|
async process(
|
|
123
136
|
request: CopilotRuntimeChatCompletionRequest,
|
|
124
137
|
): Promise<CopilotRuntimeChatCompletionResponse> {
|
|
@@ -174,7 +187,8 @@ export class OpenAIAdapter implements CopilotServiceAdapter {
|
|
|
174
187
|
}
|
|
175
188
|
|
|
176
189
|
try {
|
|
177
|
-
const
|
|
190
|
+
const openai = this.ensureOpenAI();
|
|
191
|
+
const stream = openai.beta.chat.completions.stream({
|
|
178
192
|
model: model,
|
|
179
193
|
stream: true,
|
|
180
194
|
messages: openaiMessages,
|
|
@@ -22,7 +22,10 @@
|
|
|
22
22
|
* });
|
|
23
23
|
* ```
|
|
24
24
|
*/
|
|
25
|
-
import OpenAI from "openai";
|
|
25
|
+
import type OpenAI from "openai";
|
|
26
|
+
import type { RunSubmitToolOutputsStreamParams } from "openai/resources/beta/threads/runs/runs";
|
|
27
|
+
import type { AssistantStream } from "openai/lib/AssistantStream";
|
|
28
|
+
import type { AssistantStreamEvent, AssistantTool } from "openai/resources/beta/assistants";
|
|
26
29
|
import {
|
|
27
30
|
CopilotServiceAdapter,
|
|
28
31
|
CopilotRuntimeChatCompletionRequest,
|
|
@@ -34,11 +37,8 @@ import {
|
|
|
34
37
|
convertMessageToOpenAIMessage,
|
|
35
38
|
convertSystemMessageToAssistantAPI,
|
|
36
39
|
} from "./utils";
|
|
37
|
-
import { RunSubmitToolOutputsStreamParams } from "openai/resources/beta/threads/runs/runs";
|
|
38
|
-
import { AssistantStream } from "openai/lib/AssistantStream";
|
|
39
40
|
import { RuntimeEventSource } from "../events";
|
|
40
41
|
import { ActionInput } from "../../graphql/inputs/action.input";
|
|
41
|
-
import { AssistantStreamEvent, AssistantTool } from "openai/resources/beta/assistants";
|
|
42
42
|
import { ForwardedParametersInput } from "../../graphql/inputs/forwarded-parameters.input";
|
|
43
43
|
|
|
44
44
|
export interface OpenAIAssistantAdapterParams {
|
|
@@ -84,7 +84,7 @@ export interface OpenAIAssistantAdapterParams {
|
|
|
84
84
|
}
|
|
85
85
|
|
|
86
86
|
export class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
87
|
-
private
|
|
87
|
+
private _openai: OpenAI;
|
|
88
88
|
private codeInterpreterEnabled: boolean;
|
|
89
89
|
private assistantId: string;
|
|
90
90
|
private fileSearchEnabled: boolean;
|
|
@@ -96,7 +96,10 @@ export class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
|
96
96
|
}
|
|
97
97
|
|
|
98
98
|
constructor(params: OpenAIAssistantAdapterParams) {
|
|
99
|
-
|
|
99
|
+
if (params.openai) {
|
|
100
|
+
this._openai = params.openai;
|
|
101
|
+
}
|
|
102
|
+
// If no instance provided, we'll lazy-load in ensureOpenAI()
|
|
100
103
|
this.codeInterpreterEnabled = params.codeInterpreterEnabled === false || true;
|
|
101
104
|
this.fileSearchEnabled = params.fileSearchEnabled === false || true;
|
|
102
105
|
this.assistantId = params.assistantId;
|
|
@@ -104,6 +107,15 @@ export class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
|
104
107
|
this.keepSystemRole = params?.keepSystemRole ?? false;
|
|
105
108
|
}
|
|
106
109
|
|
|
110
|
+
private ensureOpenAI(): OpenAI {
|
|
111
|
+
if (!this._openai) {
|
|
112
|
+
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
113
|
+
const OpenAI = require("openai").default;
|
|
114
|
+
this._openai = new OpenAI({});
|
|
115
|
+
}
|
|
116
|
+
return this._openai;
|
|
117
|
+
}
|
|
118
|
+
|
|
107
119
|
async process(
|
|
108
120
|
request: CopilotRuntimeChatCompletionRequest,
|
|
109
121
|
): Promise<CopilotRuntimeChatCompletionResponse> {
|
|
@@ -111,9 +123,10 @@ export class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
|
111
123
|
|
|
112
124
|
// if we don't have a threadId, create a new thread
|
|
113
125
|
let threadId = request.extensions?.openaiAssistantAPI?.threadId;
|
|
126
|
+
const openai = this.ensureOpenAI();
|
|
114
127
|
|
|
115
128
|
if (!threadId) {
|
|
116
|
-
threadId = (await
|
|
129
|
+
threadId = (await openai.beta.threads.create()).id;
|
|
117
130
|
}
|
|
118
131
|
|
|
119
132
|
const lastMessage = messages.at(-1);
|
|
@@ -158,7 +171,8 @@ export class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
|
158
171
|
messages: Message[],
|
|
159
172
|
eventSource: RuntimeEventSource,
|
|
160
173
|
) {
|
|
161
|
-
|
|
174
|
+
const openai = this.ensureOpenAI();
|
|
175
|
+
let run = await openai.beta.threads.runs.retrieve(threadId, runId);
|
|
162
176
|
|
|
163
177
|
if (!run.required_action) {
|
|
164
178
|
throw new Error("No tool outputs required");
|
|
@@ -188,7 +202,7 @@ export class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
|
188
202
|
},
|
|
189
203
|
);
|
|
190
204
|
|
|
191
|
-
const stream =
|
|
205
|
+
const stream = openai.beta.threads.runs.submitToolOutputsStream(threadId, runId, {
|
|
192
206
|
tool_outputs: toolOutputs,
|
|
193
207
|
...(this.disableParallelToolCalls && { parallel_tool_calls: false }),
|
|
194
208
|
});
|
|
@@ -204,6 +218,7 @@ export class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
|
204
218
|
eventSource: RuntimeEventSource,
|
|
205
219
|
forwardedParameters: ForwardedParametersInput,
|
|
206
220
|
) {
|
|
221
|
+
const openai = this.ensureOpenAI();
|
|
207
222
|
messages = [...messages];
|
|
208
223
|
|
|
209
224
|
// get the instruction message
|
|
@@ -220,7 +235,7 @@ export class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
|
220
235
|
throw new Error("No user message found");
|
|
221
236
|
}
|
|
222
237
|
|
|
223
|
-
await
|
|
238
|
+
await openai.beta.threads.messages.create(threadId, {
|
|
224
239
|
role: "user",
|
|
225
240
|
content: userMessage.content,
|
|
226
241
|
});
|
|
@@ -233,7 +248,7 @@ export class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
|
233
248
|
...(this.fileSearchEnabled ? [{ type: "file_search" } as AssistantTool] : []),
|
|
234
249
|
];
|
|
235
250
|
|
|
236
|
-
let stream =
|
|
251
|
+
let stream = openai.beta.threads.runs.stream(threadId, {
|
|
237
252
|
assistant_id: this.assistantId,
|
|
238
253
|
instructions,
|
|
239
254
|
tools: tools,
|
|
@@ -22,7 +22,6 @@ import {
|
|
|
22
22
|
CopilotRuntimeChatCompletionResponse,
|
|
23
23
|
CopilotServiceAdapter,
|
|
24
24
|
} from "../service-adapter";
|
|
25
|
-
import OpenAI from "openai";
|
|
26
25
|
import { randomId, randomUUID } from "@copilotkit/shared";
|
|
27
26
|
import { convertActionInputToOpenAITool, convertMessageToOpenAIMessage } from "../openai/utils";
|
|
28
27
|
|
|
@@ -55,6 +54,9 @@ export class UnifyAdapter implements CopilotServiceAdapter {
|
|
|
55
54
|
request: CopilotRuntimeChatCompletionRequest,
|
|
56
55
|
): Promise<CopilotRuntimeChatCompletionResponse> {
|
|
57
56
|
const tools = request.actions.map(convertActionInputToOpenAITool);
|
|
57
|
+
// Lazy require for optional peer dependency
|
|
58
|
+
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
59
|
+
const OpenAI = require("openai").default;
|
|
58
60
|
const openai = new OpenAI({
|
|
59
61
|
apiKey: this.apiKey,
|
|
60
62
|
baseURL: "https://api.unify.ai/v0/",
|
package/src/v2/index.ts
CHANGED
package/tsup.config.ts
CHANGED
|
@@ -2,11 +2,11 @@ import { defineConfig, Options } from "tsup";
|
|
|
2
2
|
|
|
3
3
|
export default defineConfig((options: Options) => ({
|
|
4
4
|
...options,
|
|
5
|
-
entry: ["src/index.ts", "src/v2/index.ts"],
|
|
5
|
+
entry: ["src/index.ts", "src/v2/index.ts", "src/langgraph.ts"],
|
|
6
6
|
format: ["esm", "cjs"],
|
|
7
7
|
dts: true,
|
|
8
8
|
minify: false,
|
|
9
|
-
external: [],
|
|
9
|
+
external: ["@ag-ui/langgraph"],
|
|
10
10
|
sourcemap: true,
|
|
11
11
|
exclude: [
|
|
12
12
|
"**/*.test.ts", // Exclude TypeScript test files
|
|
@@ -14,4 +14,7 @@ export default defineConfig((options: Options) => ({
|
|
|
14
14
|
"**/__tests__/*", // Exclude any files inside a __tests__ directory
|
|
15
15
|
],
|
|
16
16
|
treeshake: true,
|
|
17
|
+
// Disable code splitting so each entry point is fully independent
|
|
18
|
+
// This prevents @ag-ui/langgraph from being pulled into index.mjs via shared chunks
|
|
19
|
+
splitting: false,
|
|
17
20
|
}));
|