@copilotkit/runtime 0.37.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.js +7 -0
- package/.turbo/turbo-build.log +70 -0
- package/CHANGELOG.md +1 -0
- package/__snapshots__/schema/schema.graphql +178 -0
- package/dist/chunk-2CCVVJDU.mjs +56 -0
- package/dist/chunk-2CCVVJDU.mjs.map +1 -0
- package/dist/chunk-4UA4RB4C.mjs +185 -0
- package/dist/chunk-4UA4RB4C.mjs.map +1 -0
- package/dist/chunk-5HGYI6EG.mjs +678 -0
- package/dist/chunk-5HGYI6EG.mjs.map +1 -0
- package/dist/chunk-7IFP53C6.mjs +169 -0
- package/dist/chunk-7IFP53C6.mjs.map +1 -0
- package/dist/chunk-BLTAUVRP.mjs +30 -0
- package/dist/chunk-BLTAUVRP.mjs.map +1 -0
- package/dist/chunk-NFCPM5AM.mjs +43 -0
- package/dist/chunk-NFCPM5AM.mjs.map +1 -0
- package/dist/chunk-XPAUPJMW.mjs +1051 -0
- package/dist/chunk-XPAUPJMW.mjs.map +1 -0
- package/dist/graphql/types/base/index.d.ts +6 -0
- package/dist/graphql/types/base/index.js +63 -0
- package/dist/graphql/types/base/index.js.map +1 -0
- package/dist/graphql/types/base/index.mjs +7 -0
- package/dist/graphql/types/base/index.mjs.map +1 -0
- package/dist/graphql/types/converted/index.d.ts +2 -0
- package/dist/graphql/types/converted/index.js +88 -0
- package/dist/graphql/types/converted/index.js.map +1 -0
- package/dist/graphql/types/converted/index.mjs +12 -0
- package/dist/graphql/types/converted/index.mjs.map +1 -0
- package/dist/index-aa091e3c.d.ts +49 -0
- package/dist/index-f0875df3.d.ts +197 -0
- package/dist/index.d.ts +15 -0
- package/dist/index.js +2171 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +49 -0
- package/dist/index.mjs.map +1 -0
- package/dist/langchain-adapter-9ce103f3.d.ts +200 -0
- package/dist/langserve-fd5066ee.d.ts +96 -0
- package/dist/lib/index.d.ts +15 -0
- package/dist/lib/index.js +2170 -0
- package/dist/lib/index.js.map +1 -0
- package/dist/lib/index.mjs +46 -0
- package/dist/lib/index.mjs.map +1 -0
- package/dist/lib/integrations/index.d.ts +9 -0
- package/dist/lib/integrations/index.js +1024 -0
- package/dist/lib/integrations/index.js.map +1 -0
- package/dist/lib/integrations/index.mjs +24 -0
- package/dist/lib/integrations/index.mjs.map +1 -0
- package/dist/lib/integrations/node-http/index.d.ts +8 -0
- package/dist/lib/integrations/node-http/index.js +969 -0
- package/dist/lib/integrations/node-http/index.js.map +1 -0
- package/dist/lib/integrations/node-http/index.mjs +10 -0
- package/dist/lib/integrations/node-http/index.mjs.map +1 -0
- package/dist/pages-router-b6bc6c60.d.ts +30 -0
- package/dist/service-adapters/index.d.ts +11 -0
- package/dist/service-adapters/index.js +912 -0
- package/dist/service-adapters/index.js.map +1 -0
- package/dist/service-adapters/index.mjs +18 -0
- package/dist/service-adapters/index.mjs.map +1 -0
- package/jest.config.js +5 -0
- package/package.json +63 -0
- package/scripts/generate-gql-schema.ts +13 -0
- package/src/graphql/inputs/action.input.ts +13 -0
- package/src/graphql/inputs/cloud-guardrails.input.ts +19 -0
- package/src/graphql/inputs/cloud.input.ts +8 -0
- package/src/graphql/inputs/context-property.input.ts +10 -0
- package/src/graphql/inputs/custom-property.input.ts +15 -0
- package/src/graphql/inputs/frontend.input.ts +11 -0
- package/src/graphql/inputs/generate-copilot-response.input.ts +22 -0
- package/src/graphql/inputs/message.input.ts +50 -0
- package/src/graphql/resolvers/copilot.resolver.ts +147 -0
- package/src/graphql/types/base/index.ts +10 -0
- package/src/graphql/types/converted/index.ts +29 -0
- package/src/graphql/types/copilot-response.type.ts +75 -0
- package/src/graphql/types/enums.ts +22 -0
- package/src/graphql/types/guardrails-result.type.ts +20 -0
- package/src/graphql/types/message-status.type.ts +40 -0
- package/src/graphql/types/response-status.type.ts +52 -0
- package/src/index.ts +2 -0
- package/src/lib/copilot-cloud.ts +63 -0
- package/src/lib/copilot-runtime.ts +261 -0
- package/src/lib/guardrails.ts +3 -0
- package/src/lib/index.ts +7 -0
- package/src/lib/integrations/index.ts +4 -0
- package/src/lib/integrations/nextjs/app-router.ts +29 -0
- package/src/lib/integrations/nextjs/pages-router.ts +36 -0
- package/src/lib/integrations/node-http/index.ts +23 -0
- package/src/lib/integrations/shared.ts +68 -0
- package/src/service-adapters/conversion.ts +47 -0
- package/src/service-adapters/events.ts +197 -0
- package/src/service-adapters/experimental/groq/groq-adapter.ts +124 -0
- package/src/service-adapters/experimental/ollama/ollama-adapter.ts +75 -0
- package/src/service-adapters/google/google-genai-adapter.ts +149 -0
- package/src/service-adapters/google/utils.ts +94 -0
- package/src/service-adapters/index.ts +6 -0
- package/src/service-adapters/langchain/langchain-adapter.ts +82 -0
- package/src/service-adapters/langchain/langserve.ts +81 -0
- package/src/service-adapters/langchain/types.ts +14 -0
- package/src/service-adapters/langchain/utils.ts +235 -0
- package/src/service-adapters/openai/openai-adapter.ts +142 -0
- package/src/service-adapters/openai/openai-assistant-adapter.ts +260 -0
- package/src/service-adapters/openai/utils.ts +164 -0
- package/src/service-adapters/service-adapter.ts +29 -0
- package/tsconfig.json +11 -0
- package/tsup.config.ts +17 -0
- package/typedoc.json +4 -0
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ActionExecutionMessage,
|
|
3
|
+
Message,
|
|
4
|
+
ResultMessage,
|
|
5
|
+
TextMessage,
|
|
6
|
+
} from "../../graphql/types/converted";
|
|
7
|
+
import { Tool } from "@google/generative-ai";
|
|
8
|
+
import { ActionInput } from "../../graphql/inputs/action.input";
|
|
9
|
+
|
|
10
|
+
export function convertMessageToGoogleGenAIMessage(message: Message) {
|
|
11
|
+
if (message instanceof TextMessage) {
|
|
12
|
+
const role = {
|
|
13
|
+
user: "user",
|
|
14
|
+
assistant: "model",
|
|
15
|
+
system: "user",
|
|
16
|
+
}[message.role];
|
|
17
|
+
|
|
18
|
+
const text =
|
|
19
|
+
message.role === "system"
|
|
20
|
+
? "THE FOLLOWING MESSAGE IS A SYSTEM MESSAGE: " + message.content
|
|
21
|
+
: message.content;
|
|
22
|
+
|
|
23
|
+
return {
|
|
24
|
+
role,
|
|
25
|
+
parts: [{ text }],
|
|
26
|
+
};
|
|
27
|
+
} else if (message instanceof ActionExecutionMessage) {
|
|
28
|
+
return {
|
|
29
|
+
role: "model",
|
|
30
|
+
parts: [
|
|
31
|
+
{
|
|
32
|
+
functionCall: {
|
|
33
|
+
name: message.name,
|
|
34
|
+
args: message.arguments,
|
|
35
|
+
},
|
|
36
|
+
},
|
|
37
|
+
],
|
|
38
|
+
};
|
|
39
|
+
} else if (message instanceof ResultMessage) {
|
|
40
|
+
return {
|
|
41
|
+
role: "model",
|
|
42
|
+
parts: [
|
|
43
|
+
{
|
|
44
|
+
functionResponse: {
|
|
45
|
+
name: message.actionName,
|
|
46
|
+
response: {
|
|
47
|
+
name: message.actionName,
|
|
48
|
+
content: tryParseJson(message.result),
|
|
49
|
+
},
|
|
50
|
+
},
|
|
51
|
+
},
|
|
52
|
+
],
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export function transformActionToGoogleGenAITool(action: ActionInput): Tool {
|
|
58
|
+
const name = action.name;
|
|
59
|
+
const description = action.description;
|
|
60
|
+
const parameters = JSON.parse(action.jsonSchema);
|
|
61
|
+
|
|
62
|
+
const transformProperties = (props: any) => {
|
|
63
|
+
for (const key in props) {
|
|
64
|
+
if (props[key].type) {
|
|
65
|
+
props[key].type = props[key].type.toUpperCase();
|
|
66
|
+
}
|
|
67
|
+
if (props[key].properties) {
|
|
68
|
+
transformProperties(props[key].properties);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
};
|
|
72
|
+
transformProperties(parameters);
|
|
73
|
+
|
|
74
|
+
return {
|
|
75
|
+
functionDeclarations: [
|
|
76
|
+
{
|
|
77
|
+
name,
|
|
78
|
+
description,
|
|
79
|
+
parameters,
|
|
80
|
+
},
|
|
81
|
+
],
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function tryParseJson(str?: string) {
|
|
86
|
+
if (!str) {
|
|
87
|
+
return "";
|
|
88
|
+
}
|
|
89
|
+
try {
|
|
90
|
+
return JSON.parse(str);
|
|
91
|
+
} catch (e) {
|
|
92
|
+
return str;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export type { CopilotServiceAdapter } from "./service-adapter";
|
|
2
|
+
export { OpenAIAdapter } from "./openai/openai-adapter";
|
|
3
|
+
export { OpenAIAssistantAdapter } from "./openai/openai-assistant-adapter";
|
|
4
|
+
export { GoogleGenerativeAIAdapter } from "./google/google-genai-adapter";
|
|
5
|
+
export { LangChainAdapter } from "./langchain/langchain-adapter";
|
|
6
|
+
export { RemoteChain } from "./langchain/langserve";
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CopilotKit Adapter for LangChain
|
|
3
|
+
*
|
|
4
|
+
* Use this adapter to use LangChain as a backend.
|
|
5
|
+
*
|
|
6
|
+
* ```typescript
|
|
7
|
+
* return copilotKit.response(
|
|
8
|
+
* req,
|
|
9
|
+
* new LangChainAdapter(async (forwardedProps) => {
|
|
10
|
+
* const model = new ChatOpenAI({ modelName: "gpt-4o" });
|
|
11
|
+
* return model.stream(forwardedProps.messages, {
|
|
12
|
+
* tools: forwardedProps.tools,
|
|
13
|
+
* });
|
|
14
|
+
* })
|
|
15
|
+
* );
|
|
16
|
+
* ```
|
|
17
|
+
* The async handler function can return:
|
|
18
|
+
*
|
|
19
|
+
* - a simple `string` response
|
|
20
|
+
* - a LangChain stream `IterableReadableStream`
|
|
21
|
+
* - a LangChain `BaseMessageChunk` object
|
|
22
|
+
* - a LangChain `AIMessage` object
|
|
23
|
+
*/
|
|
24
|
+
|
|
25
|
+
import { BaseMessage } from "@langchain/core/messages";
|
|
26
|
+
import { CopilotServiceAdapter } from "../service-adapter";
|
|
27
|
+
import {
|
|
28
|
+
CopilotRuntimeChatCompletionRequest,
|
|
29
|
+
CopilotRuntimeChatCompletionResponse,
|
|
30
|
+
} from "../service-adapter";
|
|
31
|
+
import {
|
|
32
|
+
convertActionInputToLangChainTool,
|
|
33
|
+
convertMessageToLangChainMessage,
|
|
34
|
+
streamLangChainResponse,
|
|
35
|
+
} from "./utils";
|
|
36
|
+
import { DynamicStructuredTool } from "@langchain/core/tools";
|
|
37
|
+
import { LangChainReturnType } from "./types";
|
|
38
|
+
|
|
39
|
+
interface ChainFnParameters {
|
|
40
|
+
model: string;
|
|
41
|
+
messages: BaseMessage[];
|
|
42
|
+
tools: DynamicStructuredTool[];
|
|
43
|
+
threadId?: string;
|
|
44
|
+
runId?: string;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
interface LangChainAdapterOptions {
|
|
48
|
+
chainFn: (parameters: ChainFnParameters) => Promise<LangChainReturnType>;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
export class LangChainAdapter implements CopilotServiceAdapter {
|
|
52
|
+
/**
|
|
53
|
+
* To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.
|
|
54
|
+
*/
|
|
55
|
+
constructor(private options: LangChainAdapterOptions) {}
|
|
56
|
+
|
|
57
|
+
async process({
|
|
58
|
+
eventSource,
|
|
59
|
+
model,
|
|
60
|
+
actions,
|
|
61
|
+
messages,
|
|
62
|
+
threadId,
|
|
63
|
+
runId,
|
|
64
|
+
}: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse> {
|
|
65
|
+
const result = await this.options.chainFn({
|
|
66
|
+
messages: messages.map(convertMessageToLangChainMessage),
|
|
67
|
+
tools: actions.map(convertActionInputToLangChainTool),
|
|
68
|
+
model,
|
|
69
|
+
threadId,
|
|
70
|
+
runId,
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
eventSource.stream(async (eventStream$) => {
|
|
74
|
+
await streamLangChainResponse({
|
|
75
|
+
result,
|
|
76
|
+
eventStream$,
|
|
77
|
+
});
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
return {};
|
|
81
|
+
}
|
|
82
|
+
}
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import { Parameter, Action } from "@copilotkit/shared";
|
|
2
|
+
import { RemoteRunnable } from "langchain/runnables/remote";
|
|
3
|
+
|
|
4
|
+
export interface RemoteChain {
|
|
5
|
+
name: string;
|
|
6
|
+
description: string;
|
|
7
|
+
chainUrl: string;
|
|
8
|
+
parameters?: Parameter[];
|
|
9
|
+
parameterType: "single" | "multi";
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export class RemoteChain implements RemoteChain {
|
|
13
|
+
constructor(options: RemoteChain) {
|
|
14
|
+
this.name = options.name;
|
|
15
|
+
this.description = options.description;
|
|
16
|
+
this.chainUrl = options.chainUrl;
|
|
17
|
+
this.parameters = options.parameters;
|
|
18
|
+
this.parameterType = options.parameterType || "multi";
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
async toAction(): Promise<Action<any>> {
|
|
22
|
+
if (!this.parameters) {
|
|
23
|
+
await this.inferLangServeParameters();
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
return {
|
|
27
|
+
name: this.name,
|
|
28
|
+
description: this.description,
|
|
29
|
+
parameters: this.parameters!,
|
|
30
|
+
handler: async (args: any) => {
|
|
31
|
+
const runnable = new RemoteRunnable({ url: this.chainUrl });
|
|
32
|
+
let input: any;
|
|
33
|
+
if (this.parameterType === "single") {
|
|
34
|
+
input = args[Object.keys(args)[0]];
|
|
35
|
+
} else {
|
|
36
|
+
input = args;
|
|
37
|
+
}
|
|
38
|
+
return await runnable.invoke(input);
|
|
39
|
+
},
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
async inferLangServeParameters() {
|
|
44
|
+
const supportedTypes = ["string", "number", "boolean"];
|
|
45
|
+
|
|
46
|
+
let schemaUrl = this.chainUrl.replace(/\/+$/, "") + "/input_schema";
|
|
47
|
+
let schema = await fetch(schemaUrl)
|
|
48
|
+
.then((res) => res.json())
|
|
49
|
+
.catch(() => {
|
|
50
|
+
throw new Error("Failed to fetch langserve schema at " + schemaUrl);
|
|
51
|
+
});
|
|
52
|
+
// for now, don't use json schema, just do a simple conversion
|
|
53
|
+
|
|
54
|
+
if (supportedTypes.includes(schema.type)) {
|
|
55
|
+
this.parameterType = "single";
|
|
56
|
+
this.parameters = [
|
|
57
|
+
{
|
|
58
|
+
name: "input",
|
|
59
|
+
type: schema.type,
|
|
60
|
+
description: "The input to the chain",
|
|
61
|
+
},
|
|
62
|
+
];
|
|
63
|
+
} else if (schema.type === "object") {
|
|
64
|
+
this.parameterType = "multi";
|
|
65
|
+
this.parameters = Object.keys(schema.properties).map((key) => {
|
|
66
|
+
let property = schema.properties[key];
|
|
67
|
+
if (!supportedTypes.includes(property.type)) {
|
|
68
|
+
throw new Error("Unsupported schema type");
|
|
69
|
+
}
|
|
70
|
+
return {
|
|
71
|
+
name: key,
|
|
72
|
+
type: property.type,
|
|
73
|
+
description: property.description || "",
|
|
74
|
+
required: schema.required?.includes(key) || false,
|
|
75
|
+
};
|
|
76
|
+
});
|
|
77
|
+
} else {
|
|
78
|
+
throw new Error("Unsupported schema type");
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { AIMessage, AIMessageChunk, BaseMessageChunk } from "@langchain/core/messages";
|
|
2
|
+
import {
|
|
3
|
+
IterableReadableStream,
|
|
4
|
+
IterableReadableStreamInterface,
|
|
5
|
+
} from "@langchain/core/utils/stream";
|
|
6
|
+
|
|
7
|
+
export type LangChainBaseMessageChunkStream = IterableReadableStream<BaseMessageChunk>;
|
|
8
|
+
export type LangChainAIMessageChunkStream = IterableReadableStreamInterface<AIMessageChunk>;
|
|
9
|
+
export type LangChainReturnType =
|
|
10
|
+
| LangChainBaseMessageChunkStream
|
|
11
|
+
| LangChainAIMessageChunkStream
|
|
12
|
+
| BaseMessageChunk
|
|
13
|
+
| string
|
|
14
|
+
| AIMessage;
|
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ActionExecutionMessage,
|
|
3
|
+
Message,
|
|
4
|
+
ResultMessage,
|
|
5
|
+
TextMessage,
|
|
6
|
+
} from "../../graphql/types/converted";
|
|
7
|
+
import {
|
|
8
|
+
AIMessage,
|
|
9
|
+
AIMessageChunk,
|
|
10
|
+
BaseMessage,
|
|
11
|
+
BaseMessageChunk,
|
|
12
|
+
HumanMessage,
|
|
13
|
+
SystemMessage,
|
|
14
|
+
ToolMessage,
|
|
15
|
+
} from "@langchain/core/messages";
|
|
16
|
+
import { DynamicStructuredTool } from "@langchain/core/tools";
|
|
17
|
+
import { z } from "zod";
|
|
18
|
+
import { ActionInput } from "../../graphql/inputs/action.input";
|
|
19
|
+
import { LangChainReturnType } from "./types";
|
|
20
|
+
import { RuntimeEventSubject } from "../events";
|
|
21
|
+
import { nanoid } from "nanoid";
|
|
22
|
+
|
|
23
|
+
export function convertMessageToLangChainMessage(message: Message): BaseMessage {
|
|
24
|
+
if (message instanceof TextMessage) {
|
|
25
|
+
if (message.role == "user") {
|
|
26
|
+
return new HumanMessage(message.content);
|
|
27
|
+
} else if (message.role == "assistant") {
|
|
28
|
+
return new AIMessage(message.content);
|
|
29
|
+
} else if (message.role === "system") {
|
|
30
|
+
return new SystemMessage(message.content);
|
|
31
|
+
}
|
|
32
|
+
} else if (message instanceof ActionExecutionMessage) {
|
|
33
|
+
return new AIMessage({
|
|
34
|
+
content: "",
|
|
35
|
+
tool_calls: [
|
|
36
|
+
{
|
|
37
|
+
id: message.id,
|
|
38
|
+
args: message.arguments,
|
|
39
|
+
name: message.name,
|
|
40
|
+
},
|
|
41
|
+
],
|
|
42
|
+
});
|
|
43
|
+
} else if (message instanceof ResultMessage) {
|
|
44
|
+
return new ToolMessage({
|
|
45
|
+
content: message.result,
|
|
46
|
+
tool_call_id: message.actionExecutionId,
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
export function convertJsonSchemaToZodSchema(jsonSchema: any, required: boolean): z.ZodSchema {
|
|
52
|
+
if (jsonSchema.type === "object") {
|
|
53
|
+
const spec: { [key: string]: z.ZodSchema } = {};
|
|
54
|
+
for (const [key, value] of Object.entries(jsonSchema.properties)) {
|
|
55
|
+
spec[key] = convertJsonSchemaToZodSchema(
|
|
56
|
+
value,
|
|
57
|
+
jsonSchema.required ? jsonSchema.required.includes(key) : false,
|
|
58
|
+
);
|
|
59
|
+
}
|
|
60
|
+
let schema = z.object(spec);
|
|
61
|
+
return !required ? schema.optional() : schema;
|
|
62
|
+
} else if (jsonSchema.type === "string") {
|
|
63
|
+
let schema = z.string().describe(jsonSchema.description);
|
|
64
|
+
return !required ? schema.optional() : schema;
|
|
65
|
+
} else if (jsonSchema.type === "number") {
|
|
66
|
+
let schema = z.number().describe(jsonSchema.description);
|
|
67
|
+
return !required ? schema.optional() : schema;
|
|
68
|
+
} else if (jsonSchema.type === "boolean") {
|
|
69
|
+
let schema = z.boolean().describe(jsonSchema.description);
|
|
70
|
+
return !required ? schema.optional() : schema;
|
|
71
|
+
} else if (jsonSchema.type === "array") {
|
|
72
|
+
let itemSchema = convertJsonSchemaToZodSchema(jsonSchema.items, false);
|
|
73
|
+
let schema = z.array(itemSchema);
|
|
74
|
+
return !required ? schema.optional() : schema;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
export function convertActionInputToLangChainTool(actionInput: ActionInput): DynamicStructuredTool {
|
|
79
|
+
return new DynamicStructuredTool({
|
|
80
|
+
name: actionInput.name,
|
|
81
|
+
description: actionInput.description,
|
|
82
|
+
schema: convertJsonSchemaToZodSchema(
|
|
83
|
+
JSON.parse(actionInput.jsonSchema),
|
|
84
|
+
true,
|
|
85
|
+
) as z.ZodObject<any>,
|
|
86
|
+
func: async () => {
|
|
87
|
+
return "";
|
|
88
|
+
},
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
interface StreamLangChainResponseParams {
|
|
93
|
+
result: LangChainReturnType;
|
|
94
|
+
eventStream$: RuntimeEventSubject;
|
|
95
|
+
actionExecution?: {
|
|
96
|
+
id: string;
|
|
97
|
+
name: string;
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
export async function streamLangChainResponse({
|
|
102
|
+
result,
|
|
103
|
+
eventStream$,
|
|
104
|
+
actionExecution,
|
|
105
|
+
}: StreamLangChainResponseParams) {
|
|
106
|
+
// We support several types of return values from LangChain functions:
|
|
107
|
+
|
|
108
|
+
// 1. string
|
|
109
|
+
|
|
110
|
+
if (typeof result === "string") {
|
|
111
|
+
if (!actionExecution) {
|
|
112
|
+
// Just send one chunk with the string as the content.
|
|
113
|
+
eventStream$.sendTextMessage(nanoid(), result);
|
|
114
|
+
} else {
|
|
115
|
+
// Send as a result
|
|
116
|
+
eventStream$.sendActionExecutionResult(actionExecution.id, actionExecution.name, result);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// 2. AIMessage
|
|
121
|
+
// Send the content and function call of the AIMessage as the content of the chunk.
|
|
122
|
+
// else if ("content" in result && typeof result.content === "string") {
|
|
123
|
+
else if (result instanceof AIMessage) {
|
|
124
|
+
if (result.content) {
|
|
125
|
+
eventStream$.sendTextMessage(nanoid(), result.content as string);
|
|
126
|
+
}
|
|
127
|
+
for (const toolCall of result.tool_calls) {
|
|
128
|
+
eventStream$.sendActionExecution(
|
|
129
|
+
toolCall.id || nanoid(),
|
|
130
|
+
toolCall.name,
|
|
131
|
+
JSON.stringify(toolCall.args),
|
|
132
|
+
);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// 3. BaseMessageChunk
|
|
137
|
+
// Send the content and function call of the AIMessage as the content of the chunk.
|
|
138
|
+
else if (result instanceof BaseMessageChunk) {
|
|
139
|
+
if (result.lc_kwargs?.content) {
|
|
140
|
+
eventStream$.sendTextMessage(nanoid(), result.content as string);
|
|
141
|
+
}
|
|
142
|
+
if (result.lc_kwargs?.tool_calls) {
|
|
143
|
+
for (const toolCall of result.lc_kwargs?.tool_calls) {
|
|
144
|
+
eventStream$.sendActionExecution(
|
|
145
|
+
toolCall.id || nanoid(),
|
|
146
|
+
toolCall.name,
|
|
147
|
+
JSON.stringify(toolCall.args),
|
|
148
|
+
);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// 4. IterableReadableStream
|
|
154
|
+
// Stream the result of the LangChain function.
|
|
155
|
+
else if ("getReader" in result) {
|
|
156
|
+
let reader = result.getReader();
|
|
157
|
+
|
|
158
|
+
let mode: "function" | "message" | null = null;
|
|
159
|
+
|
|
160
|
+
while (true) {
|
|
161
|
+
try {
|
|
162
|
+
const { done, value } = await reader.read();
|
|
163
|
+
|
|
164
|
+
let toolCallName: string | undefined = undefined;
|
|
165
|
+
let toolCallId: string | undefined = undefined;
|
|
166
|
+
let toolCallArgs: string | undefined = undefined;
|
|
167
|
+
let hasToolCall: boolean = false;
|
|
168
|
+
let content = value.content as string;
|
|
169
|
+
|
|
170
|
+
if (value instanceof AIMessageChunk) {
|
|
171
|
+
let chunk = value.tool_call_chunks?.[0];
|
|
172
|
+
toolCallName = chunk?.name;
|
|
173
|
+
toolCallId = chunk?.id;
|
|
174
|
+
toolCallArgs = chunk?.args;
|
|
175
|
+
hasToolCall = chunk != undefined;
|
|
176
|
+
} else if (value instanceof BaseMessageChunk) {
|
|
177
|
+
let chunk = value.additional_kwargs?.tool_calls?.[0];
|
|
178
|
+
toolCallName = chunk?.function?.name;
|
|
179
|
+
toolCallId = chunk?.id;
|
|
180
|
+
toolCallArgs = chunk?.function?.arguments;
|
|
181
|
+
hasToolCall = chunk?.function != undefined;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
// When switching from message to function or vice versa,
|
|
185
|
+
// send the respective end event.
|
|
186
|
+
// If toolCallName is defined, it means a new tool call starts.
|
|
187
|
+
if (mode === "message" && (toolCallId || done)) {
|
|
188
|
+
mode = null;
|
|
189
|
+
eventStream$.sendTextMessageEnd();
|
|
190
|
+
} else if (mode === "function" && (!hasToolCall || done)) {
|
|
191
|
+
mode = null;
|
|
192
|
+
eventStream$.sendActionExecutionEnd();
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
if (done) {
|
|
196
|
+
break;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// If we send a new message type, send the appropriate start event.
|
|
200
|
+
if (mode === null) {
|
|
201
|
+
if (hasToolCall) {
|
|
202
|
+
mode = "function";
|
|
203
|
+
eventStream$.sendActionExecutionStart(toolCallId, toolCallName);
|
|
204
|
+
} else if (content) {
|
|
205
|
+
mode = "message";
|
|
206
|
+
eventStream$.sendTextMessageStart(nanoid());
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
// send the content events
|
|
211
|
+
if (mode === "message" && content) {
|
|
212
|
+
eventStream$.sendTextMessageContent(content);
|
|
213
|
+
} else if (mode === "function" && toolCallArgs) {
|
|
214
|
+
eventStream$.sendActionExecutionArgs(toolCallArgs);
|
|
215
|
+
}
|
|
216
|
+
} catch (error) {
|
|
217
|
+
console.error("Error reading from stream", error);
|
|
218
|
+
break;
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
} else if (actionExecution) {
|
|
222
|
+
eventStream$.sendActionExecutionResult(
|
|
223
|
+
actionExecution.id,
|
|
224
|
+
actionExecution.name,
|
|
225
|
+
JSON.stringify(result),
|
|
226
|
+
);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
// unsupported type
|
|
230
|
+
else {
|
|
231
|
+
throw new Error("Invalid return type from LangChain function.");
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
eventStream$.complete();
|
|
235
|
+
}
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CopilotRuntime Adapter for OpenAI.
|
|
3
|
+
*
|
|
4
|
+
* <RequestExample>
|
|
5
|
+
* ```jsx CopilotRuntime Example
|
|
6
|
+
* const copilotKit = new CopilotRuntime();
|
|
7
|
+
* return copilotKit.response(req, new OpenAIAdapter());
|
|
8
|
+
* ```
|
|
9
|
+
* </RequestExample>
|
|
10
|
+
*
|
|
11
|
+
* You can easily set the model to use by passing it to the constructor.
|
|
12
|
+
* ```jsx
|
|
13
|
+
* const copilotKit = new CopilotRuntime();
|
|
14
|
+
* return copilotKit.response(
|
|
15
|
+
* req,
|
|
16
|
+
* new OpenAIAdapter({ model: "gpt-4o" }),
|
|
17
|
+
* );
|
|
18
|
+
* ```
|
|
19
|
+
*
|
|
20
|
+
* To use your custom OpenAI instance, pass the `openai` property.
|
|
21
|
+
* ```jsx
|
|
22
|
+
* const openai = new OpenAI({
|
|
23
|
+
* organization: "your-organization-id",
|
|
24
|
+
* apiKey: "your-api-key"
|
|
25
|
+
* });
|
|
26
|
+
*
|
|
27
|
+
* const copilotKit = new CopilotRuntime();
|
|
28
|
+
* return copilotKit.response(
|
|
29
|
+
* req,
|
|
30
|
+
* new OpenAIAdapter({ openai }),
|
|
31
|
+
* );
|
|
32
|
+
* ```
|
|
33
|
+
*
|
|
34
|
+
*/
|
|
35
|
+
import OpenAI from "openai";
|
|
36
|
+
import {
|
|
37
|
+
CopilotServiceAdapter,
|
|
38
|
+
CopilotRuntimeChatCompletionRequest,
|
|
39
|
+
CopilotRuntimeChatCompletionResponse,
|
|
40
|
+
} from "../service-adapter";
|
|
41
|
+
import {
|
|
42
|
+
convertActionInputToOpenAITool,
|
|
43
|
+
convertMessageToOpenAIMessage,
|
|
44
|
+
limitMessagesToTokenCount,
|
|
45
|
+
} from "./utils";
|
|
46
|
+
|
|
47
|
+
const DEFAULT_MODEL = "gpt-4o";
|
|
48
|
+
|
|
49
|
+
export interface OpenAIAdapterParams {
|
|
50
|
+
/**
|
|
51
|
+
* An optional OpenAI instance to use.
|
|
52
|
+
*/
|
|
53
|
+
openai?: OpenAI;
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* The model to use.
|
|
57
|
+
*/
|
|
58
|
+
model?: string;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
export class OpenAIAdapter implements CopilotServiceAdapter {
|
|
62
|
+
private model: string = DEFAULT_MODEL;
|
|
63
|
+
|
|
64
|
+
private _openai: OpenAI;
|
|
65
|
+
public get openai(): OpenAI {
|
|
66
|
+
return this._openai;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
constructor(params?: OpenAIAdapterParams) {
|
|
70
|
+
this._openai = params?.openai || new OpenAI({});
|
|
71
|
+
if (params?.model) {
|
|
72
|
+
this.model = params.model;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async process({
|
|
77
|
+
model = this.model,
|
|
78
|
+
messages,
|
|
79
|
+
actions,
|
|
80
|
+
eventSource,
|
|
81
|
+
}: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse> {
|
|
82
|
+
const tools = actions.map(convertActionInputToOpenAITool);
|
|
83
|
+
|
|
84
|
+
let openaiMessages = messages.map(convertMessageToOpenAIMessage);
|
|
85
|
+
openaiMessages = limitMessagesToTokenCount(openaiMessages, tools, model);
|
|
86
|
+
|
|
87
|
+
const stream = this.openai.beta.chat.completions.stream({
|
|
88
|
+
model: model,
|
|
89
|
+
stream: true,
|
|
90
|
+
messages: openaiMessages,
|
|
91
|
+
...(tools.length > 0 && { tools }),
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
eventSource.stream(async (eventStream$) => {
|
|
95
|
+
let mode: "function" | "message" | null = null;
|
|
96
|
+
for await (const chunk of stream) {
|
|
97
|
+
const toolCall = chunk.choices[0].delta.tool_calls?.[0];
|
|
98
|
+
const content = chunk.choices[0].delta.content;
|
|
99
|
+
|
|
100
|
+
// When switching from message to function or vice versa,
|
|
101
|
+
// send the respective end event.
|
|
102
|
+
// If toolCall?.id is defined, it means a new tool call starts.
|
|
103
|
+
if (mode === "message" && toolCall?.id) {
|
|
104
|
+
mode = null;
|
|
105
|
+
eventStream$.sendTextMessageEnd();
|
|
106
|
+
} else if (mode === "function" && (toolCall === undefined || toolCall?.id)) {
|
|
107
|
+
mode = null;
|
|
108
|
+
eventStream$.sendActionExecutionEnd();
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// If we send a new message type, send the appropriate start event.
|
|
112
|
+
if (mode === null) {
|
|
113
|
+
if (toolCall?.id) {
|
|
114
|
+
mode = "function";
|
|
115
|
+
eventStream$.sendActionExecutionStart(toolCall!.id, toolCall!.function!.name);
|
|
116
|
+
} else if (content) {
|
|
117
|
+
mode = "message";
|
|
118
|
+
eventStream$.sendTextMessageStart(chunk.id);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// send the content events
|
|
123
|
+
if (mode === "message" && content) {
|
|
124
|
+
eventStream$.sendTextMessageContent(content);
|
|
125
|
+
} else if (mode === "function" && toolCall?.function?.arguments) {
|
|
126
|
+
eventStream$.sendActionExecutionArgs(toolCall.function.arguments);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// send the end events
|
|
131
|
+
if (mode === "message") {
|
|
132
|
+
eventStream$.sendTextMessageEnd();
|
|
133
|
+
} else if (mode === "function") {
|
|
134
|
+
eventStream$.sendActionExecutionEnd();
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
eventStream$.complete();
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
return {};
|
|
141
|
+
}
|
|
142
|
+
}
|