@copilotkit/runtime 0.37.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.js +7 -0
- package/.turbo/turbo-build.log +70 -0
- package/CHANGELOG.md +1 -0
- package/__snapshots__/schema/schema.graphql +178 -0
- package/dist/chunk-2CCVVJDU.mjs +56 -0
- package/dist/chunk-2CCVVJDU.mjs.map +1 -0
- package/dist/chunk-4UA4RB4C.mjs +185 -0
- package/dist/chunk-4UA4RB4C.mjs.map +1 -0
- package/dist/chunk-5HGYI6EG.mjs +678 -0
- package/dist/chunk-5HGYI6EG.mjs.map +1 -0
- package/dist/chunk-7IFP53C6.mjs +169 -0
- package/dist/chunk-7IFP53C6.mjs.map +1 -0
- package/dist/chunk-BLTAUVRP.mjs +30 -0
- package/dist/chunk-BLTAUVRP.mjs.map +1 -0
- package/dist/chunk-NFCPM5AM.mjs +43 -0
- package/dist/chunk-NFCPM5AM.mjs.map +1 -0
- package/dist/chunk-XPAUPJMW.mjs +1051 -0
- package/dist/chunk-XPAUPJMW.mjs.map +1 -0
- package/dist/graphql/types/base/index.d.ts +6 -0
- package/dist/graphql/types/base/index.js +63 -0
- package/dist/graphql/types/base/index.js.map +1 -0
- package/dist/graphql/types/base/index.mjs +7 -0
- package/dist/graphql/types/base/index.mjs.map +1 -0
- package/dist/graphql/types/converted/index.d.ts +2 -0
- package/dist/graphql/types/converted/index.js +88 -0
- package/dist/graphql/types/converted/index.js.map +1 -0
- package/dist/graphql/types/converted/index.mjs +12 -0
- package/dist/graphql/types/converted/index.mjs.map +1 -0
- package/dist/index-aa091e3c.d.ts +49 -0
- package/dist/index-f0875df3.d.ts +197 -0
- package/dist/index.d.ts +15 -0
- package/dist/index.js +2171 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +49 -0
- package/dist/index.mjs.map +1 -0
- package/dist/langchain-adapter-9ce103f3.d.ts +200 -0
- package/dist/langserve-fd5066ee.d.ts +96 -0
- package/dist/lib/index.d.ts +15 -0
- package/dist/lib/index.js +2170 -0
- package/dist/lib/index.js.map +1 -0
- package/dist/lib/index.mjs +46 -0
- package/dist/lib/index.mjs.map +1 -0
- package/dist/lib/integrations/index.d.ts +9 -0
- package/dist/lib/integrations/index.js +1024 -0
- package/dist/lib/integrations/index.js.map +1 -0
- package/dist/lib/integrations/index.mjs +24 -0
- package/dist/lib/integrations/index.mjs.map +1 -0
- package/dist/lib/integrations/node-http/index.d.ts +8 -0
- package/dist/lib/integrations/node-http/index.js +969 -0
- package/dist/lib/integrations/node-http/index.js.map +1 -0
- package/dist/lib/integrations/node-http/index.mjs +10 -0
- package/dist/lib/integrations/node-http/index.mjs.map +1 -0
- package/dist/pages-router-b6bc6c60.d.ts +30 -0
- package/dist/service-adapters/index.d.ts +11 -0
- package/dist/service-adapters/index.js +912 -0
- package/dist/service-adapters/index.js.map +1 -0
- package/dist/service-adapters/index.mjs +18 -0
- package/dist/service-adapters/index.mjs.map +1 -0
- package/jest.config.js +5 -0
- package/package.json +63 -0
- package/scripts/generate-gql-schema.ts +13 -0
- package/src/graphql/inputs/action.input.ts +13 -0
- package/src/graphql/inputs/cloud-guardrails.input.ts +19 -0
- package/src/graphql/inputs/cloud.input.ts +8 -0
- package/src/graphql/inputs/context-property.input.ts +10 -0
- package/src/graphql/inputs/custom-property.input.ts +15 -0
- package/src/graphql/inputs/frontend.input.ts +11 -0
- package/src/graphql/inputs/generate-copilot-response.input.ts +22 -0
- package/src/graphql/inputs/message.input.ts +50 -0
- package/src/graphql/resolvers/copilot.resolver.ts +147 -0
- package/src/graphql/types/base/index.ts +10 -0
- package/src/graphql/types/converted/index.ts +29 -0
- package/src/graphql/types/copilot-response.type.ts +75 -0
- package/src/graphql/types/enums.ts +22 -0
- package/src/graphql/types/guardrails-result.type.ts +20 -0
- package/src/graphql/types/message-status.type.ts +40 -0
- package/src/graphql/types/response-status.type.ts +52 -0
- package/src/index.ts +2 -0
- package/src/lib/copilot-cloud.ts +63 -0
- package/src/lib/copilot-runtime.ts +261 -0
- package/src/lib/guardrails.ts +3 -0
- package/src/lib/index.ts +7 -0
- package/src/lib/integrations/index.ts +4 -0
- package/src/lib/integrations/nextjs/app-router.ts +29 -0
- package/src/lib/integrations/nextjs/pages-router.ts +36 -0
- package/src/lib/integrations/node-http/index.ts +23 -0
- package/src/lib/integrations/shared.ts +68 -0
- package/src/service-adapters/conversion.ts +47 -0
- package/src/service-adapters/events.ts +197 -0
- package/src/service-adapters/experimental/groq/groq-adapter.ts +124 -0
- package/src/service-adapters/experimental/ollama/ollama-adapter.ts +75 -0
- package/src/service-adapters/google/google-genai-adapter.ts +149 -0
- package/src/service-adapters/google/utils.ts +94 -0
- package/src/service-adapters/index.ts +6 -0
- package/src/service-adapters/langchain/langchain-adapter.ts +82 -0
- package/src/service-adapters/langchain/langserve.ts +81 -0
- package/src/service-adapters/langchain/types.ts +14 -0
- package/src/service-adapters/langchain/utils.ts +235 -0
- package/src/service-adapters/openai/openai-adapter.ts +142 -0
- package/src/service-adapters/openai/openai-assistant-adapter.ts +260 -0
- package/src/service-adapters/openai/utils.ts +164 -0
- package/src/service-adapters/service-adapter.ts +29 -0
- package/tsconfig.json +11 -0
- package/tsup.config.ts +17 -0
- package/typedoc.json +4 -0
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { YogaInitialContext } from "graphql-yoga";
|
|
2
|
+
import { GuardrailsOptions } from "../guardrails";
|
|
3
|
+
import { buildSchemaSync } from "type-graphql";
|
|
4
|
+
import { CopilotResolver } from "../../graphql/resolvers/copilot.resolver";
|
|
5
|
+
import { useDeferStream } from "@graphql-yoga/plugin-defer-stream";
|
|
6
|
+
import { CopilotRuntime } from "../copilot-runtime";
|
|
7
|
+
import { CopilotServiceAdapter } from "../../service-adapters";
|
|
8
|
+
|
|
9
|
+
type AnyPrimitive = string | boolean | number | null;
|
|
10
|
+
export type CopilotRequestContextProperties = Record<
|
|
11
|
+
string,
|
|
12
|
+
AnyPrimitive | Record<string, AnyPrimitive>
|
|
13
|
+
>;
|
|
14
|
+
|
|
15
|
+
type CopilotKitContext = {
|
|
16
|
+
runtime: CopilotRuntime;
|
|
17
|
+
serviceAdapter: CopilotServiceAdapter;
|
|
18
|
+
properties: CopilotRequestContextProperties;
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
export type GraphQLContext = YogaInitialContext & {
|
|
22
|
+
_copilotkit: CopilotKitContext;
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
export interface CreateCopilotRuntimeServerOptions {
|
|
26
|
+
runtime: CopilotRuntime;
|
|
27
|
+
serviceAdapter: CopilotServiceAdapter;
|
|
28
|
+
guardrails?: GuardrailsOptions;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export async function createContext(
|
|
32
|
+
initialContext: YogaInitialContext,
|
|
33
|
+
copilotKitContext: CopilotKitContext,
|
|
34
|
+
): Promise<Partial<GraphQLContext>> {
|
|
35
|
+
const ctx: GraphQLContext = {
|
|
36
|
+
...initialContext,
|
|
37
|
+
_copilotkit: {
|
|
38
|
+
...copilotKitContext,
|
|
39
|
+
},
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
return ctx;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export function buildSchema(
|
|
46
|
+
options: {
|
|
47
|
+
emitSchemaFile?: string;
|
|
48
|
+
} = {},
|
|
49
|
+
) {
|
|
50
|
+
const schema = buildSchemaSync({
|
|
51
|
+
resolvers: [CopilotResolver],
|
|
52
|
+
emitSchemaFile: options.emitSchemaFile,
|
|
53
|
+
});
|
|
54
|
+
return schema;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export function getCommonConfig(options?: CreateCopilotRuntimeServerOptions) {
|
|
58
|
+
return {
|
|
59
|
+
schema: buildSchema(),
|
|
60
|
+
plugins: [useDeferStream()],
|
|
61
|
+
context: (ctx: YogaInitialContext): Promise<Partial<GraphQLContext>> =>
|
|
62
|
+
createContext(ctx, {
|
|
63
|
+
runtime: options.runtime,
|
|
64
|
+
serviceAdapter: options.serviceAdapter,
|
|
65
|
+
properties: {},
|
|
66
|
+
}),
|
|
67
|
+
};
|
|
68
|
+
}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ActionExecutionMessage,
|
|
3
|
+
Message,
|
|
4
|
+
ResultMessage,
|
|
5
|
+
TextMessage,
|
|
6
|
+
} from "../graphql/types/converted";
|
|
7
|
+
import { MessageInput } from "../graphql/inputs/message.input";
|
|
8
|
+
import { plainToInstance } from "class-transformer";
|
|
9
|
+
|
|
10
|
+
export function convertGqlInputToMessages(inputMessages: MessageInput[]): Message[] {
|
|
11
|
+
const messages: Message[] = [];
|
|
12
|
+
|
|
13
|
+
for (const message of inputMessages) {
|
|
14
|
+
if (message.textMessage) {
|
|
15
|
+
messages.push(
|
|
16
|
+
plainToInstance(TextMessage, {
|
|
17
|
+
id: message.id,
|
|
18
|
+
createdAt: message.createdAt,
|
|
19
|
+
role: message.textMessage.role,
|
|
20
|
+
content: message.textMessage.content,
|
|
21
|
+
}),
|
|
22
|
+
);
|
|
23
|
+
} else if (message.actionExecutionMessage) {
|
|
24
|
+
messages.push(
|
|
25
|
+
plainToInstance(ActionExecutionMessage, {
|
|
26
|
+
id: message.id,
|
|
27
|
+
createdAt: message.createdAt,
|
|
28
|
+
name: message.actionExecutionMessage.name,
|
|
29
|
+
arguments: JSON.parse(message.actionExecutionMessage.arguments),
|
|
30
|
+
scope: message.actionExecutionMessage.scope,
|
|
31
|
+
}),
|
|
32
|
+
);
|
|
33
|
+
} else if (message.resultMessage) {
|
|
34
|
+
messages.push(
|
|
35
|
+
plainToInstance(ResultMessage, {
|
|
36
|
+
id: message.id,
|
|
37
|
+
createdAt: message.createdAt,
|
|
38
|
+
actionExecutionId: message.resultMessage.actionExecutionId,
|
|
39
|
+
actionName: message.resultMessage.actionName,
|
|
40
|
+
result: message.resultMessage.result,
|
|
41
|
+
}),
|
|
42
|
+
);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
return messages;
|
|
47
|
+
}
|
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
import { Action } from "@copilotkit/shared";
|
|
2
|
+
import { of, concat, map, scan, concatMap, ReplaySubject } from "rxjs";
|
|
3
|
+
import { streamLangChainResponse } from "./langchain/utils";
|
|
4
|
+
|
|
5
|
+
export enum RuntimeEventTypes {
|
|
6
|
+
TextMessageStart = "TextMessageStart",
|
|
7
|
+
TextMessageContent = "TextMessageContent",
|
|
8
|
+
TextMessageEnd = "TextMessageEnd",
|
|
9
|
+
ActionExecutionStart = "ActionExecutionStart",
|
|
10
|
+
ActionExecutionArgs = "ActionExecutionArgs",
|
|
11
|
+
ActionExecutionEnd = "ActionExecutionEnd",
|
|
12
|
+
ActionExecutionResult = "ActionExecutionResult",
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
type FunctionCallScope = "client" | "server";
|
|
16
|
+
|
|
17
|
+
export type RuntimeEvent =
|
|
18
|
+
| { type: RuntimeEventTypes.TextMessageStart; messageId: string }
|
|
19
|
+
| {
|
|
20
|
+
type: RuntimeEventTypes.TextMessageContent;
|
|
21
|
+
content: string;
|
|
22
|
+
}
|
|
23
|
+
| { type: RuntimeEventTypes.TextMessageEnd }
|
|
24
|
+
| {
|
|
25
|
+
type: RuntimeEventTypes.ActionExecutionStart;
|
|
26
|
+
actionExecutionId: string;
|
|
27
|
+
actionName: string;
|
|
28
|
+
scope?: FunctionCallScope;
|
|
29
|
+
}
|
|
30
|
+
| { type: RuntimeEventTypes.ActionExecutionArgs; args: string }
|
|
31
|
+
| { type: RuntimeEventTypes.ActionExecutionEnd }
|
|
32
|
+
| {
|
|
33
|
+
type: RuntimeEventTypes.ActionExecutionResult;
|
|
34
|
+
actionName: string;
|
|
35
|
+
actionExecutionId: string;
|
|
36
|
+
result: string;
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
interface RuntimeEventWithState {
|
|
40
|
+
event: RuntimeEvent | null;
|
|
41
|
+
callActionServerSide: boolean;
|
|
42
|
+
action: Action<any> | null;
|
|
43
|
+
actionExecutionId: string | null;
|
|
44
|
+
args: string;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
type EventSourceCallback = (eventStream$: RuntimeEventSubject) => Promise<void>;
|
|
48
|
+
|
|
49
|
+
export class RuntimeEventSubject extends ReplaySubject<RuntimeEvent> {
|
|
50
|
+
constructor() {
|
|
51
|
+
super();
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
sendTextMessageStart(messageId: string) {
|
|
55
|
+
this.next({ type: RuntimeEventTypes.TextMessageStart, messageId });
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
sendTextMessageContent(content: string) {
|
|
59
|
+
this.next({ type: RuntimeEventTypes.TextMessageContent, content });
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
sendTextMessageEnd() {
|
|
63
|
+
this.next({ type: RuntimeEventTypes.TextMessageEnd });
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
sendTextMessage(messageId: string, content: string) {
|
|
67
|
+
this.sendTextMessageStart(messageId);
|
|
68
|
+
this.sendTextMessageContent(content);
|
|
69
|
+
this.sendTextMessageEnd();
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
sendActionExecutionStart(actionExecutionId: string, actionName: string) {
|
|
73
|
+
this.next({
|
|
74
|
+
type: RuntimeEventTypes.ActionExecutionStart,
|
|
75
|
+
actionExecutionId,
|
|
76
|
+
actionName,
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
sendActionExecutionArgs(args: string) {
|
|
81
|
+
this.next({ type: RuntimeEventTypes.ActionExecutionArgs, args });
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
sendActionExecutionEnd() {
|
|
85
|
+
this.next({ type: RuntimeEventTypes.ActionExecutionEnd });
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
sendActionExecution(actionExecutionId: string, toolName: string, args: string) {
|
|
89
|
+
this.sendActionExecutionStart(actionExecutionId, toolName);
|
|
90
|
+
this.sendActionExecutionArgs(args);
|
|
91
|
+
this.sendActionExecutionEnd();
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
sendActionExecutionResult(actionExecutionId: string, actionName: string, result: string) {
|
|
95
|
+
this.next({
|
|
96
|
+
type: RuntimeEventTypes.ActionExecutionResult,
|
|
97
|
+
actionName,
|
|
98
|
+
actionExecutionId,
|
|
99
|
+
result,
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
export class RuntimeEventSource {
|
|
105
|
+
private eventStream$ = new RuntimeEventSubject();
|
|
106
|
+
private callback!: EventSourceCallback;
|
|
107
|
+
|
|
108
|
+
async stream(callback: EventSourceCallback): Promise<void> {
|
|
109
|
+
this.callback = callback;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
process(serversideActions: Action<any>[]) {
|
|
113
|
+
this.callback(this.eventStream$).catch((error) => {
|
|
114
|
+
console.error("Error in event source callback", error);
|
|
115
|
+
});
|
|
116
|
+
return this.eventStream$.pipe(
|
|
117
|
+
// mark tools for server side execution
|
|
118
|
+
map((event) => {
|
|
119
|
+
if (event.type === RuntimeEventTypes.ActionExecutionStart) {
|
|
120
|
+
event.scope = serversideActions.find((action) => action.name === event.actionName)
|
|
121
|
+
? "server"
|
|
122
|
+
: "client";
|
|
123
|
+
}
|
|
124
|
+
return event;
|
|
125
|
+
}),
|
|
126
|
+
// track state
|
|
127
|
+
scan(
|
|
128
|
+
(acc, event) => {
|
|
129
|
+
if (event.type === RuntimeEventTypes.ActionExecutionStart) {
|
|
130
|
+
acc.callActionServerSide = event.scope === "server";
|
|
131
|
+
acc.args = "";
|
|
132
|
+
acc.actionExecutionId = event.actionExecutionId;
|
|
133
|
+
if (acc.callActionServerSide) {
|
|
134
|
+
acc.action = serversideActions.find((action) => action.name === event.actionName);
|
|
135
|
+
}
|
|
136
|
+
} else if (event.type === RuntimeEventTypes.ActionExecutionArgs) {
|
|
137
|
+
acc.args += event.args;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
acc.event = event;
|
|
141
|
+
return acc;
|
|
142
|
+
},
|
|
143
|
+
{
|
|
144
|
+
event: null,
|
|
145
|
+
callActionServerSide: false,
|
|
146
|
+
args: "",
|
|
147
|
+
actionExecutionId: null,
|
|
148
|
+
action: null,
|
|
149
|
+
} as RuntimeEventWithState,
|
|
150
|
+
),
|
|
151
|
+
concatMap((eventWithState) => {
|
|
152
|
+
if (
|
|
153
|
+
eventWithState.event!.type === RuntimeEventTypes.ActionExecutionEnd &&
|
|
154
|
+
eventWithState.callActionServerSide
|
|
155
|
+
) {
|
|
156
|
+
const toolCallEventStream$ = new RuntimeEventSubject();
|
|
157
|
+
executeAction(
|
|
158
|
+
toolCallEventStream$,
|
|
159
|
+
eventWithState.action!,
|
|
160
|
+
eventWithState.args,
|
|
161
|
+
eventWithState.actionExecutionId,
|
|
162
|
+
).catch((error) => {
|
|
163
|
+
console.error(error);
|
|
164
|
+
});
|
|
165
|
+
return concat(of(eventWithState.event!), toolCallEventStream$);
|
|
166
|
+
} else {
|
|
167
|
+
return of(eventWithState.event!);
|
|
168
|
+
}
|
|
169
|
+
}),
|
|
170
|
+
);
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
async function executeAction(
|
|
175
|
+
eventStream$: RuntimeEventSubject,
|
|
176
|
+
action: Action<any>,
|
|
177
|
+
actionArguments: string,
|
|
178
|
+
actionExecutionId: string,
|
|
179
|
+
) {
|
|
180
|
+
// Prepare arguments for function calling
|
|
181
|
+
let args: Record<string, any>[] = [];
|
|
182
|
+
if (actionArguments) {
|
|
183
|
+
args = JSON.parse(actionArguments);
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// call the function
|
|
187
|
+
const result = await action.handler(args);
|
|
188
|
+
|
|
189
|
+
await streamLangChainResponse({
|
|
190
|
+
result,
|
|
191
|
+
eventStream$,
|
|
192
|
+
actionExecution: {
|
|
193
|
+
name: action.name,
|
|
194
|
+
id: actionExecutionId,
|
|
195
|
+
},
|
|
196
|
+
});
|
|
197
|
+
}
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CopilotKit Adapter for Groq
|
|
3
|
+
*
|
|
4
|
+
* <RequestExample>
|
|
5
|
+
* ```jsx CopilotRuntime Example
|
|
6
|
+
* const copilotKit = new CopilotRuntime();
|
|
7
|
+
* return copilotKit.response(req, new GroqAdapter());
|
|
8
|
+
* ```
|
|
9
|
+
* </RequestExample>
|
|
10
|
+
*
|
|
11
|
+
* You can easily set the model to use by passing it to the constructor.
|
|
12
|
+
* ```jsx
|
|
13
|
+
* const copilotKit = new CopilotRuntime();
|
|
14
|
+
* return copilotKit.response(
|
|
15
|
+
* req,
|
|
16
|
+
* new GroqAdapter({ model: "llama3-70b-8192" }),
|
|
17
|
+
* );
|
|
18
|
+
* ```
|
|
19
|
+
*/
|
|
20
|
+
import { nanoid } from "nanoid";
|
|
21
|
+
import { TextMessage } from "../../../graphql/types/converted";
|
|
22
|
+
import {
|
|
23
|
+
CopilotServiceAdapter,
|
|
24
|
+
CopilotRuntimeChatCompletionRequest,
|
|
25
|
+
CopilotRuntimeChatCompletionResponse,
|
|
26
|
+
} from "../../service-adapter";
|
|
27
|
+
|
|
28
|
+
import Groq from "groq-sdk";
|
|
29
|
+
|
|
30
|
+
const DEFAULT_MODEL = "llama3-70b-8192";
|
|
31
|
+
|
|
32
|
+
interface GroqAdapterOptions {
|
|
33
|
+
model?: string;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export class ExperimentalGroqAdapter implements CopilotServiceAdapter {
|
|
37
|
+
private model: string;
|
|
38
|
+
|
|
39
|
+
constructor(options?: GroqAdapterOptions) {
|
|
40
|
+
if (options?.model) {
|
|
41
|
+
this.model = options.model;
|
|
42
|
+
} else {
|
|
43
|
+
this.model = DEFAULT_MODEL;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
async process(
|
|
48
|
+
request: CopilotRuntimeChatCompletionRequest,
|
|
49
|
+
): Promise<CopilotRuntimeChatCompletionResponse> {
|
|
50
|
+
const groq = new Groq();
|
|
51
|
+
|
|
52
|
+
const messages = (
|
|
53
|
+
request.messages.filter((m) => m instanceof TextMessage) as TextMessage[]
|
|
54
|
+
).map((m) => ({
|
|
55
|
+
role: m.role,
|
|
56
|
+
content: m.content,
|
|
57
|
+
}));
|
|
58
|
+
|
|
59
|
+
const max_tokens = maxTokensForGroqModel(this.model);
|
|
60
|
+
|
|
61
|
+
const _stream = await groq.chat.completions.create({
|
|
62
|
+
//
|
|
63
|
+
// Required parameters
|
|
64
|
+
//
|
|
65
|
+
messages: messages,
|
|
66
|
+
|
|
67
|
+
// The language model which will generate the completion.
|
|
68
|
+
model: this.model,
|
|
69
|
+
|
|
70
|
+
//
|
|
71
|
+
// Optional parameters
|
|
72
|
+
//
|
|
73
|
+
|
|
74
|
+
// Controls randomness: lowering results in less random completions.
|
|
75
|
+
// As the temperature approaches zero, the model will become deterministic
|
|
76
|
+
// and repetitive.
|
|
77
|
+
temperature: 0.5, // [TODO]
|
|
78
|
+
|
|
79
|
+
// The maximum number of tokens to generate. Requests can use up to
|
|
80
|
+
// 2048 tokens shared between prompt and completion.
|
|
81
|
+
max_tokens: max_tokens,
|
|
82
|
+
|
|
83
|
+
// Controls diversity via nucleus sampling: 0.5 means half of all
|
|
84
|
+
// likelihood-weighted options are considered.
|
|
85
|
+
top_p: 1, // [TODO]
|
|
86
|
+
|
|
87
|
+
// A stop sequence is a predefined or user-specified text string that
|
|
88
|
+
// signals an AI to stop generating content, ensuring its responses
|
|
89
|
+
// remain focused and concise. Examples include punctuation marks and
|
|
90
|
+
// markers like "[end]".
|
|
91
|
+
stop: null,
|
|
92
|
+
|
|
93
|
+
// If set, partial message deltas will be sent.
|
|
94
|
+
stream: true,
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
request.eventSource.stream(async (eventStream$) => {
|
|
98
|
+
eventStream$.sendTextMessageStart(nanoid());
|
|
99
|
+
for await (const chunk of _stream) {
|
|
100
|
+
if (chunk.choices[0]?.delta?.content) {
|
|
101
|
+
eventStream$.sendTextMessageContent(chunk.choices[0]?.delta?.content);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
eventStream$.sendTextMessageEnd();
|
|
105
|
+
// we may need to add this later.. [nc]
|
|
106
|
+
// let calls = (await result.response).functionCalls();
|
|
107
|
+
|
|
108
|
+
eventStream$.complete();
|
|
109
|
+
});
|
|
110
|
+
return {};
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
export function maxTokensForGroqModel(model: string): number {
|
|
115
|
+
return maxTokensByModel[model] || DEFAULT_MAX_TOKENS;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
const DEFAULT_MAX_TOKENS = 8192;
|
|
119
|
+
|
|
120
|
+
const maxTokensByModel: { [key: string]: number } = {
|
|
121
|
+
// llama3
|
|
122
|
+
"llama3-8b-8192": DEFAULT_MAX_TOKENS,
|
|
123
|
+
"llama3-70b-8192": DEFAULT_MAX_TOKENS,
|
|
124
|
+
};
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CopilotKit Adapter for Ollama
|
|
3
|
+
*
|
|
4
|
+
* <RequestExample>
|
|
5
|
+
* ```jsx CopilotRuntime Example
|
|
6
|
+
* const copilotKit = new CopilotRuntime();
|
|
7
|
+
* return copilotKit.response(req, new OllamaAdapter());
|
|
8
|
+
* ```
|
|
9
|
+
* </RequestExample>
|
|
10
|
+
*
|
|
11
|
+
* You can easily set the model to use by passing it to the constructor.
|
|
12
|
+
* ```jsx
|
|
13
|
+
* const copilotKit = new CopilotRuntime();
|
|
14
|
+
* return copilotKit.response(
|
|
15
|
+
* req,
|
|
16
|
+
* new OllamaAdapter({ model: "llama3-70b-8192" }),
|
|
17
|
+
* );
|
|
18
|
+
* ```
|
|
19
|
+
*/
|
|
20
|
+
import { nanoid } from "nanoid";
|
|
21
|
+
import { TextMessage } from "../../../graphql/types/converted";
|
|
22
|
+
import {
|
|
23
|
+
CopilotServiceAdapter,
|
|
24
|
+
CopilotRuntimeChatCompletionRequest,
|
|
25
|
+
CopilotRuntimeChatCompletionResponse,
|
|
26
|
+
} from "../../service-adapter";
|
|
27
|
+
// import { writeChatCompletionChunk, writeChatCompletionEnd } from "../utils";
|
|
28
|
+
// import { ChatCompletionChunk, Message } from "@copilotkit/shared";
|
|
29
|
+
import { Ollama } from "@langchain/community/llms/ollama";
|
|
30
|
+
|
|
31
|
+
const DEFAULT_MODEL = "llama3:latest";
|
|
32
|
+
|
|
33
|
+
interface OllamaAdapterOptions {
|
|
34
|
+
model?: string;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export class ExperimentalOllamaAdapter implements CopilotServiceAdapter {
|
|
38
|
+
private model: string;
|
|
39
|
+
|
|
40
|
+
constructor(options?: OllamaAdapterOptions) {
|
|
41
|
+
if (options?.model) {
|
|
42
|
+
this.model = options.model;
|
|
43
|
+
} else {
|
|
44
|
+
this.model = DEFAULT_MODEL;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
async process(
|
|
49
|
+
request: CopilotRuntimeChatCompletionRequest,
|
|
50
|
+
): Promise<CopilotRuntimeChatCompletionResponse> {
|
|
51
|
+
const { messages, actions, eventSource } = request;
|
|
52
|
+
// const messages = this.transformMessages(forwardedProps.messages);
|
|
53
|
+
|
|
54
|
+
const ollama = new Ollama({
|
|
55
|
+
model: this.model,
|
|
56
|
+
});
|
|
57
|
+
const contents = (messages.filter((m) => m instanceof TextMessage) as TextMessage[]).map(
|
|
58
|
+
(m) => m.content,
|
|
59
|
+
);
|
|
60
|
+
const _stream = await ollama.stream(contents); // [TODO] role info is dropped...
|
|
61
|
+
|
|
62
|
+
eventSource.stream(async (eventStream$) => {
|
|
63
|
+
eventStream$.sendTextMessageStart(nanoid());
|
|
64
|
+
for await (const chunkText of _stream) {
|
|
65
|
+
eventStream$.sendTextMessageContent(chunkText);
|
|
66
|
+
}
|
|
67
|
+
eventStream$.sendTextMessageEnd();
|
|
68
|
+
// we may need to add this later.. [nc]
|
|
69
|
+
// let calls = (await result.response).functionCalls();
|
|
70
|
+
|
|
71
|
+
eventStream$.complete();
|
|
72
|
+
});
|
|
73
|
+
return {};
|
|
74
|
+
}
|
|
75
|
+
}
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CopilotKit Adapter for Google Gemini
|
|
3
|
+
*
|
|
4
|
+
* Use this adapter for a Google Gemini backend.
|
|
5
|
+
*
|
|
6
|
+
* <RequestExample>
|
|
7
|
+
* ```typescript
|
|
8
|
+
* const copilotKit = new CopilotRuntime();
|
|
9
|
+
* return copilotKit.response(
|
|
10
|
+
* req,
|
|
11
|
+
* new GoogleGenerativeAIAdapter()
|
|
12
|
+
* );
|
|
13
|
+
* ```
|
|
14
|
+
* </RequestExample>
|
|
15
|
+
*
|
|
16
|
+
* To set up a different model, pass the model prop:
|
|
17
|
+
*
|
|
18
|
+
* ```typescript
|
|
19
|
+
* const copilotKit = new CopilotRuntime();
|
|
20
|
+
* const genAI = new GoogleGenerativeAI(
|
|
21
|
+
* process.env["GOOGLE_API_KEY"]!
|
|
22
|
+
* );
|
|
23
|
+
* const model = genAI.getGenerativeModel(
|
|
24
|
+
* { model: "gemini-pro" }
|
|
25
|
+
* );
|
|
26
|
+
* return copilotKit.response(
|
|
27
|
+
* req,
|
|
28
|
+
* new GoogleGenerativeAIAdapter()
|
|
29
|
+
* );
|
|
30
|
+
* ```
|
|
31
|
+
*/
|
|
32
|
+
import { CopilotServiceAdapter } from "../service-adapter";
|
|
33
|
+
import {
|
|
34
|
+
CopilotRuntimeChatCompletionRequest,
|
|
35
|
+
CopilotRuntimeChatCompletionResponse,
|
|
36
|
+
} from "../service-adapter";
|
|
37
|
+
import { GenerativeModel, GoogleGenerativeAI } from "@google/generative-ai";
|
|
38
|
+
import { TextMessage } from "../../graphql/types/converted";
|
|
39
|
+
import { convertMessageToGoogleGenAIMessage, transformActionToGoogleGenAITool } from "./utils";
|
|
40
|
+
import { nanoid } from "nanoid";
|
|
41
|
+
|
|
42
|
+
interface GoogleGenerativeAIAdapterOptions {
|
|
43
|
+
/**
|
|
44
|
+
* A custom `GenerativeModel` to use for the request.
|
|
45
|
+
*/
|
|
46
|
+
model?: GenerativeModel;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
export class GoogleGenerativeAIAdapter implements CopilotServiceAdapter {
|
|
50
|
+
private model: GenerativeModel;
|
|
51
|
+
|
|
52
|
+
constructor(options?: GoogleGenerativeAIAdapterOptions) {
|
|
53
|
+
if (options?.model) {
|
|
54
|
+
this.model = options.model;
|
|
55
|
+
} else {
|
|
56
|
+
const genAI = new GoogleGenerativeAI(process.env["GOOGLE_API_KEY"]!);
|
|
57
|
+
this.model = genAI.getGenerativeModel({ model: "gemini-pro" });
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
async process(
|
|
62
|
+
request: CopilotRuntimeChatCompletionRequest,
|
|
63
|
+
): Promise<CopilotRuntimeChatCompletionResponse> {
|
|
64
|
+
const { messages, actions, eventSource } = request;
|
|
65
|
+
|
|
66
|
+
// get the history (everything except the first and last message)
|
|
67
|
+
const history = messages.slice(1, -1).map(convertMessageToGoogleGenAIMessage);
|
|
68
|
+
|
|
69
|
+
// get the current message (the last message)
|
|
70
|
+
const currentMessage = convertMessageToGoogleGenAIMessage(messages.at(-1));
|
|
71
|
+
if (!currentMessage) {
|
|
72
|
+
throw new Error("No current message");
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
let systemMessage: string;
|
|
76
|
+
const firstMessage = messages.at(0);
|
|
77
|
+
if (firstMessage instanceof TextMessage && firstMessage.role === "system") {
|
|
78
|
+
systemMessage = firstMessage.content.trim();
|
|
79
|
+
} else {
|
|
80
|
+
throw new Error("First message is not a system message");
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const tools = actions.map(transformActionToGoogleGenAITool);
|
|
84
|
+
|
|
85
|
+
const isFirstGenGeminiPro =
|
|
86
|
+
this.model.model === "gemini-pro" || this.model.model === "models/gemini-pro";
|
|
87
|
+
|
|
88
|
+
const chat = this.model.startChat({
|
|
89
|
+
history: [
|
|
90
|
+
...history,
|
|
91
|
+
// gemini-pro does not support system instructions, so we need to add them to the history
|
|
92
|
+
...(isFirstGenGeminiPro ? [{ role: "user", parts: [{ text: systemMessage }] }] : []),
|
|
93
|
+
],
|
|
94
|
+
// only gemini-1.5-pro-latest and later supports setting system instructions
|
|
95
|
+
...(isFirstGenGeminiPro
|
|
96
|
+
? {}
|
|
97
|
+
: { systemInstruction: { role: "user", parts: [{ text: systemMessage }] } }),
|
|
98
|
+
tools,
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
const result = await chat.sendMessageStream(currentMessage.parts);
|
|
102
|
+
|
|
103
|
+
eventSource.stream(async (eventStream$) => {
|
|
104
|
+
let isTextMessage = false;
|
|
105
|
+
for await (const chunk of result.stream) {
|
|
106
|
+
const chunkText = chunk.text();
|
|
107
|
+
if (!isTextMessage) {
|
|
108
|
+
isTextMessage = true;
|
|
109
|
+
eventStream$.sendTextMessageStart(nanoid());
|
|
110
|
+
}
|
|
111
|
+
eventStream$.sendTextMessageContent(chunkText);
|
|
112
|
+
}
|
|
113
|
+
if (isTextMessage) {
|
|
114
|
+
eventStream$.sendTextMessageEnd();
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
let calls = (await result.response).functionCalls();
|
|
118
|
+
if (calls) {
|
|
119
|
+
for (let call of calls) {
|
|
120
|
+
eventStream$.sendActionExecution(
|
|
121
|
+
nanoid(),
|
|
122
|
+
call.name,
|
|
123
|
+
JSON.stringify(replaceNewlinesInObject(call.args)),
|
|
124
|
+
);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
eventStream$.complete();
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
return {};
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
function replaceNewlinesInObject(obj: any): any {
|
|
135
|
+
if (typeof obj === "string") {
|
|
136
|
+
return obj.replace(/\\\\n/g, "\n");
|
|
137
|
+
} else if (Array.isArray(obj)) {
|
|
138
|
+
return obj.map(replaceNewlinesInObject);
|
|
139
|
+
} else if (typeof obj === "object" && obj !== null) {
|
|
140
|
+
const newObj: any = {};
|
|
141
|
+
for (const key in obj) {
|
|
142
|
+
if (obj.hasOwnProperty(key)) {
|
|
143
|
+
newObj[key] = replaceNewlinesInObject(obj[key]);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
return newObj;
|
|
147
|
+
}
|
|
148
|
+
return obj;
|
|
149
|
+
}
|