@copilotkit/runtime 1.50.0-beta.8 → 1.50.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +168 -0
- package/dist/chunk-27JKTS6P.mjs +1704 -0
- package/dist/chunk-27JKTS6P.mjs.map +1 -0
- package/dist/chunk-2GPTVDTO.mjs +25 -0
- package/dist/chunk-2GPTVDTO.mjs.map +1 -0
- package/dist/chunk-2OZAGFV3.mjs +43 -0
- package/dist/chunk-2OZAGFV3.mjs.map +1 -0
- package/dist/chunk-3AJVKDZX.mjs +3097 -0
- package/dist/chunk-3AJVKDZX.mjs.map +1 -0
- package/dist/chunk-45RCC3ZS.mjs +25 -0
- package/dist/chunk-45RCC3ZS.mjs.map +1 -0
- package/dist/chunk-4EHJ4XFJ.mjs +25 -0
- package/dist/chunk-4EHJ4XFJ.mjs.map +1 -0
- package/dist/chunk-4IANB4TC.mjs +25 -0
- package/dist/chunk-4IANB4TC.mjs.map +1 -0
- package/dist/chunk-4KES76K3.mjs +74 -0
- package/dist/chunk-4KES76K3.mjs.map +1 -0
- package/dist/chunk-4OGE3SLW.mjs +3100 -0
- package/dist/chunk-4OGE3SLW.mjs.map +1 -0
- package/dist/chunk-54YJBMCP.mjs +3097 -0
- package/dist/chunk-54YJBMCP.mjs.map +1 -0
- package/dist/chunk-62NE5S6M.mjs +226 -0
- package/dist/chunk-62NE5S6M.mjs.map +1 -0
- package/dist/chunk-6ER4SZYH.mjs +74 -0
- package/dist/chunk-6ER4SZYH.mjs.map +1 -0
- package/dist/chunk-6TNSLHVR.mjs +74 -0
- package/dist/chunk-6TNSLHVR.mjs.map +1 -0
- package/dist/chunk-6XRUR5UK.mjs +1 -0
- package/dist/chunk-6XRUR5UK.mjs.map +1 -0
- package/dist/chunk-7V4BK7TZ.mjs +25 -0
- package/dist/chunk-7V4BK7TZ.mjs.map +1 -0
- package/dist/chunk-7YZIEXD2.mjs +74 -0
- package/dist/chunk-7YZIEXD2.mjs.map +1 -0
- package/dist/chunk-A4XHOAFU.mjs +25 -0
- package/dist/chunk-A4XHOAFU.mjs.map +1 -0
- package/dist/chunk-A555KEAD.mjs +6020 -0
- package/dist/chunk-A555KEAD.mjs.map +1 -0
- package/dist/chunk-AF73TFTX.mjs +74 -0
- package/dist/chunk-AF73TFTX.mjs.map +1 -0
- package/dist/chunk-AMUJQ6IR.mjs +50 -0
- package/dist/chunk-AMUJQ6IR.mjs.map +1 -0
- package/dist/chunk-AQG2SVCA.mjs +25 -0
- package/dist/chunk-AQG2SVCA.mjs.map +1 -0
- package/dist/chunk-BJZHMXND.mjs +74 -0
- package/dist/chunk-BJZHMXND.mjs.map +1 -0
- package/dist/chunk-CB2OJXF6.mjs +25 -0
- package/dist/chunk-CB2OJXF6.mjs.map +1 -0
- package/dist/chunk-CEOMFPJU.mjs +6020 -0
- package/dist/chunk-CEOMFPJU.mjs.map +1 -0
- package/dist/chunk-CZVLR7CC.mjs +175 -0
- package/dist/chunk-CZVLR7CC.mjs.map +1 -0
- package/dist/chunk-DCEEHMLJ.mjs +1127 -0
- package/dist/chunk-DCEEHMLJ.mjs.map +1 -0
- package/dist/chunk-DE3CLKUG.mjs +25 -0
- package/dist/chunk-DE3CLKUG.mjs.map +1 -0
- package/dist/chunk-DTPRUTNV.mjs +25 -0
- package/dist/chunk-DTPRUTNV.mjs.map +1 -0
- package/dist/chunk-ERUOA47O.mjs +626 -0
- package/dist/chunk-ERUOA47O.mjs.map +1 -0
- package/dist/chunk-ESSRC64W.mjs +74 -0
- package/dist/chunk-ESSRC64W.mjs.map +1 -0
- package/dist/chunk-FHD4JECV.mjs +33 -0
- package/dist/chunk-FHD4JECV.mjs.map +1 -0
- package/dist/chunk-GRAN6K6N.mjs +25 -0
- package/dist/chunk-GRAN6K6N.mjs.map +1 -0
- package/dist/chunk-I27F2UPA.mjs +175 -0
- package/dist/chunk-I27F2UPA.mjs.map +1 -0
- package/dist/chunk-IAZKTOQW.mjs +25 -0
- package/dist/chunk-IAZKTOQW.mjs.map +1 -0
- package/dist/chunk-J6XZ5MFB.mjs +25 -0
- package/dist/chunk-J6XZ5MFB.mjs.map +1 -0
- package/dist/chunk-JJ32MA4C.mjs +73 -0
- package/dist/chunk-JJ32MA4C.mjs.map +1 -0
- package/dist/chunk-JJY4ZTHQ.mjs +25 -0
- package/dist/chunk-JJY4ZTHQ.mjs.map +1 -0
- package/dist/chunk-KEYLBFU2.mjs +3117 -0
- package/dist/chunk-KEYLBFU2.mjs.map +1 -0
- package/dist/chunk-KQ53L4WZ.mjs +3094 -0
- package/dist/chunk-KQ53L4WZ.mjs.map +1 -0
- package/dist/chunk-KTELVQ67.mjs +3098 -0
- package/dist/chunk-KTELVQ67.mjs.map +1 -0
- package/dist/chunk-LPEPX6NH.mjs +25 -0
- package/dist/chunk-LPEPX6NH.mjs.map +1 -0
- package/dist/chunk-MDXE55DK.mjs +3117 -0
- package/dist/chunk-MDXE55DK.mjs.map +1 -0
- package/dist/chunk-MMFUVOXH.mjs +73 -0
- package/dist/chunk-MMFUVOXH.mjs.map +1 -0
- package/dist/chunk-N3Y4U66N.mjs +253 -0
- package/dist/chunk-N3Y4U66N.mjs.map +1 -0
- package/dist/chunk-O7UYB4MH.mjs +25 -0
- package/dist/chunk-O7UYB4MH.mjs.map +1 -0
- package/dist/chunk-OFNVQHNM.mjs +3089 -0
- package/dist/chunk-OFNVQHNM.mjs.map +1 -0
- package/dist/chunk-OFSV5GET.mjs +3074 -0
- package/dist/chunk-OFSV5GET.mjs.map +1 -0
- package/dist/chunk-OMRST67R.mjs +25 -0
- package/dist/chunk-OMRST67R.mjs.map +1 -0
- package/dist/chunk-OWIGJONH.mjs +275 -0
- package/dist/chunk-OWIGJONH.mjs.map +1 -0
- package/dist/chunk-PRZHE74A.mjs +25 -0
- package/dist/chunk-PRZHE74A.mjs.map +1 -0
- package/dist/chunk-PTYRVXXP.mjs +80 -0
- package/dist/chunk-PTYRVXXP.mjs.map +1 -0
- package/dist/chunk-R22B5CCO.mjs +25 -0
- package/dist/chunk-R22B5CCO.mjs.map +1 -0
- package/dist/chunk-SHBDMA63.mjs +141 -0
- package/dist/chunk-SHBDMA63.mjs.map +1 -0
- package/dist/chunk-SPVXBPRA.mjs +74 -0
- package/dist/chunk-SPVXBPRA.mjs.map +1 -0
- package/dist/chunk-T72G46ME.mjs +25 -0
- package/dist/chunk-T72G46ME.mjs.map +1 -0
- package/dist/chunk-TGELROPU.mjs +25 -0
- package/dist/chunk-TGELROPU.mjs.map +1 -0
- package/dist/chunk-UNX4IAAD.mjs +25 -0
- package/dist/chunk-UNX4IAAD.mjs.map +1 -0
- package/dist/chunk-V4DHVC7M.mjs +3085 -0
- package/dist/chunk-V4DHVC7M.mjs.map +1 -0
- package/dist/chunk-VVRFOB66.mjs +25 -0
- package/dist/chunk-VVRFOB66.mjs.map +1 -0
- package/dist/chunk-W6NVBYM6.mjs +80 -0
- package/dist/chunk-W6NVBYM6.mjs.map +1 -0
- package/dist/chunk-W7MBACGC.mjs +74 -0
- package/dist/chunk-W7MBACGC.mjs.map +1 -0
- package/dist/chunk-WMD4XZZS.mjs +25 -0
- package/dist/chunk-WMD4XZZS.mjs.map +1 -0
- package/dist/chunk-WX2ZNCRT.mjs +74 -0
- package/dist/chunk-WX2ZNCRT.mjs.map +1 -0
- package/dist/chunk-XWBDEXDA.mjs +153 -0
- package/dist/chunk-XWBDEXDA.mjs.map +1 -0
- package/dist/chunk-Y2Z62E2T.mjs +74 -0
- package/dist/chunk-Y2Z62E2T.mjs.map +1 -0
- package/dist/chunk-YO4I6RVI.mjs +25 -0
- package/dist/chunk-YO4I6RVI.mjs.map +1 -0
- package/dist/chunk-Z6Q5IW6I.mjs +3098 -0
- package/dist/chunk-Z6Q5IW6I.mjs.map +1 -0
- package/dist/chunk-Z726O3G2.mjs +25 -0
- package/dist/chunk-Z726O3G2.mjs.map +1 -0
- package/dist/chunk-ZE4SMZZR.mjs +3097 -0
- package/dist/chunk-ZE4SMZZR.mjs.map +1 -0
- package/dist/chunk-ZULZB33C.mjs +73 -0
- package/dist/chunk-ZULZB33C.mjs.map +1 -0
- package/dist/chunk-ZVRGXMY7.mjs +25 -0
- package/dist/chunk-ZVRGXMY7.mjs.map +1 -0
- package/dist/chunk-ZZ35WBYQ.mjs +25 -0
- package/dist/chunk-ZZ35WBYQ.mjs.map +1 -0
- package/dist/graphql/message-conversion/index.d.ts +18 -0
- package/dist/graphql/message-conversion/index.js +725 -0
- package/dist/graphql/message-conversion/index.js.map +1 -0
- package/dist/graphql/message-conversion/index.mjs +245 -0
- package/dist/graphql/message-conversion/index.mjs.map +1 -0
- package/dist/graphql/types/base/index.d.ts +6 -0
- package/dist/graphql/types/base/index.js +63 -0
- package/dist/graphql/types/base/index.js.map +1 -0
- package/dist/graphql/types/base/index.mjs +8 -0
- package/dist/graphql/types/base/index.mjs.map +1 -0
- package/dist/graphql/types/converted/index.d.ts +2 -0
- package/dist/graphql/types/converted/index.js +200 -0
- package/dist/graphql/types/converted/index.js.map +1 -0
- package/dist/graphql/types/converted/index.mjs +19 -0
- package/dist/graphql/types/converted/index.mjs.map +1 -0
- package/dist/groq-adapter-540da9c3.d.ts +331 -0
- package/dist/groq-adapter-a6f5e9d2.d.ts +331 -0
- package/dist/groq-adapter-c8aec5c5.d.ts +321 -0
- package/dist/index-96b330da.d.ts +119 -0
- package/dist/index-adbd78f1.d.ts +154 -0
- package/dist/index.d.ts +67 -8
- package/dist/index.js +118 -28
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +120 -31
- package/dist/index.mjs.map +1 -1
- package/dist/langserve-0c6100e3.d.ts +257 -0
- package/dist/langserve-978d5790.d.ts +243 -0
- package/dist/langserve-9fc76ce5.d.ts +243 -0
- package/dist/lib/cloud/index.d.ts +6 -0
- package/dist/lib/cloud/index.js +18 -0
- package/dist/lib/cloud/index.js.map +1 -0
- package/dist/lib/cloud/index.mjs +1 -0
- package/dist/lib/cloud/index.mjs.map +1 -0
- package/dist/lib/index.d.ts +212 -0
- package/dist/lib/index.js +7843 -0
- package/dist/lib/index.js.map +1 -0
- package/dist/lib/index.mjs +76 -0
- package/dist/lib/index.mjs.map +1 -0
- package/dist/lib/integrations/index.d.ts +34 -0
- package/dist/lib/integrations/index.js +3052 -0
- package/dist/lib/integrations/index.js.map +1 -0
- package/dist/lib/integrations/index.mjs +37 -0
- package/dist/lib/integrations/index.mjs.map +1 -0
- package/dist/lib/integrations/nest/index.d.ts +15 -0
- package/dist/lib/integrations/nest/index.js +2959 -0
- package/dist/lib/integrations/nest/index.js.map +1 -0
- package/dist/lib/integrations/nest/index.mjs +14 -0
- package/dist/lib/integrations/nest/index.mjs.map +1 -0
- package/dist/lib/integrations/node-express/index.d.ts +15 -0
- package/dist/lib/integrations/node-express/index.js +2959 -0
- package/dist/lib/integrations/node-express/index.js.map +1 -0
- package/dist/lib/integrations/node-express/index.mjs +14 -0
- package/dist/lib/integrations/node-express/index.mjs.map +1 -0
- package/dist/lib/integrations/node-http/index.d.ts +15 -0
- package/dist/lib/integrations/node-http/index.js +2945 -0
- package/dist/lib/integrations/node-http/index.js.map +1 -0
- package/dist/lib/integrations/node-http/index.mjs +13 -0
- package/dist/lib/integrations/node-http/index.mjs.map +1 -0
- package/dist/service-adapters/index.d.ts +162 -0
- package/dist/service-adapters/index.js +1787 -0
- package/dist/service-adapters/index.js.map +1 -0
- package/dist/service-adapters/index.mjs +34 -0
- package/dist/service-adapters/index.mjs.map +1 -0
- package/dist/service-adapters/shared/index.d.ts +9 -0
- package/dist/service-adapters/shared/index.js +72 -0
- package/dist/service-adapters/shared/index.js.map +1 -0
- package/dist/service-adapters/shared/index.mjs +8 -0
- package/dist/service-adapters/shared/index.mjs.map +1 -0
- package/dist/shared-0a7346ce.d.ts +466 -0
- package/dist/shared-35c6eb04.d.ts +448 -0
- package/dist/shared-9ed1dc31.d.ts +414 -0
- package/dist/shared-da5708fe.d.ts +449 -0
- package/dist/utils/index.d.ts +65 -0
- package/dist/utils/index.js +175 -0
- package/dist/utils/index.js.map +1 -0
- package/dist/utils/index.mjs +12 -0
- package/dist/utils/index.mjs.map +1 -0
- package/package.json +14 -20
- package/src/lib/index.ts +7 -6
- package/src/lib/runtime/copilot-runtime.ts +62 -26
- package/src/lib/runtime/telemetry-agent-runner.ts +139 -0
|
@@ -0,0 +1,331 @@
|
|
|
1
|
+
import OpenAI from 'openai';
|
|
2
|
+
import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from './langserve-978d5790.js';
|
|
3
|
+
import { BaseMessageChunk, AIMessage, AIMessageChunk, BaseMessage } from '@langchain/core/messages';
|
|
4
|
+
import { DynamicStructuredTool } from '@langchain/core/tools';
|
|
5
|
+
import { IterableReadableStream, IterableReadableStreamInterface } from '@langchain/core/utils/stream';
|
|
6
|
+
import { Groq } from 'groq-sdk';
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Copilot Runtime adapter for OpenAI.
|
|
10
|
+
*
|
|
11
|
+
* ## Example
|
|
12
|
+
*
|
|
13
|
+
* ```ts
|
|
14
|
+
* import { CopilotRuntime, OpenAIAdapter } from "@copilotkit/runtime";
|
|
15
|
+
* import OpenAI from "openai";
|
|
16
|
+
*
|
|
17
|
+
* const copilotKit = new CopilotRuntime();
|
|
18
|
+
*
|
|
19
|
+
* const openai = new OpenAI({
|
|
20
|
+
* organization: "<your-organization-id>", // optional
|
|
21
|
+
* apiKey: "<your-api-key>",
|
|
22
|
+
* });
|
|
23
|
+
*
|
|
24
|
+
* return new OpenAIAdapter({ openai });
|
|
25
|
+
* ```
|
|
26
|
+
*
|
|
27
|
+
* ## Example with Azure OpenAI
|
|
28
|
+
*
|
|
29
|
+
* ```ts
|
|
30
|
+
* import { CopilotRuntime, OpenAIAdapter } from "@copilotkit/runtime";
|
|
31
|
+
* import OpenAI from "openai";
|
|
32
|
+
*
|
|
33
|
+
* // The name of your Azure OpenAI Instance.
|
|
34
|
+
* // https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal#create-a-resource
|
|
35
|
+
* const instance = "<your instance name>";
|
|
36
|
+
*
|
|
37
|
+
* // Corresponds to your Model deployment within your OpenAI resource, e.g. my-gpt35-16k-deployment
|
|
38
|
+
* // Navigate to the Azure OpenAI Studio to deploy a model.
|
|
39
|
+
* const model = "<your model>";
|
|
40
|
+
*
|
|
41
|
+
* const apiKey = process.env["AZURE_OPENAI_API_KEY"];
|
|
42
|
+
* if (!apiKey) {
|
|
43
|
+
* throw new Error("The AZURE_OPENAI_API_KEY environment variable is missing or empty.");
|
|
44
|
+
* }
|
|
45
|
+
*
|
|
46
|
+
* const copilotKit = new CopilotRuntime();
|
|
47
|
+
*
|
|
48
|
+
* const openai = new OpenAI({
|
|
49
|
+
* apiKey,
|
|
50
|
+
* baseURL: `https://${instance}.openai.azure.com/openai/deployments/${model}`,
|
|
51
|
+
* defaultQuery: { "api-version": "2024-04-01-preview" },
|
|
52
|
+
* defaultHeaders: { "api-key": apiKey },
|
|
53
|
+
* });
|
|
54
|
+
*
|
|
55
|
+
* return new OpenAIAdapter({ openai });
|
|
56
|
+
* ```
|
|
57
|
+
*/
|
|
58
|
+
|
|
59
|
+
interface OpenAIAdapterParams {
|
|
60
|
+
/**
|
|
61
|
+
* An optional OpenAI instance to use. If not provided, a new instance will be
|
|
62
|
+
* created.
|
|
63
|
+
*/
|
|
64
|
+
openai?: OpenAI;
|
|
65
|
+
/**
|
|
66
|
+
* The model to use.
|
|
67
|
+
*/
|
|
68
|
+
model?: string;
|
|
69
|
+
/**
|
|
70
|
+
* Whether to disable parallel tool calls.
|
|
71
|
+
* You can disable parallel tool calls to force the model to execute tool calls sequentially.
|
|
72
|
+
* This is useful if you want to execute tool calls in a specific order so that the state changes
|
|
73
|
+
* introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)
|
|
74
|
+
*
|
|
75
|
+
* @default false
|
|
76
|
+
*/
|
|
77
|
+
disableParallelToolCalls?: boolean;
|
|
78
|
+
/**
|
|
79
|
+
* Whether to keep the role in system messages as "System".
|
|
80
|
+
* By default, it is converted to "developer", which is used by newer OpenAI models
|
|
81
|
+
*
|
|
82
|
+
* @default false
|
|
83
|
+
*/
|
|
84
|
+
keepSystemRole?: boolean;
|
|
85
|
+
}
|
|
86
|
+
declare class OpenAIAdapter implements CopilotServiceAdapter {
|
|
87
|
+
model: string;
|
|
88
|
+
provider: string;
|
|
89
|
+
private disableParallelToolCalls;
|
|
90
|
+
private _openai;
|
|
91
|
+
private keepSystemRole;
|
|
92
|
+
get openai(): OpenAI;
|
|
93
|
+
get name(): string;
|
|
94
|
+
constructor(params?: OpenAIAdapterParams);
|
|
95
|
+
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
type LangChainBaseMessageChunkStream = IterableReadableStream<BaseMessageChunk>;
|
|
99
|
+
type LangChainAIMessageChunkStream = IterableReadableStreamInterface<AIMessageChunk>;
|
|
100
|
+
type LangChainReturnType = LangChainBaseMessageChunkStream | LangChainAIMessageChunkStream | BaseMessageChunk | string | AIMessage;
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Copilot Runtime adapter for LangChain.
|
|
104
|
+
*
|
|
105
|
+
* ## Example
|
|
106
|
+
*
|
|
107
|
+
* ```ts
|
|
108
|
+
* import { CopilotRuntime, LangChainAdapter } from "@copilotkit/runtime";
|
|
109
|
+
* import { ChatOpenAI } from "@langchain/openai";
|
|
110
|
+
*
|
|
111
|
+
* const copilotKit = new CopilotRuntime();
|
|
112
|
+
*
|
|
113
|
+
* const model = new ChatOpenAI({
|
|
114
|
+
* model: "gpt-4o",
|
|
115
|
+
* apiKey: "<your-api-key>",
|
|
116
|
+
* });
|
|
117
|
+
*
|
|
118
|
+
* return new LangChainAdapter({
|
|
119
|
+
* chainFn: async ({ messages, tools }) => {
|
|
120
|
+
* return model.bindTools(tools).stream(messages);
|
|
121
|
+
* // or optionally enable strict mode
|
|
122
|
+
* // return model.bindTools(tools, { strict: true }).stream(messages);
|
|
123
|
+
* }
|
|
124
|
+
* });
|
|
125
|
+
* ```
|
|
126
|
+
*
|
|
127
|
+
* The asynchronous handler function (`chainFn`) can return any of the following:
|
|
128
|
+
*
|
|
129
|
+
* - A simple `string` response
|
|
130
|
+
* - A LangChain stream (`IterableReadableStream`)
|
|
131
|
+
* - A LangChain `BaseMessageChunk` object
|
|
132
|
+
* - A LangChain `AIMessage` object
|
|
133
|
+
*/
|
|
134
|
+
|
|
135
|
+
interface ChainFnParameters {
|
|
136
|
+
model: string;
|
|
137
|
+
messages: BaseMessage[];
|
|
138
|
+
tools: DynamicStructuredTool[];
|
|
139
|
+
threadId?: string;
|
|
140
|
+
runId?: string;
|
|
141
|
+
}
|
|
142
|
+
interface LangChainAdapterOptions {
|
|
143
|
+
/**
|
|
144
|
+
* A function that uses the LangChain API to generate a response.
|
|
145
|
+
*/
|
|
146
|
+
chainFn: (parameters: ChainFnParameters) => Promise<LangChainReturnType>;
|
|
147
|
+
}
|
|
148
|
+
declare class LangChainAdapter implements CopilotServiceAdapter {
|
|
149
|
+
private options;
|
|
150
|
+
/**
|
|
151
|
+
* To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.
|
|
152
|
+
*/
|
|
153
|
+
get name(): string;
|
|
154
|
+
constructor(options: LangChainAdapterOptions);
|
|
155
|
+
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
interface GoogleGenerativeAIAdapterOptions {
|
|
159
|
+
/**
|
|
160
|
+
* A custom Google Generative AI model to use.
|
|
161
|
+
*/
|
|
162
|
+
model?: string;
|
|
163
|
+
/**
|
|
164
|
+
* The API key to use.
|
|
165
|
+
*/
|
|
166
|
+
apiKey?: string;
|
|
167
|
+
}
|
|
168
|
+
declare class GoogleGenerativeAIAdapter extends LangChainAdapter {
|
|
169
|
+
provider: string;
|
|
170
|
+
model: string;
|
|
171
|
+
constructor(options?: GoogleGenerativeAIAdapterOptions);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* Copilot Runtime adapter for the OpenAI Assistant API.
|
|
176
|
+
*
|
|
177
|
+
* ## Example
|
|
178
|
+
*
|
|
179
|
+
* ```ts
|
|
180
|
+
* import { CopilotRuntime, OpenAIAssistantAdapter } from "@copilotkit/runtime";
|
|
181
|
+
* import OpenAI from "openai";
|
|
182
|
+
*
|
|
183
|
+
* const copilotKit = new CopilotRuntime();
|
|
184
|
+
*
|
|
185
|
+
* const openai = new OpenAI({
|
|
186
|
+
* organization: "<your-organization-id>",
|
|
187
|
+
* apiKey: "<your-api-key>",
|
|
188
|
+
* });
|
|
189
|
+
*
|
|
190
|
+
* return new OpenAIAssistantAdapter({
|
|
191
|
+
* openai,
|
|
192
|
+
* assistantId: "<your-assistant-id>",
|
|
193
|
+
* codeInterpreterEnabled: true,
|
|
194
|
+
* fileSearchEnabled: true,
|
|
195
|
+
* });
|
|
196
|
+
* ```
|
|
197
|
+
*/
|
|
198
|
+
|
|
199
|
+
interface OpenAIAssistantAdapterParams {
|
|
200
|
+
/**
|
|
201
|
+
* The ID of the assistant to use.
|
|
202
|
+
*/
|
|
203
|
+
assistantId: string;
|
|
204
|
+
/**
|
|
205
|
+
* An optional OpenAI instance to use. If not provided, a new instance will be created.
|
|
206
|
+
*/
|
|
207
|
+
openai?: OpenAI;
|
|
208
|
+
/**
|
|
209
|
+
* Whether to enable code interpretation.
|
|
210
|
+
* @default true
|
|
211
|
+
*/
|
|
212
|
+
codeInterpreterEnabled?: boolean;
|
|
213
|
+
/**
|
|
214
|
+
* Whether to enable file search.
|
|
215
|
+
* @default true
|
|
216
|
+
*/
|
|
217
|
+
fileSearchEnabled?: boolean;
|
|
218
|
+
/**
|
|
219
|
+
* Whether to disable parallel tool calls.
|
|
220
|
+
* You can disable parallel tool calls to force the model to execute tool calls sequentially.
|
|
221
|
+
* This is useful if you want to execute tool calls in a specific order so that the state changes
|
|
222
|
+
* introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)
|
|
223
|
+
*
|
|
224
|
+
* @default false
|
|
225
|
+
*/
|
|
226
|
+
disableParallelToolCalls?: boolean;
|
|
227
|
+
/**
|
|
228
|
+
* Whether to keep the role in system messages as "System".
|
|
229
|
+
* By default, it is converted to "developer", which is used by newer OpenAI models
|
|
230
|
+
*
|
|
231
|
+
* @default false
|
|
232
|
+
*/
|
|
233
|
+
keepSystemRole?: boolean;
|
|
234
|
+
}
|
|
235
|
+
declare class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
236
|
+
private openai;
|
|
237
|
+
private codeInterpreterEnabled;
|
|
238
|
+
private assistantId;
|
|
239
|
+
private fileSearchEnabled;
|
|
240
|
+
private disableParallelToolCalls;
|
|
241
|
+
private keepSystemRole;
|
|
242
|
+
get name(): string;
|
|
243
|
+
constructor(params: OpenAIAssistantAdapterParams);
|
|
244
|
+
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
245
|
+
private submitToolOutputs;
|
|
246
|
+
private submitUserMessage;
|
|
247
|
+
private streamResponse;
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
/**
|
|
251
|
+
* CopilotKit Adapter for Unify
|
|
252
|
+
*
|
|
253
|
+
* <RequestExample>
|
|
254
|
+
* ```jsx CopilotRuntime Example
|
|
255
|
+
* const copilotKit = new CopilotRuntime();
|
|
256
|
+
* return copilotKit.response(req, new UnifyAdapter());
|
|
257
|
+
* ```
|
|
258
|
+
* </RequestExample>
|
|
259
|
+
*
|
|
260
|
+
* You can easily set the model to use by passing it to the constructor.
|
|
261
|
+
* ```jsx
|
|
262
|
+
* const copilotKit = new CopilotRuntime();
|
|
263
|
+
* return copilotKit.response(
|
|
264
|
+
* req,
|
|
265
|
+
* new UnifyAdapter({ model: "llama-3-8b-chat@fireworks-ai" }),
|
|
266
|
+
* );
|
|
267
|
+
* ```
|
|
268
|
+
*/
|
|
269
|
+
|
|
270
|
+
interface UnifyAdapterParams {
|
|
271
|
+
apiKey?: string;
|
|
272
|
+
model: string;
|
|
273
|
+
}
|
|
274
|
+
declare class UnifyAdapter implements CopilotServiceAdapter {
|
|
275
|
+
private apiKey;
|
|
276
|
+
model: string;
|
|
277
|
+
private start;
|
|
278
|
+
provider: string;
|
|
279
|
+
get name(): string;
|
|
280
|
+
constructor(options?: UnifyAdapterParams);
|
|
281
|
+
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
/**
|
|
285
|
+
* Copilot Runtime adapter for Groq.
|
|
286
|
+
*
|
|
287
|
+
* ## Example
|
|
288
|
+
*
|
|
289
|
+
* ```ts
|
|
290
|
+
* import { CopilotRuntime, GroqAdapter } from "@copilotkit/runtime";
|
|
291
|
+
* import { Groq } from "groq-sdk";
|
|
292
|
+
*
|
|
293
|
+
* const groq = new Groq({ apiKey: process.env["GROQ_API_KEY"] });
|
|
294
|
+
*
|
|
295
|
+
* const copilotKit = new CopilotRuntime();
|
|
296
|
+
*
|
|
297
|
+
* return new GroqAdapter({ groq, model: "<model-name>" });
|
|
298
|
+
* ```
|
|
299
|
+
*/
|
|
300
|
+
|
|
301
|
+
interface GroqAdapterParams {
|
|
302
|
+
/**
|
|
303
|
+
* An optional Groq instance to use.
|
|
304
|
+
*/
|
|
305
|
+
groq?: Groq;
|
|
306
|
+
/**
|
|
307
|
+
* The model to use.
|
|
308
|
+
*/
|
|
309
|
+
model?: string;
|
|
310
|
+
/**
|
|
311
|
+
* Whether to disable parallel tool calls.
|
|
312
|
+
* You can disable parallel tool calls to force the model to execute tool calls sequentially.
|
|
313
|
+
* This is useful if you want to execute tool calls in a specific order so that the state changes
|
|
314
|
+
* introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)
|
|
315
|
+
*
|
|
316
|
+
* @default false
|
|
317
|
+
*/
|
|
318
|
+
disableParallelToolCalls?: boolean;
|
|
319
|
+
}
|
|
320
|
+
declare class GroqAdapter implements CopilotServiceAdapter {
|
|
321
|
+
model: string;
|
|
322
|
+
provider: string;
|
|
323
|
+
private disableParallelToolCalls;
|
|
324
|
+
private _groq;
|
|
325
|
+
get groq(): Groq;
|
|
326
|
+
get name(): string;
|
|
327
|
+
constructor(params?: GroqAdapterParams);
|
|
328
|
+
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
export { GoogleGenerativeAIAdapter as G, LangChainAdapter as L, OpenAIAdapterParams as O, UnifyAdapterParams as U, OpenAIAdapter as a, OpenAIAssistantAdapterParams as b, OpenAIAssistantAdapter as c, UnifyAdapter as d, GroqAdapterParams as e, GroqAdapter as f };
|
|
@@ -0,0 +1,321 @@
|
|
|
1
|
+
import OpenAI from 'openai';
|
|
2
|
+
import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from './langserve-0c6100e3.js';
|
|
3
|
+
import { BaseMessageChunk, AIMessage, AIMessageChunk, BaseMessage } from '@langchain/core/messages';
|
|
4
|
+
import { DynamicStructuredTool } from '@langchain/core/tools';
|
|
5
|
+
import { IterableReadableStream, IterableReadableStreamInterface } from '@langchain/core/utils/stream';
|
|
6
|
+
import { Groq } from 'groq-sdk';
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Copilot Runtime adapter for OpenAI.
|
|
10
|
+
*
|
|
11
|
+
* ## Example
|
|
12
|
+
*
|
|
13
|
+
* ```ts
|
|
14
|
+
* import { CopilotRuntime, OpenAIAdapter } from "@copilotkit/runtime";
|
|
15
|
+
* import OpenAI from "openai";
|
|
16
|
+
*
|
|
17
|
+
* const copilotKit = new CopilotRuntime();
|
|
18
|
+
*
|
|
19
|
+
* const openai = new OpenAI({
|
|
20
|
+
* organization: "<your-organization-id>", // optional
|
|
21
|
+
* apiKey: "<your-api-key>",
|
|
22
|
+
* });
|
|
23
|
+
*
|
|
24
|
+
* return new OpenAIAdapter({ openai });
|
|
25
|
+
* ```
|
|
26
|
+
*
|
|
27
|
+
* ## Example with Azure OpenAI
|
|
28
|
+
*
|
|
29
|
+
* ```ts
|
|
30
|
+
* import { CopilotRuntime, OpenAIAdapter } from "@copilotkit/runtime";
|
|
31
|
+
* import OpenAI from "openai";
|
|
32
|
+
*
|
|
33
|
+
* // The name of your Azure OpenAI Instance.
|
|
34
|
+
* // https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal#create-a-resource
|
|
35
|
+
* const instance = "<your instance name>";
|
|
36
|
+
*
|
|
37
|
+
* // Corresponds to your Model deployment within your OpenAI resource, e.g. my-gpt35-16k-deployment
|
|
38
|
+
* // Navigate to the Azure OpenAI Studio to deploy a model.
|
|
39
|
+
* const model = "<your model>";
|
|
40
|
+
*
|
|
41
|
+
* const apiKey = process.env["AZURE_OPENAI_API_KEY"];
|
|
42
|
+
* if (!apiKey) {
|
|
43
|
+
* throw new Error("The AZURE_OPENAI_API_KEY environment variable is missing or empty.");
|
|
44
|
+
* }
|
|
45
|
+
*
|
|
46
|
+
* const copilotKit = new CopilotRuntime();
|
|
47
|
+
*
|
|
48
|
+
* const openai = new OpenAI({
|
|
49
|
+
* apiKey,
|
|
50
|
+
* baseURL: `https://${instance}.openai.azure.com/openai/deployments/${model}`,
|
|
51
|
+
* defaultQuery: { "api-version": "2024-04-01-preview" },
|
|
52
|
+
* defaultHeaders: { "api-key": apiKey },
|
|
53
|
+
* });
|
|
54
|
+
*
|
|
55
|
+
* return new OpenAIAdapter({ openai });
|
|
56
|
+
* ```
|
|
57
|
+
*/
|
|
58
|
+
|
|
59
|
+
interface OpenAIAdapterParams {
|
|
60
|
+
/**
|
|
61
|
+
* An optional OpenAI instance to use. If not provided, a new instance will be
|
|
62
|
+
* created.
|
|
63
|
+
*/
|
|
64
|
+
openai?: OpenAI;
|
|
65
|
+
/**
|
|
66
|
+
* The model to use.
|
|
67
|
+
*/
|
|
68
|
+
model?: string;
|
|
69
|
+
/**
|
|
70
|
+
* Whether to disable parallel tool calls.
|
|
71
|
+
* You can disable parallel tool calls to force the model to execute tool calls sequentially.
|
|
72
|
+
* This is useful if you want to execute tool calls in a specific order so that the state changes
|
|
73
|
+
* introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)
|
|
74
|
+
*
|
|
75
|
+
* @default false
|
|
76
|
+
*/
|
|
77
|
+
disableParallelToolCalls?: boolean;
|
|
78
|
+
/**
|
|
79
|
+
* Whether to keep the role in system messages as "System".
|
|
80
|
+
* By default, it is converted to "developer", which is used by newer OpenAI models
|
|
81
|
+
*
|
|
82
|
+
* @default false
|
|
83
|
+
*/
|
|
84
|
+
keepSystemRole?: boolean;
|
|
85
|
+
}
|
|
86
|
+
declare class OpenAIAdapter implements CopilotServiceAdapter {
|
|
87
|
+
private model;
|
|
88
|
+
private disableParallelToolCalls;
|
|
89
|
+
private _openai;
|
|
90
|
+
private keepSystemRole;
|
|
91
|
+
get openai(): OpenAI;
|
|
92
|
+
constructor(params?: OpenAIAdapterParams);
|
|
93
|
+
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
type LangChainBaseMessageChunkStream = IterableReadableStream<BaseMessageChunk>;
|
|
97
|
+
type LangChainAIMessageChunkStream = IterableReadableStreamInterface<AIMessageChunk>;
|
|
98
|
+
type LangChainReturnType = LangChainBaseMessageChunkStream | LangChainAIMessageChunkStream | BaseMessageChunk | string | AIMessage;
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Copilot Runtime adapter for LangChain.
|
|
102
|
+
*
|
|
103
|
+
* ## Example
|
|
104
|
+
*
|
|
105
|
+
* ```ts
|
|
106
|
+
* import { CopilotRuntime, LangChainAdapter } from "@copilotkit/runtime";
|
|
107
|
+
* import { ChatOpenAI } from "@langchain/openai";
|
|
108
|
+
*
|
|
109
|
+
* const copilotKit = new CopilotRuntime();
|
|
110
|
+
*
|
|
111
|
+
* const model = new ChatOpenAI({
|
|
112
|
+
* model: "gpt-4o",
|
|
113
|
+
* apiKey: "<your-api-key>",
|
|
114
|
+
* });
|
|
115
|
+
*
|
|
116
|
+
* return new LangChainAdapter({
|
|
117
|
+
* chainFn: async ({ messages, tools }) => {
|
|
118
|
+
* return model.bindTools(tools).stream(messages);
|
|
119
|
+
* // or optionally enable strict mode
|
|
120
|
+
* // return model.bindTools(tools, { strict: true }).stream(messages);
|
|
121
|
+
* }
|
|
122
|
+
* });
|
|
123
|
+
* ```
|
|
124
|
+
*
|
|
125
|
+
* The asynchronous handler function (`chainFn`) can return any of the following:
|
|
126
|
+
*
|
|
127
|
+
* - A simple `string` response
|
|
128
|
+
* - A LangChain stream (`IterableReadableStream`)
|
|
129
|
+
* - A LangChain `BaseMessageChunk` object
|
|
130
|
+
* - A LangChain `AIMessage` object
|
|
131
|
+
*/
|
|
132
|
+
|
|
133
|
+
interface ChainFnParameters {
|
|
134
|
+
model: string;
|
|
135
|
+
messages: BaseMessage[];
|
|
136
|
+
tools: DynamicStructuredTool[];
|
|
137
|
+
threadId?: string;
|
|
138
|
+
runId?: string;
|
|
139
|
+
}
|
|
140
|
+
interface LangChainAdapterOptions {
|
|
141
|
+
/**
|
|
142
|
+
* A function that uses the LangChain API to generate a response.
|
|
143
|
+
*/
|
|
144
|
+
chainFn: (parameters: ChainFnParameters) => Promise<LangChainReturnType>;
|
|
145
|
+
}
|
|
146
|
+
declare class LangChainAdapter implements CopilotServiceAdapter {
|
|
147
|
+
private options;
|
|
148
|
+
/**
|
|
149
|
+
* To use LangChain as a backend, provide a handler function to the adapter with your custom LangChain logic.
|
|
150
|
+
*/
|
|
151
|
+
constructor(options: LangChainAdapterOptions);
|
|
152
|
+
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
interface GoogleGenerativeAIAdapterOptions {
|
|
156
|
+
/**
|
|
157
|
+
* A custom Google Generative AI model to use.
|
|
158
|
+
*/
|
|
159
|
+
model?: string;
|
|
160
|
+
/**
|
|
161
|
+
* The API key to use.
|
|
162
|
+
*/
|
|
163
|
+
apiKey?: string;
|
|
164
|
+
}
|
|
165
|
+
declare class GoogleGenerativeAIAdapter extends LangChainAdapter {
|
|
166
|
+
constructor(options?: GoogleGenerativeAIAdapterOptions);
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
/**
|
|
170
|
+
* Copilot Runtime adapter for the OpenAI Assistant API.
|
|
171
|
+
*
|
|
172
|
+
* ## Example
|
|
173
|
+
*
|
|
174
|
+
* ```ts
|
|
175
|
+
* import { CopilotRuntime, OpenAIAssistantAdapter } from "@copilotkit/runtime";
|
|
176
|
+
* import OpenAI from "openai";
|
|
177
|
+
*
|
|
178
|
+
* const copilotKit = new CopilotRuntime();
|
|
179
|
+
*
|
|
180
|
+
* const openai = new OpenAI({
|
|
181
|
+
* organization: "<your-organization-id>",
|
|
182
|
+
* apiKey: "<your-api-key>",
|
|
183
|
+
* });
|
|
184
|
+
*
|
|
185
|
+
* return new OpenAIAssistantAdapter({
|
|
186
|
+
* openai,
|
|
187
|
+
* assistantId: "<your-assistant-id>",
|
|
188
|
+
* codeInterpreterEnabled: true,
|
|
189
|
+
* fileSearchEnabled: true,
|
|
190
|
+
* });
|
|
191
|
+
* ```
|
|
192
|
+
*/
|
|
193
|
+
|
|
194
|
+
interface OpenAIAssistantAdapterParams {
|
|
195
|
+
/**
|
|
196
|
+
* The ID of the assistant to use.
|
|
197
|
+
*/
|
|
198
|
+
assistantId: string;
|
|
199
|
+
/**
|
|
200
|
+
* An optional OpenAI instance to use. If not provided, a new instance will be created.
|
|
201
|
+
*/
|
|
202
|
+
openai?: OpenAI;
|
|
203
|
+
/**
|
|
204
|
+
* Whether to enable code interpretation.
|
|
205
|
+
* @default true
|
|
206
|
+
*/
|
|
207
|
+
codeInterpreterEnabled?: boolean;
|
|
208
|
+
/**
|
|
209
|
+
* Whether to enable file search.
|
|
210
|
+
* @default true
|
|
211
|
+
*/
|
|
212
|
+
fileSearchEnabled?: boolean;
|
|
213
|
+
/**
|
|
214
|
+
* Whether to disable parallel tool calls.
|
|
215
|
+
* You can disable parallel tool calls to force the model to execute tool calls sequentially.
|
|
216
|
+
* This is useful if you want to execute tool calls in a specific order so that the state changes
|
|
217
|
+
* introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)
|
|
218
|
+
*
|
|
219
|
+
* @default false
|
|
220
|
+
*/
|
|
221
|
+
disableParallelToolCalls?: boolean;
|
|
222
|
+
/**
|
|
223
|
+
* Whether to keep the role in system messages as "System".
|
|
224
|
+
* By default, it is converted to "developer", which is used by newer OpenAI models
|
|
225
|
+
*
|
|
226
|
+
* @default false
|
|
227
|
+
*/
|
|
228
|
+
keepSystemRole?: boolean;
|
|
229
|
+
}
|
|
230
|
+
declare class OpenAIAssistantAdapter implements CopilotServiceAdapter {
|
|
231
|
+
private openai;
|
|
232
|
+
private codeInterpreterEnabled;
|
|
233
|
+
private assistantId;
|
|
234
|
+
private fileSearchEnabled;
|
|
235
|
+
private disableParallelToolCalls;
|
|
236
|
+
private keepSystemRole;
|
|
237
|
+
constructor(params: OpenAIAssistantAdapterParams);
|
|
238
|
+
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
239
|
+
private submitToolOutputs;
|
|
240
|
+
private submitUserMessage;
|
|
241
|
+
private streamResponse;
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
/**
|
|
245
|
+
* CopilotKit Adapter for Unify
|
|
246
|
+
*
|
|
247
|
+
* <RequestExample>
|
|
248
|
+
* ```jsx CopilotRuntime Example
|
|
249
|
+
* const copilotKit = new CopilotRuntime();
|
|
250
|
+
* return copilotKit.response(req, new UnifyAdapter());
|
|
251
|
+
* ```
|
|
252
|
+
* </RequestExample>
|
|
253
|
+
*
|
|
254
|
+
* You can easily set the model to use by passing it to the constructor.
|
|
255
|
+
* ```jsx
|
|
256
|
+
* const copilotKit = new CopilotRuntime();
|
|
257
|
+
* return copilotKit.response(
|
|
258
|
+
* req,
|
|
259
|
+
* new UnifyAdapter({ model: "llama-3-8b-chat@fireworks-ai" }),
|
|
260
|
+
* );
|
|
261
|
+
* ```
|
|
262
|
+
*/
|
|
263
|
+
|
|
264
|
+
interface UnifyAdapterParams {
|
|
265
|
+
apiKey?: string;
|
|
266
|
+
model: string;
|
|
267
|
+
}
|
|
268
|
+
declare class UnifyAdapter implements CopilotServiceAdapter {
|
|
269
|
+
private apiKey;
|
|
270
|
+
private model;
|
|
271
|
+
private start;
|
|
272
|
+
constructor(options?: UnifyAdapterParams);
|
|
273
|
+
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
/**
|
|
277
|
+
* Copilot Runtime adapter for Groq.
|
|
278
|
+
*
|
|
279
|
+
* ## Example
|
|
280
|
+
*
|
|
281
|
+
* ```ts
|
|
282
|
+
* import { CopilotRuntime, GroqAdapter } from "@copilotkit/runtime";
|
|
283
|
+
* import { Groq } from "groq-sdk";
|
|
284
|
+
*
|
|
285
|
+
* const groq = new Groq({ apiKey: process.env["GROQ_API_KEY"] });
|
|
286
|
+
*
|
|
287
|
+
* const copilotKit = new CopilotRuntime();
|
|
288
|
+
*
|
|
289
|
+
* return new GroqAdapter({ groq, model: "<model-name>" });
|
|
290
|
+
* ```
|
|
291
|
+
*/
|
|
292
|
+
|
|
293
|
+
interface GroqAdapterParams {
|
|
294
|
+
/**
|
|
295
|
+
* An optional Groq instance to use.
|
|
296
|
+
*/
|
|
297
|
+
groq?: Groq;
|
|
298
|
+
/**
|
|
299
|
+
* The model to use.
|
|
300
|
+
*/
|
|
301
|
+
model?: string;
|
|
302
|
+
/**
|
|
303
|
+
* Whether to disable parallel tool calls.
|
|
304
|
+
* You can disable parallel tool calls to force the model to execute tool calls sequentially.
|
|
305
|
+
* This is useful if you want to execute tool calls in a specific order so that the state changes
|
|
306
|
+
* introduced by one tool call are visible to the next tool call. (i.e. new actions or readables)
|
|
307
|
+
*
|
|
308
|
+
* @default false
|
|
309
|
+
*/
|
|
310
|
+
disableParallelToolCalls?: boolean;
|
|
311
|
+
}
|
|
312
|
+
declare class GroqAdapter implements CopilotServiceAdapter {
|
|
313
|
+
private model;
|
|
314
|
+
private disableParallelToolCalls;
|
|
315
|
+
private _groq;
|
|
316
|
+
get groq(): Groq;
|
|
317
|
+
constructor(params?: GroqAdapterParams);
|
|
318
|
+
process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>;
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
export { GoogleGenerativeAIAdapter as G, LangChainAdapter as L, OpenAIAdapterParams as O, UnifyAdapterParams as U, OpenAIAdapter as a, OpenAIAssistantAdapterParams as b, OpenAIAssistantAdapter as c, UnifyAdapter as d, GroqAdapterParams as e, GroqAdapter as f };
|