@theia/ai-vercel-ai 1.62.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/browser/vercel-ai-frontend-application-contribution.d.ts +21 -0
- package/lib/browser/vercel-ai-frontend-application-contribution.d.ts.map +1 -0
- package/lib/browser/vercel-ai-frontend-application-contribution.js +167 -0
- package/lib/browser/vercel-ai-frontend-application-contribution.js.map +1 -0
- package/lib/browser/vercel-ai-frontend-module.d.ts +4 -0
- package/lib/browser/vercel-ai-frontend-module.d.ts.map +1 -0
- package/lib/browser/vercel-ai-frontend-module.js +32 -0
- package/lib/browser/vercel-ai-frontend-module.js.map +1 -0
- package/lib/browser/vercel-ai-preferences.d.ts +8 -0
- package/lib/browser/vercel-ai-preferences.d.ts.map +1 -0
- package/lib/browser/vercel-ai-preferences.js +132 -0
- package/lib/browser/vercel-ai-preferences.js.map +1 -0
- package/lib/common/index.d.ts +2 -0
- package/lib/common/index.d.ts.map +1 -0
- package/lib/common/index.js +20 -0
- package/lib/common/index.js.map +1 -0
- package/lib/common/vercel-ai-language-models-manager.d.ts +45 -0
- package/lib/common/vercel-ai-language-models-manager.d.ts.map +1 -0
- package/lib/common/vercel-ai-language-models-manager.js +21 -0
- package/lib/common/vercel-ai-language-models-manager.js.map +1 -0
- package/lib/node/vercel-ai-backend-module.d.ts +4 -0
- package/lib/node/vercel-ai-backend-module.d.ts.map +1 -0
- package/lib/node/vercel-ai-backend-module.js +33 -0
- package/lib/node/vercel-ai-backend-module.js.map +1 -0
- package/lib/node/vercel-ai-language-model-factory.d.ts +14 -0
- package/lib/node/vercel-ai-language-model-factory.d.ts.map +1 -0
- package/lib/node/vercel-ai-language-model-factory.js +73 -0
- package/lib/node/vercel-ai-language-model-factory.js.map +1 -0
- package/lib/node/vercel-ai-language-model.d.ts +51 -0
- package/lib/node/vercel-ai-language-model.d.ts.map +1 -0
- package/lib/node/vercel-ai-language-model.js +305 -0
- package/lib/node/vercel-ai-language-model.js.map +1 -0
- package/lib/node/vercel-ai-language-models-manager-impl.d.ts +18 -0
- package/lib/node/vercel-ai-language-models-manager-impl.d.ts.map +1 -0
- package/lib/node/vercel-ai-language-models-manager-impl.js +96 -0
- package/lib/node/vercel-ai-language-models-manager-impl.js.map +1 -0
- package/lib/package.spec.d.ts +1 -0
- package/lib/package.spec.d.ts.map +1 -0
- package/lib/package.spec.js +26 -0
- package/lib/package.spec.js.map +1 -0
- package/package.json +56 -0
- package/src/browser/vercel-ai-frontend-application-contribution.ts +196 -0
- package/src/browser/vercel-ai-frontend-module.ts +31 -0
- package/src/browser/vercel-ai-preferences.ts +139 -0
- package/src/common/index.ts +16 -0
- package/src/common/vercel-ai-language-models-manager.ts +63 -0
- package/src/node/vercel-ai-backend-module.ts +35 -0
- package/src/node/vercel-ai-language-model-factory.ts +82 -0
- package/src/node/vercel-ai-language-model.ts +408 -0
- package/src/node/vercel-ai-language-models-manager-impl.ts +101 -0
- package/src/package.spec.ts +27 -0
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
// *****************************************************************************
|
|
2
|
+
// Copyright (C) 2025 EclipseSource GmbH.
|
|
3
|
+
//
|
|
4
|
+
// This program and the accompanying materials are made available under the
|
|
5
|
+
// terms of the Eclipse Public License v. 2.0 which is available at
|
|
6
|
+
// http://www.eclipse.org/legal/epl-2.0.
|
|
7
|
+
//
|
|
8
|
+
// This Source Code may also be made available under the following Secondary
|
|
9
|
+
// Licenses when the conditions for such availability set forth in the Eclipse
|
|
10
|
+
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
|
|
11
|
+
// with the GNU Classpath Exception which is available at
|
|
12
|
+
// https://www.gnu.org/software/classpath/license.html.
|
|
13
|
+
//
|
|
14
|
+
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
|
|
15
|
+
// *****************************************************************************
|
|
16
|
+
|
|
17
|
+
import { LanguageModelV1 } from '@ai-sdk/provider';
|
|
18
|
+
import {
|
|
19
|
+
LanguageModel,
|
|
20
|
+
LanguageModelMessage,
|
|
21
|
+
LanguageModelParsedResponse,
|
|
22
|
+
LanguageModelRequest,
|
|
23
|
+
LanguageModelResponse,
|
|
24
|
+
LanguageModelStreamResponse,
|
|
25
|
+
LanguageModelStreamResponsePart,
|
|
26
|
+
LanguageModelTextResponse,
|
|
27
|
+
TokenUsageService,
|
|
28
|
+
ToolCall,
|
|
29
|
+
UserRequest,
|
|
30
|
+
} from '@theia/ai-core';
|
|
31
|
+
import { CancellationToken, Disposable, ILogger } from '@theia/core';
|
|
32
|
+
import {
|
|
33
|
+
CoreMessage,
|
|
34
|
+
generateObject,
|
|
35
|
+
GenerateObjectResult,
|
|
36
|
+
generateText,
|
|
37
|
+
GenerateTextResult,
|
|
38
|
+
jsonSchema,
|
|
39
|
+
StepResult,
|
|
40
|
+
streamText,
|
|
41
|
+
TextStreamPart,
|
|
42
|
+
tool,
|
|
43
|
+
ToolExecutionOptions,
|
|
44
|
+
ToolResultPart,
|
|
45
|
+
ToolSet
|
|
46
|
+
} from 'ai';
|
|
47
|
+
import { VercelAiLanguageModelFactory, VercelAiProviderConfig } from './vercel-ai-language-model-factory';
|
|
48
|
+
|
|
49
|
+
interface VercelCancellationToken extends Disposable {
|
|
50
|
+
signal: AbortSignal;
|
|
51
|
+
cancellationToken: CancellationToken;
|
|
52
|
+
isCancellationRequested: boolean;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
type StreamPart = ToolResultPart | {
|
|
56
|
+
type: string;
|
|
57
|
+
textDelta?: string;
|
|
58
|
+
toolCallId?: string;
|
|
59
|
+
toolName?: string;
|
|
60
|
+
args?: object | string;
|
|
61
|
+
argsTextDelta?: string;
|
|
62
|
+
usage?: { promptTokens: number; completionTokens: number };
|
|
63
|
+
signature?: string;
|
|
64
|
+
};
|
|
65
|
+
|
|
66
|
+
interface VercelAiStream extends AsyncIterable<TextStreamPart<ToolSet>> {
|
|
67
|
+
cancel: () => void;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
interface StreamContext {
|
|
71
|
+
logger: ILogger;
|
|
72
|
+
cancellationToken?: VercelCancellationToken;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
export class VercelAiStreamTransformer {
|
|
76
|
+
private toolCallsMap = new Map<string, ToolCall>();
|
|
77
|
+
|
|
78
|
+
constructor(
|
|
79
|
+
protected readonly fullStream: VercelAiStream,
|
|
80
|
+
protected readonly context: StreamContext
|
|
81
|
+
) { }
|
|
82
|
+
|
|
83
|
+
async *transform(): AsyncGenerator<LanguageModelStreamResponsePart> {
|
|
84
|
+
this.toolCallsMap.clear();
|
|
85
|
+
try {
|
|
86
|
+
for await (const part of this.fullStream) {
|
|
87
|
+
this.context.logger.trace('Received stream part:', part);
|
|
88
|
+
if (this.context.cancellationToken?.isCancellationRequested) {
|
|
89
|
+
this.context.logger.debug('Cancellation requested, stopping stream');
|
|
90
|
+
this.fullStream.cancel();
|
|
91
|
+
break;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
let toolCallUpdated = false;
|
|
95
|
+
|
|
96
|
+
switch (part.type) {
|
|
97
|
+
case 'text-delta':
|
|
98
|
+
if (part.textDelta) {
|
|
99
|
+
yield { content: part.textDelta };
|
|
100
|
+
}
|
|
101
|
+
break;
|
|
102
|
+
|
|
103
|
+
case 'tool-call':
|
|
104
|
+
if (part.toolCallId && part.toolName) {
|
|
105
|
+
const args = typeof part.args === 'object' ? JSON.stringify(part.args) : (part.args || '');
|
|
106
|
+
toolCallUpdated = this.updateToolCall(part.toolCallId, part.toolName, args);
|
|
107
|
+
}
|
|
108
|
+
break;
|
|
109
|
+
|
|
110
|
+
case 'tool-call-streaming-start':
|
|
111
|
+
if (part.toolCallId && part.toolName) {
|
|
112
|
+
toolCallUpdated = this.updateToolCall(part.toolCallId, part.toolName);
|
|
113
|
+
}
|
|
114
|
+
break;
|
|
115
|
+
|
|
116
|
+
case 'tool-call-delta':
|
|
117
|
+
if (part.toolCallId && part.argsTextDelta) {
|
|
118
|
+
toolCallUpdated = this.appendToToolCallArgs(part.toolCallId, part.argsTextDelta);
|
|
119
|
+
}
|
|
120
|
+
break;
|
|
121
|
+
|
|
122
|
+
default:
|
|
123
|
+
if (this.isToolResultPart(part)) {
|
|
124
|
+
toolCallUpdated = this.processToolResult(part);
|
|
125
|
+
}
|
|
126
|
+
break;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
if (toolCallUpdated && this.toolCallsMap.size > 0) {
|
|
130
|
+
yield { tool_calls: Array.from(this.toolCallsMap.values()) };
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
} catch (error) {
|
|
134
|
+
this.context.logger.error('Error in AI SDK stream:', error);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
private isToolResultPart(part: StreamPart): part is ToolResultPart {
|
|
139
|
+
return part.type === 'tool-result';
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
private updateToolCall(id: string, name: string, args?: string): boolean {
|
|
143
|
+
const toolCall: ToolCall = {
|
|
144
|
+
id,
|
|
145
|
+
function: { name, arguments: args ? args : '' },
|
|
146
|
+
finished: false
|
|
147
|
+
};
|
|
148
|
+
this.toolCallsMap.set(id, toolCall);
|
|
149
|
+
return true;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
private appendToToolCallArgs(id: string, argsTextDelta: string): boolean {
|
|
153
|
+
const existingCall = this.toolCallsMap.get(id);
|
|
154
|
+
if (existingCall?.function) {
|
|
155
|
+
existingCall.function.arguments = (existingCall.function.arguments || '') + argsTextDelta;
|
|
156
|
+
return true;
|
|
157
|
+
}
|
|
158
|
+
return false;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
private processToolResult(part: ToolResultPart): boolean {
|
|
162
|
+
if (!part.toolCallId) {
|
|
163
|
+
return false;
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
const completedCall = this.toolCallsMap.get(part.toolCallId);
|
|
167
|
+
if (!completedCall) {
|
|
168
|
+
return false;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
completedCall.result = part.result as string;
|
|
172
|
+
completedCall.finished = true;
|
|
173
|
+
return true;
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
export class VercelAiModel implements LanguageModel {
|
|
179
|
+
|
|
180
|
+
constructor(
|
|
181
|
+
public readonly id: string,
|
|
182
|
+
public model: string,
|
|
183
|
+
public enableStreaming: boolean,
|
|
184
|
+
public supportsStructuredOutput: boolean,
|
|
185
|
+
public url: string | undefined,
|
|
186
|
+
protected readonly logger: ILogger,
|
|
187
|
+
protected readonly languageModelFactory: VercelAiLanguageModelFactory,
|
|
188
|
+
protected providerConfig: () => VercelAiProviderConfig,
|
|
189
|
+
protected readonly tokenUsageService?: TokenUsageService
|
|
190
|
+
) { }
|
|
191
|
+
|
|
192
|
+
protected getSettings(request: LanguageModelRequest): Record<string, unknown> {
|
|
193
|
+
return request.settings ?? {};
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
async request(request: UserRequest, cancellationToken?: CancellationToken): Promise<LanguageModelResponse> {
|
|
197
|
+
const settings = this.getSettings(request);
|
|
198
|
+
const model = this.languageModelFactory.createLanguageModel(
|
|
199
|
+
{
|
|
200
|
+
id: this.id,
|
|
201
|
+
model: this.model,
|
|
202
|
+
url: this.url,
|
|
203
|
+
apiKey: true, // We'll use the provider's API key
|
|
204
|
+
enableStreaming: this.enableStreaming,
|
|
205
|
+
supportsStructuredOutput: this.supportsStructuredOutput
|
|
206
|
+
},
|
|
207
|
+
this.providerConfig()
|
|
208
|
+
);
|
|
209
|
+
const cancel = this.createCancellationToken(cancellationToken);
|
|
210
|
+
|
|
211
|
+
try {
|
|
212
|
+
if (request.response_format?.type === 'json_schema' && this.supportsStructuredOutput) {
|
|
213
|
+
return this.handleStructuredOutputRequest(model, request, cancel);
|
|
214
|
+
}
|
|
215
|
+
if (!this.enableStreaming || (typeof settings.stream === 'boolean' && !settings.stream)) {
|
|
216
|
+
return this.handleNonStreamingRequest(model, request, cancel);
|
|
217
|
+
}
|
|
218
|
+
return this.handleStreamingRequest(model, request, cancel);
|
|
219
|
+
} catch (error) {
|
|
220
|
+
this.logger.error('Error in Vercel AI model request:', error);
|
|
221
|
+
throw error;
|
|
222
|
+
} finally {
|
|
223
|
+
cancel.dispose();
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
protected createCancellationToken(cancellationToken?: CancellationToken): VercelCancellationToken {
|
|
228
|
+
const abortController = new AbortController();
|
|
229
|
+
const abortSignal = abortController.signal;
|
|
230
|
+
if (cancellationToken?.isCancellationRequested) {
|
|
231
|
+
abortController.abort();
|
|
232
|
+
}
|
|
233
|
+
const cancellationListener = cancellationToken ?
|
|
234
|
+
cancellationToken.onCancellationRequested(() => {
|
|
235
|
+
abortController.abort();
|
|
236
|
+
}) : undefined;
|
|
237
|
+
return {
|
|
238
|
+
signal: abortSignal,
|
|
239
|
+
cancellationToken: cancellationToken ?? CancellationToken.None,
|
|
240
|
+
get isCancellationRequested(): boolean {
|
|
241
|
+
return cancellationToken?.isCancellationRequested ?? abortSignal.aborted;
|
|
242
|
+
},
|
|
243
|
+
dispose: () => cancellationListener?.dispose()
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
protected async handleNonStreamingRequest(
|
|
248
|
+
model: LanguageModelV1,
|
|
249
|
+
request: UserRequest,
|
|
250
|
+
cancellationToken?: VercelCancellationToken
|
|
251
|
+
): Promise<LanguageModelTextResponse> {
|
|
252
|
+
const settings = this.getSettings(request);
|
|
253
|
+
const messages = this.processMessages(request.messages);
|
|
254
|
+
const tools = this.createTools(request);
|
|
255
|
+
const abortSignal = cancellationToken?.signal;
|
|
256
|
+
|
|
257
|
+
const response = await generateText({
|
|
258
|
+
model,
|
|
259
|
+
messages,
|
|
260
|
+
tools,
|
|
261
|
+
toolChoice: 'auto',
|
|
262
|
+
abortSignal,
|
|
263
|
+
...settings
|
|
264
|
+
});
|
|
265
|
+
|
|
266
|
+
await this.recordTokenUsage(response, request);
|
|
267
|
+
|
|
268
|
+
return { text: response.text };
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
protected createTools(request: UserRequest): ToolSet | undefined {
|
|
272
|
+
if (!request.tools) {
|
|
273
|
+
return undefined;
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
const toolSet: ToolSet = {};
|
|
277
|
+
for (const toolRequest of request.tools) {
|
|
278
|
+
toolSet[toolRequest.name] = tool({
|
|
279
|
+
description: toolRequest.description,
|
|
280
|
+
parameters: jsonSchema(toolRequest.parameters),
|
|
281
|
+
execute: async (args: object, options: ToolExecutionOptions) => {
|
|
282
|
+
try {
|
|
283
|
+
const result = await toolRequest.handler(JSON.stringify(args), options);
|
|
284
|
+
return JSON.stringify(result);
|
|
285
|
+
} catch (error) {
|
|
286
|
+
this.logger.error(`Error executing tool (${toolRequest.name}):`, error);
|
|
287
|
+
return { status: 'error', error: 'Tool execution failed', details: error };
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
});
|
|
291
|
+
}
|
|
292
|
+
return toolSet;
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
protected async handleStructuredOutputRequest(
|
|
296
|
+
model: LanguageModelV1,
|
|
297
|
+
request: UserRequest,
|
|
298
|
+
cancellationToken?: VercelCancellationToken
|
|
299
|
+
): Promise<LanguageModelParsedResponse | LanguageModelStreamResponse> {
|
|
300
|
+
if (request.response_format?.type !== 'json_schema' || !request.response_format.json_schema.schema) {
|
|
301
|
+
throw Error('Invalid response format for structured output request');
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
const schema = jsonSchema(request.response_format.json_schema.schema);
|
|
305
|
+
if (!schema) {
|
|
306
|
+
throw new Error('Schema extraction failed.');
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
const settings = this.getSettings(request);
|
|
310
|
+
const messages = this.processMessages(request.messages);
|
|
311
|
+
const abortSignal = cancellationToken?.signal;
|
|
312
|
+
|
|
313
|
+
const response = await generateObject<unknown>({
|
|
314
|
+
model,
|
|
315
|
+
output: 'object',
|
|
316
|
+
messages,
|
|
317
|
+
schema,
|
|
318
|
+
abortSignal,
|
|
319
|
+
...settings
|
|
320
|
+
});
|
|
321
|
+
|
|
322
|
+
await this.recordTokenUsage(response, request);
|
|
323
|
+
|
|
324
|
+
return {
|
|
325
|
+
content: JSON.stringify(response.object),
|
|
326
|
+
parsed: response.object
|
|
327
|
+
};
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
private async recordTokenUsage(
|
|
331
|
+
result: GenerateObjectResult<unknown> | GenerateTextResult<ToolSet, unknown>,
|
|
332
|
+
request: UserRequest
|
|
333
|
+
): Promise<void> {
|
|
334
|
+
if (this.tokenUsageService && !isNaN(result.usage.completionTokens) && !isNaN(result.usage.promptTokens)) {
|
|
335
|
+
await this.tokenUsageService.recordTokenUsage(
|
|
336
|
+
this.id,
|
|
337
|
+
{
|
|
338
|
+
inputTokens: result.usage.promptTokens,
|
|
339
|
+
outputTokens: result.usage.completionTokens,
|
|
340
|
+
requestId: request.requestId
|
|
341
|
+
}
|
|
342
|
+
);
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
protected async handleStreamingRequest(
|
|
347
|
+
model: LanguageModelV1,
|
|
348
|
+
request: UserRequest,
|
|
349
|
+
cancellationToken?: VercelCancellationToken
|
|
350
|
+
): Promise<LanguageModelStreamResponse> {
|
|
351
|
+
const settings = this.getSettings(request);
|
|
352
|
+
const messages = this.processMessages(request.messages);
|
|
353
|
+
const tools = this.createTools(request);
|
|
354
|
+
const abortSignal = cancellationToken?.signal;
|
|
355
|
+
|
|
356
|
+
const { fullStream } = streamText({
|
|
357
|
+
model,
|
|
358
|
+
messages,
|
|
359
|
+
tools,
|
|
360
|
+
toolChoice: 'auto',
|
|
361
|
+
maxSteps: 100,
|
|
362
|
+
toolCallStreaming: true,
|
|
363
|
+
abortSignal,
|
|
364
|
+
onStepFinish: (stepResult: StepResult<ToolSet>) => {
|
|
365
|
+
if (!isNaN(stepResult.usage.completionTokens) && !isNaN(stepResult.usage.promptTokens)) {
|
|
366
|
+
this.tokenUsageService?.recordTokenUsage(this.id, {
|
|
367
|
+
inputTokens: stepResult.usage.promptTokens,
|
|
368
|
+
outputTokens: stepResult.usage.completionTokens,
|
|
369
|
+
requestId: request.requestId
|
|
370
|
+
});
|
|
371
|
+
}
|
|
372
|
+
},
|
|
373
|
+
...settings
|
|
374
|
+
});
|
|
375
|
+
|
|
376
|
+
const transformer = new VercelAiStreamTransformer(
|
|
377
|
+
fullStream, { cancellationToken, logger: this.logger }
|
|
378
|
+
);
|
|
379
|
+
|
|
380
|
+
return {
|
|
381
|
+
stream: transformer.transform()
|
|
382
|
+
};
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
protected processMessages(messages: LanguageModelMessage[]): Array<CoreMessage> {
|
|
386
|
+
return messages.map(message => {
|
|
387
|
+
const content = LanguageModelMessage.isTextMessage(message) ? message.text : '';
|
|
388
|
+
let role: 'user' | 'assistant' | 'system';
|
|
389
|
+
switch (message.actor) {
|
|
390
|
+
case 'user':
|
|
391
|
+
role = 'user';
|
|
392
|
+
break;
|
|
393
|
+
case 'ai':
|
|
394
|
+
role = 'assistant';
|
|
395
|
+
break;
|
|
396
|
+
case 'system':
|
|
397
|
+
role = 'system';
|
|
398
|
+
break;
|
|
399
|
+
default:
|
|
400
|
+
role = 'user';
|
|
401
|
+
}
|
|
402
|
+
return {
|
|
403
|
+
role,
|
|
404
|
+
content,
|
|
405
|
+
};
|
|
406
|
+
});
|
|
407
|
+
}
|
|
408
|
+
}
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
// *****************************************************************************
|
|
2
|
+
// Copyright (C) 2025 EclipseSource GmbH.
|
|
3
|
+
//
|
|
4
|
+
// This program and the accompanying materials are made available under the
|
|
5
|
+
// terms of the Eclipse Public License v. 2.0 which is available at
|
|
6
|
+
// http://www.eclipse.org/legal/epl-2.0.
|
|
7
|
+
//
|
|
8
|
+
// This Source Code may also be made available under the following Secondary
|
|
9
|
+
// Licenses when the conditions for such availability set forth in the Eclipse
|
|
10
|
+
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
|
|
11
|
+
// with the GNU Classpath Exception which is available at
|
|
12
|
+
// https://www.gnu.org/software/classpath/license.html.
|
|
13
|
+
//
|
|
14
|
+
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
|
|
15
|
+
// *****************************************************************************
|
|
16
|
+
|
|
17
|
+
import { LanguageModelRegistry, TokenUsageService } from '@theia/ai-core';
|
|
18
|
+
import { inject, injectable, named } from '@theia/core/shared/inversify';
|
|
19
|
+
import { VercelAiModel } from './vercel-ai-language-model';
|
|
20
|
+
import { VercelAiLanguageModelsManager, VercelAiModelDescription } from '../common';
|
|
21
|
+
import { ILogger } from '@theia/core';
|
|
22
|
+
import { VercelAiLanguageModelFactory, VercelAiProvider, VercelAiProviderConfig } from './vercel-ai-language-model-factory';
|
|
23
|
+
|
|
24
|
+
@injectable()
|
|
25
|
+
export class VercelAiLanguageModelsManagerImpl implements VercelAiLanguageModelsManager {
|
|
26
|
+
|
|
27
|
+
apiKey: string | undefined;
|
|
28
|
+
protected providerConfigs: Map<VercelAiProvider, VercelAiProviderConfig> = new Map();
|
|
29
|
+
|
|
30
|
+
@inject(LanguageModelRegistry)
|
|
31
|
+
protected readonly languageModelRegistry: LanguageModelRegistry;
|
|
32
|
+
|
|
33
|
+
@inject(TokenUsageService)
|
|
34
|
+
protected readonly tokenUsageService: TokenUsageService;
|
|
35
|
+
|
|
36
|
+
@inject(ILogger) @named('vercel-ai')
|
|
37
|
+
protected readonly logger: ILogger;
|
|
38
|
+
|
|
39
|
+
@inject(VercelAiLanguageModelFactory)
|
|
40
|
+
protected readonly languageModelFactory: VercelAiLanguageModelFactory;
|
|
41
|
+
|
|
42
|
+
// Triggered from frontend. In case you want to use the models on the backend
|
|
43
|
+
// without a frontend then call this yourself
|
|
44
|
+
async createOrUpdateLanguageModels(...modelDescriptions: VercelAiModelDescription[]): Promise<void> {
|
|
45
|
+
for (const modelDescription of modelDescriptions) {
|
|
46
|
+
this.logger.info(`Vercel AI: Creating or updating model ${modelDescription.id}`);
|
|
47
|
+
const model = await this.languageModelRegistry.getLanguageModel(modelDescription.id);
|
|
48
|
+
const provider = this.determineProvider(modelDescription);
|
|
49
|
+
const providerConfig = this.getProviderConfig(provider);
|
|
50
|
+
|
|
51
|
+
if (model) {
|
|
52
|
+
if (!(model instanceof VercelAiModel)) {
|
|
53
|
+
this.logger.warn(`Vercel AI: model ${modelDescription.id} is not a Vercel AI model`);
|
|
54
|
+
continue;
|
|
55
|
+
}
|
|
56
|
+
model.model = modelDescription.model;
|
|
57
|
+
model.enableStreaming = modelDescription.enableStreaming;
|
|
58
|
+
model.url = modelDescription.url;
|
|
59
|
+
model.supportsStructuredOutput = modelDescription.supportsStructuredOutput;
|
|
60
|
+
this.providerConfigs.set(provider, providerConfig);
|
|
61
|
+
} else {
|
|
62
|
+
this.languageModelRegistry.addLanguageModels([
|
|
63
|
+
new VercelAiModel(
|
|
64
|
+
modelDescription.id,
|
|
65
|
+
modelDescription.model,
|
|
66
|
+
modelDescription.enableStreaming,
|
|
67
|
+
modelDescription.supportsStructuredOutput,
|
|
68
|
+
modelDescription.url,
|
|
69
|
+
this.logger,
|
|
70
|
+
this.languageModelFactory,
|
|
71
|
+
() => this.getProviderConfig(provider),
|
|
72
|
+
this.tokenUsageService
|
|
73
|
+
)
|
|
74
|
+
]);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
removeLanguageModels(...modelIds: string[]): void {
|
|
80
|
+
this.languageModelRegistry.removeLanguageModels(modelIds);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
setProviderConfig(provider: VercelAiProvider, config: Partial<VercelAiProviderConfig>): void {
|
|
84
|
+
const existingConfig = this.providerConfigs.get(provider) || { provider };
|
|
85
|
+
this.providerConfigs.set(provider, { ...existingConfig, ...config });
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
private determineProvider(modelDescription: VercelAiModelDescription): VercelAiProvider {
|
|
89
|
+
// Use the provider from the model description or default to OpenAI
|
|
90
|
+
return modelDescription.provider || 'openai';
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
private getProviderConfig(provider: VercelAiProvider): VercelAiProviderConfig {
|
|
94
|
+
let config = this.providerConfigs.get(provider);
|
|
95
|
+
if (!config) {
|
|
96
|
+
config = { provider, apiKey: this.apiKey };
|
|
97
|
+
this.providerConfigs.set(provider, config);
|
|
98
|
+
}
|
|
99
|
+
return config;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
// *****************************************************************************
|
|
2
|
+
// Copyright (C) 2025 EclipseSource GmbH and others.
|
|
3
|
+
//
|
|
4
|
+
// This program and the accompanying materials are made available under the
|
|
5
|
+
// terms of the Eclipse Public License v. 2.0 which is available at
|
|
6
|
+
// http://www.eclipse.org/legal/epl-2.0.
|
|
7
|
+
//
|
|
8
|
+
// This Source Code may also be made available under the following Secondary
|
|
9
|
+
// Licenses when the conditions for such availability set forth in the Eclipse
|
|
10
|
+
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
|
|
11
|
+
// with the GNU Classpath Exception which is available at
|
|
12
|
+
// https://www.gnu.org/software/classpath/license.html.
|
|
13
|
+
//
|
|
14
|
+
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
|
|
15
|
+
// *****************************************************************************
|
|
16
|
+
|
|
17
|
+
/* note: this bogus test file is required so that
|
|
18
|
+
we are able to run mocha unit tests on this
|
|
19
|
+
package, without having any actual unit tests in it.
|
|
20
|
+
This way a coverage report will be generated,
|
|
21
|
+
showing 0% coverage, instead of no report.
|
|
22
|
+
This file can be removed once we have real unit
|
|
23
|
+
tests in place. */
|
|
24
|
+
|
|
25
|
+
describe('ai-vercel-ai package', () => {
|
|
26
|
+
it('support code coverage statistics', () => true);
|
|
27
|
+
});
|