@theia/ai-openai 1.67.0-next.13 → 1.67.0-next.56
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/package.json +6 -6
- package/src/common/openai-preferences.ts +4 -10
- package/lib/browser/openai-frontend-application-contribution.d.ts +0 -18
- package/lib/browser/openai-frontend-application-contribution.d.ts.map +0 -1
- package/lib/browser/openai-frontend-application-contribution.js +0 -163
- package/lib/browser/openai-frontend-application-contribution.js.map +0 -1
- package/lib/browser/openai-frontend-module.d.ts +0 -4
- package/lib/browser/openai-frontend-module.d.ts.map +0 -1
- package/lib/browser/openai-frontend-module.js +0 -33
- package/lib/browser/openai-frontend-module.js.map +0 -1
- package/lib/common/index.d.ts +0 -2
- package/lib/common/index.d.ts.map +0 -1
- package/lib/common/index.js +0 -20
- package/lib/common/index.js.map +0 -1
- package/lib/common/openai-language-models-manager.d.ts +0 -63
- package/lib/common/openai-language-models-manager.d.ts.map +0 -1
- package/lib/common/openai-language-models-manager.js +0 -22
- package/lib/common/openai-language-models-manager.js.map +0 -1
- package/lib/common/openai-preferences.d.ts +0 -7
- package/lib/common/openai-preferences.d.ts.map +0 -1
- package/lib/common/openai-preferences.js +0 -147
- package/lib/common/openai-preferences.js.map +0 -1
- package/lib/node/openai-backend-module.d.ts +0 -5
- package/lib/node/openai-backend-module.d.ts.map +0 -1
- package/lib/node/openai-backend-module.js +0 -40
- package/lib/node/openai-backend-module.js.map +0 -1
- package/lib/node/openai-language-model.d.ts +0 -80
- package/lib/node/openai-language-model.d.ts.map +0 -1
- package/lib/node/openai-language-model.js +0 -324
- package/lib/node/openai-language-model.js.map +0 -1
- package/lib/node/openai-language-models-manager-impl.d.ts +0 -22
- package/lib/node/openai-language-models-manager-impl.d.ts.map +0 -1
- package/lib/node/openai-language-models-manager-impl.js +0 -162
- package/lib/node/openai-language-models-manager-impl.js.map +0 -1
- package/lib/node/openai-model-utils.spec.d.ts +0 -2
- package/lib/node/openai-model-utils.spec.d.ts.map +0 -1
- package/lib/node/openai-model-utils.spec.js +0 -467
- package/lib/node/openai-model-utils.spec.js.map +0 -1
- package/lib/node/openai-request-api-context.d.ts +0 -4
- package/lib/node/openai-request-api-context.d.ts.map +0 -1
- package/lib/node/openai-request-api-context.js +0 -18
- package/lib/node/openai-request-api-context.js.map +0 -1
- package/lib/node/openai-response-api-utils.d.ts +0 -45
- package/lib/node/openai-response-api-utils.d.ts.map +0 -1
- package/lib/node/openai-response-api-utils.js +0 -724
- package/lib/node/openai-response-api-utils.js.map +0 -1
- package/lib/node/openai-streaming-iterator.d.ts +0 -24
- package/lib/node/openai-streaming-iterator.d.ts.map +0 -1
- package/lib/node/openai-streaming-iterator.js +0 -176
- package/lib/node/openai-streaming-iterator.js.map +0 -1
- package/lib/node/openai-streaming-iterator.spec.d.ts +0 -2
- package/lib/node/openai-streaming-iterator.spec.d.ts.map +0 -1
- package/lib/node/openai-streaming-iterator.spec.js +0 -207
- package/lib/node/openai-streaming-iterator.spec.js.map +0 -1
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// *****************************************************************************
|
|
3
|
-
// Copyright (C) 2024 EclipseSource GmbH.
|
|
4
|
-
//
|
|
5
|
-
// This program and the accompanying materials are made available under the
|
|
6
|
-
// terms of the Eclipse Public License v. 2.0 which is available at
|
|
7
|
-
// http://www.eclipse.org/legal/epl-2.0.
|
|
8
|
-
//
|
|
9
|
-
// This Source Code may also be made available under the following Secondary
|
|
10
|
-
// Licenses when the conditions for such availability set forth in the Eclipse
|
|
11
|
-
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
|
|
12
|
-
// with the GNU Classpath Exception which is available at
|
|
13
|
-
// https://www.gnu.org/software/classpath/license.html.
|
|
14
|
-
//
|
|
15
|
-
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
|
|
16
|
-
// *****************************************************************************
|
|
17
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
-
exports.OpenAiModelFactory = void 0;
|
|
19
|
-
const inversify_1 = require("@theia/core/shared/inversify");
|
|
20
|
-
const openai_language_models_manager_1 = require("../common/openai-language-models-manager");
|
|
21
|
-
const core_1 = require("@theia/core");
|
|
22
|
-
const openai_language_models_manager_impl_1 = require("./openai-language-models-manager-impl");
|
|
23
|
-
const connection_container_module_1 = require("@theia/core/lib/node/messaging/connection-container-module");
|
|
24
|
-
const openai_language_model_1 = require("./openai-language-model");
|
|
25
|
-
const openai_response_api_utils_1 = require("./openai-response-api-utils");
|
|
26
|
-
const openai_preferences_1 = require("../common/openai-preferences");
|
|
27
|
-
exports.OpenAiModelFactory = Symbol('OpenAiModelFactory');
|
|
28
|
-
// We use a connection module to handle AI services separately for each frontend.
|
|
29
|
-
const openAiConnectionModule = connection_container_module_1.ConnectionContainerModule.create(({ bind, bindBackendService, bindFrontendService }) => {
|
|
30
|
-
bind(openai_language_models_manager_impl_1.OpenAiLanguageModelsManagerImpl).toSelf().inSingletonScope();
|
|
31
|
-
bind(openai_language_models_manager_1.OpenAiLanguageModelsManager).toService(openai_language_models_manager_impl_1.OpenAiLanguageModelsManagerImpl);
|
|
32
|
-
bind(core_1.ConnectionHandler).toDynamicValue(ctx => new core_1.RpcConnectionHandler(openai_language_models_manager_1.OPENAI_LANGUAGE_MODELS_MANAGER_PATH, () => ctx.container.get(openai_language_models_manager_1.OpenAiLanguageModelsManager))).inSingletonScope();
|
|
33
|
-
});
|
|
34
|
-
exports.default = new inversify_1.ContainerModule(bind => {
|
|
35
|
-
bind(core_1.PreferenceContribution).toConstantValue({ schema: openai_preferences_1.OpenAiPreferencesSchema });
|
|
36
|
-
bind(openai_language_model_1.OpenAiModelUtils).toSelf().inSingletonScope();
|
|
37
|
-
bind(openai_response_api_utils_1.OpenAiResponseApiUtils).toSelf().inSingletonScope();
|
|
38
|
-
bind(connection_container_module_1.ConnectionContainerModule).toConstantValue(openAiConnectionModule);
|
|
39
|
-
});
|
|
40
|
-
//# sourceMappingURL=openai-backend-module.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"openai-backend-module.js","sourceRoot":"","sources":["../../src/node/openai-backend-module.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAEhF,4DAA+D;AAC/D,6FAA4H;AAC5H,sCAA8F;AAC9F,+FAAwF;AACxF,4GAAuG;AACvG,mEAA2D;AAC3D,2EAAqE;AACrE,qEAAuE;AAE1D,QAAA,kBAAkB,GAAG,MAAM,CAAC,oBAAoB,CAAC,CAAC;AAE/D,iFAAiF;AACjF,MAAM,sBAAsB,GAAG,uDAAyB,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,kBAAkB,EAAE,mBAAmB,EAAE,EAAE,EAAE;IAClH,IAAI,CAAC,qEAA+B,CAAC,CAAC,MAAM,EAAE,CAAC,gBAAgB,EAAE,CAAC;IAClE,IAAI,CAAC,4DAA2B,CAAC,CAAC,SAAS,CAAC,qEAA+B,CAAC,CAAC;IAC7E,IAAI,CAAC,wBAAiB,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE,CACzC,IAAI,2BAAoB,CAAC,oEAAmC,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,SAAS,CAAC,GAAG,CAAC,4DAA2B,CAAC,CAAC,CACtH,CAAC,gBAAgB,EAAE,CAAC;AACzB,CAAC,CAAC,CAAC;AAEH,kBAAe,IAAI,2BAAe,CAAC,IAAI,CAAC,EAAE;IACtC,IAAI,CAAC,6BAAsB,CAAC,CAAC,eAAe,CAAC,EAAE,MAAM,EAAE,4CAAuB,EAAE,CAAC,CAAC;IAClF,IAAI,CAAC,wCAAgB,CAAC,CAAC,MAAM,EAAE,CAAC,gBAAgB,EAAE,CAAC;IACnD,IAAI,CAAC,kDAAsB,CAAC,CAAC,MAAM,EAAE,CAAC,gBAAgB,EAAE,CAAC;IACzD,IAAI,CAAC,uDAAyB,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC;AAC5E,CAAC,CAAC,CAAC"}
|
|
@@ -1,80 +0,0 @@
|
|
|
1
|
-
import { LanguageModel, LanguageModelParsedResponse, LanguageModelRequest, LanguageModelMessage, LanguageModelResponse, LanguageModelTextResponse, TokenUsageService, UserRequest, LanguageModelStatus } from '@theia/ai-core';
|
|
2
|
-
import { CancellationToken } from '@theia/core';
|
|
3
|
-
import { OpenAI } from 'openai';
|
|
4
|
-
import { RunnableToolFunctionWithoutParse } from 'openai/lib/RunnableFunction';
|
|
5
|
-
import { ChatCompletionMessageParam } from 'openai/resources';
|
|
6
|
-
import type { FinalRequestOptions } from 'openai/internal/request-options';
|
|
7
|
-
import type { RunnerOptions } from 'openai/lib/AbstractChatCompletionRunner';
|
|
8
|
-
import { OpenAiResponseApiUtils } from './openai-response-api-utils';
|
|
9
|
-
export declare class MistralFixedOpenAI extends OpenAI {
|
|
10
|
-
protected prepareOptions(options: FinalRequestOptions): Promise<void>;
|
|
11
|
-
}
|
|
12
|
-
export declare const OpenAiModelIdentifier: unique symbol;
|
|
13
|
-
export type DeveloperMessageSettings = 'user' | 'system' | 'developer' | 'mergeWithFollowingUserMessage' | 'skip';
|
|
14
|
-
export declare class OpenAiModel implements LanguageModel {
|
|
15
|
-
readonly id: string;
|
|
16
|
-
model: string;
|
|
17
|
-
status: LanguageModelStatus;
|
|
18
|
-
enableStreaming: boolean;
|
|
19
|
-
apiKey: () => string | undefined;
|
|
20
|
-
apiVersion: () => string | undefined;
|
|
21
|
-
supportsStructuredOutput: boolean;
|
|
22
|
-
url: string | undefined;
|
|
23
|
-
deployment: string | undefined;
|
|
24
|
-
openAiModelUtils: OpenAiModelUtils;
|
|
25
|
-
responseApiUtils: OpenAiResponseApiUtils;
|
|
26
|
-
developerMessageSettings: DeveloperMessageSettings;
|
|
27
|
-
maxRetries: number;
|
|
28
|
-
useResponseApi: boolean;
|
|
29
|
-
protected readonly tokenUsageService?: TokenUsageService | undefined;
|
|
30
|
-
protected proxy?: string | undefined;
|
|
31
|
-
/**
|
|
32
|
-
* The options for the OpenAI runner.
|
|
33
|
-
*/
|
|
34
|
-
protected runnerOptions: RunnerOptions;
|
|
35
|
-
/**
|
|
36
|
-
* @param id the unique id for this language model. It will be used to identify the model in the UI.
|
|
37
|
-
* @param model the model id as it is used by the OpenAI API
|
|
38
|
-
* @param enableStreaming whether the streaming API shall be used
|
|
39
|
-
* @param apiKey a function that returns the API key to use for this model, called on each request
|
|
40
|
-
* @param apiVersion a function that returns the OpenAPI version to use for this model, called on each request
|
|
41
|
-
* @param developerMessageSettings how to handle system messages
|
|
42
|
-
* @param url the OpenAI API compatible endpoint where the model is hosted. If not provided the default OpenAI endpoint will be used.
|
|
43
|
-
* @param maxRetries the maximum number of retry attempts when a request fails
|
|
44
|
-
* @param useResponseApi whether to use the newer OpenAI Response API instead of the Chat Completion API
|
|
45
|
-
*/
|
|
46
|
-
constructor(id: string, model: string, status: LanguageModelStatus, enableStreaming: boolean, apiKey: () => string | undefined, apiVersion: () => string | undefined, supportsStructuredOutput: boolean, url: string | undefined, deployment: string | undefined, openAiModelUtils: OpenAiModelUtils, responseApiUtils: OpenAiResponseApiUtils, developerMessageSettings?: DeveloperMessageSettings, maxRetries?: number, useResponseApi?: boolean, tokenUsageService?: TokenUsageService | undefined, proxy?: string | undefined);
|
|
47
|
-
protected getSettings(request: LanguageModelRequest): Record<string, unknown>;
|
|
48
|
-
request(request: UserRequest, cancellationToken?: CancellationToken): Promise<LanguageModelResponse>;
|
|
49
|
-
protected handleChatCompletionsRequest(openai: OpenAI, request: UserRequest, cancellationToken?: CancellationToken): Promise<LanguageModelResponse>;
|
|
50
|
-
protected handleNonStreamingRequest(openai: OpenAI, request: UserRequest): Promise<LanguageModelTextResponse>;
|
|
51
|
-
protected isNonStreamingModel(_model: string): boolean;
|
|
52
|
-
protected handleStructuredOutputRequest(openai: OpenAI, request: UserRequest): Promise<LanguageModelParsedResponse>;
|
|
53
|
-
protected createTools(request: LanguageModelRequest): RunnableToolFunctionWithoutParse[] | undefined;
|
|
54
|
-
protected initializeOpenAi(): OpenAI;
|
|
55
|
-
protected handleResponseApiRequest(openai: OpenAI, request: UserRequest, cancellationToken?: CancellationToken): Promise<LanguageModelResponse>;
|
|
56
|
-
protected processMessages(messages: LanguageModelMessage[]): ChatCompletionMessageParam[];
|
|
57
|
-
}
|
|
58
|
-
/**
|
|
59
|
-
* Utility class for processing messages for the OpenAI language model.
|
|
60
|
-
*
|
|
61
|
-
* Adopters can rebind this class to implement custom message processing behavior.
|
|
62
|
-
*/
|
|
63
|
-
export declare class OpenAiModelUtils {
|
|
64
|
-
protected processSystemMessages(messages: LanguageModelMessage[], developerMessageSettings: DeveloperMessageSettings): LanguageModelMessage[];
|
|
65
|
-
protected toOpenAiRole(message: LanguageModelMessage, developerMessageSettings: DeveloperMessageSettings): 'developer' | 'user' | 'assistant' | 'system';
|
|
66
|
-
protected toOpenAIMessage(message: LanguageModelMessage, developerMessageSettings: DeveloperMessageSettings): ChatCompletionMessageParam;
|
|
67
|
-
/**
|
|
68
|
-
* Processes the provided list of messages by applying system message adjustments and converting
|
|
69
|
-
* them to the format expected by the OpenAI API.
|
|
70
|
-
*
|
|
71
|
-
* Adopters can rebind this processing to implement custom behavior.
|
|
72
|
-
*
|
|
73
|
-
* @param messages the list of messages to process.
|
|
74
|
-
* @param developerMessageSettings how system and developer messages are handled during processing.
|
|
75
|
-
* @param model the OpenAI model identifier. Currently not used, but allows subclasses to implement model-specific behavior.
|
|
76
|
-
* @returns an array of messages formatted for the OpenAI API.
|
|
77
|
-
*/
|
|
78
|
-
processMessages(messages: LanguageModelMessage[], developerMessageSettings: DeveloperMessageSettings, model?: string): ChatCompletionMessageParam[];
|
|
79
|
-
}
|
|
80
|
-
//# sourceMappingURL=openai-language-model.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"openai-language-model.d.ts","sourceRoot":"","sources":["../../src/node/openai-language-model.ts"],"names":[],"mappings":"AAgBA,OAAO,EACH,aAAa,EACb,2BAA2B,EAC3B,oBAAoB,EACpB,oBAAoB,EACpB,qBAAqB,EACrB,yBAAyB,EACzB,iBAAiB,EACjB,WAAW,EAEX,mBAAmB,EACtB,MAAM,gBAAgB,CAAC;AACxB,OAAO,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAEhD,OAAO,EAAE,MAAM,EAAe,MAAM,QAAQ,CAAC;AAE7C,OAAO,EAAE,gCAAgC,EAAE,MAAM,6BAA6B,CAAC;AAC/E,OAAO,EAAE,0BAA0B,EAAE,MAAM,kBAAkB,CAAC;AAG9D,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,iCAAiC,CAAC;AAC3E,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,yCAAyC,CAAC;AAC7E,OAAO,EAAE,sBAAsB,EAAyB,MAAM,6BAA6B,CAAC;AAG5F,qBAAa,kBAAmB,SAAQ,MAAM;cACjB,cAAc,CAAC,OAAO,EAAE,mBAAmB,GAAG,OAAO,CAAC,IAAI,CAAC;CAoBvF;AAED,eAAO,MAAM,qBAAqB,eAAkC,CAAC;AAErE,MAAM,MAAM,wBAAwB,GAAG,MAAM,GAAG,QAAQ,GAAG,WAAW,GAAG,+BAA+B,GAAG,MAAM,CAAC;AAElH,qBAAa,WAAY,YAAW,aAAa;aAwBzB,EAAE,EAAE,MAAM;IACnB,KAAK,EAAE,MAAM;IACb,MAAM,EAAE,mBAAmB;IAC3B,eAAe,EAAE,OAAO;IACxB,MAAM,EAAE,MAAM,MAAM,GAAG,SAAS;IAChC,UAAU,EAAE,MAAM,MAAM,GAAG,SAAS;IACpC,wBAAwB,EAAE,OAAO;IACjC,GAAG,EAAE,MAAM,GAAG,SAAS;IACvB,UAAU,EAAE,MAAM,GAAG,SAAS;IAC9B,gBAAgB,EAAE,gBAAgB;IAClC,gBAAgB,EAAE,sBAAsB;IACxC,wBAAwB,EAAE,wBAAwB;IAClD,UAAU,EAAE,MAAM;IAClB,cAAc,EAAE,OAAO;IAC9B,SAAS,CAAC,QAAQ,CAAC,iBAAiB,CAAC;IACrC,SAAS,CAAC,KAAK,CAAC;IArCpB;;OAEG;IACH,SAAS,CAAC,aAAa,EAAE,aAAa,CAKpC;IAEF;;;;;;;;;;OAUG;gBAEiB,EAAE,EAAE,MAAM,EACnB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,mBAAmB,EAC3B,eAAe,EAAE,OAAO,EACxB,MAAM,EAAE,MAAM,MAAM,GAAG,SAAS,EAChC,UAAU,EAAE,MAAM,MAAM,GAAG,SAAS,EACpC,wBAAwB,EAAE,OAAO,EACjC,GAAG,EAAE,MAAM,GAAG,SAAS,EACvB,UAAU,EAAE,MAAM,GAAG,SAAS,EAC9B,gBAAgB,EAAE,gBAAgB,EAClC,gBAAgB,EAAE,sBAAsB,EACxC,wBAAwB,GAAE,wBAAsC,EAChE,UAAU,GAAE,MAAU,EACtB,cAAc,GAAE,OAAe,EACnB,iBAAiB,CAAC,+BAAmB,EAC9C,KAAK,CAAC,oBAAQ;IAG5B,SAAS,CAAC,WAAW,CAAC,OAAO,EAAE,oBAAoB,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAIvE,OAAO,CAAC,OAAO,EAAE,WAAW,EAAE,iBAAiB,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,qBAAqB,CAAC;cAQ1F,4BAA4B,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,EAAE,iBAAiB,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,qBAAqB,CAAC;cA4CzI,yBAAyB,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,yBAAyB,CAAC;IA2BnH,SAAS,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO;cAItC,6BAA6B,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,2BAA2B,CAAC;IAgCzH,SAAS,CAAC,WAAW,CAAC,OAAO,EAAE,oBAAoB,GAAG,gCAAgC,EAAE,GAAG,SAAS;IAYpG,SAAS,CAAC,gBAAgB,IAAI,MAAM;cAyBpB,wBAAwB,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,EAAE,iBAAiB,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,qBAAqB,CAAC;IA4BrJ,SAAS,CAAC,eAAe,CAAC,QAAQ,EAAE,oBAAoB,EAAE,GAAG,0BAA0B,EAAE;CAG5F;AAED;;;;GAIG;AACH,qBACa,gBAAgB;IAEzB,SAAS,CAAC,qBAAqB,CAC3B,QAAQ,EAAE,oBAAoB,EAAE,EAChC,wBAAwB,EAAE,wBAAwB,GACnD,oBAAoB,EAAE;IAIzB,SAAS,CAAC,YAAY,CAClB,OAAO,EAAE,oBAAoB,EAC7B,wBAAwB,EAAE,wBAAwB,GACnD,WAAW,GAAG,MAAM,GAAG,WAAW,GAAG,QAAQ;IAahD,SAAS,CAAC,eAAe,CACrB,OAAO,EAAE,oBAAoB,EAC7B,wBAAwB,EAAE,wBAAwB,GACnD,0BAA0B;IAsC7B;;;;;;;;;;OAUG;IACH,eAAe,CACX,QAAQ,EAAE,oBAAoB,EAAE,EAChC,wBAAwB,EAAE,wBAAwB,EAClD,KAAK,CAAC,EAAE,MAAM,GACf,0BAA0B,EAAE;CAKlC"}
|
|
@@ -1,324 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// *****************************************************************************
|
|
3
|
-
// Copyright (C) 2024 EclipseSource GmbH.
|
|
4
|
-
//
|
|
5
|
-
// This program and the accompanying materials are made available under the
|
|
6
|
-
// terms of the Eclipse Public License v. 2.0 which is available at
|
|
7
|
-
// http://www.eclipse.org/legal/epl-2.0.
|
|
8
|
-
//
|
|
9
|
-
// This Source Code may also be made available under the following Secondary
|
|
10
|
-
// Licenses when the conditions for such availability set forth in the Eclipse
|
|
11
|
-
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
|
|
12
|
-
// with the GNU Classpath Exception which is available at
|
|
13
|
-
// https://www.gnu.org/software/classpath/license.html.
|
|
14
|
-
//
|
|
15
|
-
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
|
|
16
|
-
// *****************************************************************************
|
|
17
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
18
|
-
exports.OpenAiModelUtils = exports.OpenAiModel = exports.OpenAiModelIdentifier = exports.MistralFixedOpenAI = void 0;
|
|
19
|
-
const tslib_1 = require("tslib");
|
|
20
|
-
const ai_core_1 = require("@theia/ai-core");
|
|
21
|
-
const inversify_1 = require("@theia/core/shared/inversify");
|
|
22
|
-
const openai_1 = require("openai");
|
|
23
|
-
const openai_streaming_iterator_1 = require("./openai-streaming-iterator");
|
|
24
|
-
const common_1 = require("../common");
|
|
25
|
-
const openai_response_api_utils_1 = require("./openai-response-api-utils");
|
|
26
|
-
const undici = require("undici");
|
|
27
|
-
class MistralFixedOpenAI extends openai_1.OpenAI {
|
|
28
|
-
async prepareOptions(options) {
|
|
29
|
-
const messages = options.body.messages;
|
|
30
|
-
if (Array.isArray(messages)) {
|
|
31
|
-
options.body.messages.forEach(m => {
|
|
32
|
-
if (m.role === 'assistant' && m.tool_calls) {
|
|
33
|
-
// Mistral OpenAI Endpoint expects refusal to be undefined and not null for optional properties
|
|
34
|
-
// eslint-disable-next-line no-null/no-null
|
|
35
|
-
if (m.refusal === null) {
|
|
36
|
-
m.refusal = undefined;
|
|
37
|
-
}
|
|
38
|
-
// Mistral OpenAI Endpoint expects parsed to be undefined and not null for optional properties
|
|
39
|
-
// eslint-disable-next-line no-null/no-null
|
|
40
|
-
if (m.parsed === null) {
|
|
41
|
-
m.parsed = undefined;
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
});
|
|
45
|
-
}
|
|
46
|
-
return super.prepareOptions(options);
|
|
47
|
-
}
|
|
48
|
-
;
|
|
49
|
-
}
|
|
50
|
-
exports.MistralFixedOpenAI = MistralFixedOpenAI;
|
|
51
|
-
exports.OpenAiModelIdentifier = Symbol('OpenAiModelIdentifier');
|
|
52
|
-
class OpenAiModel {
|
|
53
|
-
/**
|
|
54
|
-
* @param id the unique id for this language model. It will be used to identify the model in the UI.
|
|
55
|
-
* @param model the model id as it is used by the OpenAI API
|
|
56
|
-
* @param enableStreaming whether the streaming API shall be used
|
|
57
|
-
* @param apiKey a function that returns the API key to use for this model, called on each request
|
|
58
|
-
* @param apiVersion a function that returns the OpenAPI version to use for this model, called on each request
|
|
59
|
-
* @param developerMessageSettings how to handle system messages
|
|
60
|
-
* @param url the OpenAI API compatible endpoint where the model is hosted. If not provided the default OpenAI endpoint will be used.
|
|
61
|
-
* @param maxRetries the maximum number of retry attempts when a request fails
|
|
62
|
-
* @param useResponseApi whether to use the newer OpenAI Response API instead of the Chat Completion API
|
|
63
|
-
*/
|
|
64
|
-
constructor(id, model, status, enableStreaming, apiKey, apiVersion, supportsStructuredOutput, url, deployment, openAiModelUtils, responseApiUtils, developerMessageSettings = 'developer', maxRetries = 3, useResponseApi = false, tokenUsageService, proxy) {
|
|
65
|
-
this.id = id;
|
|
66
|
-
this.model = model;
|
|
67
|
-
this.status = status;
|
|
68
|
-
this.enableStreaming = enableStreaming;
|
|
69
|
-
this.apiKey = apiKey;
|
|
70
|
-
this.apiVersion = apiVersion;
|
|
71
|
-
this.supportsStructuredOutput = supportsStructuredOutput;
|
|
72
|
-
this.url = url;
|
|
73
|
-
this.deployment = deployment;
|
|
74
|
-
this.openAiModelUtils = openAiModelUtils;
|
|
75
|
-
this.responseApiUtils = responseApiUtils;
|
|
76
|
-
this.developerMessageSettings = developerMessageSettings;
|
|
77
|
-
this.maxRetries = maxRetries;
|
|
78
|
-
this.useResponseApi = useResponseApi;
|
|
79
|
-
this.tokenUsageService = tokenUsageService;
|
|
80
|
-
this.proxy = proxy;
|
|
81
|
-
/**
|
|
82
|
-
* The options for the OpenAI runner.
|
|
83
|
-
*/
|
|
84
|
-
this.runnerOptions = {
|
|
85
|
-
// The maximum number of chat completions to return in a single request.
|
|
86
|
-
// Each function call counts as a chat completion.
|
|
87
|
-
// To support use cases with many function calls (e.g. @Coder), we set this to a high value.
|
|
88
|
-
maxChatCompletions: 100,
|
|
89
|
-
};
|
|
90
|
-
}
|
|
91
|
-
getSettings(request) {
|
|
92
|
-
var _a;
|
|
93
|
-
return (_a = request.settings) !== null && _a !== void 0 ? _a : {};
|
|
94
|
-
}
|
|
95
|
-
async request(request, cancellationToken) {
|
|
96
|
-
const openai = this.initializeOpenAi();
|
|
97
|
-
return this.useResponseApi ?
|
|
98
|
-
this.handleResponseApiRequest(openai, request, cancellationToken)
|
|
99
|
-
: this.handleChatCompletionsRequest(openai, request, cancellationToken);
|
|
100
|
-
}
|
|
101
|
-
async handleChatCompletionsRequest(openai, request, cancellationToken) {
|
|
102
|
-
var _a;
|
|
103
|
-
const settings = this.getSettings(request);
|
|
104
|
-
if (((_a = request.response_format) === null || _a === void 0 ? void 0 : _a.type) === 'json_schema' && this.supportsStructuredOutput) {
|
|
105
|
-
return this.handleStructuredOutputRequest(openai, request);
|
|
106
|
-
}
|
|
107
|
-
if (this.isNonStreamingModel(this.model) || (typeof settings.stream === 'boolean' && !settings.stream)) {
|
|
108
|
-
return this.handleNonStreamingRequest(openai, request);
|
|
109
|
-
}
|
|
110
|
-
if (this.id.startsWith(`${common_1.OPENAI_PROVIDER_ID}/`)) {
|
|
111
|
-
settings['stream_options'] = { include_usage: true };
|
|
112
|
-
}
|
|
113
|
-
if (cancellationToken === null || cancellationToken === void 0 ? void 0 : cancellationToken.isCancellationRequested) {
|
|
114
|
-
return { text: '' };
|
|
115
|
-
}
|
|
116
|
-
let runner;
|
|
117
|
-
const tools = this.createTools(request);
|
|
118
|
-
if (tools) {
|
|
119
|
-
runner = openai.chat.completions.runTools({
|
|
120
|
-
model: this.model,
|
|
121
|
-
messages: this.processMessages(request.messages),
|
|
122
|
-
stream: true,
|
|
123
|
-
tools: tools,
|
|
124
|
-
tool_choice: 'auto',
|
|
125
|
-
...settings
|
|
126
|
-
}, {
|
|
127
|
-
...this.runnerOptions, maxRetries: this.maxRetries
|
|
128
|
-
});
|
|
129
|
-
}
|
|
130
|
-
else {
|
|
131
|
-
runner = openai.chat.completions.stream({
|
|
132
|
-
model: this.model,
|
|
133
|
-
messages: this.processMessages(request.messages),
|
|
134
|
-
stream: true,
|
|
135
|
-
...settings
|
|
136
|
-
});
|
|
137
|
-
}
|
|
138
|
-
return { stream: new openai_streaming_iterator_1.StreamingAsyncIterator(runner, request.requestId, cancellationToken, this.tokenUsageService, this.id) };
|
|
139
|
-
}
|
|
140
|
-
async handleNonStreamingRequest(openai, request) {
|
|
141
|
-
var _a;
|
|
142
|
-
const settings = this.getSettings(request);
|
|
143
|
-
const response = await openai.chat.completions.create({
|
|
144
|
-
model: this.model,
|
|
145
|
-
messages: this.processMessages(request.messages),
|
|
146
|
-
...settings
|
|
147
|
-
});
|
|
148
|
-
const message = response.choices[0].message;
|
|
149
|
-
// Record token usage if token usage service is available
|
|
150
|
-
if (this.tokenUsageService && response.usage) {
|
|
151
|
-
await this.tokenUsageService.recordTokenUsage(this.id, {
|
|
152
|
-
inputTokens: response.usage.prompt_tokens,
|
|
153
|
-
outputTokens: response.usage.completion_tokens,
|
|
154
|
-
requestId: request.requestId
|
|
155
|
-
});
|
|
156
|
-
}
|
|
157
|
-
return {
|
|
158
|
-
text: (_a = message.content) !== null && _a !== void 0 ? _a : ''
|
|
159
|
-
};
|
|
160
|
-
}
|
|
161
|
-
isNonStreamingModel(_model) {
|
|
162
|
-
return !this.enableStreaming;
|
|
163
|
-
}
|
|
164
|
-
async handleStructuredOutputRequest(openai, request) {
|
|
165
|
-
var _a;
|
|
166
|
-
const settings = this.getSettings(request);
|
|
167
|
-
// TODO implement tool support for structured output (parse() seems to require different tool format)
|
|
168
|
-
const result = await openai.chat.completions.parse({
|
|
169
|
-
model: this.model,
|
|
170
|
-
messages: this.processMessages(request.messages),
|
|
171
|
-
response_format: request.response_format,
|
|
172
|
-
...settings
|
|
173
|
-
});
|
|
174
|
-
const message = result.choices[0].message;
|
|
175
|
-
if (message.refusal || message.parsed === undefined) {
|
|
176
|
-
console.error('Error in OpenAI chat completion stream:', JSON.stringify(message));
|
|
177
|
-
}
|
|
178
|
-
// Record token usage if token usage service is available
|
|
179
|
-
if (this.tokenUsageService && result.usage) {
|
|
180
|
-
await this.tokenUsageService.recordTokenUsage(this.id, {
|
|
181
|
-
inputTokens: result.usage.prompt_tokens,
|
|
182
|
-
outputTokens: result.usage.completion_tokens,
|
|
183
|
-
requestId: request.requestId
|
|
184
|
-
});
|
|
185
|
-
}
|
|
186
|
-
return {
|
|
187
|
-
content: (_a = message.content) !== null && _a !== void 0 ? _a : '',
|
|
188
|
-
parsed: message.parsed
|
|
189
|
-
};
|
|
190
|
-
}
|
|
191
|
-
createTools(request) {
|
|
192
|
-
var _a;
|
|
193
|
-
return (_a = request.tools) === null || _a === void 0 ? void 0 : _a.map(tool => ({
|
|
194
|
-
type: 'function',
|
|
195
|
-
function: {
|
|
196
|
-
name: tool.name,
|
|
197
|
-
description: tool.description,
|
|
198
|
-
parameters: tool.parameters,
|
|
199
|
-
function: (args_string) => tool.handler(args_string)
|
|
200
|
-
}
|
|
201
|
-
}));
|
|
202
|
-
}
|
|
203
|
-
initializeOpenAi() {
|
|
204
|
-
const apiKey = this.apiKey();
|
|
205
|
-
if (!apiKey && !(this.url)) {
|
|
206
|
-
throw new Error('Please provide OPENAI_API_KEY in preferences or via environment variable');
|
|
207
|
-
}
|
|
208
|
-
const apiVersion = this.apiVersion();
|
|
209
|
-
// We need to hand over "some" key, even if a custom url is not key protected as otherwise the OpenAI client will throw an error
|
|
210
|
-
const key = apiKey !== null && apiKey !== void 0 ? apiKey : 'no-key';
|
|
211
|
-
let fo;
|
|
212
|
-
if (this.proxy) {
|
|
213
|
-
const proxyAgent = new undici.ProxyAgent(this.proxy);
|
|
214
|
-
fo = {
|
|
215
|
-
dispatcher: proxyAgent,
|
|
216
|
-
};
|
|
217
|
-
}
|
|
218
|
-
if (apiVersion) {
|
|
219
|
-
return new openai_1.AzureOpenAI({ apiKey: key, baseURL: this.url, apiVersion: apiVersion, deployment: this.deployment, fetchOptions: fo });
|
|
220
|
-
}
|
|
221
|
-
else {
|
|
222
|
-
return new MistralFixedOpenAI({ apiKey: key, baseURL: this.url, fetchOptions: fo });
|
|
223
|
-
}
|
|
224
|
-
}
|
|
225
|
-
async handleResponseApiRequest(openai, request, cancellationToken) {
|
|
226
|
-
const settings = this.getSettings(request);
|
|
227
|
-
const isStreamingRequest = this.enableStreaming && !(typeof settings.stream === 'boolean' && !settings.stream);
|
|
228
|
-
try {
|
|
229
|
-
return await this.responseApiUtils.handleRequest(openai, request, settings, this.model, this.openAiModelUtils, this.developerMessageSettings, this.runnerOptions, this.id, isStreamingRequest, this.tokenUsageService, cancellationToken);
|
|
230
|
-
}
|
|
231
|
-
catch (error) {
|
|
232
|
-
// If Response API fails, fall back to Chat Completions API
|
|
233
|
-
if (error instanceof Error) {
|
|
234
|
-
console.warn(`Response API failed for model ${this.id}, falling back to Chat Completions API:`, error.message);
|
|
235
|
-
return this.handleChatCompletionsRequest(openai, request, cancellationToken);
|
|
236
|
-
}
|
|
237
|
-
throw error;
|
|
238
|
-
}
|
|
239
|
-
}
|
|
240
|
-
processMessages(messages) {
|
|
241
|
-
return this.openAiModelUtils.processMessages(messages, this.developerMessageSettings, this.model);
|
|
242
|
-
}
|
|
243
|
-
}
|
|
244
|
-
exports.OpenAiModel = OpenAiModel;
|
|
245
|
-
/**
|
|
246
|
-
* Utility class for processing messages for the OpenAI language model.
|
|
247
|
-
*
|
|
248
|
-
* Adopters can rebind this class to implement custom message processing behavior.
|
|
249
|
-
*/
|
|
250
|
-
let OpenAiModelUtils = class OpenAiModelUtils {
|
|
251
|
-
processSystemMessages(messages, developerMessageSettings) {
|
|
252
|
-
return (0, openai_response_api_utils_1.processSystemMessages)(messages, developerMessageSettings);
|
|
253
|
-
}
|
|
254
|
-
toOpenAiRole(message, developerMessageSettings) {
|
|
255
|
-
if (message.actor === 'system') {
|
|
256
|
-
if (developerMessageSettings === 'user' || developerMessageSettings === 'system' || developerMessageSettings === 'developer') {
|
|
257
|
-
return developerMessageSettings;
|
|
258
|
-
}
|
|
259
|
-
else {
|
|
260
|
-
return 'developer';
|
|
261
|
-
}
|
|
262
|
-
}
|
|
263
|
-
else if (message.actor === 'ai') {
|
|
264
|
-
return 'assistant';
|
|
265
|
-
}
|
|
266
|
-
return 'user';
|
|
267
|
-
}
|
|
268
|
-
toOpenAIMessage(message, developerMessageSettings) {
|
|
269
|
-
if (ai_core_1.LanguageModelMessage.isTextMessage(message)) {
|
|
270
|
-
return {
|
|
271
|
-
role: this.toOpenAiRole(message, developerMessageSettings),
|
|
272
|
-
content: message.text
|
|
273
|
-
};
|
|
274
|
-
}
|
|
275
|
-
if (ai_core_1.LanguageModelMessage.isToolUseMessage(message)) {
|
|
276
|
-
return {
|
|
277
|
-
role: 'assistant',
|
|
278
|
-
tool_calls: [{ id: message.id, function: { name: message.name, arguments: JSON.stringify(message.input) }, type: 'function' }]
|
|
279
|
-
};
|
|
280
|
-
}
|
|
281
|
-
if (ai_core_1.LanguageModelMessage.isToolResultMessage(message)) {
|
|
282
|
-
return {
|
|
283
|
-
role: 'tool',
|
|
284
|
-
tool_call_id: message.tool_use_id,
|
|
285
|
-
// content only supports text content so we need to stringify any potential data we have, e.g., images
|
|
286
|
-
content: typeof message.content === 'string' ? message.content : JSON.stringify(message.content)
|
|
287
|
-
};
|
|
288
|
-
}
|
|
289
|
-
if (ai_core_1.LanguageModelMessage.isImageMessage(message) && message.actor === 'user') {
|
|
290
|
-
return {
|
|
291
|
-
role: 'user',
|
|
292
|
-
content: [{
|
|
293
|
-
type: 'image_url',
|
|
294
|
-
image_url: {
|
|
295
|
-
url: ai_core_1.ImageContent.isBase64(message.image) ?
|
|
296
|
-
`data:${message.image.mimeType};base64,${message.image.base64data}` :
|
|
297
|
-
message.image.url
|
|
298
|
-
}
|
|
299
|
-
}]
|
|
300
|
-
};
|
|
301
|
-
}
|
|
302
|
-
throw new Error(`Unknown message type:'${JSON.stringify(message)}'`);
|
|
303
|
-
}
|
|
304
|
-
/**
|
|
305
|
-
* Processes the provided list of messages by applying system message adjustments and converting
|
|
306
|
-
* them to the format expected by the OpenAI API.
|
|
307
|
-
*
|
|
308
|
-
* Adopters can rebind this processing to implement custom behavior.
|
|
309
|
-
*
|
|
310
|
-
* @param messages the list of messages to process.
|
|
311
|
-
* @param developerMessageSettings how system and developer messages are handled during processing.
|
|
312
|
-
* @param model the OpenAI model identifier. Currently not used, but allows subclasses to implement model-specific behavior.
|
|
313
|
-
* @returns an array of messages formatted for the OpenAI API.
|
|
314
|
-
*/
|
|
315
|
-
processMessages(messages, developerMessageSettings, model) {
|
|
316
|
-
const processed = this.processSystemMessages(messages, developerMessageSettings);
|
|
317
|
-
return processed.filter(m => m.type !== 'thinking').map(m => this.toOpenAIMessage(m, developerMessageSettings));
|
|
318
|
-
}
|
|
319
|
-
};
|
|
320
|
-
exports.OpenAiModelUtils = OpenAiModelUtils;
|
|
321
|
-
exports.OpenAiModelUtils = OpenAiModelUtils = tslib_1.__decorate([
|
|
322
|
-
(0, inversify_1.injectable)()
|
|
323
|
-
], OpenAiModelUtils);
|
|
324
|
-
//# sourceMappingURL=openai-language-model.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"openai-language-model.js","sourceRoot":"","sources":["../../src/node/openai-language-model.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;;AAEhF,4CAWwB;AAExB,4DAA0D;AAC1D,mCAA6C;AAI7C,2EAAqE;AACrE,sCAA+C;AAG/C,2EAA4F;AAC5F,iCAAiC;AAEjC,MAAa,kBAAmB,SAAQ,eAAM;IACvB,KAAK,CAAC,cAAc,CAAC,OAA4B;QAChE,MAAM,QAAQ,GAAI,OAAO,CAAC,IAAwD,CAAC,QAAQ,CAAC;QAC5F,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC;YACzB,OAAO,CAAC,IAAwD,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;gBACnF,IAAI,CAAC,CAAC,IAAI,KAAK,WAAW,IAAI,CAAC,CAAC,UAAU,EAAE,CAAC;oBACzC,+FAA+F;oBAC/F,2CAA2C;oBAC3C,IAAI,CAAC,CAAC,OAAO,KAAK,IAAI,EAAE,CAAC;wBACrB,CAAC,CAAC,OAAO,GAAG,SAAS,CAAC;oBAC1B,CAAC;oBACD,8FAA8F;oBAC9F,2CAA2C;oBAC3C,IAAK,CAA6C,CAAC,MAAM,KAAK,IAAI,EAAE,CAAC;wBAChE,CAA6C,CAAC,MAAM,GAAG,SAAS,CAAC;oBACtE,CAAC;gBACL,CAAC;YACL,CAAC,CAAC,CAAC;QACP,CAAC;QACD,OAAO,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;IACzC,CAAC;IAAA,CAAC;CACL;AArBD,gDAqBC;AAEY,QAAA,qBAAqB,GAAG,MAAM,CAAC,uBAAuB,CAAC,CAAC;AAIrE,MAAa,WAAW;IAYpB;;;;;;;;;;OAUG;IACH,YACoB,EAAU,EACnB,KAAa,EACb,MAA2B,EAC3B,eAAwB,EACxB,MAAgC,EAChC,UAAoC,EACpC,wBAAiC,EACjC,GAAuB,EACvB,UAA8B,EAC9B,gBAAkC,EAClC,gBAAwC,EACxC,2BAAqD,WAAW,EAChE,aAAqB,CAAC,EACtB,iBAA0B,KAAK,EACnB,iBAAqC,EAC9C,KAAc;QAfR,OAAE,GAAF,EAAE,CAAQ;QACnB,UAAK,GAAL,KAAK,CAAQ;QACb,WAAM,GAAN,MAAM,CAAqB;QAC3B,oBAAe,GAAf,eAAe,CAAS;QACxB,WAAM,GAAN,MAAM,CAA0B;QAChC,eAAU,GAAV,UAAU,CAA0B;QACpC,6BAAwB,GAAxB,wBAAwB,CAAS;QACjC,QAAG,GAAH,GAAG,CAAoB;QACvB,eAAU,GAAV,UAAU,CAAoB;QAC9B,qBAAgB,GAAhB,gBAAgB,CAAkB;QAClC,qBAAgB,GAAhB,gBAAgB,CAAwB;QACxC,6BAAwB,GAAxB,wBAAwB,CAAwC;QAChE,eAAU,GAAV,UAAU,CAAY;QACtB,mBAAc,GAAd,cAAc,CAAiB;QACnB,sBAAiB,GAAjB,iBAAiB,CAAoB;QAC9C,UAAK,GAAL,KAAK,CAAS;QArC5B;;WAEG;QACO,kBAAa,GAAkB;YACrC,wEAAwE;YACxE,kDAAkD;YAClD,4FAA4F;YAC5F,kBAAkB,EAAE,GAAG;SAC1B,CAAC;IA8BE,CAAC;IAEK,WAAW,CAAC,OAA6B;;QAC/C,OAAO,MAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE,CAAC;IAClC,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,OAAoB,EAAE,iBAAqC;QACrE,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,EAAE,CAAC;QAEvC,OAAO,IAAI,CAAC,cAAc,CAAC,CAAC;YACxB,IAAI,CAAC,wBAAwB,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,CAAC;YACjE,CAAC,CAAC,IAAI,CAAC,4BAA4B,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,CAAC,CAAC;IAChF,CAAC;IAES,KAAK,CAAC,4BAA4B,CAAC,MAAc,EAAE,OAAoB,EAAE,iBAAqC;;QACpH,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAE3C,IAAI,CAAA,MAAA,OAAO,CAAC,eAAe,0CAAE,IAAI,MAAK,aAAa,IAAI,IAAI,CAAC,wBAAwB,EAAE,CAAC;YACnF,OAAO,IAAI,CAAC,6BAA6B,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC/D,CAAC;QAED,IAAI,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,QAAQ,CAAC,MAAM,KAAK,SAAS,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC;YACrG,OAAO,IAAI,CAAC,yBAAyB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC3D,CAAC;QAED,IAAI,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,GAAG,2BAAkB,GAAG,CAAC,EAAE,CAAC;YAC/C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC;QACzD,CAAC;QAED,IAAI,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,uBAAuB,EAAE,CAAC;YAC7C,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;QACxB,CAAC;QACD,IAAI,MAA4B,CAAC;QACjC,MAAM,KAAK,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAExC,IAAI,KAAK,EAAE,CAAC;YACR,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;gBACtC,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,QAAQ,EAAE,IAAI,CAAC,eAAe,CAAC,OAAO,CAAC,QAAQ,CAAC;gBAChD,MAAM,EAAE,IAAI;gBACZ,KAAK,EAAE,KAAK;gBACZ,WAAW,EAAE,MAAM;gBACnB,GAAG,QAAQ;aACd,EAAE;gBACC,GAAG,IAAI,CAAC,aAAa,EAAE,UAAU,EAAE,IAAI,CAAC,UAAU;aACrD,CAAC,CAAC;QACP,CAAC;aAAM,CAAC;YACJ,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;gBACpC,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,QAAQ,EAAE,IAAI,CAAC,eAAe,CAAC,OAAO,CAAC,QAAQ,CAAC;gBAChD,MAAM,EAAE,IAAI;gBACZ,GAAG,QAAQ;aACd,CAAC,CAAC;QACP,CAAC;QAED,OAAO,EAAE,MAAM,EAAE,IAAI,kDAAsB,CAAC,MAAM,EAAE,OAAO,CAAC,SAAS,EAAE,iBAAiB,EAAE,IAAI,CAAC,iBAAiB,EAAE,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC;IACjI,CAAC;IAES,KAAK,CAAC,yBAAyB,CAAC,MAAc,EAAE,OAAoB;;QAC1E,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAC3C,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;YAClD,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI,CAAC,eAAe,CAAC,OAAO,CAAC,QAAQ,CAAC;YAChD,GAAG,QAAQ;SACd,CAAC,CAAC;QAEH,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;QAE5C,yDAAyD;QACzD,IAAI,IAAI,CAAC,iBAAiB,IAAI,QAAQ,CAAC,KAAK,EAAE,CAAC;YAC3C,MAAM,IAAI,CAAC,iBAAiB,CAAC,gBAAgB,CACzC,IAAI,CAAC,EAAE,EACP;gBACI,WAAW,EAAE,QAAQ,CAAC,KAAK,CAAC,aAAa;gBACzC,YAAY,EAAE,QAAQ,CAAC,KAAK,CAAC,iBAAiB;gBAC9C,SAAS,EAAE,OAAO,CAAC,SAAS;aAC/B,CACJ,CAAC;QACN,CAAC;QAED,OAAO;YACH,IAAI,EAAE,MAAA,OAAO,CAAC,OAAO,mCAAI,EAAE;SAC9B,CAAC;IACN,CAAC;IAES,mBAAmB,CAAC,MAAc;QACxC,OAAO,CAAC,IAAI,CAAC,eAAe,CAAC;IACjC,CAAC;IAES,KAAK,CAAC,6BAA6B,CAAC,MAAc,EAAE,OAAoB;;QAC9E,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAC3C,qGAAqG;QACrG,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC;YAC/C,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI,CAAC,eAAe,CAAC,OAAO,CAAC,QAAQ,CAAC;YAChD,eAAe,EAAE,OAAO,CAAC,eAAe;YACxC,GAAG,QAAQ;SACd,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;QAC1C,IAAI,OAAO,CAAC,OAAO,IAAI,OAAO,CAAC,MAAM,KAAK,SAAS,EAAE,CAAC;YAClD,OAAO,CAAC,KAAK,CAAC,yCAAyC,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC;QACtF,CAAC;QAED,yDAAyD;QACzD,IAAI,IAAI,CAAC,iBAAiB,IAAI,MAAM,CAAC,KAAK,EAAE,CAAC;YACzC,MAAM,IAAI,CAAC,iBAAiB,CAAC,gBAAgB,CACzC,IAAI,CAAC,EAAE,EACP;gBACI,WAAW,EAAE,MAAM,CAAC,KAAK,CAAC,aAAa;gBACvC,YAAY,EAAE,MAAM,CAAC,KAAK,CAAC,iBAAiB;gBAC5C,SAAS,EAAE,OAAO,CAAC,SAAS;aAC/B,CACJ,CAAC;QACN,CAAC;QAED,OAAO;YACH,OAAO,EAAE,MAAA,OAAO,CAAC,OAAO,mCAAI,EAAE;YAC9B,MAAM,EAAE,OAAO,CAAC,MAAM;SACzB,CAAC;IACN,CAAC;IAES,WAAW,CAAC,OAA6B;;QAC/C,OAAO,MAAA,OAAO,CAAC,KAAK,0CAAE,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;YAC/B,IAAI,EAAE,UAAU;YAChB,QAAQ,EAAE;gBACN,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,UAAU,EAAE,IAAI,CAAC,UAAU;gBAC3B,QAAQ,EAAE,CAAC,WAAmB,EAAE,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC;aAC/D;SACiC,CAAA,CAAC,CAAC;IAC5C,CAAC;IAES,gBAAgB;QACtB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QAC7B,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACzB,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;QAChG,CAAC;QAED,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC;QACrC,gIAAgI;QAChI,MAAM,GAAG,GAAG,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,QAAQ,CAAC;QAE/B,IAAI,EAAE,CAAC;QACP,IAAI,IAAI,CAAC,KAAK,EAAE,CAAC;YACb,MAAM,UAAU,GAAG,IAAI,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;YACrD,EAAE,GAAG;gBACD,UAAU,EAAE,UAAU;aACzB,CAAC;QACN,CAAC;QAED,IAAI,UAAU,EAAE,CAAC;YACb,OAAO,IAAI,oBAAW,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,IAAI,CAAC,GAAG,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,IAAI,CAAC,UAAU,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,CAAC;QACtI,CAAC;aAAM,CAAC;YACJ,OAAO,IAAI,kBAAkB,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,IAAI,CAAC,GAAG,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,CAAC;QACxF,CAAC;IACL,CAAC;IAES,KAAK,CAAC,wBAAwB,CAAC,MAAc,EAAE,OAAoB,EAAE,iBAAqC;QAChH,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAC3C,MAAM,kBAAkB,GAAG,IAAI,CAAC,eAAe,IAAI,CAAC,CAAC,OAAO,QAAQ,CAAC,MAAM,KAAK,SAAS,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;QAE/G,IAAI,CAAC;YACD,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAC5C,MAAM,EACN,OAAO,EACP,QAAQ,EACR,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,wBAAwB,EAC7B,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,EAAE,EACP,kBAAkB,EAClB,IAAI,CAAC,iBAAiB,EACtB,iBAAiB,CACpB,CAAC;QACN,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,2DAA2D;YAC3D,IAAI,KAAK,YAAY,KAAK,EAAE,CAAC;gBACzB,OAAO,CAAC,IAAI,CAAC,iCAAiC,IAAI,CAAC,EAAE,yCAAyC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;gBAC/G,OAAO,IAAI,CAAC,4BAA4B,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,CAAC,CAAC;YACjF,CAAC;YACD,MAAM,KAAK,CAAC;QAChB,CAAC;IACL,CAAC;IAES,eAAe,CAAC,QAAgC;QACtD,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC,QAAQ,EAAE,IAAI,CAAC,wBAAwB,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC;IACtG,CAAC;CACJ;AArOD,kCAqOC;AAED;;;;GAIG;AAEI,IAAM,gBAAgB,GAAtB,MAAM,gBAAgB;IAEf,qBAAqB,CAC3B,QAAgC,EAChC,wBAAkD;QAElD,OAAO,IAAA,iDAAqB,EAAC,QAAQ,EAAE,wBAAwB,CAAC,CAAC;IACrE,CAAC;IAES,YAAY,CAClB,OAA6B,EAC7B,wBAAkD;QAElD,IAAI,OAAO,CAAC,KAAK,KAAK,QAAQ,EAAE,CAAC;YAC7B,IAAI,wBAAwB,KAAK,MAAM,IAAI,wBAAwB,KAAK,QAAQ,IAAI,wBAAwB,KAAK,WAAW,EAAE,CAAC;gBAC3H,OAAO,wBAAwB,CAAC;YACpC,CAAC;iBAAM,CAAC;gBACJ,OAAO,WAAW,CAAC;YACvB,CAAC;QACL,CAAC;aAAM,IAAI,OAAO,CAAC,KAAK,KAAK,IAAI,EAAE,CAAC;YAChC,OAAO,WAAW,CAAC;QACvB,CAAC;QACD,OAAO,MAAM,CAAC;IAClB,CAAC;IAES,eAAe,CACrB,OAA6B,EAC7B,wBAAkD;QAElD,IAAI,8BAAoB,CAAC,aAAa,CAAC,OAAO,CAAC,EAAE,CAAC;YAC9C,OAAO;gBACH,IAAI,EAAE,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE,wBAAwB,CAAC;gBAC1D,OAAO,EAAE,OAAO,CAAC,IAAI;aACxB,CAAC;QACN,CAAC;QACD,IAAI,8BAAoB,CAAC,gBAAgB,CAAC,OAAO,CAAC,EAAE,CAAC;YACjD,OAAO;gBACH,IAAI,EAAE,WAAW;gBACjB,UAAU,EAAE,CAAC,EAAE,EAAE,EAAE,OAAO,CAAC,EAAE,EAAE,QAAQ,EAAE,EAAE,IAAI,EAAE,OAAO,CAAC,IAAI,EAAE,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC;aACjI,CAAC;QACN,CAAC;QACD,IAAI,8BAAoB,CAAC,mBAAmB,CAAC,OAAO,CAAC,EAAE,CAAC;YACpD,OAAO;gBACH,IAAI,EAAE,MAAM;gBACZ,YAAY,EAAE,OAAO,CAAC,WAAW;gBACjC,sGAAsG;gBACtG,OAAO,EAAE,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,OAAO,CAAC;aACnG,CAAC;QACN,CAAC;QACD,IAAI,8BAAoB,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,KAAK,KAAK,MAAM,EAAE,CAAC;YAC3E,OAAO;gBACH,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,CAAC;wBACN,IAAI,EAAE,WAAW;wBACjB,SAAS,EAAE;4BACP,GAAG,EACC,sBAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC;gCAClC,QAAQ,OAAO,CAAC,KAAK,CAAC,QAAQ,WAAW,OAAO,CAAC,KAAK,CAAC,UAAU,EAAE,CAAC,CAAC;gCACrE,OAAO,CAAC,KAAK,CAAC,GAAG;yBAC5B;qBACJ,CAAC;aACL,CAAC;QACN,CAAC;QACD,MAAM,IAAI,KAAK,CAAC,yBAAyB,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACzE,CAAC;IAED;;;;;;;;;;OAUG;IACH,eAAe,CACX,QAAgC,EAChC,wBAAkD,EAClD,KAAc;QAEd,MAAM,SAAS,GAAG,IAAI,CAAC,qBAAqB,CAAC,QAAQ,EAAE,wBAAwB,CAAC,CAAC;QACjF,OAAO,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,EAAE,wBAAwB,CAAC,CAAC,CAAC;IACpH,CAAC;CAEJ,CAAA;AAtFY,4CAAgB;2BAAhB,gBAAgB;IAD5B,IAAA,sBAAU,GAAE;GACA,gBAAgB,CAsF5B"}
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
import { LanguageModelRegistry, LanguageModelStatus, TokenUsageService } from '@theia/ai-core';
|
|
2
|
-
import { OpenAiModelUtils } from './openai-language-model';
|
|
3
|
-
import { OpenAiResponseApiUtils } from './openai-response-api-utils';
|
|
4
|
-
import { OpenAiLanguageModelsManager, OpenAiModelDescription } from '../common';
|
|
5
|
-
export declare class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsManager {
|
|
6
|
-
protected readonly openAiModelUtils: OpenAiModelUtils;
|
|
7
|
-
protected readonly responseApiUtils: OpenAiResponseApiUtils;
|
|
8
|
-
protected _apiKey: string | undefined;
|
|
9
|
-
protected _apiVersion: string | undefined;
|
|
10
|
-
protected _proxyUrl: string | undefined;
|
|
11
|
-
protected readonly languageModelRegistry: LanguageModelRegistry;
|
|
12
|
-
protected readonly tokenUsageService: TokenUsageService;
|
|
13
|
-
get apiKey(): string | undefined;
|
|
14
|
-
get apiVersion(): string | undefined;
|
|
15
|
-
protected calculateStatus(modelDescription: OpenAiModelDescription, effectiveApiKey: string | undefined): LanguageModelStatus;
|
|
16
|
-
createOrUpdateLanguageModels(...modelDescriptions: OpenAiModelDescription[]): Promise<void>;
|
|
17
|
-
removeLanguageModels(...modelIds: string[]): void;
|
|
18
|
-
setApiKey(apiKey: string | undefined): void;
|
|
19
|
-
setApiVersion(apiVersion: string | undefined): void;
|
|
20
|
-
setProxyUrl(proxyUrl: string | undefined): void;
|
|
21
|
-
}
|
|
22
|
-
//# sourceMappingURL=openai-language-models-manager-impl.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"openai-language-models-manager-impl.d.ts","sourceRoot":"","sources":["../../src/node/openai-language-models-manager-impl.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,qBAAqB,EAAE,mBAAmB,EAAE,iBAAiB,EAAE,MAAM,gBAAgB,CAAC;AAE/F,OAAO,EAAe,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AACxE,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AACrE,OAAO,EAAE,2BAA2B,EAAE,sBAAsB,EAAE,MAAM,WAAW,CAAC;AAEhF,qBACa,+BAAgC,YAAW,2BAA2B;IAG/E,SAAS,CAAC,QAAQ,CAAC,gBAAgB,EAAE,gBAAgB,CAAC;IAGtD,SAAS,CAAC,QAAQ,CAAC,gBAAgB,EAAE,sBAAsB,CAAC;IAE5D,SAAS,CAAC,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;IACtC,SAAS,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,CAAC;IAC1C,SAAS,CAAC,SAAS,EAAE,MAAM,GAAG,SAAS,CAAC;IAGxC,SAAS,CAAC,QAAQ,CAAC,qBAAqB,EAAE,qBAAqB,CAAC;IAGhE,SAAS,CAAC,QAAQ,CAAC,iBAAiB,EAAE,iBAAiB,CAAC;IAExD,IAAI,MAAM,IAAI,MAAM,GAAG,SAAS,CAE/B;IAED,IAAI,UAAU,IAAI,MAAM,GAAG,SAAS,CAEnC;IAED,SAAS,CAAC,eAAe,CAAC,gBAAgB,EAAE,sBAAsB,EAAE,eAAe,EAAE,MAAM,GAAG,SAAS,GAAG,mBAAmB;IAYvH,4BAA4B,CAAC,GAAG,iBAAiB,EAAE,sBAAsB,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IA0FjG,oBAAoB,CAAC,GAAG,QAAQ,EAAE,MAAM,EAAE,GAAG,IAAI;IAIjD,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQ3C,aAAa,CAAC,UAAU,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQnD,WAAW,CAAC,QAAQ,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;CAOlD"}
|