@theia/ai-openai 1.56.0 → 1.57.0-next.112
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +56 -1
- package/lib/browser/openai-frontend-application-contribution.d.ts.map +1 -1
- package/lib/browser/openai-frontend-application-contribution.js +14 -3
- package/lib/browser/openai-frontend-application-contribution.js.map +1 -1
- package/lib/browser/openai-preferences.d.ts.map +1 -1
- package/lib/browser/openai-preferences.js +19 -1
- package/lib/browser/openai-preferences.js.map +1 -1
- package/lib/common/openai-language-models-manager.d.ts +13 -0
- package/lib/common/openai-language-models-manager.d.ts.map +1 -1
- package/lib/node/openai-backend-module.d.ts.map +1 -1
- package/lib/node/openai-backend-module.js +6 -1
- package/lib/node/openai-backend-module.js.map +1 -1
- package/lib/node/openai-language-model.d.ts +11 -4
- package/lib/node/openai-language-model.d.ts.map +1 -1
- package/lib/node/openai-language-model.js +62 -40
- package/lib/node/openai-language-model.js.map +1 -1
- package/lib/node/openai-language-models-manager-impl.d.ts +3 -0
- package/lib/node/openai-language-models-manager-impl.d.ts.map +1 -1
- package/lib/node/openai-language-models-manager-impl.js +25 -1
- package/lib/node/openai-language-models-manager-impl.js.map +1 -1
- package/package.json +8 -8
- package/src/browser/openai-frontend-application-contribution.ts +12 -1
- package/src/browser/openai-preferences.ts +19 -1
- package/src/common/openai-language-models-manager.ts +13 -0
- package/src/node/openai-backend-module.ts +7 -1
- package/src/node/openai-language-model.ts +66 -44
- package/src/node/openai-language-models-manager-impl.ts +28 -0
package/README.md
CHANGED
|
@@ -27,12 +27,66 @@ You can configure the end points via the `ai-features.openAiCustom.customOpenAiM
|
|
|
27
27
|
url: string
|
|
28
28
|
id?: string
|
|
29
29
|
apiKey?: string | true
|
|
30
|
+
apiVersion?: string | true
|
|
31
|
+
supportsDeveloperMessage?: boolean
|
|
32
|
+
enableStreaming?: boolean
|
|
30
33
|
}
|
|
31
34
|
```
|
|
32
35
|
|
|
33
36
|
- `model` and `url` are mandatory attributes, indicating the end point and model to use
|
|
34
37
|
- `id` is an optional attribute which is used in the UI to refer to this configuration
|
|
35
38
|
- `apiKey` is either the key to access the API served at the given URL or `true` to use the global OpenAI API key. If not given 'no-key' will be used.
|
|
39
|
+
- `apiVersion` is either the api version to access the API served at the given URL in Azure or `true` to use the global OpenAI API version.
|
|
40
|
+
- `supportsDeveloperMessage` is a flag that indicates whether the model supports the `developer` role or not. `true` by default.
|
|
41
|
+
- `enableStreaming` is a flag that indicates whether the streaming API shall be used or not. `true` by default.
|
|
42
|
+
|
|
43
|
+
### Azure OpenAI
|
|
44
|
+
|
|
45
|
+
To use a custom OpenAI model hosted on Azure, the `AzureOpenAI` class needs to be used, as described in the
|
|
46
|
+
[openai-node docs](https://github.com/openai/openai-node?tab=readme-ov-file#microsoft-azure-openai).
|
|
47
|
+
|
|
48
|
+
Requests to an OpenAI model hosted on Azure need an `apiVersion`. To configure a custom OpenAI model in Theia you therefore need to configure the `apiVersion` with the end point.
|
|
49
|
+
Note that if you don't configure an `apiVersion`, the default `OpenAI` object is used for initialization and a connection to an Azure hosted OpenAI model will fail.
|
|
50
|
+
|
|
51
|
+
An OpenAI model version deployed on Azure might not support the `developer` role. In that case it is possible to configure whether the `developer` role is supported or not via the
|
|
52
|
+
`supportsDeveloperMessage` option, which defaults to `true`.
|
|
53
|
+
|
|
54
|
+
The following snippet shows a possible configuration to access an OpenAI model hosted on Azure. The `AZURE_OPENAI_API_BASE_URL` needs to be given without the `/chat/completions`
|
|
55
|
+
path and without the `api-version` parameter, e.g. _`https://<my_prefix>.openai.azure.com/openai/deployments/<my_deployment>`_
|
|
56
|
+
|
|
57
|
+
```json
|
|
58
|
+
{
|
|
59
|
+
"ai-features.AiEnable.enableAI": true,
|
|
60
|
+
"ai-features.openAiCustom.customOpenAiModels": [
|
|
61
|
+
{
|
|
62
|
+
"model": "gpt4o",
|
|
63
|
+
"url": "<AZURE_OPENAI_API_BASE_URL>",
|
|
64
|
+
"id": "azure-deployment",
|
|
65
|
+
"apiKey": "<AZURE_OPENAI_API_KEY>",
|
|
66
|
+
"apiVersion": "<AZURE_OPENAI_API_VERSION>",
|
|
67
|
+
"supportsDeveloperMessage": false
|
|
68
|
+
}
|
|
69
|
+
],
|
|
70
|
+
"ai-features.agentSettings": {
|
|
71
|
+
"Universal": {
|
|
72
|
+
"languageModelRequirements": [
|
|
73
|
+
{
|
|
74
|
+
"purpose": "chat",
|
|
75
|
+
"identifier": "azure-deployment"
|
|
76
|
+
}
|
|
77
|
+
]
|
|
78
|
+
},
|
|
79
|
+
"Orchestrator": {
|
|
80
|
+
"languageModelRequirements": [
|
|
81
|
+
{
|
|
82
|
+
"purpose": "agent-selection",
|
|
83
|
+
"identifier": "azure-deployment"
|
|
84
|
+
}
|
|
85
|
+
]
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
```
|
|
36
90
|
|
|
37
91
|
## Additional Information
|
|
38
92
|
|
|
@@ -45,5 +99,6 @@ You can configure the end points via the `ai-features.openAiCustom.customOpenAiM
|
|
|
45
99
|
- [一 (Secondary) GNU General Public License, version 2 with the GNU Classpath Exception](https://projects.eclipse.org/license/secondary-gpl-2.0-cp)
|
|
46
100
|
|
|
47
101
|
## Trademark
|
|
102
|
+
|
|
48
103
|
"Theia" is a trademark of the Eclipse Foundation
|
|
49
|
-
https://www.eclipse.org/theia
|
|
104
|
+
<https://www.eclipse.org/theia>
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-frontend-application-contribution.d.ts","sourceRoot":"","sources":["../../src/browser/openai-frontend-application-contribution.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,+BAA+B,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EAAE,2BAA2B,EAAE,sBAAsB,EAAE,MAAM,WAAW,CAAC;AAEhF,OAAO,EAAoC,cAAc,EAAE,MAAM,gDAAgD,CAAC;AAIlH,qBACa,qCAAsC,YAAW,+BAA+B;IAGzF,SAAS,CAAC,iBAAiB,EAAE,iBAAiB,CAAC;IAG/C,SAAS,CAAC,OAAO,EAAE,2BAA2B,CAAC;IAE/C,SAAS,CAAC,UAAU,EAAE,MAAM,EAAE,CAAM;IACpC,SAAS,CAAC,gBAAgB,EAAE,OAAO,CAAC,sBAAsB,CAAC,EAAE,CAAM;IAEnE,OAAO,IAAI,IAAI;IA4Bf,SAAS,CAAC,kBAAkB,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,IAAI;IAavD,OAAO,CAAC,sBAAsB;IAI9B,SAAS,CAAC,wBAAwB,CAAC,eAAe,EAAE,OAAO,CAAC,sBAAsB,CAAC,EAAE,GAAG,IAAI;
|
|
1
|
+
{"version":3,"file":"openai-frontend-application-contribution.d.ts","sourceRoot":"","sources":["../../src/browser/openai-frontend-application-contribution.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,+BAA+B,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EAAE,2BAA2B,EAAE,sBAAsB,EAAE,MAAM,WAAW,CAAC;AAEhF,OAAO,EAAoC,cAAc,EAAE,MAAM,gDAAgD,CAAC;AAIlH,qBACa,qCAAsC,YAAW,+BAA+B;IAGzF,SAAS,CAAC,iBAAiB,EAAE,iBAAiB,CAAC;IAG/C,SAAS,CAAC,OAAO,EAAE,2BAA2B,CAAC;IAE/C,SAAS,CAAC,UAAU,EAAE,MAAM,EAAE,CAAM;IACpC,SAAS,CAAC,gBAAgB,EAAE,OAAO,CAAC,sBAAsB,CAAC,EAAE,CAAM;IAEnE,OAAO,IAAI,IAAI;IA4Bf,SAAS,CAAC,kBAAkB,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,IAAI;IAavD,OAAO,CAAC,sBAAsB;IAI9B,SAAS,CAAC,wBAAwB,CAAC,eAAe,EAAE,OAAO,CAAC,sBAAsB,CAAC,EAAE,GAAG,IAAI;IAsB5F,SAAS,CAAC,4BAA4B,CAAC,WAAW,EAAE,cAAc,EAAE,GAAG,IAAI;IAQ3E,SAAS,CAAC,4BAA4B,CAAC,OAAO,EAAE,MAAM,EAAE,eAAe,EAAE,cAAc,EAAE,GAAG,sBAAsB;IAelH,SAAS,CAAC,4CAA4C,CAClD,WAAW,EAAE,OAAO,CAAC,sBAAsB,CAAC,EAAE,EAC9C,eAAe,EAAE,cAAc,EAAE,GAClC,sBAAsB,EAAE;IAwB3B,SAAS,CAAC,yBAAyB,CAC/B,OAAO,EAAE,MAAM,EACf,UAAU,EAAE,MAAM,EAClB,eAAe,EAAE,cAAc,EAAE,GAClC,cAAc,GAAG,SAAS;CAWhC"}
|
|
@@ -77,6 +77,9 @@ let OpenAiFrontendApplicationContribution = class OpenAiFrontendApplicationContr
|
|
|
77
77
|
model.model === newModel.model &&
|
|
78
78
|
model.url === newModel.url &&
|
|
79
79
|
model.apiKey === newModel.apiKey &&
|
|
80
|
+
model.apiVersion === newModel.apiVersion &&
|
|
81
|
+
model.supportsDeveloperMessage === newModel.supportsDeveloperMessage &&
|
|
82
|
+
model.supportsStructuredOutput === newModel.supportsStructuredOutput &&
|
|
80
83
|
model.enableStreaming === newModel.enableStreaming));
|
|
81
84
|
this.manager.removeLanguageModels(...modelsToRemove.map(model => model.id));
|
|
82
85
|
this.manager.createOrUpdateLanguageModels(...modelsToAddOrUpdate);
|
|
@@ -95,13 +98,16 @@ let OpenAiFrontendApplicationContribution = class OpenAiFrontendApplicationContr
|
|
|
95
98
|
id: id,
|
|
96
99
|
model: modelId,
|
|
97
100
|
apiKey: true,
|
|
101
|
+
apiVersion: true,
|
|
102
|
+
supportsDeveloperMessage: !openAIModelsNotSupportingDeveloperMessages.includes(modelId),
|
|
98
103
|
enableStreaming: !openAIModelsWithDisabledStreaming.includes(modelId),
|
|
104
|
+
supportsStructuredOutput: !openAIModelsWithoutStructuredOutput.includes(modelId),
|
|
99
105
|
defaultRequestSettings: modelRequestSetting === null || modelRequestSetting === void 0 ? void 0 : modelRequestSetting.requestSettings
|
|
100
106
|
};
|
|
101
107
|
}
|
|
102
108
|
createCustomModelDescriptionsFromPreferences(preferences, requestSettings) {
|
|
103
109
|
return preferences.reduce((acc, pref) => {
|
|
104
|
-
var _a;
|
|
110
|
+
var _a, _b, _c;
|
|
105
111
|
if (!pref.model || !pref.url || typeof pref.model !== 'string' || typeof pref.url !== 'string') {
|
|
106
112
|
return acc;
|
|
107
113
|
}
|
|
@@ -113,7 +119,10 @@ let OpenAiFrontendApplicationContribution = class OpenAiFrontendApplicationContr
|
|
|
113
119
|
model: pref.model,
|
|
114
120
|
url: pref.url,
|
|
115
121
|
apiKey: typeof pref.apiKey === 'string' || pref.apiKey === true ? pref.apiKey : undefined,
|
|
116
|
-
|
|
122
|
+
apiVersion: typeof pref.apiVersion === 'string' || pref.apiVersion === true ? pref.apiVersion : undefined,
|
|
123
|
+
supportsDeveloperMessage: (_a = pref.supportsDeveloperMessage) !== null && _a !== void 0 ? _a : true,
|
|
124
|
+
supportsStructuredOutput: (_b = pref.supportsStructuredOutput) !== null && _b !== void 0 ? _b : true,
|
|
125
|
+
enableStreaming: (_c = pref.enableStreaming) !== null && _c !== void 0 ? _c : true,
|
|
117
126
|
defaultRequestSettings: modelRequestSetting === null || modelRequestSetting === void 0 ? void 0 : modelRequestSetting.requestSettings
|
|
118
127
|
}
|
|
119
128
|
];
|
|
@@ -139,5 +148,7 @@ tslib_1.__decorate([
|
|
|
139
148
|
exports.OpenAiFrontendApplicationContribution = OpenAiFrontendApplicationContribution = tslib_1.__decorate([
|
|
140
149
|
(0, inversify_1.injectable)()
|
|
141
150
|
], OpenAiFrontendApplicationContribution);
|
|
142
|
-
const openAIModelsWithDisabledStreaming = ['o1
|
|
151
|
+
const openAIModelsWithDisabledStreaming = ['o1'];
|
|
152
|
+
const openAIModelsNotSupportingDeveloperMessages = ['o1-preview', 'o1-mini'];
|
|
153
|
+
const openAIModelsWithoutStructuredOutput = ['o1-preview', 'gpt-4-turbo', 'gpt-4', 'gpt-3.5-turbo', 'o1-mini', 'gpt-4o-2024-05-13'];
|
|
143
154
|
//# sourceMappingURL=openai-frontend-application-contribution.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-frontend-application-contribution.js","sourceRoot":"","sources":["../../src/browser/openai-frontend-application-contribution.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;;AAEhF,qDAA6F;AAC7F,4DAAkE;AAClE,sCAAgF;AAChF,6DAAwF;AACxF,wFAAkH;AAElH,MAAM,kBAAkB,GAAG,QAAQ,CAAC;AAG7B,IAAM,qCAAqC,GAA3C,MAAM,qCAAqC;IAA3C;QAQO,eAAU,GAAa,EAAE,CAAC;QAC1B,qBAAgB,GAAsC,EAAE,CAAC;
|
|
1
|
+
{"version":3,"file":"openai-frontend-application-contribution.js","sourceRoot":"","sources":["../../src/browser/openai-frontend-application-contribution.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;;AAEhF,qDAA6F;AAC7F,4DAAkE;AAClE,sCAAgF;AAChF,6DAAwF;AACxF,wFAAkH;AAElH,MAAM,kBAAkB,GAAG,QAAQ,CAAC;AAG7B,IAAM,qCAAqC,GAA3C,MAAM,qCAAqC;IAA3C;QAQO,eAAU,GAAa,EAAE,CAAC;QAC1B,qBAAgB,GAAsC,EAAE,CAAC;IAsIvE,CAAC;IApIG,OAAO;QACH,IAAI,CAAC,iBAAiB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE;YACnC,MAAM,MAAM,GAAG,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAS,iCAAY,EAAE,SAAS,CAAC,CAAC;YAC3E,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;YAE/B,MAAM,MAAM,GAAG,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAW,gCAAW,EAAE,EAAE,CAAC,CAAC;YACrE,MAAM,eAAe,GAAG,IAAI,CAAC,sBAAsB,EAAE,CAAC;YACtD,IAAI,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,4BAA4B,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC;YACjI,IAAI,CAAC,UAAU,GAAG,CAAC,GAAG,MAAM,CAAC,CAAC;YAE9B,MAAM,YAAY,GAAG,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAoC,0CAAqB,EAAE,EAAE,CAAC,CAAC;YAC9G,IAAI,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,IAAI,CAAC,4CAA4C,CAAC,YAAY,EAAE,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC,CAAC;YAC7I,IAAI,CAAC,gBAAgB,GAAG,CAAC,GAAG,YAAY,CAAC,CAAC;YAE1C,IAAI,CAAC,iBAAiB,CAAC,mBAAmB,CAAC,KAAK,CAAC,EAAE;gBAC/C,IAAI,KAAK,CAAC,cAAc,KAAK,iCAAY,EAAE,CAAC;oBACxC,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;gBAC3C,CAAC;qBAAM,IAAI,KAAK,CAAC,cAAc,KAAK,gCAAW,EAAE,CAAC;oBAC9C,IAAI,CAAC,kBAAkB,CAAC,KAAK,CAAC,QAAoB,CAAC,CAAC;gBACxD,CAAC;qBAAM,IAAI,KAAK,CAAC,cAAc,KAAK,0CAAqB,EAAE,CAAC;oBACxD,IAAI,CAAC,wBAAwB,CAAC,KAAK,CAAC,QAA6C,CAAC,CAAC;gBACvF,CAAC;qBAAM,IAAI,KAAK,CAAC,cAAc,KAAK,sDAAgC,EAAE,CAAC;oBACnE,IAAI,CAAC,4BAA4B,CAAC,KAAK,CAAC,QAA4B,CAAC,CAAC;gBAC1E,CAAC;YACL,CAAC,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;IACP,CAAC;IAES,kBAAkB,CAAC,SAAmB;QAC5C,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;QAC3C,MAAM,aAAa,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;QAEzC,MAAM,cAAc,GAAG,CAAC,GAAG,SAAS,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;QACjF,MAAM,WAAW,GAAG,CAAC,GAAG,aAAa,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;QAE9E,IAAI,CAAC,OAAO,CAAC,oBAAoB,CAAC,GAAG,cAAc,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,KAAK,EAAE,CAAC,CAAC,CAAC;QACrF,MAAM,eAAe,GAAG,IAAI,CAAC,sBAAsB,EAAE,CAAC;QACtD,IAAI,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,4BAA4B,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC;QACtI,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;IAChC,CAAC;IAEO,sBAAsB;QAC1B,OAAO,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAmB,sDAAgC,EAAE,EAAE,CAAC,CAAC;IAC9F,CAAC;IAES,wBAAwB,CAAC,eAAkD;QACjF,MAAM,eAAe,GAAG,IAAI,CAAC,sBAAsB,EAAE,CAAC;QACtD,MAAM,SAAS,GAAG,IAAI,CAAC,4CAA4C,CAAC,IAAI,CAAC,gBAAgB,EAAE,eAAe,CAAC,CAAC;QAC5G,MAAM,SAAS,GAAG,IAAI,CAAC,4CAA4C,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;QAEtG,MAAM,cAAc,GAAG,SAAS,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,KAAK,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;QACxG,MAAM,mBAAmB,GAAG,SAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CACpD,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CACpB,KAAK,CAAC,EAAE,KAAK,QAAQ,CAAC,EAAE;YACxB,KAAK,CAAC,KAAK,KAAK,QAAQ,CAAC,KAAK;YAC9B,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,GAAG;YAC1B,KAAK,CAAC,MAAM,KAAK,QAAQ,CAAC,MAAM;YAChC,KAAK,CAAC,UAAU,KAAK,QAAQ,CAAC,UAAU;YACxC,KAAK,CAAC,wBAAwB,KAAK,QAAQ,CAAC,wBAAwB;YACpE,KAAK,CAAC,wBAAwB,KAAK,QAAQ,CAAC,wBAAwB;YACpE,KAAK,CAAC,eAAe,KAAK,QAAQ,CAAC,eAAe,CAAC,CAAC,CAAC;QAE7D,IAAI,CAAC,OAAO,CAAC,oBAAoB,CAAC,GAAG,cAAc,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;QAC5E,IAAI,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,mBAAmB,CAAC,CAAC;QAClE,IAAI,CAAC,gBAAgB,GAAG,CAAC,GAAG,eAAe,CAAC,CAAC;IACjD,CAAC;IAES,4BAA4B,CAAC,WAA6B;QAChE,MAAM,MAAM,GAAG,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAW,gCAAW,EAAE,EAAE,CAAC,CAAC;QACrE,IAAI,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,4BAA4B,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;QAE7H,MAAM,YAAY,GAAG,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAoC,0CAAqB,EAAE,EAAE,CAAC,CAAC;QAC9G,IAAI,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,IAAI,CAAC,4CAA4C,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC,CAAC;IAC/H,CAAC;IAES,4BAA4B,CAAC,OAAe,EAAE,eAAiC;QACrF,MAAM,EAAE,GAAG,GAAG,kBAAkB,IAAI,OAAO,EAAE,CAAC;QAC9C,MAAM,mBAAmB,GAAG,IAAI,CAAC,yBAAyB,CAAC,OAAO,EAAE,kBAAkB,EAAE,eAAe,CAAC,CAAC;QACzG,OAAO;YACH,EAAE,EAAE,EAAE;YACN,KAAK,EAAE,OAAO;YACd,MAAM,EAAE,IAAI;YACZ,UAAU,EAAE,IAAI;YAChB,wBAAwB,EAAE,CAAC,0CAA0C,CAAC,QAAQ,CAAC,OAAO,CAAC;YACvF,eAAe,EAAE,CAAC,iCAAiC,CAAC,QAAQ,CAAC,OAAO,CAAC;YACrE,wBAAwB,EAAE,CAAC,mCAAmC,CAAC,QAAQ,CAAC,OAAO,CAAC;YAChF,sBAAsB,EAAE,mBAAmB,aAAnB,mBAAmB,uBAAnB,mBAAmB,CAAE,eAAe;SAC/D,CAAC;IACN,CAAC;IAES,4CAA4C,CAClD,WAA8C,EAC9C,eAAiC;QAEjC,OAAO,WAAW,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,IAAI,EAAE,EAAE;;YACpC,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,IAAI,CAAC,GAAG,IAAI,OAAO,IAAI,CAAC,KAAK,KAAK,QAAQ,IAAI,OAAO,IAAI,CAAC,GAAG,KAAK,QAAQ,EAAE,CAAC;gBAC7F,OAAO,GAAG,CAAC;YACf,CAAC;YAED,MAAM,mBAAmB,GAAG,IAAI,CAAC,yBAAyB,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,eAAe,CAAC,CAAC;YAE5G,OAAO;gBACH,GAAG,GAAG;gBACN;oBACI,EAAE,EAAE,IAAI,CAAC,EAAE,IAAI,OAAO,IAAI,CAAC,EAAE,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK;oBACjE,KAAK,EAAE,IAAI,CAAC,KAAK;oBACjB,GAAG,EAAE,IAAI,CAAC,GAAG;oBACb,MAAM,EAAE,OAAO,IAAI,CAAC,MAAM,KAAK,QAAQ,IAAI,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS;oBACzF,UAAU,EAAE,OAAO,IAAI,CAAC,UAAU,KAAK,QAAQ,IAAI,IAAI,CAAC,UAAU,KAAK,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,SAAS;oBACzG,wBAAwB,EAAE,MAAA,IAAI,CAAC,wBAAwB,mCAAI,IAAI;oBAC/D,wBAAwB,EAAE,MAAA,IAAI,CAAC,wBAAwB,mCAAI,IAAI;oBAC/D,eAAe,EAAE,MAAA,IAAI,CAAC,eAAe,mCAAI,IAAI;oBAC7C,sBAAsB,EAAE,mBAAmB,aAAnB,mBAAmB,uBAAnB,mBAAmB,CAAE,eAAe;iBAC/D;aACJ,CAAC;QACN,CAAC,EAAE,EAAE,CAAC,CAAC;IACX,CAAC;IACS,yBAAyB,CAC/B,OAAe,EACf,UAAkB,EAClB,eAAiC;QAEjC,MAAM,gBAAgB,GAAG,eAAe,CAAC,MAAM,CAC3C,OAAO,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,IAAI,OAAO,CAAC,UAAU,KAAK,UAAU,CAAC,IAAI,OAAO,CAAC,OAAO,KAAK,OAAO,CACvG,CAAC;QACF,IAAI,gBAAgB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC9B,OAAO,CAAC,IAAI,CACR,wCAAwC,UAAU,gBAAgB,OAAO,2BAA2B,CACvG,CAAC;QACN,CAAC;QACD,OAAO,gBAAgB,CAAC,CAAC,CAAC,CAAC;IAC/B,CAAC;CACJ,CAAA;AA/IY,sFAAqC;AAGpC;IADT,IAAA,kBAAM,EAAC,2BAAiB,CAAC;;gFACqB;AAGrC;IADT,IAAA,kBAAM,EAAC,oCAA2B,CAAC;;sEACW;gDANtC,qCAAqC;IADjD,IAAA,sBAAU,GAAE;GACA,qCAAqC,CA+IjD;AAED,MAAM,iCAAiC,GAAG,CAAC,IAAI,CAAC,CAAC;AACjD,MAAM,0CAA0C,GAAG,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;AAC7E,MAAM,mCAAmC,GAAG,CAAC,YAAY,EAAE,aAAa,EAAE,OAAO,EAAE,eAAe,EAAE,SAAS,EAAE,mBAAmB,CAAC,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-preferences.d.ts","sourceRoot":"","sources":["../../src/browser/openai-preferences.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,gBAAgB,EAAE,MAAM,6DAA6D,CAAC;AAG/F,eAAO,MAAM,YAAY,4CAA4C,CAAC;AACtE,eAAO,MAAM,WAAW,oDAAoD,CAAC;AAC7E,eAAO,MAAM,qBAAqB,gDAAgD,CAAC;AAEnF,eAAO,MAAM,uBAAuB,EAAE,
|
|
1
|
+
{"version":3,"file":"openai-preferences.d.ts","sourceRoot":"","sources":["../../src/browser/openai-preferences.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,gBAAgB,EAAE,MAAM,6DAA6D,CAAC;AAG/F,eAAO,MAAM,YAAY,4CAA4C,CAAC;AACtE,eAAO,MAAM,WAAW,oDAAoD,CAAC;AAC7E,eAAO,MAAM,qBAAqB,gDAAgD,CAAC;AAEnF,eAAO,MAAM,uBAAuB,EAAE,gBA8ErC,CAAC"}
|
|
@@ -33,7 +33,7 @@ exports.OpenAiPreferencesSchema = {
|
|
|
33
33
|
type: 'array',
|
|
34
34
|
description: 'Official OpenAI models to use',
|
|
35
35
|
title: ai_core_preferences_1.AI_CORE_PREFERENCES_TITLE,
|
|
36
|
-
default: ['gpt-4o', 'gpt-4o-2024-
|
|
36
|
+
default: ['gpt-4o', 'gpt-4o-2024-11-20', 'gpt-4o-2024-08-06', 'gpt-4o-mini', 'o1', 'o1-mini', 'o3-mini'],
|
|
37
37
|
items: {
|
|
38
38
|
type: 'string'
|
|
39
39
|
}
|
|
@@ -49,6 +49,12 @@ exports.OpenAiPreferencesSchema = {
|
|
|
49
49
|
\n\
|
|
50
50
|
- provide an `apiKey` to access the API served at the given url. Use `true` to indicate the use of the global OpenAI API key.\
|
|
51
51
|
\n\
|
|
52
|
+
- provide an `apiVersion` to access the API served at the given url in Azure. Use `true` to indicate the use of the global OpenAI API version.\
|
|
53
|
+
\n\
|
|
54
|
+
- specify `supportsDeveloperMessage: false` to indicate that the developer role shall not be used.\
|
|
55
|
+
\n\
|
|
56
|
+
- specify `supportsStructuredOutput: false` to indicate that structured output shall not be used.\
|
|
57
|
+
\n\
|
|
52
58
|
- specify `enableStreaming: false` to indicate that streaming shall not be used.\
|
|
53
59
|
\n\
|
|
54
60
|
Refer to [our documentation](https://theia-ide.org/docs/user_ai/#openai-compatible-models-eg-via-vllm) for more information.',
|
|
@@ -72,6 +78,18 @@ exports.OpenAiPreferencesSchema = {
|
|
|
72
78
|
type: ['string', 'boolean'],
|
|
73
79
|
title: 'Either the key to access the API served at the given url or `true` to use the global OpenAI API key',
|
|
74
80
|
},
|
|
81
|
+
apiVersion: {
|
|
82
|
+
type: ['string', 'boolean'],
|
|
83
|
+
title: 'Either the version to access the API served at the given url in Azure or `true` to use the global OpenAI API version',
|
|
84
|
+
},
|
|
85
|
+
supportsDeveloperMessage: {
|
|
86
|
+
type: 'boolean',
|
|
87
|
+
title: 'Indicates whether the model supports the `developer` role. `true` by default.',
|
|
88
|
+
},
|
|
89
|
+
supportsStructuredOutput: {
|
|
90
|
+
type: 'boolean',
|
|
91
|
+
title: 'Indicates whether the model supports structured output. `true` by default.',
|
|
92
|
+
},
|
|
75
93
|
enableStreaming: {
|
|
76
94
|
type: 'boolean',
|
|
77
95
|
title: 'Indicates whether the streaming API shall be used. `true` by default.',
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-preferences.js","sourceRoot":"","sources":["../../src/browser/openai-preferences.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAGhF,wFAA2F;AAE9E,QAAA,YAAY,GAAG,yCAAyC,CAAC;AACzD,QAAA,WAAW,GAAG,iDAAiD,CAAC;AAChE,QAAA,qBAAqB,GAAG,6CAA6C,CAAC;AAEtE,QAAA,uBAAuB,GAAqB;IACrD,IAAI,EAAE,QAAQ;IACd,UAAU,EAAE;QACR,CAAC,oBAAY,CAAC,EAAE;YACZ,IAAI,EAAE,QAAQ;YACd,mBAAmB,EAAE;iHACgF;YACrG,KAAK,EAAE,+CAAyB;SACnC;QACD,CAAC,mBAAW,CAAC,EAAE;YACX,IAAI,EAAE,OAAO;YACb,WAAW,EAAE,+BAA+B;YAC5C,KAAK,EAAE,+CAAyB;YAChC,OAAO,EAAE,CAAC,QAAQ,EAAE,mBAAmB,EAAE,mBAAmB,EAAE,aAAa,EAAE,
|
|
1
|
+
{"version":3,"file":"openai-preferences.js","sourceRoot":"","sources":["../../src/browser/openai-preferences.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAGhF,wFAA2F;AAE9E,QAAA,YAAY,GAAG,yCAAyC,CAAC;AACzD,QAAA,WAAW,GAAG,iDAAiD,CAAC;AAChE,QAAA,qBAAqB,GAAG,6CAA6C,CAAC;AAEtE,QAAA,uBAAuB,GAAqB;IACrD,IAAI,EAAE,QAAQ;IACd,UAAU,EAAE;QACR,CAAC,oBAAY,CAAC,EAAE;YACZ,IAAI,EAAE,QAAQ;YACd,mBAAmB,EAAE;iHACgF;YACrG,KAAK,EAAE,+CAAyB;SACnC;QACD,CAAC,mBAAW,CAAC,EAAE;YACX,IAAI,EAAE,OAAO;YACb,WAAW,EAAE,+BAA+B;YAC5C,KAAK,EAAE,+CAAyB;YAChC,OAAO,EAAE,CAAC,QAAQ,EAAE,mBAAmB,EAAE,mBAAmB,EAAE,aAAa,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS,CAAC;YACxG,KAAK,EAAE;gBACH,IAAI,EAAE,QAAQ;aACjB;SACJ;QACD,CAAC,6BAAqB,CAAC,EAAE;YACrB,IAAI,EAAE,OAAO;YACb,KAAK,EAAE,+CAAyB;YAChC,mBAAmB,EAAE;;;;;;;;;;;;;;;;yIAgBwG;YAC7H,OAAO,EAAE,EAAE;YACX,KAAK,EAAE;gBACH,IAAI,EAAE,QAAQ;gBACd,UAAU,EAAE;oBACR,KAAK,EAAE;wBACH,IAAI,EAAE,QAAQ;wBACd,KAAK,EAAE,UAAU;qBACpB;oBACD,GAAG,EAAE;wBACD,IAAI,EAAE,QAAQ;wBACd,KAAK,EAAE,+DAA+D;qBACzE;oBACD,EAAE,EAAE;wBACA,IAAI,EAAE,QAAQ;wBACd,KAAK,EAAE,0EAA0E;qBACpF;oBACD,MAAM,EAAE;wBACJ,IAAI,EAAE,CAAC,QAAQ,EAAE,SAAS,CAAC;wBAC3B,KAAK,EAAE,qGAAqG;qBAC/G;oBACD,UAAU,EAAE;wBACR,IAAI,EAAE,CAAC,QAAQ,EAAE,SAAS,CAAC;wBAC3B,KAAK,EAAE,sHAAsH;qBAChI;oBACD,wBAAwB,EAAE;wBACtB,IAAI,EAAE,SAAS;wBACf,KAAK,EAAE,+EAA+E;qBACzF;oBACD,wBAAwB,EAAE;wBACtB,IAAI,EAAE,SAAS;wBACf,KAAK,EAAE,4EAA4E;qBACtF;oBACD,eAAe,EAAE;wBACb,IAAI,EAAE,SAAS;wBACf,KAAK,EAAE,uEAAuE;qBACjF;iBACJ;aACJ;SACJ;KACJ;CACJ,CAAC"}
|
|
@@ -17,10 +17,22 @@ export interface OpenAiModelDescription {
|
|
|
17
17
|
* The key for the model. If 'true' is provided the global OpenAI API key will be used.
|
|
18
18
|
*/
|
|
19
19
|
apiKey: string | true | undefined;
|
|
20
|
+
/**
|
|
21
|
+
* The version for the api. If 'true' is provided the global OpenAI version will be used.
|
|
22
|
+
*/
|
|
23
|
+
apiVersion: string | true | undefined;
|
|
20
24
|
/**
|
|
21
25
|
* Indicate whether the streaming API shall be used.
|
|
22
26
|
*/
|
|
23
27
|
enableStreaming: boolean;
|
|
28
|
+
/**
|
|
29
|
+
* Flag to configure whether the OpenAPI model supports the `developer` role. Default is `true`.
|
|
30
|
+
*/
|
|
31
|
+
supportsDeveloperMessage: boolean;
|
|
32
|
+
/**
|
|
33
|
+
* Flag to configure whether the OpenAPI model supports structured output. Default is `true`.
|
|
34
|
+
*/
|
|
35
|
+
supportsStructuredOutput: boolean;
|
|
24
36
|
/**
|
|
25
37
|
* Default request settings for the OpenAI model.
|
|
26
38
|
*/
|
|
@@ -31,6 +43,7 @@ export interface OpenAiModelDescription {
|
|
|
31
43
|
export interface OpenAiLanguageModelsManager {
|
|
32
44
|
apiKey: string | undefined;
|
|
33
45
|
setApiKey(key: string | undefined): void;
|
|
46
|
+
setApiVersion(version: string | undefined): void;
|
|
34
47
|
createOrUpdateLanguageModels(...models: OpenAiModelDescription[]): Promise<void>;
|
|
35
48
|
removeLanguageModels(...modelIds: string[]): void;
|
|
36
49
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-language-models-manager.d.ts","sourceRoot":"","sources":["../../src/common/openai-language-models-manager.ts"],"names":[],"mappings":"AAeA,eAAO,MAAM,mCAAmC,6CAA6C,CAAC;AAC9F,eAAO,MAAM,2BAA2B,eAAwC,CAAC;AACjF,MAAM,WAAW,sBAAsB;IACnC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IACX;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,MAAM,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IAClC;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,sBAAsB,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;KAAE,CAAC;CACvD;AACD,MAAM,WAAW,2BAA2B;IACxC,MAAM,EAAE,MAAM,GAAG,SAAS,CAAC;IAC3B,SAAS,CAAC,GAAG,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI,CAAC;IACzC,4BAA4B,CAAC,GAAG,MAAM,EAAE,sBAAsB,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACjF,oBAAoB,CAAC,GAAG,QAAQ,EAAE,MAAM,EAAE,GAAG,IAAI,CAAA;CACpD"}
|
|
1
|
+
{"version":3,"file":"openai-language-models-manager.d.ts","sourceRoot":"","sources":["../../src/common/openai-language-models-manager.ts"],"names":[],"mappings":"AAeA,eAAO,MAAM,mCAAmC,6CAA6C,CAAC;AAC9F,eAAO,MAAM,2BAA2B,eAAwC,CAAC;AACjF,MAAM,WAAW,sBAAsB;IACnC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IACX;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,MAAM,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IAClC;;OAEG;IACH,UAAU,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IACtC;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,wBAAwB,EAAE,OAAO,CAAC;IAClC;;OAEG;IACH,wBAAwB,EAAE,OAAO,CAAC;IAClC;;OAEG;IACH,sBAAsB,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;KAAE,CAAC;CACvD;AACD,MAAM,WAAW,2BAA2B;IACxC,MAAM,EAAE,MAAM,GAAG,SAAS,CAAC;IAC3B,SAAS,CAAC,GAAG,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI,CAAC;IACzC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI,CAAC;IACjD,4BAA4B,CAAC,GAAG,MAAM,EAAE,sBAAsB,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACjF,oBAAoB,CAAC,GAAG,QAAQ,EAAE,MAAM,EAAE,GAAG,IAAI,CAAA;CACpD"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-backend-module.d.ts","sourceRoot":"","sources":["../../src/node/openai-backend-module.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,eAAe,EAAE,MAAM,8BAA8B,CAAC;
|
|
1
|
+
{"version":3,"file":"openai-backend-module.d.ts","sourceRoot":"","sources":["../../src/node/openai-backend-module.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,eAAe,EAAE,MAAM,8BAA8B,CAAC;AAM/D,eAAO,MAAM,kBAAkB,eAA+B,CAAC;;AAW/D,wBAEG"}
|
|
@@ -20,10 +20,15 @@ const inversify_1 = require("@theia/core/shared/inversify");
|
|
|
20
20
|
const openai_language_models_manager_1 = require("../common/openai-language-models-manager");
|
|
21
21
|
const core_1 = require("@theia/core");
|
|
22
22
|
const openai_language_models_manager_impl_1 = require("./openai-language-models-manager-impl");
|
|
23
|
+
const connection_container_module_1 = require("@theia/core/lib/node/messaging/connection-container-module");
|
|
23
24
|
exports.OpenAiModelFactory = Symbol('OpenAiModelFactory');
|
|
24
|
-
|
|
25
|
+
// We use a connection module to handle AI services separately for each frontend.
|
|
26
|
+
const openAiConnectionModule = connection_container_module_1.ConnectionContainerModule.create(({ bind, bindBackendService, bindFrontendService }) => {
|
|
25
27
|
bind(openai_language_models_manager_impl_1.OpenAiLanguageModelsManagerImpl).toSelf().inSingletonScope();
|
|
26
28
|
bind(openai_language_models_manager_1.OpenAiLanguageModelsManager).toService(openai_language_models_manager_impl_1.OpenAiLanguageModelsManagerImpl);
|
|
27
29
|
bind(core_1.ConnectionHandler).toDynamicValue(ctx => new core_1.RpcConnectionHandler(openai_language_models_manager_1.OPENAI_LANGUAGE_MODELS_MANAGER_PATH, () => ctx.container.get(openai_language_models_manager_1.OpenAiLanguageModelsManager))).inSingletonScope();
|
|
28
30
|
});
|
|
31
|
+
exports.default = new inversify_1.ContainerModule(bind => {
|
|
32
|
+
bind(connection_container_module_1.ConnectionContainerModule).toConstantValue(openAiConnectionModule);
|
|
33
|
+
});
|
|
29
34
|
//# sourceMappingURL=openai-backend-module.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-backend-module.js","sourceRoot":"","sources":["../../src/node/openai-backend-module.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAEhF,4DAA+D;AAC/D,6FAA4H;AAC5H,sCAAsE;AACtE,+FAAwF;
|
|
1
|
+
{"version":3,"file":"openai-backend-module.js","sourceRoot":"","sources":["../../src/node/openai-backend-module.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAEhF,4DAA+D;AAC/D,6FAA4H;AAC5H,sCAAsE;AACtE,+FAAwF;AACxF,4GAAuG;AAE1F,QAAA,kBAAkB,GAAG,MAAM,CAAC,oBAAoB,CAAC,CAAC;AAE/D,iFAAiF;AACjF,MAAM,sBAAsB,GAAG,uDAAyB,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,kBAAkB,EAAE,mBAAmB,EAAE,EAAE,EAAE;IAClH,IAAI,CAAC,qEAA+B,CAAC,CAAC,MAAM,EAAE,CAAC,gBAAgB,EAAE,CAAC;IAClE,IAAI,CAAC,4DAA2B,CAAC,CAAC,SAAS,CAAC,qEAA+B,CAAC,CAAC;IAC7E,IAAI,CAAC,wBAAiB,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE,CACzC,IAAI,2BAAoB,CAAC,oEAAmC,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,SAAS,CAAC,GAAG,CAAC,4DAA2B,CAAC,CAAC,CACtH,CAAC,gBAAgB,EAAE,CAAC;AACzB,CAAC,CAAC,CAAC;AAEH,kBAAe,IAAI,2BAAe,CAAC,IAAI,CAAC,EAAE;IACtC,IAAI,CAAC,uDAAyB,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC;AAC5E,CAAC,CAAC,CAAC"}
|
|
@@ -1,13 +1,17 @@
|
|
|
1
|
-
import { LanguageModel, LanguageModelParsedResponse, LanguageModelRequest, LanguageModelResponse, LanguageModelTextResponse } from '@theia/ai-core';
|
|
1
|
+
import { LanguageModel, LanguageModelParsedResponse, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelResponse, LanguageModelTextResponse } from '@theia/ai-core';
|
|
2
2
|
import { CancellationToken } from '@theia/core';
|
|
3
|
-
import OpenAI from 'openai';
|
|
3
|
+
import { OpenAI } from 'openai';
|
|
4
4
|
import { RunnableToolFunctionWithoutParse } from 'openai/lib/RunnableFunction';
|
|
5
|
+
import { ChatCompletionMessageParam } from 'openai/resources';
|
|
5
6
|
export declare const OpenAiModelIdentifier: unique symbol;
|
|
6
7
|
export declare class OpenAiModel implements LanguageModel {
|
|
7
8
|
readonly id: string;
|
|
8
9
|
model: string;
|
|
9
10
|
enableStreaming: boolean;
|
|
10
11
|
apiKey: () => string | undefined;
|
|
12
|
+
apiVersion: () => string | undefined;
|
|
13
|
+
supportsDeveloperMessage: boolean;
|
|
14
|
+
supportsStructuredOutput: boolean;
|
|
11
15
|
url: string | undefined;
|
|
12
16
|
defaultRequestSettings?: {
|
|
13
17
|
[key: string]: unknown;
|
|
@@ -17,17 +21,20 @@ export declare class OpenAiModel implements LanguageModel {
|
|
|
17
21
|
* @param model the model id as it is used by the OpenAI API
|
|
18
22
|
* @param enableStreaming whether the streaming API shall be used
|
|
19
23
|
* @param apiKey a function that returns the API key to use for this model, called on each request
|
|
24
|
+
* @param apiVersion a function that returns the OpenAPI version to use for this model, called on each request
|
|
25
|
+
* @param supportsDeveloperMessage whether the model supports the `developer` role
|
|
20
26
|
* @param url the OpenAI API compatible endpoint where the model is hosted. If not provided the default OpenAI endpoint will be used.
|
|
21
27
|
* @param defaultRequestSettings optional default settings for requests made using this model.
|
|
22
28
|
*/
|
|
23
|
-
constructor(id: string, model: string, enableStreaming: boolean, apiKey: () => string | undefined, url: string | undefined, defaultRequestSettings?: {
|
|
29
|
+
constructor(id: string, model: string, enableStreaming: boolean, apiKey: () => string | undefined, apiVersion: () => string | undefined, supportsDeveloperMessage: boolean, supportsStructuredOutput: boolean, url: string | undefined, defaultRequestSettings?: {
|
|
24
30
|
[key: string]: unknown;
|
|
25
31
|
} | undefined);
|
|
26
32
|
protected getSettings(request: LanguageModelRequest): Record<string, unknown>;
|
|
27
33
|
request(request: LanguageModelRequest, cancellationToken?: CancellationToken): Promise<LanguageModelResponse>;
|
|
28
34
|
protected handleNonStreamingRequest(openai: OpenAI, request: LanguageModelRequest): Promise<LanguageModelTextResponse>;
|
|
35
|
+
protected toOpenAIMessage(message: LanguageModelRequestMessage): ChatCompletionMessageParam;
|
|
36
|
+
protected toOpenAiRole(message: LanguageModelRequestMessage): 'developer' | 'user' | 'assistant';
|
|
29
37
|
protected isNonStreamingModel(_model: string): boolean;
|
|
30
|
-
protected supportsStructuredOutput(): boolean;
|
|
31
38
|
protected handleStructuredOutputRequest(openai: OpenAI, request: LanguageModelRequest): Promise<LanguageModelParsedResponse>;
|
|
32
39
|
private getCompletionContent;
|
|
33
40
|
protected createTools(request: LanguageModelRequest): RunnableToolFunctionWithoutParse[] | undefined;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-language-model.d.ts","sourceRoot":"","sources":["../../src/node/openai-language-model.ts"],"names":[],"mappings":"AAgBA,OAAO,EACH,aAAa,EACb,2BAA2B,EAC3B,oBAAoB,
|
|
1
|
+
{"version":3,"file":"openai-language-model.d.ts","sourceRoot":"","sources":["../../src/node/openai-language-model.ts"],"names":[],"mappings":"AAgBA,OAAO,EACH,aAAa,EACb,2BAA2B,EAC3B,oBAAoB,EACpB,2BAA2B,EAC3B,qBAAqB,EAErB,yBAAyB,EAC5B,MAAM,gBAAgB,CAAC;AACxB,OAAO,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAChD,OAAO,EAAE,MAAM,EAAe,MAAM,QAAQ,CAAC;AAE7C,OAAO,EAAE,gCAAgC,EAAE,MAAM,6BAA6B,CAAC;AAC/E,OAAO,EAAE,0BAA0B,EAAE,MAAM,kBAAkB,CAAC;AAE9D,eAAO,MAAM,qBAAqB,eAAkC,CAAC;AAErE,qBAAa,WAAY,YAAW,aAAa;aAazB,EAAE,EAAE,MAAM;IACnB,KAAK,EAAE,MAAM;IACb,eAAe,EAAE,OAAO;IACxB,MAAM,EAAE,MAAM,MAAM,GAAG,SAAS;IAChC,UAAU,EAAE,MAAM,MAAM,GAAG,SAAS;IACpC,wBAAwB,EAAE,OAAO;IACjC,wBAAwB,EAAE,OAAO;IACjC,GAAG,EAAE,MAAM,GAAG,SAAS;IACvB,sBAAsB,CAAC;;;IAnBlC;;;;;;;;;OASG;gBAEiB,EAAE,EAAE,MAAM,EACnB,KAAK,EAAE,MAAM,EACb,eAAe,EAAE,OAAO,EACxB,MAAM,EAAE,MAAM,MAAM,GAAG,SAAS,EAChC,UAAU,EAAE,MAAM,MAAM,GAAG,SAAS,EACpC,wBAAwB,EAAE,OAAO,EACjC,wBAAwB,EAAE,OAAO,EACjC,GAAG,EAAE,MAAM,GAAG,SAAS,EACvB,sBAAsB,CAAC;;iBAA4B;IAG9D,SAAS,CAAC,WAAW,CAAC,OAAO,EAAE,oBAAoB,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAQvE,OAAO,CAAC,OAAO,EAAE,oBAAoB,EAAE,iBAAiB,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,qBAAqB,CAAC;cAoGnG,yBAAyB,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,oBAAoB,GAAG,OAAO,CAAC,yBAAyB,CAAC;IAe5H,SAAS,CAAC,eAAe,CAAC,OAAO,EAAE,2BAA2B,GAAG,0BAA0B;IAO3F,SAAS,CAAC,YAAY,CAAC,OAAO,EAAE,2BAA2B,GAAG,WAAW,GAAG,MAAM,GAAG,WAAW;IAWhG,SAAS,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO;cAItC,6BAA6B,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,oBAAoB,GAAG,OAAO,CAAC,2BAA2B,CAAC;IAmBlI,OAAO,CAAC,oBAAoB;IAO5B,SAAS,CAAC,WAAW,CAAC,OAAO,EAAE,oBAAoB,GAAG,gCAAgC,EAAE,GAAG,SAAS;IAYpG,SAAS,CAAC,gBAAgB,IAAI,MAAM;CAevC"}
|
|
@@ -18,36 +18,25 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
18
18
|
exports.OpenAiModel = exports.OpenAiModelIdentifier = void 0;
|
|
19
19
|
const openai_1 = require("openai");
|
|
20
20
|
exports.OpenAiModelIdentifier = Symbol('OpenAiModelIdentifier');
|
|
21
|
-
function toOpenAIMessage(message) {
|
|
22
|
-
return {
|
|
23
|
-
role: toOpenAiRole(message),
|
|
24
|
-
content: message.query || ''
|
|
25
|
-
};
|
|
26
|
-
}
|
|
27
|
-
function toOpenAiRole(message) {
|
|
28
|
-
switch (message.actor) {
|
|
29
|
-
case 'system':
|
|
30
|
-
return 'system';
|
|
31
|
-
case 'ai':
|
|
32
|
-
return 'assistant';
|
|
33
|
-
default:
|
|
34
|
-
return 'user';
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
21
|
class OpenAiModel {
|
|
38
22
|
/**
|
|
39
23
|
* @param id the unique id for this language model. It will be used to identify the model in the UI.
|
|
40
24
|
* @param model the model id as it is used by the OpenAI API
|
|
41
25
|
* @param enableStreaming whether the streaming API shall be used
|
|
42
26
|
* @param apiKey a function that returns the API key to use for this model, called on each request
|
|
27
|
+
* @param apiVersion a function that returns the OpenAPI version to use for this model, called on each request
|
|
28
|
+
* @param supportsDeveloperMessage whether the model supports the `developer` role
|
|
43
29
|
* @param url the OpenAI API compatible endpoint where the model is hosted. If not provided the default OpenAI endpoint will be used.
|
|
44
30
|
* @param defaultRequestSettings optional default settings for requests made using this model.
|
|
45
31
|
*/
|
|
46
|
-
constructor(id, model, enableStreaming, apiKey, url, defaultRequestSettings) {
|
|
32
|
+
constructor(id, model, enableStreaming, apiKey, apiVersion, supportsDeveloperMessage, supportsStructuredOutput, url, defaultRequestSettings) {
|
|
47
33
|
this.id = id;
|
|
48
34
|
this.model = model;
|
|
49
35
|
this.enableStreaming = enableStreaming;
|
|
50
36
|
this.apiKey = apiKey;
|
|
37
|
+
this.apiVersion = apiVersion;
|
|
38
|
+
this.supportsDeveloperMessage = supportsDeveloperMessage;
|
|
39
|
+
this.supportsStructuredOutput = supportsStructuredOutput;
|
|
51
40
|
this.url = url;
|
|
52
41
|
this.defaultRequestSettings = defaultRequestSettings;
|
|
53
42
|
}
|
|
@@ -62,18 +51,21 @@ class OpenAiModel {
|
|
|
62
51
|
var _a;
|
|
63
52
|
const settings = this.getSettings(request);
|
|
64
53
|
const openai = this.initializeOpenAi();
|
|
65
|
-
if (
|
|
54
|
+
if (((_a = request.response_format) === null || _a === void 0 ? void 0 : _a.type) === 'json_schema' && this.supportsStructuredOutput) {
|
|
55
|
+
return this.handleStructuredOutputRequest(openai, request);
|
|
56
|
+
}
|
|
57
|
+
if (this.isNonStreamingModel(this.model) || (typeof settings.stream === 'boolean' && !settings.stream)) {
|
|
66
58
|
return this.handleNonStreamingRequest(openai, request);
|
|
67
59
|
}
|
|
68
|
-
if (
|
|
69
|
-
return
|
|
60
|
+
if (cancellationToken === null || cancellationToken === void 0 ? void 0 : cancellationToken.isCancellationRequested) {
|
|
61
|
+
return { text: '' };
|
|
70
62
|
}
|
|
71
63
|
let runner;
|
|
72
64
|
const tools = this.createTools(request);
|
|
73
65
|
if (tools) {
|
|
74
66
|
runner = openai.beta.chat.completions.runTools({
|
|
75
67
|
model: this.model,
|
|
76
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
68
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
77
69
|
stream: true,
|
|
78
70
|
tools: tools,
|
|
79
71
|
tool_choice: 'auto',
|
|
@@ -83,7 +75,7 @@ class OpenAiModel {
|
|
|
83
75
|
else {
|
|
84
76
|
runner = openai.beta.chat.completions.stream({
|
|
85
77
|
model: this.model,
|
|
86
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
78
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
87
79
|
stream: true,
|
|
88
80
|
...settings
|
|
89
81
|
});
|
|
@@ -96,39 +88,54 @@ class OpenAiModel {
|
|
|
96
88
|
runner.on('error', error => {
|
|
97
89
|
console.error('Error in OpenAI chat completion stream:', error);
|
|
98
90
|
runnerEnd = true;
|
|
99
|
-
resolve({ content: error.message });
|
|
91
|
+
resolve === null || resolve === void 0 ? void 0 : resolve({ content: error.message });
|
|
100
92
|
});
|
|
101
93
|
// we need to also listen for the emitted errors, as otherwise any error actually thrown by the API will not be caught
|
|
102
94
|
runner.emitted('error').then(error => {
|
|
103
95
|
console.error('Error in OpenAI chat completion stream:', error);
|
|
104
96
|
runnerEnd = true;
|
|
105
|
-
resolve({ content: error.message });
|
|
97
|
+
resolve === null || resolve === void 0 ? void 0 : resolve({ content: error.message });
|
|
106
98
|
});
|
|
107
99
|
runner.emitted('abort').then(() => {
|
|
108
|
-
//
|
|
100
|
+
// cancel async iterator
|
|
101
|
+
runnerEnd = true;
|
|
109
102
|
});
|
|
110
103
|
runner.on('message', message => {
|
|
111
104
|
if (message.role === 'tool') {
|
|
112
|
-
resolve({ tool_calls: [{ id: message.tool_call_id, finished: true, result: this.getCompletionContent(message) }] });
|
|
105
|
+
resolve === null || resolve === void 0 ? void 0 : resolve({ tool_calls: [{ id: message.tool_call_id, finished: true, result: this.getCompletionContent(message) }] });
|
|
113
106
|
}
|
|
114
107
|
console.debug('Received Open AI message', JSON.stringify(message));
|
|
115
108
|
});
|
|
116
109
|
runner.once('end', () => {
|
|
117
110
|
runnerEnd = true;
|
|
118
111
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
119
|
-
resolve(runner.finalChatCompletion);
|
|
112
|
+
resolve === null || resolve === void 0 ? void 0 : resolve(runner.finalChatCompletion);
|
|
120
113
|
});
|
|
114
|
+
if (cancellationToken === null || cancellationToken === void 0 ? void 0 : cancellationToken.isCancellationRequested) {
|
|
115
|
+
return { text: '' };
|
|
116
|
+
}
|
|
121
117
|
const asyncIterator = {
|
|
122
118
|
async *[Symbol.asyncIterator]() {
|
|
123
119
|
runner.on('chunk', chunk => {
|
|
124
120
|
var _a, _b;
|
|
125
|
-
if (
|
|
121
|
+
if (cancellationToken === null || cancellationToken === void 0 ? void 0 : cancellationToken.isCancellationRequested) {
|
|
122
|
+
resolve = undefined;
|
|
123
|
+
return;
|
|
124
|
+
}
|
|
125
|
+
if (resolve && ((_a = chunk.choices[0]) === null || _a === void 0 ? void 0 : _a.delta)) {
|
|
126
126
|
resolve({ ...(_b = chunk.choices[0]) === null || _b === void 0 ? void 0 : _b.delta });
|
|
127
127
|
}
|
|
128
128
|
});
|
|
129
129
|
while (!runnerEnd) {
|
|
130
|
+
if (cancellationToken === null || cancellationToken === void 0 ? void 0 : cancellationToken.isCancellationRequested) {
|
|
131
|
+
throw new Error('Iterator canceled');
|
|
132
|
+
}
|
|
130
133
|
const promise = new Promise((res, rej) => {
|
|
131
134
|
resolve = res;
|
|
135
|
+
cancellationToken === null || cancellationToken === void 0 ? void 0 : cancellationToken.onCancellationRequested(() => {
|
|
136
|
+
rej(new Error('Canceled'));
|
|
137
|
+
runnerEnd = true; // Stop the iterator
|
|
138
|
+
});
|
|
132
139
|
});
|
|
133
140
|
yield promise;
|
|
134
141
|
}
|
|
@@ -141,7 +148,7 @@ class OpenAiModel {
|
|
|
141
148
|
const settings = this.getSettings(request);
|
|
142
149
|
const response = await openai.chat.completions.create({
|
|
143
150
|
model: this.model,
|
|
144
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
151
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
145
152
|
...settings
|
|
146
153
|
});
|
|
147
154
|
const message = response.choices[0].message;
|
|
@@ -149,24 +156,32 @@ class OpenAiModel {
|
|
|
149
156
|
text: (_a = message.content) !== null && _a !== void 0 ? _a : ''
|
|
150
157
|
};
|
|
151
158
|
}
|
|
159
|
+
toOpenAIMessage(message) {
|
|
160
|
+
return {
|
|
161
|
+
role: this.toOpenAiRole(message),
|
|
162
|
+
content: message.query || ''
|
|
163
|
+
};
|
|
164
|
+
}
|
|
165
|
+
toOpenAiRole(message) {
|
|
166
|
+
switch (message.actor) {
|
|
167
|
+
case 'system':
|
|
168
|
+
return this.supportsDeveloperMessage ? 'developer' : 'user';
|
|
169
|
+
case 'ai':
|
|
170
|
+
return 'assistant';
|
|
171
|
+
default:
|
|
172
|
+
return 'user';
|
|
173
|
+
}
|
|
174
|
+
}
|
|
152
175
|
isNonStreamingModel(_model) {
|
|
153
176
|
return !this.enableStreaming;
|
|
154
177
|
}
|
|
155
|
-
supportsStructuredOutput() {
|
|
156
|
-
// see https://platform.openai.com/docs/models/gpt-4o
|
|
157
|
-
return [
|
|
158
|
-
'gpt-4o',
|
|
159
|
-
'gpt-4o-2024-08-06',
|
|
160
|
-
'gpt-4o-mini'
|
|
161
|
-
].includes(this.model);
|
|
162
|
-
}
|
|
163
178
|
async handleStructuredOutputRequest(openai, request) {
|
|
164
179
|
var _a;
|
|
165
180
|
const settings = this.getSettings(request);
|
|
166
181
|
// TODO implement tool support for structured output (parse() seems to require different tool format)
|
|
167
182
|
const result = await openai.beta.chat.completions.parse({
|
|
168
183
|
model: this.model,
|
|
169
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
184
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
170
185
|
response_format: request.response_format,
|
|
171
186
|
...settings
|
|
172
187
|
});
|
|
@@ -202,8 +217,15 @@ class OpenAiModel {
|
|
|
202
217
|
if (!apiKey && !(this.url)) {
|
|
203
218
|
throw new Error('Please provide OPENAI_API_KEY in preferences or via environment variable');
|
|
204
219
|
}
|
|
205
|
-
|
|
206
|
-
|
|
220
|
+
const apiVersion = this.apiVersion();
|
|
221
|
+
if (apiVersion) {
|
|
222
|
+
// We need to hand over "some" key, even if a custom url is not key protected as otherwise the OpenAI client will throw an error
|
|
223
|
+
return new openai_1.AzureOpenAI({ apiKey: apiKey !== null && apiKey !== void 0 ? apiKey : 'no-key', baseURL: this.url, apiVersion: apiVersion });
|
|
224
|
+
}
|
|
225
|
+
else {
|
|
226
|
+
// We need to hand over "some" key, even if a custom url is not key protected as otherwise the OpenAI client will throw an error
|
|
227
|
+
return new openai_1.OpenAI({ apiKey: apiKey !== null && apiKey !== void 0 ? apiKey : 'no-key', baseURL: this.url });
|
|
228
|
+
}
|
|
207
229
|
}
|
|
208
230
|
}
|
|
209
231
|
exports.OpenAiModel = OpenAiModel;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-language-model.js","sourceRoot":"","sources":["../../src/node/openai-language-model.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAYhF,
|
|
1
|
+
{"version":3,"file":"openai-language-model.js","sourceRoot":"","sources":["../../src/node/openai-language-model.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAYhF,mCAA6C;AAKhC,QAAA,qBAAqB,GAAG,MAAM,CAAC,uBAAuB,CAAC,CAAC;AAErE,MAAa,WAAW;IAEpB;;;;;;;;;OASG;IACH,YACoB,EAAU,EACnB,KAAa,EACb,eAAwB,EACxB,MAAgC,EAChC,UAAoC,EACpC,wBAAiC,EACjC,wBAAiC,EACjC,GAAuB,EACvB,sBAAmD;QAR1C,OAAE,GAAF,EAAE,CAAQ;QACnB,UAAK,GAAL,KAAK,CAAQ;QACb,oBAAe,GAAf,eAAe,CAAS;QACxB,WAAM,GAAN,MAAM,CAA0B;QAChC,eAAU,GAAV,UAAU,CAA0B;QACpC,6BAAwB,GAAxB,wBAAwB,CAAS;QACjC,6BAAwB,GAAxB,wBAAwB,CAAS;QACjC,QAAG,GAAH,GAAG,CAAoB;QACvB,2BAAsB,GAAtB,sBAAsB,CAA6B;IAC1D,CAAC;IAEK,WAAW,CAAC,OAA6B;QAC/C,MAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,sBAAsB,CAAC;QACnF,IAAI,CAAC,QAAQ,EAAE,CAAC;YACZ,OAAO,EAAE,CAAC;QACd,CAAC;QACD,OAAO,QAAQ,CAAC;IACpB,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,OAA6B,EAAE,iBAAqC;;QAC9E,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAC3C,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,EAAE,CAAC;QAEvC,IAAI,CAAA,MAAA,OAAO,CAAC,eAAe,0CAAE,IAAI,MAAK,aAAa,IAAI,IAAI,CAAC,wBAAwB,EAAE,CAAC;YACnF,OAAO,IAAI,CAAC,6BAA6B,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC/D,CAAC;QAED,IAAI,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,QAAQ,CAAC,MAAM,KAAK,SAAS,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC;YACrG,OAAO,IAAI,CAAC,yBAAyB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC3D,CAAC;QAED,IAAI,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,uBAAuB,EAAE,CAAC;YAC7C,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;QACxB,CAAC;QAED,IAAI,MAA4B,CAAC;QACjC,MAAM,KAAK,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QACxC,IAAI,KAAK,EAAE,CAAC;YACR,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;gBAC3C,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBAC/D,MAAM,EAAE,IAAI;gBACZ,KAAK,EAAE,KAAK;gBACZ,WAAW,EAAE,MAAM;gBACnB,GAAG,QAAQ;aACd,CAAC,CAAC;QACP,CAAC;aAAM,CAAC;YACJ,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;gBACzC,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBAC/D,MAAM,EAAE,IAAI;gBACZ,GAAG,QAAQ;aACd,CAAC,CAAC;QACP,CAAC;QACD,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,uBAAuB,CAAC,GAAG,EAAE;YAC5C,MAAM,CAAC,KAAK,EAAE,CAAC;QACnB,CAAC,CAAC,CAAC;QAEH,IAAI,SAAS,GAAG,KAAK,CAAC;QAEtB,IAAI,OAAsE,CAAC;QAC3E,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;YACvB,OAAO,CAAC,KAAK,CAAC,yCAAyC,EAAE,KAAK,CAAC,CAAC;YAChE,SAAS,GAAG,IAAI,CAAC;YACjB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAG,EAAE,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;QAC1C,CAAC,CAAC,CAAC;QACH,sHAAsH;QACtH,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;YACjC,OAAO,CAAC,KAAK,CAAC,yCAAyC,EAAE,KAAK,CAAC,CAAC;YAChE,SAAS,GAAG,IAAI,CAAC;YACjB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAG,EAAE,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;QAC1C,CAAC,CAAC,CAAC;QACH,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE;YAC9B,wBAAwB;YACxB,SAAS,GAAG,IAAI,CAAC;QACrB,CAAC,CAAC,CAAC;QACH,MAAM,CAAC,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC,EAAE;YAC3B,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;gBAC1B,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAG,EAAE,UAAU,EAAE,CAAC,EAAE,EAAE,EAAE,OAAO,CAAC,YAAY,EAAE,QAAQ,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;YAC1H,CAAC;YACD,OAAO,CAAC,KAAK,CAAC,0BAA0B,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC;QACvE,CAAC,CAAC,CAAC;QACH,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,EAAE;YACpB,SAAS,GAAG,IAAI,CAAC;YACjB,8DAA8D;YAC9D,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAG,MAAM,CAAC,mBAA0B,CAAC,CAAC;QACjD,CAAC,CAAC,CAAC;QACH,IAAI,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,uBAAuB,EAAE,CAAC;YAC7C,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;QACxB,CAAC;QACD,MAAM,aAAa,GAAG;YAClB,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC;gBACzB,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;;oBACvB,IAAI,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,uBAAuB,EAAE,CAAC;wBAC7C,OAAO,GAAG,SAAS,CAAC;wBACpB,OAAO;oBACX,CAAC;oBACD,IAAI,OAAO,KAAI,MAAA,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,0CAAE,KAAK,CAAA,EAAE,CAAC;wBACrC,OAAO,CAAC,EAAE,GAAG,MAAA,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,0CAAE,KAAK,EAAE,CAAC,CAAC;oBAC5C,CAAC;gBACL,CAAC,CAAC,CAAC;gBACH,OAAO,CAAC,SAAS,EAAE,CAAC;oBAChB,IAAI,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,uBAAuB,EAAE,CAAC;wBAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;oBACzC,CAAC;oBACD,MAAM,OAAO,GAAG,IAAI,OAAO,CAAkC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;wBACtE,OAAO,GAAG,GAAG,CAAC;wBACd,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,uBAAuB,CAAC,GAAG,EAAE;4BAC5C,GAAG,CAAC,IAAI,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC;4BAC3B,SAAS,GAAG,IAAI,CAAC,CAAC,oBAAoB;wBAC1C,CAAC,CAAC,CAAC;oBACP,CAAC,CAAC,CAAC;oBACH,MAAM,OAAO,CAAC;gBAClB,CAAC;YACL,CAAC;SACJ,CAAC;QACF,OAAO,EAAE,MAAM,EAAE,aAAa,EAAE,CAAC;IACrC,CAAC;IAES,KAAK,CAAC,yBAAyB,CAAC,MAAc,EAAE,OAA6B;;QACnF,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAC3C,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;YAClD,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YAC/D,GAAG,QAAQ;SACd,CAAC,CAAC;QAEH,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;QAE5C,OAAO;YACH,IAAI,EAAE,MAAA,OAAO,CAAC,OAAO,mCAAI,EAAE;SAC9B,CAAC;IACN,CAAC;IAES,eAAe,CAAC,OAAoC;QAC1D,OAAO;YACH,IAAI,EAAE,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC;YAChC,OAAO,EAAE,OAAO,CAAC,KAAK,IAAI,EAAE;SAC/B,CAAC;IACN,CAAC;IAES,YAAY,CAAC,OAAoC;QACvD,QAAQ,OAAO,CAAC,KAAK,EAAE,CAAC;YACpB,KAAK,QAAQ;gBACT,OAAO,IAAI,CAAC,wBAAwB,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC;YAChE,KAAK,IAAI;gBACL,OAAO,WAAW,CAAC;YACvB;gBACI,OAAO,MAAM,CAAC;QACtB,CAAC;IACL,CAAC;IAES,mBAAmB,CAAC,MAAc;QACxC,OAAO,CAAC,IAAI,CAAC,eAAe,CAAC;IACjC,CAAC;IAES,KAAK,CAAC,6BAA6B,CAAC,MAAc,EAAE,OAA6B;;QACvF,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAC3C,qGAAqG;QACrG,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC;YACpD,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YAC/D,eAAe,EAAE,OAAO,CAAC,eAAe;YACxC,GAAG,QAAQ;SACd,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;QAC1C,IAAI,OAAO,CAAC,OAAO,IAAI,OAAO,CAAC,MAAM,KAAK,SAAS,EAAE,CAAC;YAClD,OAAO,CAAC,KAAK,CAAC,yCAAyC,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC;QACtF,CAAC;QACD,OAAO;YACH,OAAO,EAAE,MAAA,OAAO,CAAC,OAAO,mCAAI,EAAE;YAC9B,MAAM,EAAE,OAAO,CAAC,MAAM;SACzB,CAAC;IACN,CAAC;IAEO,oBAAoB,CAAC,OAA+D;QACxF,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;YACjC,OAAO,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QACpC,CAAC;QACD,OAAO,OAAO,CAAC,OAAO,CAAC;IAC3B,CAAC;IAES,WAAW,CAAC,OAA6B;;QAC/C,OAAO,MAAA,OAAO,CAAC,KAAK,0CAAE,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;YAC/B,IAAI,EAAE,UAAU;YAChB,QAAQ,EAAE;gBACN,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,UAAU,EAAE,IAAI,CAAC,UAAU;gBAC3B,QAAQ,EAAE,CAAC,WAAmB,EAAE,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC;aAC/D;SACiC,CAAA,CAAC,CAAC;IAC5C,CAAC;IAES,gBAAgB;QACtB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QAC7B,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACzB,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;QAChG,CAAC;QAED,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC;QACrC,IAAI,UAAU,EAAE,CAAC;YACb,gIAAgI;YAChI,OAAO,IAAI,oBAAW,CAAC,EAAE,MAAM,EAAE,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,QAAQ,EAAE,OAAO,EAAE,IAAI,CAAC,GAAG,EAAE,UAAU,EAAE,UAAU,EAAE,CAAC,CAAC;QACtG,CAAC;aAAM,CAAC;YACJ,gIAAgI;YAChI,OAAO,IAAI,eAAM,CAAC,EAAE,MAAM,EAAE,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,QAAQ,EAAE,OAAO,EAAE,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC;QACzE,CAAC;IACL,CAAC;CACJ;AA9ND,kCA8NC"}
|
|
@@ -2,10 +2,13 @@ import { LanguageModelRegistry } from '@theia/ai-core';
|
|
|
2
2
|
import { OpenAiLanguageModelsManager, OpenAiModelDescription } from '../common';
|
|
3
3
|
export declare class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsManager {
|
|
4
4
|
protected _apiKey: string | undefined;
|
|
5
|
+
protected _apiVersion: string | undefined;
|
|
5
6
|
protected readonly languageModelRegistry: LanguageModelRegistry;
|
|
6
7
|
get apiKey(): string | undefined;
|
|
8
|
+
get apiVersion(): string | undefined;
|
|
7
9
|
createOrUpdateLanguageModels(...modelDescriptions: OpenAiModelDescription[]): Promise<void>;
|
|
8
10
|
removeLanguageModels(...modelIds: string[]): void;
|
|
9
11
|
setApiKey(apiKey: string | undefined): void;
|
|
12
|
+
setApiVersion(apiVersion: string | undefined): void;
|
|
10
13
|
}
|
|
11
14
|
//# sourceMappingURL=openai-language-models-manager-impl.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-language-models-manager-impl.d.ts","sourceRoot":"","sources":["../../src/node/openai-language-models-manager-impl.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAC;AAGvD,OAAO,EAAE,2BAA2B,EAAE,sBAAsB,EAAE,MAAM,WAAW,CAAC;AAEhF,qBACa,+BAAgC,YAAW,2BAA2B;IAE/E,SAAS,CAAC,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;
|
|
1
|
+
{"version":3,"file":"openai-language-models-manager-impl.d.ts","sourceRoot":"","sources":["../../src/node/openai-language-models-manager-impl.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAC;AAGvD,OAAO,EAAE,2BAA2B,EAAE,sBAAsB,EAAE,MAAM,WAAW,CAAC;AAEhF,qBACa,+BAAgC,YAAW,2BAA2B;IAE/E,SAAS,CAAC,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;IACtC,SAAS,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,CAAC;IAG1C,SAAS,CAAC,QAAQ,CAAC,qBAAqB,EAAE,qBAAqB,CAAC;IAEhE,IAAI,MAAM,IAAI,MAAM,GAAG,SAAS,CAE/B;IAED,IAAI,UAAU,IAAI,MAAM,GAAG,SAAS,CAEnC;IAIK,4BAA4B,CAAC,GAAG,iBAAiB,EAAE,sBAAsB,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAqDjG,oBAAoB,CAAC,GAAG,QAAQ,EAAE,MAAM,EAAE,GAAG,IAAI;IAIjD,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQ3C,aAAa,CAAC,UAAU,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;CAOtD"}
|
|
@@ -25,6 +25,10 @@ let OpenAiLanguageModelsManagerImpl = class OpenAiLanguageModelsManagerImpl {
|
|
|
25
25
|
var _a;
|
|
26
26
|
return (_a = this._apiKey) !== null && _a !== void 0 ? _a : process.env.OPENAI_API_KEY;
|
|
27
27
|
}
|
|
28
|
+
get apiVersion() {
|
|
29
|
+
var _a;
|
|
30
|
+
return (_a = this._apiVersion) !== null && _a !== void 0 ? _a : process.env.OPENAI_API_VERSION;
|
|
31
|
+
}
|
|
28
32
|
// Triggered from frontend. In case you want to use the models on the backend
|
|
29
33
|
// without a frontend then call this yourself
|
|
30
34
|
async createOrUpdateLanguageModels(...modelDescriptions) {
|
|
@@ -39,6 +43,15 @@ let OpenAiLanguageModelsManagerImpl = class OpenAiLanguageModelsManagerImpl {
|
|
|
39
43
|
}
|
|
40
44
|
return undefined;
|
|
41
45
|
};
|
|
46
|
+
const apiVersionProvider = () => {
|
|
47
|
+
if (modelDescription.apiVersion === true) {
|
|
48
|
+
return this.apiVersion;
|
|
49
|
+
}
|
|
50
|
+
if (modelDescription.apiVersion) {
|
|
51
|
+
return modelDescription.apiVersion;
|
|
52
|
+
}
|
|
53
|
+
return undefined;
|
|
54
|
+
};
|
|
42
55
|
if (model) {
|
|
43
56
|
if (!(model instanceof openai_language_model_1.OpenAiModel)) {
|
|
44
57
|
console.warn(`OpenAI: model ${modelDescription.id} is not an OpenAI model`);
|
|
@@ -48,11 +61,14 @@ let OpenAiLanguageModelsManagerImpl = class OpenAiLanguageModelsManagerImpl {
|
|
|
48
61
|
model.enableStreaming = modelDescription.enableStreaming;
|
|
49
62
|
model.url = modelDescription.url;
|
|
50
63
|
model.apiKey = apiKeyProvider;
|
|
64
|
+
model.apiVersion = apiVersionProvider;
|
|
65
|
+
model.supportsDeveloperMessage = modelDescription.supportsDeveloperMessage;
|
|
66
|
+
model.supportsStructuredOutput = modelDescription.supportsStructuredOutput;
|
|
51
67
|
model.defaultRequestSettings = modelDescription.defaultRequestSettings;
|
|
52
68
|
}
|
|
53
69
|
else {
|
|
54
70
|
this.languageModelRegistry.addLanguageModels([
|
|
55
|
-
new openai_language_model_1.OpenAiModel(modelDescription.id, modelDescription.model, modelDescription.enableStreaming, apiKeyProvider, modelDescription.url, modelDescription.defaultRequestSettings)
|
|
71
|
+
new openai_language_model_1.OpenAiModel(modelDescription.id, modelDescription.model, modelDescription.enableStreaming, apiKeyProvider, apiVersionProvider, modelDescription.supportsDeveloperMessage, modelDescription.supportsStructuredOutput, modelDescription.url, modelDescription.defaultRequestSettings)
|
|
56
72
|
]);
|
|
57
73
|
}
|
|
58
74
|
}
|
|
@@ -68,6 +84,14 @@ let OpenAiLanguageModelsManagerImpl = class OpenAiLanguageModelsManagerImpl {
|
|
|
68
84
|
this._apiKey = undefined;
|
|
69
85
|
}
|
|
70
86
|
}
|
|
87
|
+
setApiVersion(apiVersion) {
|
|
88
|
+
if (apiVersion) {
|
|
89
|
+
this._apiVersion = apiVersion;
|
|
90
|
+
}
|
|
91
|
+
else {
|
|
92
|
+
this._apiVersion = undefined;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
71
95
|
};
|
|
72
96
|
exports.OpenAiLanguageModelsManagerImpl = OpenAiLanguageModelsManagerImpl;
|
|
73
97
|
tslib_1.__decorate([
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-language-models-manager-impl.js","sourceRoot":"","sources":["../../src/node/openai-language-models-manager-impl.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;;AAEhF,4CAAuD;AACvD,4DAAkE;AAClE,mEAAsD;AAI/C,IAAM,+BAA+B,GAArC,MAAM,+BAA+B;
|
|
1
|
+
{"version":3,"file":"openai-language-models-manager-impl.js","sourceRoot":"","sources":["../../src/node/openai-language-models-manager-impl.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;;AAEhF,4CAAuD;AACvD,4DAAkE;AAClE,mEAAsD;AAI/C,IAAM,+BAA+B,GAArC,MAAM,+BAA+B;IAQxC,IAAI,MAAM;;QACN,OAAO,MAAA,IAAI,CAAC,OAAO,mCAAI,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC;IACtD,CAAC;IAED,IAAI,UAAU;;QACV,OAAO,MAAA,IAAI,CAAC,WAAW,mCAAI,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC;IAC9D,CAAC;IAED,6EAA6E;IAC7E,6CAA6C;IAC7C,KAAK,CAAC,4BAA4B,CAAC,GAAG,iBAA2C;QAC7E,KAAK,MAAM,gBAAgB,IAAI,iBAAiB,EAAE,CAAC;YAC/C,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAAC,gBAAgB,CAAC,gBAAgB,CAAC,EAAE,CAAC,CAAC;YACrF,MAAM,cAAc,GAAG,GAAG,EAAE;gBACxB,IAAI,gBAAgB,CAAC,MAAM,KAAK,IAAI,EAAE,CAAC;oBACnC,OAAO,IAAI,CAAC,MAAM,CAAC;gBACvB,CAAC;gBACD,IAAI,gBAAgB,CAAC,MAAM,EAAE,CAAC;oBAC1B,OAAO,gBAAgB,CAAC,MAAM,CAAC;gBACnC,CAAC;gBACD,OAAO,SAAS,CAAC;YACrB,CAAC,CAAC;YACF,MAAM,kBAAkB,GAAG,GAAG,EAAE;gBAC5B,IAAI,gBAAgB,CAAC,UAAU,KAAK,IAAI,EAAE,CAAC;oBACvC,OAAO,IAAI,CAAC,UAAU,CAAC;gBAC3B,CAAC;gBACD,IAAI,gBAAgB,CAAC,UAAU,EAAE,CAAC;oBAC9B,OAAO,gBAAgB,CAAC,UAAU,CAAC;gBACvC,CAAC;gBACD,OAAO,SAAS,CAAC;YACrB,CAAC,CAAC;YAEF,IAAI,KAAK,EAAE,CAAC;gBACR,IAAI,CAAC,CAAC,KAAK,YAAY,mCAAW,CAAC,EAAE,CAAC;oBAClC,OAAO,CAAC,IAAI,CAAC,iBAAiB,gBAAgB,CAAC,EAAE,yBAAyB,CAAC,CAAC;oBAC5E,SAAS;gBACb,CAAC;gBACD,KAAK,CAAC,KAAK,GAAG,gBAAgB,CAAC,KAAK,CAAC;gBACrC,KAAK,CAAC,eAAe,GAAG,gBAAgB,CAAC,eAAe,CAAC;gBACzD,KAAK,CAAC,GAAG,GAAG,gBAAgB,CAAC,GAAG,CAAC;gBACjC,KAAK,CAAC,MAAM,GAAG,cAAc,CAAC;gBAC9B,KAAK,CAAC,UAAU,GAAG,kBAAkB,CAAC;gBACtC,KAAK,CAAC,wBAAwB,GAAG,gBAAgB,CAAC,wBAAwB,CAAC;gBAC3E,KAAK,CAAC,wBAAwB,GAAG,gBAAgB,CAAC,wBAAwB,CAAC;gBAC3E,KAAK,CAAC,sBAAsB,GAAG,gBAAgB,CAAC,sBAAsB,CAAC;YAC3E,CAAC;iBAAM,CAAC;gBACJ,IAAI,CAAC,qBAAqB,CAAC,iBAAiB,CAAC;oBACzC,IAAI,mCAAW,CACX,gBAAgB,CAAC,EAAE,EACnB,gBAAgB,CAAC,KAAK,EACtB,gBAAgB,CAAC,eAAe,EAChC,cAAc,EACd,kBAAkB,EAClB,gBAAgB,CAAC,wBAAwB,EACzC,gBAAgB,CAAC,wBAAwB,EACzC,gBAAgB,CAAC,GAAG,EACpB,gBAAgB,CAAC,sBAAsB,CAC1C;iBACJ,CAAC,CAAC;YACP,CAAC;QACL,CAAC;IACL,CAAC;IAED,oBAAoB,CAAC,GAAG,QAAkB;QACtC,IAAI,CAAC,qBAAqB,CAAC,oBAAoB,CAAC,QAAQ,CAAC,CAAC;IAC9D,CAAC;IAED,SAAS,CAAC,MAA0B;QAChC,IAAI,MAAM,EAAE,CAAC;YACT,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;QAC1B,CAAC;aAAM,CAAC;YACJ,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;QAC7B,CAAC;IACL,CAAC;IAED,aAAa,CAAC,UAA8B;QACxC,IAAI,UAAU,EAAE,CAAC;YACb,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;QAClC,CAAC;aAAM,CAAC;YACJ,IAAI,CAAC,WAAW,GAAG,SAAS,CAAC;QACjC,CAAC;IACL,CAAC;CACJ,CAAA;AA1FY,0EAA+B;AAMrB;IADlB,IAAA,kBAAM,EAAC,+BAAqB,CAAC;;8EACkC;0CANvD,+BAA+B;IAD3C,IAAA,sBAAU,GAAE;GACA,+BAA+B,CA0F3C"}
|
package/package.json
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@theia/ai-openai",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.57.0-next.112+f4778c273",
|
|
4
4
|
"description": "Theia - OpenAI Integration",
|
|
5
5
|
"dependencies": {
|
|
6
|
-
"@theia/ai-core": "1.
|
|
7
|
-
"@theia/core": "1.
|
|
8
|
-
"@theia/filesystem": "1.
|
|
9
|
-
"@theia/workspace": "1.
|
|
6
|
+
"@theia/ai-core": "1.57.0-next.112+f4778c273",
|
|
7
|
+
"@theia/core": "1.57.0-next.112+f4778c273",
|
|
8
|
+
"@theia/filesystem": "1.57.0-next.112+f4778c273",
|
|
9
|
+
"@theia/workspace": "1.57.0-next.112+f4778c273",
|
|
10
10
|
"minimatch": "^5.1.0",
|
|
11
|
-
"openai": "^4.
|
|
11
|
+
"openai": "^4.77.0",
|
|
12
12
|
"tslib": "^2.6.2"
|
|
13
13
|
},
|
|
14
14
|
"publishConfig": {
|
|
@@ -45,10 +45,10 @@
|
|
|
45
45
|
"watch": "theiaext watch"
|
|
46
46
|
},
|
|
47
47
|
"devDependencies": {
|
|
48
|
-
"@theia/ext-scripts": "1.
|
|
48
|
+
"@theia/ext-scripts": "1.58.0"
|
|
49
49
|
},
|
|
50
50
|
"nyc": {
|
|
51
51
|
"extends": "../../configs/nyc.json"
|
|
52
52
|
},
|
|
53
|
-
"gitHead": "
|
|
53
|
+
"gitHead": "f4778c2737bb75613f0e1f99da8996bad91f6e17"
|
|
54
54
|
}
|
|
@@ -91,6 +91,9 @@ export class OpenAiFrontendApplicationContribution implements FrontendApplicatio
|
|
|
91
91
|
model.model === newModel.model &&
|
|
92
92
|
model.url === newModel.url &&
|
|
93
93
|
model.apiKey === newModel.apiKey &&
|
|
94
|
+
model.apiVersion === newModel.apiVersion &&
|
|
95
|
+
model.supportsDeveloperMessage === newModel.supportsDeveloperMessage &&
|
|
96
|
+
model.supportsStructuredOutput === newModel.supportsStructuredOutput &&
|
|
94
97
|
model.enableStreaming === newModel.enableStreaming));
|
|
95
98
|
|
|
96
99
|
this.manager.removeLanguageModels(...modelsToRemove.map(model => model.id));
|
|
@@ -113,7 +116,10 @@ export class OpenAiFrontendApplicationContribution implements FrontendApplicatio
|
|
|
113
116
|
id: id,
|
|
114
117
|
model: modelId,
|
|
115
118
|
apiKey: true,
|
|
119
|
+
apiVersion: true,
|
|
120
|
+
supportsDeveloperMessage: !openAIModelsNotSupportingDeveloperMessages.includes(modelId),
|
|
116
121
|
enableStreaming: !openAIModelsWithDisabledStreaming.includes(modelId),
|
|
122
|
+
supportsStructuredOutput: !openAIModelsWithoutStructuredOutput.includes(modelId),
|
|
117
123
|
defaultRequestSettings: modelRequestSetting?.requestSettings
|
|
118
124
|
};
|
|
119
125
|
}
|
|
@@ -136,6 +142,9 @@ export class OpenAiFrontendApplicationContribution implements FrontendApplicatio
|
|
|
136
142
|
model: pref.model,
|
|
137
143
|
url: pref.url,
|
|
138
144
|
apiKey: typeof pref.apiKey === 'string' || pref.apiKey === true ? pref.apiKey : undefined,
|
|
145
|
+
apiVersion: typeof pref.apiVersion === 'string' || pref.apiVersion === true ? pref.apiVersion : undefined,
|
|
146
|
+
supportsDeveloperMessage: pref.supportsDeveloperMessage ?? true,
|
|
147
|
+
supportsStructuredOutput: pref.supportsStructuredOutput ?? true,
|
|
139
148
|
enableStreaming: pref.enableStreaming ?? true,
|
|
140
149
|
defaultRequestSettings: modelRequestSetting?.requestSettings
|
|
141
150
|
}
|
|
@@ -159,4 +168,6 @@ export class OpenAiFrontendApplicationContribution implements FrontendApplicatio
|
|
|
159
168
|
}
|
|
160
169
|
}
|
|
161
170
|
|
|
162
|
-
const openAIModelsWithDisabledStreaming = ['o1
|
|
171
|
+
const openAIModelsWithDisabledStreaming = ['o1'];
|
|
172
|
+
const openAIModelsNotSupportingDeveloperMessages = ['o1-preview', 'o1-mini'];
|
|
173
|
+
const openAIModelsWithoutStructuredOutput = ['o1-preview', 'gpt-4-turbo', 'gpt-4', 'gpt-3.5-turbo', 'o1-mini', 'gpt-4o-2024-05-13'];
|
|
@@ -34,7 +34,7 @@ export const OpenAiPreferencesSchema: PreferenceSchema = {
|
|
|
34
34
|
type: 'array',
|
|
35
35
|
description: 'Official OpenAI models to use',
|
|
36
36
|
title: AI_CORE_PREFERENCES_TITLE,
|
|
37
|
-
default: ['gpt-4o', 'gpt-4o-2024-
|
|
37
|
+
default: ['gpt-4o', 'gpt-4o-2024-11-20', 'gpt-4o-2024-08-06', 'gpt-4o-mini', 'o1', 'o1-mini', 'o3-mini'],
|
|
38
38
|
items: {
|
|
39
39
|
type: 'string'
|
|
40
40
|
}
|
|
@@ -50,6 +50,12 @@ export const OpenAiPreferencesSchema: PreferenceSchema = {
|
|
|
50
50
|
\n\
|
|
51
51
|
- provide an `apiKey` to access the API served at the given url. Use `true` to indicate the use of the global OpenAI API key.\
|
|
52
52
|
\n\
|
|
53
|
+
- provide an `apiVersion` to access the API served at the given url in Azure. Use `true` to indicate the use of the global OpenAI API version.\
|
|
54
|
+
\n\
|
|
55
|
+
- specify `supportsDeveloperMessage: false` to indicate that the developer role shall not be used.\
|
|
56
|
+
\n\
|
|
57
|
+
- specify `supportsStructuredOutput: false` to indicate that structured output shall not be used.\
|
|
58
|
+
\n\
|
|
53
59
|
- specify `enableStreaming: false` to indicate that streaming shall not be used.\
|
|
54
60
|
\n\
|
|
55
61
|
Refer to [our documentation](https://theia-ide.org/docs/user_ai/#openai-compatible-models-eg-via-vllm) for more information.',
|
|
@@ -73,6 +79,18 @@ export const OpenAiPreferencesSchema: PreferenceSchema = {
|
|
|
73
79
|
type: ['string', 'boolean'],
|
|
74
80
|
title: 'Either the key to access the API served at the given url or `true` to use the global OpenAI API key',
|
|
75
81
|
},
|
|
82
|
+
apiVersion: {
|
|
83
|
+
type: ['string', 'boolean'],
|
|
84
|
+
title: 'Either the version to access the API served at the given url in Azure or `true` to use the global OpenAI API version',
|
|
85
|
+
},
|
|
86
|
+
supportsDeveloperMessage: {
|
|
87
|
+
type: 'boolean',
|
|
88
|
+
title: 'Indicates whether the model supports the `developer` role. `true` by default.',
|
|
89
|
+
},
|
|
90
|
+
supportsStructuredOutput: {
|
|
91
|
+
type: 'boolean',
|
|
92
|
+
title: 'Indicates whether the model supports structured output. `true` by default.',
|
|
93
|
+
},
|
|
76
94
|
enableStreaming: {
|
|
77
95
|
type: 'boolean',
|
|
78
96
|
title: 'Indicates whether the streaming API shall be used. `true` by default.',
|
|
@@ -32,10 +32,22 @@ export interface OpenAiModelDescription {
|
|
|
32
32
|
* The key for the model. If 'true' is provided the global OpenAI API key will be used.
|
|
33
33
|
*/
|
|
34
34
|
apiKey: string | true | undefined;
|
|
35
|
+
/**
|
|
36
|
+
* The version for the api. If 'true' is provided the global OpenAI version will be used.
|
|
37
|
+
*/
|
|
38
|
+
apiVersion: string | true | undefined;
|
|
35
39
|
/**
|
|
36
40
|
* Indicate whether the streaming API shall be used.
|
|
37
41
|
*/
|
|
38
42
|
enableStreaming: boolean;
|
|
43
|
+
/**
|
|
44
|
+
* Flag to configure whether the OpenAPI model supports the `developer` role. Default is `true`.
|
|
45
|
+
*/
|
|
46
|
+
supportsDeveloperMessage: boolean;
|
|
47
|
+
/**
|
|
48
|
+
* Flag to configure whether the OpenAPI model supports structured output. Default is `true`.
|
|
49
|
+
*/
|
|
50
|
+
supportsStructuredOutput: boolean;
|
|
39
51
|
/**
|
|
40
52
|
* Default request settings for the OpenAI model.
|
|
41
53
|
*/
|
|
@@ -44,6 +56,7 @@ export interface OpenAiModelDescription {
|
|
|
44
56
|
export interface OpenAiLanguageModelsManager {
|
|
45
57
|
apiKey: string | undefined;
|
|
46
58
|
setApiKey(key: string | undefined): void;
|
|
59
|
+
setApiVersion(version: string | undefined): void;
|
|
47
60
|
createOrUpdateLanguageModels(...models: OpenAiModelDescription[]): Promise<void>;
|
|
48
61
|
removeLanguageModels(...modelIds: string[]): void
|
|
49
62
|
}
|
|
@@ -18,13 +18,19 @@ import { ContainerModule } from '@theia/core/shared/inversify';
|
|
|
18
18
|
import { OPENAI_LANGUAGE_MODELS_MANAGER_PATH, OpenAiLanguageModelsManager } from '../common/openai-language-models-manager';
|
|
19
19
|
import { ConnectionHandler, RpcConnectionHandler } from '@theia/core';
|
|
20
20
|
import { OpenAiLanguageModelsManagerImpl } from './openai-language-models-manager-impl';
|
|
21
|
+
import { ConnectionContainerModule } from '@theia/core/lib/node/messaging/connection-container-module';
|
|
21
22
|
|
|
22
23
|
export const OpenAiModelFactory = Symbol('OpenAiModelFactory');
|
|
23
24
|
|
|
24
|
-
|
|
25
|
+
// We use a connection module to handle AI services separately for each frontend.
|
|
26
|
+
const openAiConnectionModule = ConnectionContainerModule.create(({ bind, bindBackendService, bindFrontendService }) => {
|
|
25
27
|
bind(OpenAiLanguageModelsManagerImpl).toSelf().inSingletonScope();
|
|
26
28
|
bind(OpenAiLanguageModelsManager).toService(OpenAiLanguageModelsManagerImpl);
|
|
27
29
|
bind(ConnectionHandler).toDynamicValue(ctx =>
|
|
28
30
|
new RpcConnectionHandler(OPENAI_LANGUAGE_MODELS_MANAGER_PATH, () => ctx.container.get(OpenAiLanguageModelsManager))
|
|
29
31
|
).inSingletonScope();
|
|
30
32
|
});
|
|
33
|
+
|
|
34
|
+
export default new ContainerModule(bind => {
|
|
35
|
+
bind(ConnectionContainerModule).toConstantValue(openAiConnectionModule);
|
|
36
|
+
});
|
|
@@ -24,31 +24,13 @@ import {
|
|
|
24
24
|
LanguageModelTextResponse
|
|
25
25
|
} from '@theia/ai-core';
|
|
26
26
|
import { CancellationToken } from '@theia/core';
|
|
27
|
-
import OpenAI from 'openai';
|
|
27
|
+
import { OpenAI, AzureOpenAI } from 'openai';
|
|
28
28
|
import { ChatCompletionStream } from 'openai/lib/ChatCompletionStream';
|
|
29
29
|
import { RunnableToolFunctionWithoutParse } from 'openai/lib/RunnableFunction';
|
|
30
30
|
import { ChatCompletionMessageParam } from 'openai/resources';
|
|
31
31
|
|
|
32
32
|
export const OpenAiModelIdentifier = Symbol('OpenAiModelIdentifier');
|
|
33
33
|
|
|
34
|
-
function toOpenAIMessage(message: LanguageModelRequestMessage): ChatCompletionMessageParam {
|
|
35
|
-
return {
|
|
36
|
-
role: toOpenAiRole(message),
|
|
37
|
-
content: message.query || ''
|
|
38
|
-
};
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
function toOpenAiRole(message: LanguageModelRequestMessage): 'system' | 'user' | 'assistant' {
|
|
42
|
-
switch (message.actor) {
|
|
43
|
-
case 'system':
|
|
44
|
-
return 'system';
|
|
45
|
-
case 'ai':
|
|
46
|
-
return 'assistant';
|
|
47
|
-
default:
|
|
48
|
-
return 'user';
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
|
|
52
34
|
export class OpenAiModel implements LanguageModel {
|
|
53
35
|
|
|
54
36
|
/**
|
|
@@ -56,6 +38,8 @@ export class OpenAiModel implements LanguageModel {
|
|
|
56
38
|
* @param model the model id as it is used by the OpenAI API
|
|
57
39
|
* @param enableStreaming whether the streaming API shall be used
|
|
58
40
|
* @param apiKey a function that returns the API key to use for this model, called on each request
|
|
41
|
+
* @param apiVersion a function that returns the OpenAPI version to use for this model, called on each request
|
|
42
|
+
* @param supportsDeveloperMessage whether the model supports the `developer` role
|
|
59
43
|
* @param url the OpenAI API compatible endpoint where the model is hosted. If not provided the default OpenAI endpoint will be used.
|
|
60
44
|
* @param defaultRequestSettings optional default settings for requests made using this model.
|
|
61
45
|
*/
|
|
@@ -64,6 +48,9 @@ export class OpenAiModel implements LanguageModel {
|
|
|
64
48
|
public model: string,
|
|
65
49
|
public enableStreaming: boolean,
|
|
66
50
|
public apiKey: () => string | undefined,
|
|
51
|
+
public apiVersion: () => string | undefined,
|
|
52
|
+
public supportsDeveloperMessage: boolean,
|
|
53
|
+
public supportsStructuredOutput: boolean,
|
|
67
54
|
public url: string | undefined,
|
|
68
55
|
public defaultRequestSettings?: { [key: string]: unknown }
|
|
69
56
|
) { }
|
|
@@ -80,12 +67,16 @@ export class OpenAiModel implements LanguageModel {
|
|
|
80
67
|
const settings = this.getSettings(request);
|
|
81
68
|
const openai = this.initializeOpenAi();
|
|
82
69
|
|
|
83
|
-
if (
|
|
70
|
+
if (request.response_format?.type === 'json_schema' && this.supportsStructuredOutput) {
|
|
71
|
+
return this.handleStructuredOutputRequest(openai, request);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if (this.isNonStreamingModel(this.model) || (typeof settings.stream === 'boolean' && !settings.stream)) {
|
|
84
75
|
return this.handleNonStreamingRequest(openai, request);
|
|
85
76
|
}
|
|
86
77
|
|
|
87
|
-
if (
|
|
88
|
-
return
|
|
78
|
+
if (cancellationToken?.isCancellationRequested) {
|
|
79
|
+
return { text: '' };
|
|
89
80
|
}
|
|
90
81
|
|
|
91
82
|
let runner: ChatCompletionStream;
|
|
@@ -93,7 +84,7 @@ export class OpenAiModel implements LanguageModel {
|
|
|
93
84
|
if (tools) {
|
|
94
85
|
runner = openai.beta.chat.completions.runTools({
|
|
95
86
|
model: this.model,
|
|
96
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
87
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
97
88
|
stream: true,
|
|
98
89
|
tools: tools,
|
|
99
90
|
tool_choice: 'auto',
|
|
@@ -102,7 +93,7 @@ export class OpenAiModel implements LanguageModel {
|
|
|
102
93
|
} else {
|
|
103
94
|
runner = openai.beta.chat.completions.stream({
|
|
104
95
|
model: this.model,
|
|
105
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
96
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
106
97
|
stream: true,
|
|
107
98
|
...settings
|
|
108
99
|
});
|
|
@@ -113,42 +104,57 @@ export class OpenAiModel implements LanguageModel {
|
|
|
113
104
|
|
|
114
105
|
let runnerEnd = false;
|
|
115
106
|
|
|
116
|
-
let resolve: (part: LanguageModelStreamResponsePart) => void;
|
|
107
|
+
let resolve: ((part: LanguageModelStreamResponsePart) => void) | undefined;
|
|
117
108
|
runner.on('error', error => {
|
|
118
109
|
console.error('Error in OpenAI chat completion stream:', error);
|
|
119
110
|
runnerEnd = true;
|
|
120
|
-
resolve({ content: error.message });
|
|
111
|
+
resolve?.({ content: error.message });
|
|
121
112
|
});
|
|
122
113
|
// we need to also listen for the emitted errors, as otherwise any error actually thrown by the API will not be caught
|
|
123
114
|
runner.emitted('error').then(error => {
|
|
124
115
|
console.error('Error in OpenAI chat completion stream:', error);
|
|
125
116
|
runnerEnd = true;
|
|
126
|
-
resolve({ content: error.message });
|
|
117
|
+
resolve?.({ content: error.message });
|
|
127
118
|
});
|
|
128
119
|
runner.emitted('abort').then(() => {
|
|
129
|
-
//
|
|
120
|
+
// cancel async iterator
|
|
121
|
+
runnerEnd = true;
|
|
130
122
|
});
|
|
131
123
|
runner.on('message', message => {
|
|
132
124
|
if (message.role === 'tool') {
|
|
133
|
-
resolve({ tool_calls: [{ id: message.tool_call_id, finished: true, result: this.getCompletionContent(message) }] });
|
|
125
|
+
resolve?.({ tool_calls: [{ id: message.tool_call_id, finished: true, result: this.getCompletionContent(message) }] });
|
|
134
126
|
}
|
|
135
127
|
console.debug('Received Open AI message', JSON.stringify(message));
|
|
136
128
|
});
|
|
137
129
|
runner.once('end', () => {
|
|
138
130
|
runnerEnd = true;
|
|
139
131
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
140
|
-
resolve(runner.finalChatCompletion as any);
|
|
132
|
+
resolve?.(runner.finalChatCompletion as any);
|
|
141
133
|
});
|
|
134
|
+
if (cancellationToken?.isCancellationRequested) {
|
|
135
|
+
return { text: '' };
|
|
136
|
+
}
|
|
142
137
|
const asyncIterator = {
|
|
143
138
|
async *[Symbol.asyncIterator](): AsyncIterator<LanguageModelStreamResponsePart> {
|
|
144
139
|
runner.on('chunk', chunk => {
|
|
145
|
-
if (
|
|
140
|
+
if (cancellationToken?.isCancellationRequested) {
|
|
141
|
+
resolve = undefined;
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
if (resolve && chunk.choices[0]?.delta) {
|
|
146
145
|
resolve({ ...chunk.choices[0]?.delta });
|
|
147
146
|
}
|
|
148
147
|
});
|
|
149
148
|
while (!runnerEnd) {
|
|
149
|
+
if (cancellationToken?.isCancellationRequested) {
|
|
150
|
+
throw new Error('Iterator canceled');
|
|
151
|
+
}
|
|
150
152
|
const promise = new Promise<LanguageModelStreamResponsePart>((res, rej) => {
|
|
151
153
|
resolve = res;
|
|
154
|
+
cancellationToken?.onCancellationRequested(() => {
|
|
155
|
+
rej(new Error('Canceled'));
|
|
156
|
+
runnerEnd = true; // Stop the iterator
|
|
157
|
+
});
|
|
152
158
|
});
|
|
153
159
|
yield promise;
|
|
154
160
|
}
|
|
@@ -161,7 +167,7 @@ export class OpenAiModel implements LanguageModel {
|
|
|
161
167
|
const settings = this.getSettings(request);
|
|
162
168
|
const response = await openai.chat.completions.create({
|
|
163
169
|
model: this.model,
|
|
164
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
170
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
165
171
|
...settings
|
|
166
172
|
});
|
|
167
173
|
|
|
@@ -172,17 +178,26 @@ export class OpenAiModel implements LanguageModel {
|
|
|
172
178
|
};
|
|
173
179
|
}
|
|
174
180
|
|
|
175
|
-
protected
|
|
176
|
-
return
|
|
181
|
+
protected toOpenAIMessage(message: LanguageModelRequestMessage): ChatCompletionMessageParam {
|
|
182
|
+
return {
|
|
183
|
+
role: this.toOpenAiRole(message),
|
|
184
|
+
content: message.query || ''
|
|
185
|
+
};
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
protected toOpenAiRole(message: LanguageModelRequestMessage): 'developer' | 'user' | 'assistant' {
|
|
189
|
+
switch (message.actor) {
|
|
190
|
+
case 'system':
|
|
191
|
+
return this.supportsDeveloperMessage ? 'developer' : 'user';
|
|
192
|
+
case 'ai':
|
|
193
|
+
return 'assistant';
|
|
194
|
+
default:
|
|
195
|
+
return 'user';
|
|
196
|
+
}
|
|
177
197
|
}
|
|
178
198
|
|
|
179
|
-
protected
|
|
180
|
-
|
|
181
|
-
return [
|
|
182
|
-
'gpt-4o',
|
|
183
|
-
'gpt-4o-2024-08-06',
|
|
184
|
-
'gpt-4o-mini'
|
|
185
|
-
].includes(this.model);
|
|
199
|
+
protected isNonStreamingModel(_model: string): boolean {
|
|
200
|
+
return !this.enableStreaming;
|
|
186
201
|
}
|
|
187
202
|
|
|
188
203
|
protected async handleStructuredOutputRequest(openai: OpenAI, request: LanguageModelRequest): Promise<LanguageModelParsedResponse> {
|
|
@@ -190,7 +205,7 @@ export class OpenAiModel implements LanguageModel {
|
|
|
190
205
|
// TODO implement tool support for structured output (parse() seems to require different tool format)
|
|
191
206
|
const result = await openai.beta.chat.completions.parse({
|
|
192
207
|
model: this.model,
|
|
193
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
208
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
194
209
|
response_format: request.response_format,
|
|
195
210
|
...settings
|
|
196
211
|
});
|
|
@@ -228,7 +243,14 @@ export class OpenAiModel implements LanguageModel {
|
|
|
228
243
|
if (!apiKey && !(this.url)) {
|
|
229
244
|
throw new Error('Please provide OPENAI_API_KEY in preferences or via environment variable');
|
|
230
245
|
}
|
|
231
|
-
|
|
232
|
-
|
|
246
|
+
|
|
247
|
+
const apiVersion = this.apiVersion();
|
|
248
|
+
if (apiVersion) {
|
|
249
|
+
// We need to hand over "some" key, even if a custom url is not key protected as otherwise the OpenAI client will throw an error
|
|
250
|
+
return new AzureOpenAI({ apiKey: apiKey ?? 'no-key', baseURL: this.url, apiVersion: apiVersion });
|
|
251
|
+
} else {
|
|
252
|
+
// We need to hand over "some" key, even if a custom url is not key protected as otherwise the OpenAI client will throw an error
|
|
253
|
+
return new OpenAI({ apiKey: apiKey ?? 'no-key', baseURL: this.url });
|
|
254
|
+
}
|
|
233
255
|
}
|
|
234
256
|
}
|
|
@@ -23,6 +23,7 @@ import { OpenAiLanguageModelsManager, OpenAiModelDescription } from '../common';
|
|
|
23
23
|
export class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsManager {
|
|
24
24
|
|
|
25
25
|
protected _apiKey: string | undefined;
|
|
26
|
+
protected _apiVersion: string | undefined;
|
|
26
27
|
|
|
27
28
|
@inject(LanguageModelRegistry)
|
|
28
29
|
protected readonly languageModelRegistry: LanguageModelRegistry;
|
|
@@ -31,6 +32,10 @@ export class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsMana
|
|
|
31
32
|
return this._apiKey ?? process.env.OPENAI_API_KEY;
|
|
32
33
|
}
|
|
33
34
|
|
|
35
|
+
get apiVersion(): string | undefined {
|
|
36
|
+
return this._apiVersion ?? process.env.OPENAI_API_VERSION;
|
|
37
|
+
}
|
|
38
|
+
|
|
34
39
|
// Triggered from frontend. In case you want to use the models on the backend
|
|
35
40
|
// without a frontend then call this yourself
|
|
36
41
|
async createOrUpdateLanguageModels(...modelDescriptions: OpenAiModelDescription[]): Promise<void> {
|
|
@@ -45,6 +50,15 @@ export class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsMana
|
|
|
45
50
|
}
|
|
46
51
|
return undefined;
|
|
47
52
|
};
|
|
53
|
+
const apiVersionProvider = () => {
|
|
54
|
+
if (modelDescription.apiVersion === true) {
|
|
55
|
+
return this.apiVersion;
|
|
56
|
+
}
|
|
57
|
+
if (modelDescription.apiVersion) {
|
|
58
|
+
return modelDescription.apiVersion;
|
|
59
|
+
}
|
|
60
|
+
return undefined;
|
|
61
|
+
};
|
|
48
62
|
|
|
49
63
|
if (model) {
|
|
50
64
|
if (!(model instanceof OpenAiModel)) {
|
|
@@ -55,6 +69,9 @@ export class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsMana
|
|
|
55
69
|
model.enableStreaming = modelDescription.enableStreaming;
|
|
56
70
|
model.url = modelDescription.url;
|
|
57
71
|
model.apiKey = apiKeyProvider;
|
|
72
|
+
model.apiVersion = apiVersionProvider;
|
|
73
|
+
model.supportsDeveloperMessage = modelDescription.supportsDeveloperMessage;
|
|
74
|
+
model.supportsStructuredOutput = modelDescription.supportsStructuredOutput;
|
|
58
75
|
model.defaultRequestSettings = modelDescription.defaultRequestSettings;
|
|
59
76
|
} else {
|
|
60
77
|
this.languageModelRegistry.addLanguageModels([
|
|
@@ -63,6 +80,9 @@ export class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsMana
|
|
|
63
80
|
modelDescription.model,
|
|
64
81
|
modelDescription.enableStreaming,
|
|
65
82
|
apiKeyProvider,
|
|
83
|
+
apiVersionProvider,
|
|
84
|
+
modelDescription.supportsDeveloperMessage,
|
|
85
|
+
modelDescription.supportsStructuredOutput,
|
|
66
86
|
modelDescription.url,
|
|
67
87
|
modelDescription.defaultRequestSettings
|
|
68
88
|
)
|
|
@@ -82,4 +102,12 @@ export class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsMana
|
|
|
82
102
|
this._apiKey = undefined;
|
|
83
103
|
}
|
|
84
104
|
}
|
|
105
|
+
|
|
106
|
+
setApiVersion(apiVersion: string | undefined): void {
|
|
107
|
+
if (apiVersion) {
|
|
108
|
+
this._apiVersion = apiVersion;
|
|
109
|
+
} else {
|
|
110
|
+
this._apiVersion = undefined;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
85
113
|
}
|