@theia/ai-openai 1.57.1 → 1.58.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +54 -0
- package/lib/browser/openai-frontend-application-contribution.d.ts.map +1 -1
- package/lib/browser/openai-frontend-application-contribution.js +10 -3
- package/lib/browser/openai-frontend-application-contribution.js.map +1 -1
- package/lib/browser/openai-preferences.d.ts.map +1 -1
- package/lib/browser/openai-preferences.js +13 -1
- package/lib/browser/openai-preferences.js.map +1 -1
- package/lib/common/openai-language-models-manager.d.ts +9 -0
- package/lib/common/openai-language-models-manager.d.ts.map +1 -1
- package/lib/node/openai-backend-module.d.ts.map +1 -1
- package/lib/node/openai-backend-module.js +6 -1
- package/lib/node/openai-backend-module.js.map +1 -1
- package/lib/node/openai-language-model.d.ts +10 -3
- package/lib/node/openai-language-model.d.ts.map +1 -1
- package/lib/node/openai-language-model.js +59 -30
- package/lib/node/openai-language-model.js.map +1 -1
- package/lib/node/openai-language-models-manager-impl.d.ts +3 -0
- package/lib/node/openai-language-models-manager-impl.d.ts.map +1 -1
- package/lib/node/openai-language-models-manager-impl.js +24 -1
- package/lib/node/openai-language-models-manager-impl.js.map +1 -1
- package/package.json +8 -8
- package/src/browser/openai-frontend-application-contribution.ts +8 -1
- package/src/browser/openai-preferences.ts +13 -1
- package/src/common/openai-language-models-manager.ts +9 -0
- package/src/node/openai-backend-module.ts +7 -1
- package/src/node/openai-language-model.ts +62 -33
- package/src/node/openai-language-models-manager-impl.ts +26 -0
package/README.md
CHANGED
|
@@ -27,12 +27,66 @@ You can configure the end points via the `ai-features.openAiCustom.customOpenAiM
|
|
|
27
27
|
url: string
|
|
28
28
|
id?: string
|
|
29
29
|
apiKey?: string | true
|
|
30
|
+
apiVersion?: string | true
|
|
31
|
+
supportsDeveloperMessage?: boolean
|
|
32
|
+
enableStreaming?: boolean
|
|
30
33
|
}
|
|
31
34
|
```
|
|
32
35
|
|
|
33
36
|
- `model` and `url` are mandatory attributes, indicating the end point and model to use
|
|
34
37
|
- `id` is an optional attribute which is used in the UI to refer to this configuration
|
|
35
38
|
- `apiKey` is either the key to access the API served at the given URL or `true` to use the global OpenAI API key. If not given 'no-key' will be used.
|
|
39
|
+
- `apiVersion` is either the api version to access the API served at the given URL in Azure or `true` to use the global OpenAI API version.
|
|
40
|
+
- `supportsDeveloperMessage` is a flag that indicates whether the model supports the `developer` role or not. `true` by default.
|
|
41
|
+
- `enableStreaming` is a flag that indicates whether the streaming API shall be used or not. `true` by default.
|
|
42
|
+
|
|
43
|
+
### Azure OpenAI
|
|
44
|
+
|
|
45
|
+
To use a custom OpenAI model hosted on Azure, the `AzureOpenAI` class needs to be used, as described in the
|
|
46
|
+
[openai-node docs](https://github.com/openai/openai-node?tab=readme-ov-file#microsoft-azure-openai).
|
|
47
|
+
|
|
48
|
+
Requests to an OpenAI model hosted on Azure need an `apiVersion`. To configure a custom OpenAI model in Theia you therefore need to configure the `apiVersion` with the end point.
|
|
49
|
+
Note that if you don't configure an `apiVersion`, the default `OpenAI` object is used for initialization and a connection to an Azure hosted OpenAI model will fail.
|
|
50
|
+
|
|
51
|
+
An OpenAI model version deployed on Azure might not support the `developer` role. In that case it is possible to configure whether the `developer` role is supported or not via the
|
|
52
|
+
`supportsDeveloperMessage` option, which defaults to `true`.
|
|
53
|
+
|
|
54
|
+
The following snippet shows a possible configuration to access an OpenAI model hosted on Azure. The `AZURE_OPENAI_API_BASE_URL` needs to be given without the `/chat/completions`
|
|
55
|
+
path and without the `api-version` parameter, e.g. _`https://<my_prefix>.openai.azure.com/openai/deployments/<my_deployment>`_
|
|
56
|
+
|
|
57
|
+
```json
|
|
58
|
+
{
|
|
59
|
+
"ai-features.AiEnable.enableAI": true,
|
|
60
|
+
"ai-features.openAiCustom.customOpenAiModels": [
|
|
61
|
+
{
|
|
62
|
+
"model": "gpt4o",
|
|
63
|
+
"url": "<AZURE_OPENAI_API_BASE_URL>",
|
|
64
|
+
"id": "azure-deployment",
|
|
65
|
+
"apiKey": "<AZURE_OPENAI_API_KEY>",
|
|
66
|
+
"apiVersion": "<AZURE_OPENAI_API_VERSION>",
|
|
67
|
+
"supportsDeveloperMessage": false
|
|
68
|
+
}
|
|
69
|
+
],
|
|
70
|
+
"ai-features.agentSettings": {
|
|
71
|
+
"Universal": {
|
|
72
|
+
"languageModelRequirements": [
|
|
73
|
+
{
|
|
74
|
+
"purpose": "chat",
|
|
75
|
+
"identifier": "azure-deployment"
|
|
76
|
+
}
|
|
77
|
+
]
|
|
78
|
+
},
|
|
79
|
+
"Orchestrator": {
|
|
80
|
+
"languageModelRequirements": [
|
|
81
|
+
{
|
|
82
|
+
"purpose": "agent-selection",
|
|
83
|
+
"identifier": "azure-deployment"
|
|
84
|
+
}
|
|
85
|
+
]
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
```
|
|
36
90
|
|
|
37
91
|
## Additional Information
|
|
38
92
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-frontend-application-contribution.d.ts","sourceRoot":"","sources":["../../src/browser/openai-frontend-application-contribution.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,+BAA+B,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EAAE,2BAA2B,EAAE,sBAAsB,EAAE,MAAM,WAAW,CAAC;AAEhF,OAAO,EAAoC,cAAc,EAAE,MAAM,gDAAgD,CAAC;AAIlH,qBACa,qCAAsC,YAAW,+BAA+B;IAGzF,SAAS,CAAC,iBAAiB,EAAE,iBAAiB,CAAC;IAG/C,SAAS,CAAC,OAAO,EAAE,2BAA2B,CAAC;IAE/C,SAAS,CAAC,UAAU,EAAE,MAAM,EAAE,CAAM;IACpC,SAAS,CAAC,gBAAgB,EAAE,OAAO,CAAC,sBAAsB,CAAC,EAAE,CAAM;IAEnE,OAAO,IAAI,IAAI;IA4Bf,SAAS,CAAC,kBAAkB,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,IAAI;IAavD,OAAO,CAAC,sBAAsB;IAI9B,SAAS,CAAC,wBAAwB,CAAC,eAAe,EAAE,OAAO,CAAC,sBAAsB,CAAC,EAAE,GAAG,IAAI;
|
|
1
|
+
{"version":3,"file":"openai-frontend-application-contribution.d.ts","sourceRoot":"","sources":["../../src/browser/openai-frontend-application-contribution.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,+BAA+B,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAE7F,OAAO,EAAE,2BAA2B,EAAE,sBAAsB,EAAE,MAAM,WAAW,CAAC;AAEhF,OAAO,EAAoC,cAAc,EAAE,MAAM,gDAAgD,CAAC;AAIlH,qBACa,qCAAsC,YAAW,+BAA+B;IAGzF,SAAS,CAAC,iBAAiB,EAAE,iBAAiB,CAAC;IAG/C,SAAS,CAAC,OAAO,EAAE,2BAA2B,CAAC;IAE/C,SAAS,CAAC,UAAU,EAAE,MAAM,EAAE,CAAM;IACpC,SAAS,CAAC,gBAAgB,EAAE,OAAO,CAAC,sBAAsB,CAAC,EAAE,CAAM;IAEnE,OAAO,IAAI,IAAI;IA4Bf,SAAS,CAAC,kBAAkB,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,IAAI;IAavD,OAAO,CAAC,sBAAsB;IAI9B,SAAS,CAAC,wBAAwB,CAAC,eAAe,EAAE,OAAO,CAAC,sBAAsB,CAAC,EAAE,GAAG,IAAI;IAqB5F,SAAS,CAAC,4BAA4B,CAAC,WAAW,EAAE,cAAc,EAAE,GAAG,IAAI;IAQ3E,SAAS,CAAC,4BAA4B,CAAC,OAAO,EAAE,MAAM,EAAE,eAAe,EAAE,cAAc,EAAE,GAAG,sBAAsB;IAclH,SAAS,CAAC,4CAA4C,CAClD,WAAW,EAAE,OAAO,CAAC,sBAAsB,CAAC,EAAE,EAC9C,eAAe,EAAE,cAAc,EAAE,GAClC,sBAAsB,EAAE;IAuB3B,SAAS,CAAC,yBAAyB,CAC/B,OAAO,EAAE,MAAM,EACf,UAAU,EAAE,MAAM,EAClB,eAAe,EAAE,cAAc,EAAE,GAClC,cAAc,GAAG,SAAS;CAWhC"}
|
|
@@ -77,6 +77,8 @@ let OpenAiFrontendApplicationContribution = class OpenAiFrontendApplicationContr
|
|
|
77
77
|
model.model === newModel.model &&
|
|
78
78
|
model.url === newModel.url &&
|
|
79
79
|
model.apiKey === newModel.apiKey &&
|
|
80
|
+
model.apiVersion === newModel.apiVersion &&
|
|
81
|
+
model.supportsDeveloperMessage === newModel.supportsDeveloperMessage &&
|
|
80
82
|
model.enableStreaming === newModel.enableStreaming));
|
|
81
83
|
this.manager.removeLanguageModels(...modelsToRemove.map(model => model.id));
|
|
82
84
|
this.manager.createOrUpdateLanguageModels(...modelsToAddOrUpdate);
|
|
@@ -95,13 +97,15 @@ let OpenAiFrontendApplicationContribution = class OpenAiFrontendApplicationContr
|
|
|
95
97
|
id: id,
|
|
96
98
|
model: modelId,
|
|
97
99
|
apiKey: true,
|
|
100
|
+
apiVersion: true,
|
|
101
|
+
supportsDeveloperMessage: !openAIModelsSupportingDeveloperMessages.includes(modelId),
|
|
98
102
|
enableStreaming: !openAIModelsWithDisabledStreaming.includes(modelId),
|
|
99
103
|
defaultRequestSettings: modelRequestSetting === null || modelRequestSetting === void 0 ? void 0 : modelRequestSetting.requestSettings
|
|
100
104
|
};
|
|
101
105
|
}
|
|
102
106
|
createCustomModelDescriptionsFromPreferences(preferences, requestSettings) {
|
|
103
107
|
return preferences.reduce((acc, pref) => {
|
|
104
|
-
var _a;
|
|
108
|
+
var _a, _b;
|
|
105
109
|
if (!pref.model || !pref.url || typeof pref.model !== 'string' || typeof pref.url !== 'string') {
|
|
106
110
|
return acc;
|
|
107
111
|
}
|
|
@@ -113,7 +117,9 @@ let OpenAiFrontendApplicationContribution = class OpenAiFrontendApplicationContr
|
|
|
113
117
|
model: pref.model,
|
|
114
118
|
url: pref.url,
|
|
115
119
|
apiKey: typeof pref.apiKey === 'string' || pref.apiKey === true ? pref.apiKey : undefined,
|
|
116
|
-
|
|
120
|
+
apiVersion: typeof pref.apiVersion === 'string' || pref.apiVersion === true ? pref.apiVersion : undefined,
|
|
121
|
+
supportsDeveloperMessage: (_a = pref.supportsDeveloperMessage) !== null && _a !== void 0 ? _a : true,
|
|
122
|
+
enableStreaming: (_b = pref.enableStreaming) !== null && _b !== void 0 ? _b : true,
|
|
117
123
|
defaultRequestSettings: modelRequestSetting === null || modelRequestSetting === void 0 ? void 0 : modelRequestSetting.requestSettings
|
|
118
124
|
}
|
|
119
125
|
];
|
|
@@ -139,5 +145,6 @@ tslib_1.__decorate([
|
|
|
139
145
|
exports.OpenAiFrontendApplicationContribution = OpenAiFrontendApplicationContribution = tslib_1.__decorate([
|
|
140
146
|
(0, inversify_1.injectable)()
|
|
141
147
|
], OpenAiFrontendApplicationContribution);
|
|
142
|
-
const openAIModelsWithDisabledStreaming = ['o1-preview'];
|
|
148
|
+
const openAIModelsWithDisabledStreaming = ['o1-preview', 'o1-mini'];
|
|
149
|
+
const openAIModelsSupportingDeveloperMessages = ['o1-preview', 'o1-mini'];
|
|
143
150
|
//# sourceMappingURL=openai-frontend-application-contribution.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-frontend-application-contribution.js","sourceRoot":"","sources":["../../src/browser/openai-frontend-application-contribution.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;;AAEhF,qDAA6F;AAC7F,4DAAkE;AAClE,sCAAgF;AAChF,6DAAwF;AACxF,wFAAkH;AAElH,MAAM,kBAAkB,GAAG,QAAQ,CAAC;AAG7B,IAAM,qCAAqC,GAA3C,MAAM,qCAAqC;IAA3C;QAQO,eAAU,GAAa,EAAE,CAAC;QAC1B,qBAAgB,GAAsC,EAAE,CAAC;
|
|
1
|
+
{"version":3,"file":"openai-frontend-application-contribution.js","sourceRoot":"","sources":["../../src/browser/openai-frontend-application-contribution.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;;AAEhF,qDAA6F;AAC7F,4DAAkE;AAClE,sCAAgF;AAChF,6DAAwF;AACxF,wFAAkH;AAElH,MAAM,kBAAkB,GAAG,QAAQ,CAAC;AAG7B,IAAM,qCAAqC,GAA3C,MAAM,qCAAqC;IAA3C;QAQO,eAAU,GAAa,EAAE,CAAC;QAC1B,qBAAgB,GAAsC,EAAE,CAAC;IAmIvE,CAAC;IAjIG,OAAO;QACH,IAAI,CAAC,iBAAiB,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE;YACnC,MAAM,MAAM,GAAG,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAS,iCAAY,EAAE,SAAS,CAAC,CAAC;YAC3E,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;YAE/B,MAAM,MAAM,GAAG,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAW,gCAAW,EAAE,EAAE,CAAC,CAAC;YACrE,MAAM,eAAe,GAAG,IAAI,CAAC,sBAAsB,EAAE,CAAC;YACtD,IAAI,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,4BAA4B,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC;YACjI,IAAI,CAAC,UAAU,GAAG,CAAC,GAAG,MAAM,CAAC,CAAC;YAE9B,MAAM,YAAY,GAAG,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAoC,0CAAqB,EAAE,EAAE,CAAC,CAAC;YAC9G,IAAI,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,IAAI,CAAC,4CAA4C,CAAC,YAAY,EAAE,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC,CAAC;YAC7I,IAAI,CAAC,gBAAgB,GAAG,CAAC,GAAG,YAAY,CAAC,CAAC;YAE1C,IAAI,CAAC,iBAAiB,CAAC,mBAAmB,CAAC,KAAK,CAAC,EAAE;gBAC/C,IAAI,KAAK,CAAC,cAAc,KAAK,iCAAY,EAAE,CAAC;oBACxC,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;gBAC3C,CAAC;qBAAM,IAAI,KAAK,CAAC,cAAc,KAAK,gCAAW,EAAE,CAAC;oBAC9C,IAAI,CAAC,kBAAkB,CAAC,KAAK,CAAC,QAAoB,CAAC,CAAC;gBACxD,CAAC;qBAAM,IAAI,KAAK,CAAC,cAAc,KAAK,0CAAqB,EAAE,CAAC;oBACxD,IAAI,CAAC,wBAAwB,CAAC,KAAK,CAAC,QAA6C,CAAC,CAAC;gBACvF,CAAC;qBAAM,IAAI,KAAK,CAAC,cAAc,KAAK,sDAAgC,EAAE,CAAC;oBACnE,IAAI,CAAC,4BAA4B,CAAC,KAAK,CAAC,QAA4B,CAAC,CAAC;gBAC1E,CAAC;YACL,CAAC,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;IACP,CAAC;IAES,kBAAkB,CAAC,SAAmB;QAC5C,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;QAC3C,MAAM,aAAa,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC;QAEzC,MAAM,cAAc,GAAG,CAAC,GAAG,SAAS,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;QACjF,MAAM,WAAW,GAAG,CAAC,GAAG,aAAa,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC;QAE9E,IAAI,CAAC,OAAO,CAAC,oBAAoB,CAAC,GAAG,cAAc,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,KAAK,EAAE,CAAC,CAAC,CAAC;QACrF,MAAM,eAAe,GAAG,IAAI,CAAC,sBAAsB,EAAE,CAAC;QACtD,IAAI,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,4BAA4B,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC,CAAC,CAAC;QACtI,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;IAChC,CAAC;IAEO,sBAAsB;QAC1B,OAAO,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAmB,sDAAgC,EAAE,EAAE,CAAC,CAAC;IAC9F,CAAC;IAES,wBAAwB,CAAC,eAAkD;QACjF,MAAM,eAAe,GAAG,IAAI,CAAC,sBAAsB,EAAE,CAAC;QACtD,MAAM,SAAS,GAAG,IAAI,CAAC,4CAA4C,CAAC,IAAI,CAAC,gBAAgB,EAAE,eAAe,CAAC,CAAC;QAC5G,MAAM,SAAS,GAAG,IAAI,CAAC,4CAA4C,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;QAEtG,MAAM,cAAc,GAAG,SAAS,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,EAAE,KAAK,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;QACxG,MAAM,mBAAmB,GAAG,SAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CACpD,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CACpB,KAAK,CAAC,EAAE,KAAK,QAAQ,CAAC,EAAE;YACxB,KAAK,CAAC,KAAK,KAAK,QAAQ,CAAC,KAAK;YAC9B,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,GAAG;YAC1B,KAAK,CAAC,MAAM,KAAK,QAAQ,CAAC,MAAM;YAChC,KAAK,CAAC,UAAU,KAAK,QAAQ,CAAC,UAAU;YACxC,KAAK,CAAC,wBAAwB,KAAK,QAAQ,CAAC,wBAAwB;YACpE,KAAK,CAAC,eAAe,KAAK,QAAQ,CAAC,eAAe,CAAC,CAAC,CAAC;QAE7D,IAAI,CAAC,OAAO,CAAC,oBAAoB,CAAC,GAAG,cAAc,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;QAC5E,IAAI,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,mBAAmB,CAAC,CAAC;QAClE,IAAI,CAAC,gBAAgB,GAAG,CAAC,GAAG,eAAe,CAAC,CAAC;IACjD,CAAC;IAES,4BAA4B,CAAC,WAA6B;QAChE,MAAM,MAAM,GAAG,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAW,gCAAW,EAAE,EAAE,CAAC,CAAC;QACrE,IAAI,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,4BAA4B,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;QAE7H,MAAM,YAAY,GAAG,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAoC,0CAAqB,EAAE,EAAE,CAAC,CAAC;QAC9G,IAAI,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,IAAI,CAAC,4CAA4C,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC,CAAC;IAC/H,CAAC;IAES,4BAA4B,CAAC,OAAe,EAAE,eAAiC;QACrF,MAAM,EAAE,GAAG,GAAG,kBAAkB,IAAI,OAAO,EAAE,CAAC;QAC9C,MAAM,mBAAmB,GAAG,IAAI,CAAC,yBAAyB,CAAC,OAAO,EAAE,kBAAkB,EAAE,eAAe,CAAC,CAAC;QACzG,OAAO;YACH,EAAE,EAAE,EAAE;YACN,KAAK,EAAE,OAAO;YACd,MAAM,EAAE,IAAI;YACZ,UAAU,EAAE,IAAI;YAChB,wBAAwB,EAAE,CAAC,uCAAuC,CAAC,QAAQ,CAAC,OAAO,CAAC;YACpF,eAAe,EAAE,CAAC,iCAAiC,CAAC,QAAQ,CAAC,OAAO,CAAC;YACrE,sBAAsB,EAAE,mBAAmB,aAAnB,mBAAmB,uBAAnB,mBAAmB,CAAE,eAAe;SAC/D,CAAC;IACN,CAAC;IAES,4CAA4C,CAClD,WAA8C,EAC9C,eAAiC;QAEjC,OAAO,WAAW,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,IAAI,EAAE,EAAE;;YACpC,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,IAAI,CAAC,GAAG,IAAI,OAAO,IAAI,CAAC,KAAK,KAAK,QAAQ,IAAI,OAAO,IAAI,CAAC,GAAG,KAAK,QAAQ,EAAE,CAAC;gBAC7F,OAAO,GAAG,CAAC;YACf,CAAC;YAED,MAAM,mBAAmB,GAAG,IAAI,CAAC,yBAAyB,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,eAAe,CAAC,CAAC;YAE5G,OAAO;gBACH,GAAG,GAAG;gBACN;oBACI,EAAE,EAAE,IAAI,CAAC,EAAE,IAAI,OAAO,IAAI,CAAC,EAAE,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK;oBACjE,KAAK,EAAE,IAAI,CAAC,KAAK;oBACjB,GAAG,EAAE,IAAI,CAAC,GAAG;oBACb,MAAM,EAAE,OAAO,IAAI,CAAC,MAAM,KAAK,QAAQ,IAAI,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS;oBACzF,UAAU,EAAE,OAAO,IAAI,CAAC,UAAU,KAAK,QAAQ,IAAI,IAAI,CAAC,UAAU,KAAK,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,SAAS;oBACzG,wBAAwB,EAAE,MAAA,IAAI,CAAC,wBAAwB,mCAAI,IAAI;oBAC/D,eAAe,EAAE,MAAA,IAAI,CAAC,eAAe,mCAAI,IAAI;oBAC7C,sBAAsB,EAAE,mBAAmB,aAAnB,mBAAmB,uBAAnB,mBAAmB,CAAE,eAAe;iBAC/D;aACJ,CAAC;QACN,CAAC,EAAE,EAAE,CAAC,CAAC;IACX,CAAC;IACS,yBAAyB,CAC/B,OAAe,EACf,UAAkB,EAClB,eAAiC;QAEjC,MAAM,gBAAgB,GAAG,eAAe,CAAC,MAAM,CAC3C,OAAO,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,IAAI,OAAO,CAAC,UAAU,KAAK,UAAU,CAAC,IAAI,OAAO,CAAC,OAAO,KAAK,OAAO,CACvG,CAAC;QACF,IAAI,gBAAgB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC9B,OAAO,CAAC,IAAI,CACR,wCAAwC,UAAU,gBAAgB,OAAO,2BAA2B,CACvG,CAAC;QACN,CAAC;QACD,OAAO,gBAAgB,CAAC,CAAC,CAAC,CAAC;IAC/B,CAAC;CACJ,CAAA;AA5IY,sFAAqC;AAGpC;IADT,IAAA,kBAAM,EAAC,2BAAiB,CAAC;;gFACqB;AAGrC;IADT,IAAA,kBAAM,EAAC,oCAA2B,CAAC;;sEACW;gDANtC,qCAAqC;IADjD,IAAA,sBAAU,GAAE;GACA,qCAAqC,CA4IjD;AAED,MAAM,iCAAiC,GAAG,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;AACpE,MAAM,uCAAuC,GAAG,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-preferences.d.ts","sourceRoot":"","sources":["../../src/browser/openai-preferences.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,gBAAgB,EAAE,MAAM,6DAA6D,CAAC;AAG/F,eAAO,MAAM,YAAY,4CAA4C,CAAC;AACtE,eAAO,MAAM,WAAW,oDAAoD,CAAC;AAC7E,eAAO,MAAM,qBAAqB,gDAAgD,CAAC;AAEnF,eAAO,MAAM,uBAAuB,EAAE,
|
|
1
|
+
{"version":3,"file":"openai-preferences.d.ts","sourceRoot":"","sources":["../../src/browser/openai-preferences.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,gBAAgB,EAAE,MAAM,6DAA6D,CAAC;AAG/F,eAAO,MAAM,YAAY,4CAA4C,CAAC;AACtE,eAAO,MAAM,WAAW,oDAAoD,CAAC;AAC7E,eAAO,MAAM,qBAAqB,gDAAgD,CAAC;AAEnF,eAAO,MAAM,uBAAuB,EAAE,gBAwErC,CAAC"}
|
|
@@ -33,7 +33,7 @@ exports.OpenAiPreferencesSchema = {
|
|
|
33
33
|
type: 'array',
|
|
34
34
|
description: 'Official OpenAI models to use',
|
|
35
35
|
title: ai_core_preferences_1.AI_CORE_PREFERENCES_TITLE,
|
|
36
|
-
default: ['gpt-4o', 'gpt-4o-2024-08-06', 'gpt-4o-2024-05-13', 'gpt-4o-mini', 'gpt-4-turbo', 'gpt-4', 'gpt-3.5-turbo', 'o1-preview'],
|
|
36
|
+
default: ['gpt-4o', 'gpt-4o-2024-08-06', 'gpt-4o-2024-05-13', 'gpt-4o-mini', 'gpt-4-turbo', 'gpt-4', 'gpt-3.5-turbo', 'o1-preview', 'o1-mini'],
|
|
37
37
|
items: {
|
|
38
38
|
type: 'string'
|
|
39
39
|
}
|
|
@@ -49,6 +49,10 @@ exports.OpenAiPreferencesSchema = {
|
|
|
49
49
|
\n\
|
|
50
50
|
- provide an `apiKey` to access the API served at the given url. Use `true` to indicate the use of the global OpenAI API key.\
|
|
51
51
|
\n\
|
|
52
|
+
- provide an `apiVersion` to access the API served at the given url in Azure. Use `true` to indicate the use of the global OpenAI API version.\
|
|
53
|
+
\n\
|
|
54
|
+
- specify `supportsDeveloperMessage: false` to indicate that the developer role shall not be used.\
|
|
55
|
+
\n\
|
|
52
56
|
- specify `enableStreaming: false` to indicate that streaming shall not be used.\
|
|
53
57
|
\n\
|
|
54
58
|
Refer to [our documentation](https://theia-ide.org/docs/user_ai/#openai-compatible-models-eg-via-vllm) for more information.',
|
|
@@ -72,6 +76,14 @@ exports.OpenAiPreferencesSchema = {
|
|
|
72
76
|
type: ['string', 'boolean'],
|
|
73
77
|
title: 'Either the key to access the API served at the given url or `true` to use the global OpenAI API key',
|
|
74
78
|
},
|
|
79
|
+
apiVersion: {
|
|
80
|
+
type: ['string', 'boolean'],
|
|
81
|
+
title: 'Either the version to access the API served at the given url in Azure or `true` to use the global OpenAI API version',
|
|
82
|
+
},
|
|
83
|
+
supportsDeveloperMessage: {
|
|
84
|
+
type: 'boolean',
|
|
85
|
+
title: 'Indicates whether the model supports the `developer` role. `true` by default.',
|
|
86
|
+
},
|
|
75
87
|
enableStreaming: {
|
|
76
88
|
type: 'boolean',
|
|
77
89
|
title: 'Indicates whether the streaming API shall be used. `true` by default.',
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-preferences.js","sourceRoot":"","sources":["../../src/browser/openai-preferences.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAGhF,wFAA2F;AAE9E,QAAA,YAAY,GAAG,yCAAyC,CAAC;AACzD,QAAA,WAAW,GAAG,iDAAiD,CAAC;AAChE,QAAA,qBAAqB,GAAG,6CAA6C,CAAC;AAEtE,QAAA,uBAAuB,GAAqB;IACrD,IAAI,EAAE,QAAQ;IACd,UAAU,EAAE;QACR,CAAC,oBAAY,CAAC,EAAE;YACZ,IAAI,EAAE,QAAQ;YACd,mBAAmB,EAAE;iHACgF;YACrG,KAAK,EAAE,+CAAyB;SACnC;QACD,CAAC,mBAAW,CAAC,EAAE;YACX,IAAI,EAAE,OAAO;YACb,WAAW,EAAE,+BAA+B;YAC5C,KAAK,EAAE,+CAAyB;YAChC,OAAO,EAAE,CAAC,QAAQ,EAAE,mBAAmB,EAAE,mBAAmB,EAAE,aAAa,EAAE,aAAa,EAAE,OAAO,EAAE,eAAe,EAAE,YAAY,CAAC;
|
|
1
|
+
{"version":3,"file":"openai-preferences.js","sourceRoot":"","sources":["../../src/browser/openai-preferences.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAGhF,wFAA2F;AAE9E,QAAA,YAAY,GAAG,yCAAyC,CAAC;AACzD,QAAA,WAAW,GAAG,iDAAiD,CAAC;AAChE,QAAA,qBAAqB,GAAG,6CAA6C,CAAC;AAEtE,QAAA,uBAAuB,GAAqB;IACrD,IAAI,EAAE,QAAQ;IACd,UAAU,EAAE;QACR,CAAC,oBAAY,CAAC,EAAE;YACZ,IAAI,EAAE,QAAQ;YACd,mBAAmB,EAAE;iHACgF;YACrG,KAAK,EAAE,+CAAyB;SACnC;QACD,CAAC,mBAAW,CAAC,EAAE;YACX,IAAI,EAAE,OAAO;YACb,WAAW,EAAE,+BAA+B;YAC5C,KAAK,EAAE,+CAAyB;YAChC,OAAO,EAAE,CAAC,QAAQ,EAAE,mBAAmB,EAAE,mBAAmB,EAAE,aAAa,EAAE,aAAa,EAAE,OAAO,EAAE,eAAe,EAAE,YAAY,EAAE,SAAS,CAAC;YAC9I,KAAK,EAAE;gBACH,IAAI,EAAE,QAAQ;aACjB;SACJ;QACD,CAAC,6BAAqB,CAAC,EAAE;YACrB,IAAI,EAAE,OAAO;YACb,KAAK,EAAE,+CAAyB;YAChC,mBAAmB,EAAE;;;;;;;;;;;;;;yIAcwG;YAC7H,OAAO,EAAE,EAAE;YACX,KAAK,EAAE;gBACH,IAAI,EAAE,QAAQ;gBACd,UAAU,EAAE;oBACR,KAAK,EAAE;wBACH,IAAI,EAAE,QAAQ;wBACd,KAAK,EAAE,UAAU;qBACpB;oBACD,GAAG,EAAE;wBACD,IAAI,EAAE,QAAQ;wBACd,KAAK,EAAE,+DAA+D;qBACzE;oBACD,EAAE,EAAE;wBACA,IAAI,EAAE,QAAQ;wBACd,KAAK,EAAE,0EAA0E;qBACpF;oBACD,MAAM,EAAE;wBACJ,IAAI,EAAE,CAAC,QAAQ,EAAE,SAAS,CAAC;wBAC3B,KAAK,EAAE,qGAAqG;qBAC/G;oBACD,UAAU,EAAE;wBACR,IAAI,EAAE,CAAC,QAAQ,EAAE,SAAS,CAAC;wBAC3B,KAAK,EAAE,sHAAsH;qBAChI;oBACD,wBAAwB,EAAE;wBACtB,IAAI,EAAE,SAAS;wBACf,KAAK,EAAE,+EAA+E;qBACzF;oBACD,eAAe,EAAE;wBACb,IAAI,EAAE,SAAS;wBACf,KAAK,EAAE,uEAAuE;qBACjF;iBACJ;aACJ;SACJ;KACJ;CACJ,CAAC"}
|
|
@@ -17,10 +17,18 @@ export interface OpenAiModelDescription {
|
|
|
17
17
|
* The key for the model. If 'true' is provided the global OpenAI API key will be used.
|
|
18
18
|
*/
|
|
19
19
|
apiKey: string | true | undefined;
|
|
20
|
+
/**
|
|
21
|
+
* The version for the api. If 'true' is provided the global OpenAI version will be used.
|
|
22
|
+
*/
|
|
23
|
+
apiVersion: string | true | undefined;
|
|
20
24
|
/**
|
|
21
25
|
* Indicate whether the streaming API shall be used.
|
|
22
26
|
*/
|
|
23
27
|
enableStreaming: boolean;
|
|
28
|
+
/**
|
|
29
|
+
* Flag to configure whether the OpenAPI model supports the `developer` role. Default is `true`.
|
|
30
|
+
*/
|
|
31
|
+
supportsDeveloperMessage: boolean;
|
|
24
32
|
/**
|
|
25
33
|
* Default request settings for the OpenAI model.
|
|
26
34
|
*/
|
|
@@ -31,6 +39,7 @@ export interface OpenAiModelDescription {
|
|
|
31
39
|
export interface OpenAiLanguageModelsManager {
|
|
32
40
|
apiKey: string | undefined;
|
|
33
41
|
setApiKey(key: string | undefined): void;
|
|
42
|
+
setApiVersion(version: string | undefined): void;
|
|
34
43
|
createOrUpdateLanguageModels(...models: OpenAiModelDescription[]): Promise<void>;
|
|
35
44
|
removeLanguageModels(...modelIds: string[]): void;
|
|
36
45
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-language-models-manager.d.ts","sourceRoot":"","sources":["../../src/common/openai-language-models-manager.ts"],"names":[],"mappings":"AAeA,eAAO,MAAM,mCAAmC,6CAA6C,CAAC;AAC9F,eAAO,MAAM,2BAA2B,eAAwC,CAAC;AACjF,MAAM,WAAW,sBAAsB;IACnC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IACX;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,MAAM,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IAClC;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,sBAAsB,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;KAAE,CAAC;CACvD;AACD,MAAM,WAAW,2BAA2B;IACxC,MAAM,EAAE,MAAM,GAAG,SAAS,CAAC;IAC3B,SAAS,CAAC,GAAG,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI,CAAC;IACzC,4BAA4B,CAAC,GAAG,MAAM,EAAE,sBAAsB,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACjF,oBAAoB,CAAC,GAAG,QAAQ,EAAE,MAAM,EAAE,GAAG,IAAI,CAAA;CACpD"}
|
|
1
|
+
{"version":3,"file":"openai-language-models-manager.d.ts","sourceRoot":"","sources":["../../src/common/openai-language-models-manager.ts"],"names":[],"mappings":"AAeA,eAAO,MAAM,mCAAmC,6CAA6C,CAAC;AAC9F,eAAO,MAAM,2BAA2B,eAAwC,CAAC;AACjF,MAAM,WAAW,sBAAsB;IACnC;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IACX;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,MAAM,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IAClC;;OAEG;IACH,UAAU,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;IACtC;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,wBAAwB,EAAE,OAAO,CAAC;IAClC;;OAEG;IACH,sBAAsB,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;KAAE,CAAC;CACvD;AACD,MAAM,WAAW,2BAA2B;IACxC,MAAM,EAAE,MAAM,GAAG,SAAS,CAAC;IAC3B,SAAS,CAAC,GAAG,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI,CAAC;IACzC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI,CAAC;IACjD,4BAA4B,CAAC,GAAG,MAAM,EAAE,sBAAsB,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACjF,oBAAoB,CAAC,GAAG,QAAQ,EAAE,MAAM,EAAE,GAAG,IAAI,CAAA;CACpD"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-backend-module.d.ts","sourceRoot":"","sources":["../../src/node/openai-backend-module.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,eAAe,EAAE,MAAM,8BAA8B,CAAC;
|
|
1
|
+
{"version":3,"file":"openai-backend-module.d.ts","sourceRoot":"","sources":["../../src/node/openai-backend-module.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,eAAe,EAAE,MAAM,8BAA8B,CAAC;AAM/D,eAAO,MAAM,kBAAkB,eAA+B,CAAC;;AAW/D,wBAEG"}
|
|
@@ -20,10 +20,15 @@ const inversify_1 = require("@theia/core/shared/inversify");
|
|
|
20
20
|
const openai_language_models_manager_1 = require("../common/openai-language-models-manager");
|
|
21
21
|
const core_1 = require("@theia/core");
|
|
22
22
|
const openai_language_models_manager_impl_1 = require("./openai-language-models-manager-impl");
|
|
23
|
+
const connection_container_module_1 = require("@theia/core/lib/node/messaging/connection-container-module");
|
|
23
24
|
exports.OpenAiModelFactory = Symbol('OpenAiModelFactory');
|
|
24
|
-
|
|
25
|
+
// We use a connection module to handle AI services separately for each frontend.
|
|
26
|
+
const openAiConnectionModule = connection_container_module_1.ConnectionContainerModule.create(({ bind, bindBackendService, bindFrontendService }) => {
|
|
25
27
|
bind(openai_language_models_manager_impl_1.OpenAiLanguageModelsManagerImpl).toSelf().inSingletonScope();
|
|
26
28
|
bind(openai_language_models_manager_1.OpenAiLanguageModelsManager).toService(openai_language_models_manager_impl_1.OpenAiLanguageModelsManagerImpl);
|
|
27
29
|
bind(core_1.ConnectionHandler).toDynamicValue(ctx => new core_1.RpcConnectionHandler(openai_language_models_manager_1.OPENAI_LANGUAGE_MODELS_MANAGER_PATH, () => ctx.container.get(openai_language_models_manager_1.OpenAiLanguageModelsManager))).inSingletonScope();
|
|
28
30
|
});
|
|
31
|
+
exports.default = new inversify_1.ContainerModule(bind => {
|
|
32
|
+
bind(connection_container_module_1.ConnectionContainerModule).toConstantValue(openAiConnectionModule);
|
|
33
|
+
});
|
|
29
34
|
//# sourceMappingURL=openai-backend-module.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-backend-module.js","sourceRoot":"","sources":["../../src/node/openai-backend-module.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAEhF,4DAA+D;AAC/D,6FAA4H;AAC5H,sCAAsE;AACtE,+FAAwF;
|
|
1
|
+
{"version":3,"file":"openai-backend-module.js","sourceRoot":"","sources":["../../src/node/openai-backend-module.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAEhF,4DAA+D;AAC/D,6FAA4H;AAC5H,sCAAsE;AACtE,+FAAwF;AACxF,4GAAuG;AAE1F,QAAA,kBAAkB,GAAG,MAAM,CAAC,oBAAoB,CAAC,CAAC;AAE/D,iFAAiF;AACjF,MAAM,sBAAsB,GAAG,uDAAyB,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,kBAAkB,EAAE,mBAAmB,EAAE,EAAE,EAAE;IAClH,IAAI,CAAC,qEAA+B,CAAC,CAAC,MAAM,EAAE,CAAC,gBAAgB,EAAE,CAAC;IAClE,IAAI,CAAC,4DAA2B,CAAC,CAAC,SAAS,CAAC,qEAA+B,CAAC,CAAC;IAC7E,IAAI,CAAC,wBAAiB,CAAC,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE,CACzC,IAAI,2BAAoB,CAAC,oEAAmC,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,SAAS,CAAC,GAAG,CAAC,4DAA2B,CAAC,CAAC,CACtH,CAAC,gBAAgB,EAAE,CAAC;AACzB,CAAC,CAAC,CAAC;AAEH,kBAAe,IAAI,2BAAe,CAAC,IAAI,CAAC,EAAE;IACtC,IAAI,CAAC,uDAAyB,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC;AAC5E,CAAC,CAAC,CAAC"}
|
|
@@ -1,13 +1,16 @@
|
|
|
1
|
-
import { LanguageModel, LanguageModelParsedResponse, LanguageModelRequest, LanguageModelResponse, LanguageModelTextResponse } from '@theia/ai-core';
|
|
1
|
+
import { LanguageModel, LanguageModelParsedResponse, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelResponse, LanguageModelTextResponse } from '@theia/ai-core';
|
|
2
2
|
import { CancellationToken } from '@theia/core';
|
|
3
|
-
import OpenAI from 'openai';
|
|
3
|
+
import { OpenAI } from 'openai';
|
|
4
4
|
import { RunnableToolFunctionWithoutParse } from 'openai/lib/RunnableFunction';
|
|
5
|
+
import { ChatCompletionMessageParam } from 'openai/resources';
|
|
5
6
|
export declare const OpenAiModelIdentifier: unique symbol;
|
|
6
7
|
export declare class OpenAiModel implements LanguageModel {
|
|
7
8
|
readonly id: string;
|
|
8
9
|
model: string;
|
|
9
10
|
enableStreaming: boolean;
|
|
10
11
|
apiKey: () => string | undefined;
|
|
12
|
+
apiVersion: () => string | undefined;
|
|
13
|
+
supportsDeveloperMessage: boolean;
|
|
11
14
|
url: string | undefined;
|
|
12
15
|
defaultRequestSettings?: {
|
|
13
16
|
[key: string]: unknown;
|
|
@@ -17,15 +20,19 @@ export declare class OpenAiModel implements LanguageModel {
|
|
|
17
20
|
* @param model the model id as it is used by the OpenAI API
|
|
18
21
|
* @param enableStreaming whether the streaming API shall be used
|
|
19
22
|
* @param apiKey a function that returns the API key to use for this model, called on each request
|
|
23
|
+
* @param apiVersion a function that returns the OpenAPI version to use for this model, called on each request
|
|
24
|
+
* @param supportsDeveloperMessage whether the model supports the `developer` role
|
|
20
25
|
* @param url the OpenAI API compatible endpoint where the model is hosted. If not provided the default OpenAI endpoint will be used.
|
|
21
26
|
* @param defaultRequestSettings optional default settings for requests made using this model.
|
|
22
27
|
*/
|
|
23
|
-
constructor(id: string, model: string, enableStreaming: boolean, apiKey: () => string | undefined, url: string | undefined, defaultRequestSettings?: {
|
|
28
|
+
constructor(id: string, model: string, enableStreaming: boolean, apiKey: () => string | undefined, apiVersion: () => string | undefined, supportsDeveloperMessage: boolean, url: string | undefined, defaultRequestSettings?: {
|
|
24
29
|
[key: string]: unknown;
|
|
25
30
|
} | undefined);
|
|
26
31
|
protected getSettings(request: LanguageModelRequest): Record<string, unknown>;
|
|
27
32
|
request(request: LanguageModelRequest, cancellationToken?: CancellationToken): Promise<LanguageModelResponse>;
|
|
28
33
|
protected handleNonStreamingRequest(openai: OpenAI, request: LanguageModelRequest): Promise<LanguageModelTextResponse>;
|
|
34
|
+
protected toOpenAIMessage(message: LanguageModelRequestMessage): ChatCompletionMessageParam;
|
|
35
|
+
protected toOpenAiRole(message: LanguageModelRequestMessage): 'developer' | 'user' | 'assistant';
|
|
29
36
|
protected isNonStreamingModel(_model: string): boolean;
|
|
30
37
|
protected supportsStructuredOutput(): boolean;
|
|
31
38
|
protected handleStructuredOutputRequest(openai: OpenAI, request: LanguageModelRequest): Promise<LanguageModelParsedResponse>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-language-model.d.ts","sourceRoot":"","sources":["../../src/node/openai-language-model.ts"],"names":[],"mappings":"AAgBA,OAAO,EACH,aAAa,EACb,2BAA2B,EAC3B,oBAAoB,
|
|
1
|
+
{"version":3,"file":"openai-language-model.d.ts","sourceRoot":"","sources":["../../src/node/openai-language-model.ts"],"names":[],"mappings":"AAgBA,OAAO,EACH,aAAa,EACb,2BAA2B,EAC3B,oBAAoB,EACpB,2BAA2B,EAC3B,qBAAqB,EAErB,yBAAyB,EAC5B,MAAM,gBAAgB,CAAC;AACxB,OAAO,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAChD,OAAO,EAAE,MAAM,EAAe,MAAM,QAAQ,CAAC;AAE7C,OAAO,EAAE,gCAAgC,EAAE,MAAM,6BAA6B,CAAC;AAC/E,OAAO,EAAE,0BAA0B,EAAE,MAAM,kBAAkB,CAAC;AAE9D,eAAO,MAAM,qBAAqB,eAAkC,CAAC;AAErE,qBAAa,WAAY,YAAW,aAAa;aAazB,EAAE,EAAE,MAAM;IACnB,KAAK,EAAE,MAAM;IACb,eAAe,EAAE,OAAO;IACxB,MAAM,EAAE,MAAM,MAAM,GAAG,SAAS;IAChC,UAAU,EAAE,MAAM,MAAM,GAAG,SAAS;IACpC,wBAAwB,EAAE,OAAO;IACjC,GAAG,EAAE,MAAM,GAAG,SAAS;IACvB,sBAAsB,CAAC;;;IAlBlC;;;;;;;;;OASG;gBAEiB,EAAE,EAAE,MAAM,EACnB,KAAK,EAAE,MAAM,EACb,eAAe,EAAE,OAAO,EACxB,MAAM,EAAE,MAAM,MAAM,GAAG,SAAS,EAChC,UAAU,EAAE,MAAM,MAAM,GAAG,SAAS,EACpC,wBAAwB,EAAE,OAAO,EACjC,GAAG,EAAE,MAAM,GAAG,SAAS,EACvB,sBAAsB,CAAC;;iBAA4B;IAG9D,SAAS,CAAC,WAAW,CAAC,OAAO,EAAE,oBAAoB,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC;IAQvE,OAAO,CAAC,OAAO,EAAE,oBAAoB,EAAE,iBAAiB,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,qBAAqB,CAAC;cAmGnG,yBAAyB,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,oBAAoB,GAAG,OAAO,CAAC,yBAAyB,CAAC;IAe5H,SAAS,CAAC,eAAe,CAAC,OAAO,EAAE,2BAA2B,GAAG,0BAA0B;IAO3F,SAAS,CAAC,YAAY,CAAC,OAAO,EAAE,2BAA2B,GAAG,WAAW,GAAG,MAAM,GAAG,WAAW;IAWhG,SAAS,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO;IAItD,SAAS,CAAC,wBAAwB,IAAI,OAAO;cAS7B,6BAA6B,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,oBAAoB,GAAG,OAAO,CAAC,2BAA2B,CAAC;IAmBlI,OAAO,CAAC,oBAAoB;IAO5B,SAAS,CAAC,WAAW,CAAC,OAAO,EAAE,oBAAoB,GAAG,gCAAgC,EAAE,GAAG,SAAS;IAYpG,SAAS,CAAC,gBAAgB,IAAI,MAAM;CAevC"}
|
|
@@ -18,36 +18,24 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
18
18
|
exports.OpenAiModel = exports.OpenAiModelIdentifier = void 0;
|
|
19
19
|
const openai_1 = require("openai");
|
|
20
20
|
exports.OpenAiModelIdentifier = Symbol('OpenAiModelIdentifier');
|
|
21
|
-
function toOpenAIMessage(message) {
|
|
22
|
-
return {
|
|
23
|
-
role: toOpenAiRole(message),
|
|
24
|
-
content: message.query || ''
|
|
25
|
-
};
|
|
26
|
-
}
|
|
27
|
-
function toOpenAiRole(message) {
|
|
28
|
-
switch (message.actor) {
|
|
29
|
-
case 'system':
|
|
30
|
-
return 'system';
|
|
31
|
-
case 'ai':
|
|
32
|
-
return 'assistant';
|
|
33
|
-
default:
|
|
34
|
-
return 'user';
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
21
|
class OpenAiModel {
|
|
38
22
|
/**
|
|
39
23
|
* @param id the unique id for this language model. It will be used to identify the model in the UI.
|
|
40
24
|
* @param model the model id as it is used by the OpenAI API
|
|
41
25
|
* @param enableStreaming whether the streaming API shall be used
|
|
42
26
|
* @param apiKey a function that returns the API key to use for this model, called on each request
|
|
27
|
+
* @param apiVersion a function that returns the OpenAPI version to use for this model, called on each request
|
|
28
|
+
* @param supportsDeveloperMessage whether the model supports the `developer` role
|
|
43
29
|
* @param url the OpenAI API compatible endpoint where the model is hosted. If not provided the default OpenAI endpoint will be used.
|
|
44
30
|
* @param defaultRequestSettings optional default settings for requests made using this model.
|
|
45
31
|
*/
|
|
46
|
-
constructor(id, model, enableStreaming, apiKey, url, defaultRequestSettings) {
|
|
32
|
+
constructor(id, model, enableStreaming, apiKey, apiVersion, supportsDeveloperMessage, url, defaultRequestSettings) {
|
|
47
33
|
this.id = id;
|
|
48
34
|
this.model = model;
|
|
49
35
|
this.enableStreaming = enableStreaming;
|
|
50
36
|
this.apiKey = apiKey;
|
|
37
|
+
this.apiVersion = apiVersion;
|
|
38
|
+
this.supportsDeveloperMessage = supportsDeveloperMessage;
|
|
51
39
|
this.url = url;
|
|
52
40
|
this.defaultRequestSettings = defaultRequestSettings;
|
|
53
41
|
}
|
|
@@ -62,18 +50,21 @@ class OpenAiModel {
|
|
|
62
50
|
var _a;
|
|
63
51
|
const settings = this.getSettings(request);
|
|
64
52
|
const openai = this.initializeOpenAi();
|
|
65
|
-
if (this.isNonStreamingModel(this.model)) {
|
|
53
|
+
if (this.isNonStreamingModel(this.model) || (typeof settings.stream === 'boolean' && !settings.stream)) {
|
|
66
54
|
return this.handleNonStreamingRequest(openai, request);
|
|
67
55
|
}
|
|
68
56
|
if (((_a = request.response_format) === null || _a === void 0 ? void 0 : _a.type) === 'json_schema' && this.supportsStructuredOutput()) {
|
|
69
57
|
return this.handleStructuredOutputRequest(openai, request);
|
|
70
58
|
}
|
|
59
|
+
if (cancellationToken === null || cancellationToken === void 0 ? void 0 : cancellationToken.isCancellationRequested) {
|
|
60
|
+
return { text: '' };
|
|
61
|
+
}
|
|
71
62
|
let runner;
|
|
72
63
|
const tools = this.createTools(request);
|
|
73
64
|
if (tools) {
|
|
74
65
|
runner = openai.beta.chat.completions.runTools({
|
|
75
66
|
model: this.model,
|
|
76
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
67
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
77
68
|
stream: true,
|
|
78
69
|
tools: tools,
|
|
79
70
|
tool_choice: 'auto',
|
|
@@ -83,7 +74,7 @@ class OpenAiModel {
|
|
|
83
74
|
else {
|
|
84
75
|
runner = openai.beta.chat.completions.stream({
|
|
85
76
|
model: this.model,
|
|
86
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
77
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
87
78
|
stream: true,
|
|
88
79
|
...settings
|
|
89
80
|
});
|
|
@@ -96,39 +87,54 @@ class OpenAiModel {
|
|
|
96
87
|
runner.on('error', error => {
|
|
97
88
|
console.error('Error in OpenAI chat completion stream:', error);
|
|
98
89
|
runnerEnd = true;
|
|
99
|
-
resolve({ content: error.message });
|
|
90
|
+
resolve === null || resolve === void 0 ? void 0 : resolve({ content: error.message });
|
|
100
91
|
});
|
|
101
92
|
// we need to also listen for the emitted errors, as otherwise any error actually thrown by the API will not be caught
|
|
102
93
|
runner.emitted('error').then(error => {
|
|
103
94
|
console.error('Error in OpenAI chat completion stream:', error);
|
|
104
95
|
runnerEnd = true;
|
|
105
|
-
resolve({ content: error.message });
|
|
96
|
+
resolve === null || resolve === void 0 ? void 0 : resolve({ content: error.message });
|
|
106
97
|
});
|
|
107
98
|
runner.emitted('abort').then(() => {
|
|
108
|
-
//
|
|
99
|
+
// cancel async iterator
|
|
100
|
+
runnerEnd = true;
|
|
109
101
|
});
|
|
110
102
|
runner.on('message', message => {
|
|
111
103
|
if (message.role === 'tool') {
|
|
112
|
-
resolve({ tool_calls: [{ id: message.tool_call_id, finished: true, result: this.getCompletionContent(message) }] });
|
|
104
|
+
resolve === null || resolve === void 0 ? void 0 : resolve({ tool_calls: [{ id: message.tool_call_id, finished: true, result: this.getCompletionContent(message) }] });
|
|
113
105
|
}
|
|
114
106
|
console.debug('Received Open AI message', JSON.stringify(message));
|
|
115
107
|
});
|
|
116
108
|
runner.once('end', () => {
|
|
117
109
|
runnerEnd = true;
|
|
118
110
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
119
|
-
resolve(runner.finalChatCompletion);
|
|
111
|
+
resolve === null || resolve === void 0 ? void 0 : resolve(runner.finalChatCompletion);
|
|
120
112
|
});
|
|
113
|
+
if (cancellationToken === null || cancellationToken === void 0 ? void 0 : cancellationToken.isCancellationRequested) {
|
|
114
|
+
return { text: '' };
|
|
115
|
+
}
|
|
121
116
|
const asyncIterator = {
|
|
122
117
|
async *[Symbol.asyncIterator]() {
|
|
123
118
|
runner.on('chunk', chunk => {
|
|
124
119
|
var _a, _b;
|
|
125
|
-
if (
|
|
120
|
+
if (cancellationToken === null || cancellationToken === void 0 ? void 0 : cancellationToken.isCancellationRequested) {
|
|
121
|
+
resolve = undefined;
|
|
122
|
+
return;
|
|
123
|
+
}
|
|
124
|
+
if (resolve && ((_a = chunk.choices[0]) === null || _a === void 0 ? void 0 : _a.delta)) {
|
|
126
125
|
resolve({ ...(_b = chunk.choices[0]) === null || _b === void 0 ? void 0 : _b.delta });
|
|
127
126
|
}
|
|
128
127
|
});
|
|
129
128
|
while (!runnerEnd) {
|
|
129
|
+
if (cancellationToken === null || cancellationToken === void 0 ? void 0 : cancellationToken.isCancellationRequested) {
|
|
130
|
+
throw new Error('Iterator canceled');
|
|
131
|
+
}
|
|
130
132
|
const promise = new Promise((res, rej) => {
|
|
131
133
|
resolve = res;
|
|
134
|
+
cancellationToken === null || cancellationToken === void 0 ? void 0 : cancellationToken.onCancellationRequested(() => {
|
|
135
|
+
rej(new Error('Canceled'));
|
|
136
|
+
runnerEnd = true; // Stop the iterator
|
|
137
|
+
});
|
|
132
138
|
});
|
|
133
139
|
yield promise;
|
|
134
140
|
}
|
|
@@ -141,7 +147,7 @@ class OpenAiModel {
|
|
|
141
147
|
const settings = this.getSettings(request);
|
|
142
148
|
const response = await openai.chat.completions.create({
|
|
143
149
|
model: this.model,
|
|
144
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
150
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
145
151
|
...settings
|
|
146
152
|
});
|
|
147
153
|
const message = response.choices[0].message;
|
|
@@ -149,6 +155,22 @@ class OpenAiModel {
|
|
|
149
155
|
text: (_a = message.content) !== null && _a !== void 0 ? _a : ''
|
|
150
156
|
};
|
|
151
157
|
}
|
|
158
|
+
toOpenAIMessage(message) {
|
|
159
|
+
return {
|
|
160
|
+
role: this.toOpenAiRole(message),
|
|
161
|
+
content: message.query || ''
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
toOpenAiRole(message) {
|
|
165
|
+
switch (message.actor) {
|
|
166
|
+
case 'system':
|
|
167
|
+
return this.supportsDeveloperMessage ? 'developer' : 'user';
|
|
168
|
+
case 'ai':
|
|
169
|
+
return 'assistant';
|
|
170
|
+
default:
|
|
171
|
+
return 'user';
|
|
172
|
+
}
|
|
173
|
+
}
|
|
152
174
|
isNonStreamingModel(_model) {
|
|
153
175
|
return !this.enableStreaming;
|
|
154
176
|
}
|
|
@@ -166,7 +188,7 @@ class OpenAiModel {
|
|
|
166
188
|
// TODO implement tool support for structured output (parse() seems to require different tool format)
|
|
167
189
|
const result = await openai.beta.chat.completions.parse({
|
|
168
190
|
model: this.model,
|
|
169
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
191
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
170
192
|
response_format: request.response_format,
|
|
171
193
|
...settings
|
|
172
194
|
});
|
|
@@ -202,8 +224,15 @@ class OpenAiModel {
|
|
|
202
224
|
if (!apiKey && !(this.url)) {
|
|
203
225
|
throw new Error('Please provide OPENAI_API_KEY in preferences or via environment variable');
|
|
204
226
|
}
|
|
205
|
-
|
|
206
|
-
|
|
227
|
+
const apiVersion = this.apiVersion();
|
|
228
|
+
if (apiVersion) {
|
|
229
|
+
// We need to hand over "some" key, even if a custom url is not key protected as otherwise the OpenAI client will throw an error
|
|
230
|
+
return new openai_1.AzureOpenAI({ apiKey: apiKey !== null && apiKey !== void 0 ? apiKey : 'no-key', baseURL: this.url, apiVersion: apiVersion });
|
|
231
|
+
}
|
|
232
|
+
else {
|
|
233
|
+
// We need to hand over "some" key, even if a custom url is not key protected as otherwise the OpenAI client will throw an error
|
|
234
|
+
return new openai_1.OpenAI({ apiKey: apiKey !== null && apiKey !== void 0 ? apiKey : 'no-key', baseURL: this.url });
|
|
235
|
+
}
|
|
207
236
|
}
|
|
208
237
|
}
|
|
209
238
|
exports.OpenAiModel = OpenAiModel;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-language-model.js","sourceRoot":"","sources":["../../src/node/openai-language-model.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAYhF,
|
|
1
|
+
{"version":3,"file":"openai-language-model.js","sourceRoot":"","sources":["../../src/node/openai-language-model.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;AAYhF,mCAA6C;AAKhC,QAAA,qBAAqB,GAAG,MAAM,CAAC,uBAAuB,CAAC,CAAC;AAErE,MAAa,WAAW;IAEpB;;;;;;;;;OASG;IACH,YACoB,EAAU,EACnB,KAAa,EACb,eAAwB,EACxB,MAAgC,EAChC,UAAoC,EACpC,wBAAiC,EACjC,GAAuB,EACvB,sBAAmD;QAP1C,OAAE,GAAF,EAAE,CAAQ;QACnB,UAAK,GAAL,KAAK,CAAQ;QACb,oBAAe,GAAf,eAAe,CAAS;QACxB,WAAM,GAAN,MAAM,CAA0B;QAChC,eAAU,GAAV,UAAU,CAA0B;QACpC,6BAAwB,GAAxB,wBAAwB,CAAS;QACjC,QAAG,GAAH,GAAG,CAAoB;QACvB,2BAAsB,GAAtB,sBAAsB,CAA6B;IAC1D,CAAC;IAEK,WAAW,CAAC,OAA6B;QAC/C,MAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,sBAAsB,CAAC;QACnF,IAAI,CAAC,QAAQ,EAAE,CAAC;YACZ,OAAO,EAAE,CAAC;QACd,CAAC;QACD,OAAO,QAAQ,CAAC;IACpB,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,OAA6B,EAAE,iBAAqC;;QAC9E,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAC3C,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,EAAE,CAAC;QAEvC,IAAI,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,QAAQ,CAAC,MAAM,KAAK,SAAS,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC;YACrG,OAAO,IAAI,CAAC,yBAAyB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC3D,CAAC;QAED,IAAI,CAAA,MAAA,OAAO,CAAC,eAAe,0CAAE,IAAI,MAAK,aAAa,IAAI,IAAI,CAAC,wBAAwB,EAAE,EAAE,CAAC;YACrF,OAAO,IAAI,CAAC,6BAA6B,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC/D,CAAC;QACD,IAAI,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,uBAAuB,EAAE,CAAC;YAC7C,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;QACxB,CAAC;QAED,IAAI,MAA4B,CAAC;QACjC,MAAM,KAAK,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QACxC,IAAI,KAAK,EAAE,CAAC;YACR,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;gBAC3C,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBAC/D,MAAM,EAAE,IAAI;gBACZ,KAAK,EAAE,KAAK;gBACZ,WAAW,EAAE,MAAM;gBACnB,GAAG,QAAQ;aACd,CAAC,CAAC;QACP,CAAC;aAAM,CAAC;YACJ,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;gBACzC,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBAC/D,MAAM,EAAE,IAAI;gBACZ,GAAG,QAAQ;aACd,CAAC,CAAC;QACP,CAAC;QACD,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,uBAAuB,CAAC,GAAG,EAAE;YAC5C,MAAM,CAAC,KAAK,EAAE,CAAC;QACnB,CAAC,CAAC,CAAC;QAEH,IAAI,SAAS,GAAG,KAAK,CAAC;QAEtB,IAAI,OAAsE,CAAC;QAC3E,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;YACvB,OAAO,CAAC,KAAK,CAAC,yCAAyC,EAAE,KAAK,CAAC,CAAC;YAChE,SAAS,GAAG,IAAI,CAAC;YACjB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAG,EAAE,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;QAC1C,CAAC,CAAC,CAAC;QACH,sHAAsH;QACtH,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;YACjC,OAAO,CAAC,KAAK,CAAC,yCAAyC,EAAE,KAAK,CAAC,CAAC;YAChE,SAAS,GAAG,IAAI,CAAC;YACjB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAG,EAAE,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;QAC1C,CAAC,CAAC,CAAC;QACH,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE;YAC9B,wBAAwB;YACxB,SAAS,GAAG,IAAI,CAAC;QACrB,CAAC,CAAC,CAAC;QACH,MAAM,CAAC,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC,EAAE;YAC3B,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;gBAC1B,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAG,EAAE,UAAU,EAAE,CAAC,EAAE,EAAE,EAAE,OAAO,CAAC,YAAY,EAAE,QAAQ,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;YAC1H,CAAC;YACD,OAAO,CAAC,KAAK,CAAC,0BAA0B,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC;QACvE,CAAC,CAAC,CAAC;QACH,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,EAAE;YACpB,SAAS,GAAG,IAAI,CAAC;YACjB,8DAA8D;YAC9D,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAG,MAAM,CAAC,mBAA0B,CAAC,CAAC;QACjD,CAAC,CAAC,CAAC;QACH,IAAI,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,uBAAuB,EAAE,CAAC;YAC7C,OAAO,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;QACxB,CAAC;QACD,MAAM,aAAa,GAAG;YAClB,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC;gBACzB,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;;oBACvB,IAAI,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,uBAAuB,EAAE,CAAC;wBAC7C,OAAO,GAAG,SAAS,CAAC;wBACpB,OAAO;oBACX,CAAC;oBACD,IAAI,OAAO,KAAI,MAAA,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,0CAAE,KAAK,CAAA,EAAE,CAAC;wBACrC,OAAO,CAAC,EAAE,GAAG,MAAA,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,0CAAE,KAAK,EAAE,CAAC,CAAC;oBAC5C,CAAC;gBACL,CAAC,CAAC,CAAC;gBACH,OAAO,CAAC,SAAS,EAAE,CAAC;oBAChB,IAAI,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,uBAAuB,EAAE,CAAC;wBAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;oBACzC,CAAC;oBACD,MAAM,OAAO,GAAG,IAAI,OAAO,CAAkC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;wBACtE,OAAO,GAAG,GAAG,CAAC;wBACd,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,uBAAuB,CAAC,GAAG,EAAE;4BAC5C,GAAG,CAAC,IAAI,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC;4BAC3B,SAAS,GAAG,IAAI,CAAC,CAAC,oBAAoB;wBAC1C,CAAC,CAAC,CAAC;oBACP,CAAC,CAAC,CAAC;oBACH,MAAM,OAAO,CAAC;gBAClB,CAAC;YACL,CAAC;SACJ,CAAC;QACF,OAAO,EAAE,MAAM,EAAE,aAAa,EAAE,CAAC;IACrC,CAAC;IAES,KAAK,CAAC,yBAAyB,CAAC,MAAc,EAAE,OAA6B;;QACnF,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAC3C,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC;YAClD,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YAC/D,GAAG,QAAQ;SACd,CAAC,CAAC;QAEH,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;QAE5C,OAAO;YACH,IAAI,EAAE,MAAA,OAAO,CAAC,OAAO,mCAAI,EAAE;SAC9B,CAAC;IACN,CAAC;IAES,eAAe,CAAC,OAAoC;QAC1D,OAAO;YACH,IAAI,EAAE,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC;YAChC,OAAO,EAAE,OAAO,CAAC,KAAK,IAAI,EAAE;SAC/B,CAAC;IACN,CAAC;IAES,YAAY,CAAC,OAAoC;QACvD,QAAQ,OAAO,CAAC,KAAK,EAAE,CAAC;YACpB,KAAK,QAAQ;gBACT,OAAO,IAAI,CAAC,wBAAwB,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC;YAChE,KAAK,IAAI;gBACL,OAAO,WAAW,CAAC;YACvB;gBACI,OAAO,MAAM,CAAC;QACtB,CAAC;IACL,CAAC;IAES,mBAAmB,CAAC,MAAc;QACxC,OAAO,CAAC,IAAI,CAAC,eAAe,CAAC;IACjC,CAAC;IAES,wBAAwB;QAC9B,qDAAqD;QACrD,OAAO;YACH,QAAQ;YACR,mBAAmB;YACnB,aAAa;SAChB,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAC3B,CAAC;IAES,KAAK,CAAC,6BAA6B,CAAC,MAAc,EAAE,OAA6B;;QACvF,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAC3C,qGAAqG;QACrG,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC;YACpD,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YAC/D,eAAe,EAAE,OAAO,CAAC,eAAe;YACxC,GAAG,QAAQ;SACd,CAAC,CAAC;QACH,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;QAC1C,IAAI,OAAO,CAAC,OAAO,IAAI,OAAO,CAAC,MAAM,KAAK,SAAS,EAAE,CAAC;YAClD,OAAO,CAAC,KAAK,CAAC,yCAAyC,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC;QACtF,CAAC;QACD,OAAO;YACH,OAAO,EAAE,MAAA,OAAO,CAAC,OAAO,mCAAI,EAAE;YAC9B,MAAM,EAAE,OAAO,CAAC,MAAM;SACzB,CAAC;IACN,CAAC;IAEO,oBAAoB,CAAC,OAA+D;QACxF,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;YACjC,OAAO,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QACpC,CAAC;QACD,OAAO,OAAO,CAAC,OAAO,CAAC;IAC3B,CAAC;IAES,WAAW,CAAC,OAA6B;;QAC/C,OAAO,MAAA,OAAO,CAAC,KAAK,0CAAE,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;YAC/B,IAAI,EAAE,UAAU;YAChB,QAAQ,EAAE;gBACN,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,UAAU,EAAE,IAAI,CAAC,UAAU;gBAC3B,QAAQ,EAAE,CAAC,WAAmB,EAAE,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC;aAC/D;SACiC,CAAA,CAAC,CAAC;IAC5C,CAAC;IAES,gBAAgB;QACtB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QAC7B,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACzB,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;QAChG,CAAC;QAED,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC;QACrC,IAAI,UAAU,EAAE,CAAC;YACb,gIAAgI;YAChI,OAAO,IAAI,oBAAW,CAAC,EAAE,MAAM,EAAE,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,QAAQ,EAAE,OAAO,EAAE,IAAI,CAAC,GAAG,EAAE,UAAU,EAAE,UAAU,EAAE,CAAC,CAAC;QACtG,CAAC;aAAM,CAAC;YACJ,gIAAgI;YAChI,OAAO,IAAI,eAAM,CAAC,EAAE,MAAM,EAAE,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,QAAQ,EAAE,OAAO,EAAE,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC;QACzE,CAAC;IACL,CAAC;CACJ;AArOD,kCAqOC"}
|
|
@@ -2,10 +2,13 @@ import { LanguageModelRegistry } from '@theia/ai-core';
|
|
|
2
2
|
import { OpenAiLanguageModelsManager, OpenAiModelDescription } from '../common';
|
|
3
3
|
export declare class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsManager {
|
|
4
4
|
protected _apiKey: string | undefined;
|
|
5
|
+
protected _apiVersion: string | undefined;
|
|
5
6
|
protected readonly languageModelRegistry: LanguageModelRegistry;
|
|
6
7
|
get apiKey(): string | undefined;
|
|
8
|
+
get apiVersion(): string | undefined;
|
|
7
9
|
createOrUpdateLanguageModels(...modelDescriptions: OpenAiModelDescription[]): Promise<void>;
|
|
8
10
|
removeLanguageModels(...modelIds: string[]): void;
|
|
9
11
|
setApiKey(apiKey: string | undefined): void;
|
|
12
|
+
setApiVersion(apiVersion: string | undefined): void;
|
|
10
13
|
}
|
|
11
14
|
//# sourceMappingURL=openai-language-models-manager-impl.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-language-models-manager-impl.d.ts","sourceRoot":"","sources":["../../src/node/openai-language-models-manager-impl.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAC;AAGvD,OAAO,EAAE,2BAA2B,EAAE,sBAAsB,EAAE,MAAM,WAAW,CAAC;AAEhF,qBACa,+BAAgC,YAAW,2BAA2B;IAE/E,SAAS,CAAC,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;
|
|
1
|
+
{"version":3,"file":"openai-language-models-manager-impl.d.ts","sourceRoot":"","sources":["../../src/node/openai-language-models-manager-impl.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,qBAAqB,EAAE,MAAM,gBAAgB,CAAC;AAGvD,OAAO,EAAE,2BAA2B,EAAE,sBAAsB,EAAE,MAAM,WAAW,CAAC;AAEhF,qBACa,+BAAgC,YAAW,2BAA2B;IAE/E,SAAS,CAAC,OAAO,EAAE,MAAM,GAAG,SAAS,CAAC;IACtC,SAAS,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,CAAC;IAG1C,SAAS,CAAC,QAAQ,CAAC,qBAAqB,EAAE,qBAAqB,CAAC;IAEhE,IAAI,MAAM,IAAI,MAAM,GAAG,SAAS,CAE/B;IAED,IAAI,UAAU,IAAI,MAAM,GAAG,SAAS,CAEnC;IAIK,4BAA4B,CAAC,GAAG,iBAAiB,EAAE,sBAAsB,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAmDjG,oBAAoB,CAAC,GAAG,QAAQ,EAAE,MAAM,EAAE,GAAG,IAAI;IAIjD,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQ3C,aAAa,CAAC,UAAU,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;CAOtD"}
|
|
@@ -25,6 +25,10 @@ let OpenAiLanguageModelsManagerImpl = class OpenAiLanguageModelsManagerImpl {
|
|
|
25
25
|
var _a;
|
|
26
26
|
return (_a = this._apiKey) !== null && _a !== void 0 ? _a : process.env.OPENAI_API_KEY;
|
|
27
27
|
}
|
|
28
|
+
get apiVersion() {
|
|
29
|
+
var _a;
|
|
30
|
+
return (_a = this._apiVersion) !== null && _a !== void 0 ? _a : process.env.OPENAI_API_VERSION;
|
|
31
|
+
}
|
|
28
32
|
// Triggered from frontend. In case you want to use the models on the backend
|
|
29
33
|
// without a frontend then call this yourself
|
|
30
34
|
async createOrUpdateLanguageModels(...modelDescriptions) {
|
|
@@ -39,6 +43,15 @@ let OpenAiLanguageModelsManagerImpl = class OpenAiLanguageModelsManagerImpl {
|
|
|
39
43
|
}
|
|
40
44
|
return undefined;
|
|
41
45
|
};
|
|
46
|
+
const apiVersionProvider = () => {
|
|
47
|
+
if (modelDescription.apiVersion === true) {
|
|
48
|
+
return this.apiVersion;
|
|
49
|
+
}
|
|
50
|
+
if (modelDescription.apiVersion) {
|
|
51
|
+
return modelDescription.apiVersion;
|
|
52
|
+
}
|
|
53
|
+
return undefined;
|
|
54
|
+
};
|
|
42
55
|
if (model) {
|
|
43
56
|
if (!(model instanceof openai_language_model_1.OpenAiModel)) {
|
|
44
57
|
console.warn(`OpenAI: model ${modelDescription.id} is not an OpenAI model`);
|
|
@@ -48,11 +61,13 @@ let OpenAiLanguageModelsManagerImpl = class OpenAiLanguageModelsManagerImpl {
|
|
|
48
61
|
model.enableStreaming = modelDescription.enableStreaming;
|
|
49
62
|
model.url = modelDescription.url;
|
|
50
63
|
model.apiKey = apiKeyProvider;
|
|
64
|
+
model.apiVersion = apiVersionProvider;
|
|
65
|
+
model.supportsDeveloperMessage = modelDescription.supportsDeveloperMessage;
|
|
51
66
|
model.defaultRequestSettings = modelDescription.defaultRequestSettings;
|
|
52
67
|
}
|
|
53
68
|
else {
|
|
54
69
|
this.languageModelRegistry.addLanguageModels([
|
|
55
|
-
new openai_language_model_1.OpenAiModel(modelDescription.id, modelDescription.model, modelDescription.enableStreaming, apiKeyProvider, modelDescription.url, modelDescription.defaultRequestSettings)
|
|
70
|
+
new openai_language_model_1.OpenAiModel(modelDescription.id, modelDescription.model, modelDescription.enableStreaming, apiKeyProvider, apiVersionProvider, modelDescription.supportsDeveloperMessage, modelDescription.url, modelDescription.defaultRequestSettings)
|
|
56
71
|
]);
|
|
57
72
|
}
|
|
58
73
|
}
|
|
@@ -68,6 +83,14 @@ let OpenAiLanguageModelsManagerImpl = class OpenAiLanguageModelsManagerImpl {
|
|
|
68
83
|
this._apiKey = undefined;
|
|
69
84
|
}
|
|
70
85
|
}
|
|
86
|
+
setApiVersion(apiVersion) {
|
|
87
|
+
if (apiVersion) {
|
|
88
|
+
this._apiVersion = apiVersion;
|
|
89
|
+
}
|
|
90
|
+
else {
|
|
91
|
+
this._apiVersion = undefined;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
71
94
|
};
|
|
72
95
|
exports.OpenAiLanguageModelsManagerImpl = OpenAiLanguageModelsManagerImpl;
|
|
73
96
|
tslib_1.__decorate([
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-language-models-manager-impl.js","sourceRoot":"","sources":["../../src/node/openai-language-models-manager-impl.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;;AAEhF,4CAAuD;AACvD,4DAAkE;AAClE,mEAAsD;AAI/C,IAAM,+BAA+B,GAArC,MAAM,+BAA+B;
|
|
1
|
+
{"version":3,"file":"openai-language-models-manager-impl.js","sourceRoot":"","sources":["../../src/node/openai-language-models-manager-impl.ts"],"names":[],"mappings":";AAAA,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,2EAA2E;AAC3E,mEAAmE;AACnE,wCAAwC;AACxC,EAAE;AACF,4EAA4E;AAC5E,8EAA8E;AAC9E,6EAA6E;AAC7E,yDAAyD;AACzD,uDAAuD;AACvD,EAAE;AACF,gFAAgF;AAChF,gFAAgF;;;;AAEhF,4CAAuD;AACvD,4DAAkE;AAClE,mEAAsD;AAI/C,IAAM,+BAA+B,GAArC,MAAM,+BAA+B;IAQxC,IAAI,MAAM;;QACN,OAAO,MAAA,IAAI,CAAC,OAAO,mCAAI,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC;IACtD,CAAC;IAED,IAAI,UAAU;;QACV,OAAO,MAAA,IAAI,CAAC,WAAW,mCAAI,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC;IAC9D,CAAC;IAED,6EAA6E;IAC7E,6CAA6C;IAC7C,KAAK,CAAC,4BAA4B,CAAC,GAAG,iBAA2C;QAC7E,KAAK,MAAM,gBAAgB,IAAI,iBAAiB,EAAE,CAAC;YAC/C,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAAC,gBAAgB,CAAC,gBAAgB,CAAC,EAAE,CAAC,CAAC;YACrF,MAAM,cAAc,GAAG,GAAG,EAAE;gBACxB,IAAI,gBAAgB,CAAC,MAAM,KAAK,IAAI,EAAE,CAAC;oBACnC,OAAO,IAAI,CAAC,MAAM,CAAC;gBACvB,CAAC;gBACD,IAAI,gBAAgB,CAAC,MAAM,EAAE,CAAC;oBAC1B,OAAO,gBAAgB,CAAC,MAAM,CAAC;gBACnC,CAAC;gBACD,OAAO,SAAS,CAAC;YACrB,CAAC,CAAC;YACF,MAAM,kBAAkB,GAAG,GAAG,EAAE;gBAC5B,IAAI,gBAAgB,CAAC,UAAU,KAAK,IAAI,EAAE,CAAC;oBACvC,OAAO,IAAI,CAAC,UAAU,CAAC;gBAC3B,CAAC;gBACD,IAAI,gBAAgB,CAAC,UAAU,EAAE,CAAC;oBAC9B,OAAO,gBAAgB,CAAC,UAAU,CAAC;gBACvC,CAAC;gBACD,OAAO,SAAS,CAAC;YACrB,CAAC,CAAC;YAEF,IAAI,KAAK,EAAE,CAAC;gBACR,IAAI,CAAC,CAAC,KAAK,YAAY,mCAAW,CAAC,EAAE,CAAC;oBAClC,OAAO,CAAC,IAAI,CAAC,iBAAiB,gBAAgB,CAAC,EAAE,yBAAyB,CAAC,CAAC;oBAC5E,SAAS;gBACb,CAAC;gBACD,KAAK,CAAC,KAAK,GAAG,gBAAgB,CAAC,KAAK,CAAC;gBACrC,KAAK,CAAC,eAAe,GAAG,gBAAgB,CAAC,eAAe,CAAC;gBACzD,KAAK,CAAC,GAAG,GAAG,gBAAgB,CAAC,GAAG,CAAC;gBACjC,KAAK,CAAC,MAAM,GAAG,cAAc,CAAC;gBAC9B,KAAK,CAAC,UAAU,GAAG,kBAAkB,CAAC;gBACtC,KAAK,CAAC,wBAAwB,GAAG,gBAAgB,CAAC,wBAAwB,CAAC;gBAC3E,KAAK,CAAC,sBAAsB,GAAG,gBAAgB,CAAC,sBAAsB,CAAC;YAC3E,CAAC;iBAAM,CAAC;gBACJ,IAAI,CAAC,qBAAqB,CAAC,iBAAiB,CAAC;oBACzC,IAAI,mCAAW,CACX,gBAAgB,CAAC,EAAE,EACnB,gBAAgB,CAAC,KAAK,EACtB,gBAAgB,CAAC,eAAe,EAChC,cAAc,EACd,kBAAkB,EAClB,gBAAgB,CAAC,wBAAwB,EACzC,gBAAgB,CAAC,GAAG,EACpB,gBAAgB,CAAC,sBAAsB,CAC1C;iBACJ,CAAC,CAAC;YACP,CAAC;QACL,CAAC;IACL,CAAC;IAED,oBAAoB,CAAC,GAAG,QAAkB;QACtC,IAAI,CAAC,qBAAqB,CAAC,oBAAoB,CAAC,QAAQ,CAAC,CAAC;IAC9D,CAAC;IAED,SAAS,CAAC,MAA0B;QAChC,IAAI,MAAM,EAAE,CAAC;YACT,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;QAC1B,CAAC;aAAM,CAAC;YACJ,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;QAC7B,CAAC;IACL,CAAC;IAED,aAAa,CAAC,UAA8B;QACxC,IAAI,UAAU,EAAE,CAAC;YACb,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;QAClC,CAAC;aAAM,CAAC;YACJ,IAAI,CAAC,WAAW,GAAG,SAAS,CAAC;QACjC,CAAC;IACL,CAAC;CACJ,CAAA;AAxFY,0EAA+B;AAMrB;IADlB,IAAA,kBAAM,EAAC,+BAAqB,CAAC;;8EACkC;0CANvD,+BAA+B;IAD3C,IAAA,sBAAU,GAAE;GACA,+BAA+B,CAwF3C"}
|
package/package.json
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@theia/ai-openai",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.58.0",
|
|
4
4
|
"description": "Theia - OpenAI Integration",
|
|
5
5
|
"dependencies": {
|
|
6
|
-
"@theia/ai-core": "1.
|
|
7
|
-
"@theia/core": "1.
|
|
8
|
-
"@theia/filesystem": "1.
|
|
9
|
-
"@theia/workspace": "1.
|
|
6
|
+
"@theia/ai-core": "1.58.0",
|
|
7
|
+
"@theia/core": "1.58.0",
|
|
8
|
+
"@theia/filesystem": "1.58.0",
|
|
9
|
+
"@theia/workspace": "1.58.0",
|
|
10
10
|
"minimatch": "^5.1.0",
|
|
11
|
-
"openai": "^4.
|
|
11
|
+
"openai": "^4.77.0",
|
|
12
12
|
"tslib": "^2.6.2"
|
|
13
13
|
},
|
|
14
14
|
"publishConfig": {
|
|
@@ -45,10 +45,10 @@
|
|
|
45
45
|
"watch": "theiaext watch"
|
|
46
46
|
},
|
|
47
47
|
"devDependencies": {
|
|
48
|
-
"@theia/ext-scripts": "1.
|
|
48
|
+
"@theia/ext-scripts": "1.58.0"
|
|
49
49
|
},
|
|
50
50
|
"nyc": {
|
|
51
51
|
"extends": "../../configs/nyc.json"
|
|
52
52
|
},
|
|
53
|
-
"gitHead": "
|
|
53
|
+
"gitHead": "6594f32b9727aea20d88934bf386dee06d08fa5e"
|
|
54
54
|
}
|
|
@@ -91,6 +91,8 @@ export class OpenAiFrontendApplicationContribution implements FrontendApplicatio
|
|
|
91
91
|
model.model === newModel.model &&
|
|
92
92
|
model.url === newModel.url &&
|
|
93
93
|
model.apiKey === newModel.apiKey &&
|
|
94
|
+
model.apiVersion === newModel.apiVersion &&
|
|
95
|
+
model.supportsDeveloperMessage === newModel.supportsDeveloperMessage &&
|
|
94
96
|
model.enableStreaming === newModel.enableStreaming));
|
|
95
97
|
|
|
96
98
|
this.manager.removeLanguageModels(...modelsToRemove.map(model => model.id));
|
|
@@ -113,6 +115,8 @@ export class OpenAiFrontendApplicationContribution implements FrontendApplicatio
|
|
|
113
115
|
id: id,
|
|
114
116
|
model: modelId,
|
|
115
117
|
apiKey: true,
|
|
118
|
+
apiVersion: true,
|
|
119
|
+
supportsDeveloperMessage: !openAIModelsSupportingDeveloperMessages.includes(modelId),
|
|
116
120
|
enableStreaming: !openAIModelsWithDisabledStreaming.includes(modelId),
|
|
117
121
|
defaultRequestSettings: modelRequestSetting?.requestSettings
|
|
118
122
|
};
|
|
@@ -136,6 +140,8 @@ export class OpenAiFrontendApplicationContribution implements FrontendApplicatio
|
|
|
136
140
|
model: pref.model,
|
|
137
141
|
url: pref.url,
|
|
138
142
|
apiKey: typeof pref.apiKey === 'string' || pref.apiKey === true ? pref.apiKey : undefined,
|
|
143
|
+
apiVersion: typeof pref.apiVersion === 'string' || pref.apiVersion === true ? pref.apiVersion : undefined,
|
|
144
|
+
supportsDeveloperMessage: pref.supportsDeveloperMessage ?? true,
|
|
139
145
|
enableStreaming: pref.enableStreaming ?? true,
|
|
140
146
|
defaultRequestSettings: modelRequestSetting?.requestSettings
|
|
141
147
|
}
|
|
@@ -159,4 +165,5 @@ export class OpenAiFrontendApplicationContribution implements FrontendApplicatio
|
|
|
159
165
|
}
|
|
160
166
|
}
|
|
161
167
|
|
|
162
|
-
const openAIModelsWithDisabledStreaming = ['o1-preview'];
|
|
168
|
+
const openAIModelsWithDisabledStreaming = ['o1-preview', 'o1-mini'];
|
|
169
|
+
const openAIModelsSupportingDeveloperMessages = ['o1-preview', 'o1-mini'];
|
|
@@ -34,7 +34,7 @@ export const OpenAiPreferencesSchema: PreferenceSchema = {
|
|
|
34
34
|
type: 'array',
|
|
35
35
|
description: 'Official OpenAI models to use',
|
|
36
36
|
title: AI_CORE_PREFERENCES_TITLE,
|
|
37
|
-
default: ['gpt-4o', 'gpt-4o-2024-08-06', 'gpt-4o-2024-05-13', 'gpt-4o-mini', 'gpt-4-turbo', 'gpt-4', 'gpt-3.5-turbo', 'o1-preview'],
|
|
37
|
+
default: ['gpt-4o', 'gpt-4o-2024-08-06', 'gpt-4o-2024-05-13', 'gpt-4o-mini', 'gpt-4-turbo', 'gpt-4', 'gpt-3.5-turbo', 'o1-preview', 'o1-mini'],
|
|
38
38
|
items: {
|
|
39
39
|
type: 'string'
|
|
40
40
|
}
|
|
@@ -50,6 +50,10 @@ export const OpenAiPreferencesSchema: PreferenceSchema = {
|
|
|
50
50
|
\n\
|
|
51
51
|
- provide an `apiKey` to access the API served at the given url. Use `true` to indicate the use of the global OpenAI API key.\
|
|
52
52
|
\n\
|
|
53
|
+
- provide an `apiVersion` to access the API served at the given url in Azure. Use `true` to indicate the use of the global OpenAI API version.\
|
|
54
|
+
\n\
|
|
55
|
+
- specify `supportsDeveloperMessage: false` to indicate that the developer role shall not be used.\
|
|
56
|
+
\n\
|
|
53
57
|
- specify `enableStreaming: false` to indicate that streaming shall not be used.\
|
|
54
58
|
\n\
|
|
55
59
|
Refer to [our documentation](https://theia-ide.org/docs/user_ai/#openai-compatible-models-eg-via-vllm) for more information.',
|
|
@@ -73,6 +77,14 @@ export const OpenAiPreferencesSchema: PreferenceSchema = {
|
|
|
73
77
|
type: ['string', 'boolean'],
|
|
74
78
|
title: 'Either the key to access the API served at the given url or `true` to use the global OpenAI API key',
|
|
75
79
|
},
|
|
80
|
+
apiVersion: {
|
|
81
|
+
type: ['string', 'boolean'],
|
|
82
|
+
title: 'Either the version to access the API served at the given url in Azure or `true` to use the global OpenAI API version',
|
|
83
|
+
},
|
|
84
|
+
supportsDeveloperMessage: {
|
|
85
|
+
type: 'boolean',
|
|
86
|
+
title: 'Indicates whether the model supports the `developer` role. `true` by default.',
|
|
87
|
+
},
|
|
76
88
|
enableStreaming: {
|
|
77
89
|
type: 'boolean',
|
|
78
90
|
title: 'Indicates whether the streaming API shall be used. `true` by default.',
|
|
@@ -32,10 +32,18 @@ export interface OpenAiModelDescription {
|
|
|
32
32
|
* The key for the model. If 'true' is provided the global OpenAI API key will be used.
|
|
33
33
|
*/
|
|
34
34
|
apiKey: string | true | undefined;
|
|
35
|
+
/**
|
|
36
|
+
* The version for the api. If 'true' is provided the global OpenAI version will be used.
|
|
37
|
+
*/
|
|
38
|
+
apiVersion: string | true | undefined;
|
|
35
39
|
/**
|
|
36
40
|
* Indicate whether the streaming API shall be used.
|
|
37
41
|
*/
|
|
38
42
|
enableStreaming: boolean;
|
|
43
|
+
/**
|
|
44
|
+
* Flag to configure whether the OpenAPI model supports the `developer` role. Default is `true`.
|
|
45
|
+
*/
|
|
46
|
+
supportsDeveloperMessage: boolean;
|
|
39
47
|
/**
|
|
40
48
|
* Default request settings for the OpenAI model.
|
|
41
49
|
*/
|
|
@@ -44,6 +52,7 @@ export interface OpenAiModelDescription {
|
|
|
44
52
|
export interface OpenAiLanguageModelsManager {
|
|
45
53
|
apiKey: string | undefined;
|
|
46
54
|
setApiKey(key: string | undefined): void;
|
|
55
|
+
setApiVersion(version: string | undefined): void;
|
|
47
56
|
createOrUpdateLanguageModels(...models: OpenAiModelDescription[]): Promise<void>;
|
|
48
57
|
removeLanguageModels(...modelIds: string[]): void
|
|
49
58
|
}
|
|
@@ -18,13 +18,19 @@ import { ContainerModule } from '@theia/core/shared/inversify';
|
|
|
18
18
|
import { OPENAI_LANGUAGE_MODELS_MANAGER_PATH, OpenAiLanguageModelsManager } from '../common/openai-language-models-manager';
|
|
19
19
|
import { ConnectionHandler, RpcConnectionHandler } from '@theia/core';
|
|
20
20
|
import { OpenAiLanguageModelsManagerImpl } from './openai-language-models-manager-impl';
|
|
21
|
+
import { ConnectionContainerModule } from '@theia/core/lib/node/messaging/connection-container-module';
|
|
21
22
|
|
|
22
23
|
export const OpenAiModelFactory = Symbol('OpenAiModelFactory');
|
|
23
24
|
|
|
24
|
-
|
|
25
|
+
// We use a connection module to handle AI services separately for each frontend.
|
|
26
|
+
const openAiConnectionModule = ConnectionContainerModule.create(({ bind, bindBackendService, bindFrontendService }) => {
|
|
25
27
|
bind(OpenAiLanguageModelsManagerImpl).toSelf().inSingletonScope();
|
|
26
28
|
bind(OpenAiLanguageModelsManager).toService(OpenAiLanguageModelsManagerImpl);
|
|
27
29
|
bind(ConnectionHandler).toDynamicValue(ctx =>
|
|
28
30
|
new RpcConnectionHandler(OPENAI_LANGUAGE_MODELS_MANAGER_PATH, () => ctx.container.get(OpenAiLanguageModelsManager))
|
|
29
31
|
).inSingletonScope();
|
|
30
32
|
});
|
|
33
|
+
|
|
34
|
+
export default new ContainerModule(bind => {
|
|
35
|
+
bind(ConnectionContainerModule).toConstantValue(openAiConnectionModule);
|
|
36
|
+
});
|
|
@@ -24,31 +24,13 @@ import {
|
|
|
24
24
|
LanguageModelTextResponse
|
|
25
25
|
} from '@theia/ai-core';
|
|
26
26
|
import { CancellationToken } from '@theia/core';
|
|
27
|
-
import OpenAI from 'openai';
|
|
27
|
+
import { OpenAI, AzureOpenAI } from 'openai';
|
|
28
28
|
import { ChatCompletionStream } from 'openai/lib/ChatCompletionStream';
|
|
29
29
|
import { RunnableToolFunctionWithoutParse } from 'openai/lib/RunnableFunction';
|
|
30
30
|
import { ChatCompletionMessageParam } from 'openai/resources';
|
|
31
31
|
|
|
32
32
|
export const OpenAiModelIdentifier = Symbol('OpenAiModelIdentifier');
|
|
33
33
|
|
|
34
|
-
function toOpenAIMessage(message: LanguageModelRequestMessage): ChatCompletionMessageParam {
|
|
35
|
-
return {
|
|
36
|
-
role: toOpenAiRole(message),
|
|
37
|
-
content: message.query || ''
|
|
38
|
-
};
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
function toOpenAiRole(message: LanguageModelRequestMessage): 'system' | 'user' | 'assistant' {
|
|
42
|
-
switch (message.actor) {
|
|
43
|
-
case 'system':
|
|
44
|
-
return 'system';
|
|
45
|
-
case 'ai':
|
|
46
|
-
return 'assistant';
|
|
47
|
-
default:
|
|
48
|
-
return 'user';
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
|
|
52
34
|
export class OpenAiModel implements LanguageModel {
|
|
53
35
|
|
|
54
36
|
/**
|
|
@@ -56,6 +38,8 @@ export class OpenAiModel implements LanguageModel {
|
|
|
56
38
|
* @param model the model id as it is used by the OpenAI API
|
|
57
39
|
* @param enableStreaming whether the streaming API shall be used
|
|
58
40
|
* @param apiKey a function that returns the API key to use for this model, called on each request
|
|
41
|
+
* @param apiVersion a function that returns the OpenAPI version to use for this model, called on each request
|
|
42
|
+
* @param supportsDeveloperMessage whether the model supports the `developer` role
|
|
59
43
|
* @param url the OpenAI API compatible endpoint where the model is hosted. If not provided the default OpenAI endpoint will be used.
|
|
60
44
|
* @param defaultRequestSettings optional default settings for requests made using this model.
|
|
61
45
|
*/
|
|
@@ -64,6 +48,8 @@ export class OpenAiModel implements LanguageModel {
|
|
|
64
48
|
public model: string,
|
|
65
49
|
public enableStreaming: boolean,
|
|
66
50
|
public apiKey: () => string | undefined,
|
|
51
|
+
public apiVersion: () => string | undefined,
|
|
52
|
+
public supportsDeveloperMessage: boolean,
|
|
67
53
|
public url: string | undefined,
|
|
68
54
|
public defaultRequestSettings?: { [key: string]: unknown }
|
|
69
55
|
) { }
|
|
@@ -80,20 +66,23 @@ export class OpenAiModel implements LanguageModel {
|
|
|
80
66
|
const settings = this.getSettings(request);
|
|
81
67
|
const openai = this.initializeOpenAi();
|
|
82
68
|
|
|
83
|
-
if (this.isNonStreamingModel(this.model)) {
|
|
69
|
+
if (this.isNonStreamingModel(this.model) || (typeof settings.stream === 'boolean' && !settings.stream)) {
|
|
84
70
|
return this.handleNonStreamingRequest(openai, request);
|
|
85
71
|
}
|
|
86
72
|
|
|
87
73
|
if (request.response_format?.type === 'json_schema' && this.supportsStructuredOutput()) {
|
|
88
74
|
return this.handleStructuredOutputRequest(openai, request);
|
|
89
75
|
}
|
|
76
|
+
if (cancellationToken?.isCancellationRequested) {
|
|
77
|
+
return { text: '' };
|
|
78
|
+
}
|
|
90
79
|
|
|
91
80
|
let runner: ChatCompletionStream;
|
|
92
81
|
const tools = this.createTools(request);
|
|
93
82
|
if (tools) {
|
|
94
83
|
runner = openai.beta.chat.completions.runTools({
|
|
95
84
|
model: this.model,
|
|
96
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
85
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
97
86
|
stream: true,
|
|
98
87
|
tools: tools,
|
|
99
88
|
tool_choice: 'auto',
|
|
@@ -102,7 +91,7 @@ export class OpenAiModel implements LanguageModel {
|
|
|
102
91
|
} else {
|
|
103
92
|
runner = openai.beta.chat.completions.stream({
|
|
104
93
|
model: this.model,
|
|
105
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
94
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
106
95
|
stream: true,
|
|
107
96
|
...settings
|
|
108
97
|
});
|
|
@@ -113,42 +102,57 @@ export class OpenAiModel implements LanguageModel {
|
|
|
113
102
|
|
|
114
103
|
let runnerEnd = false;
|
|
115
104
|
|
|
116
|
-
let resolve: (part: LanguageModelStreamResponsePart) => void;
|
|
105
|
+
let resolve: ((part: LanguageModelStreamResponsePart) => void) | undefined;
|
|
117
106
|
runner.on('error', error => {
|
|
118
107
|
console.error('Error in OpenAI chat completion stream:', error);
|
|
119
108
|
runnerEnd = true;
|
|
120
|
-
resolve({ content: error.message });
|
|
109
|
+
resolve?.({ content: error.message });
|
|
121
110
|
});
|
|
122
111
|
// we need to also listen for the emitted errors, as otherwise any error actually thrown by the API will not be caught
|
|
123
112
|
runner.emitted('error').then(error => {
|
|
124
113
|
console.error('Error in OpenAI chat completion stream:', error);
|
|
125
114
|
runnerEnd = true;
|
|
126
|
-
resolve({ content: error.message });
|
|
115
|
+
resolve?.({ content: error.message });
|
|
127
116
|
});
|
|
128
117
|
runner.emitted('abort').then(() => {
|
|
129
|
-
//
|
|
118
|
+
// cancel async iterator
|
|
119
|
+
runnerEnd = true;
|
|
130
120
|
});
|
|
131
121
|
runner.on('message', message => {
|
|
132
122
|
if (message.role === 'tool') {
|
|
133
|
-
resolve({ tool_calls: [{ id: message.tool_call_id, finished: true, result: this.getCompletionContent(message) }] });
|
|
123
|
+
resolve?.({ tool_calls: [{ id: message.tool_call_id, finished: true, result: this.getCompletionContent(message) }] });
|
|
134
124
|
}
|
|
135
125
|
console.debug('Received Open AI message', JSON.stringify(message));
|
|
136
126
|
});
|
|
137
127
|
runner.once('end', () => {
|
|
138
128
|
runnerEnd = true;
|
|
139
129
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
140
|
-
resolve(runner.finalChatCompletion as any);
|
|
130
|
+
resolve?.(runner.finalChatCompletion as any);
|
|
141
131
|
});
|
|
132
|
+
if (cancellationToken?.isCancellationRequested) {
|
|
133
|
+
return { text: '' };
|
|
134
|
+
}
|
|
142
135
|
const asyncIterator = {
|
|
143
136
|
async *[Symbol.asyncIterator](): AsyncIterator<LanguageModelStreamResponsePart> {
|
|
144
137
|
runner.on('chunk', chunk => {
|
|
145
|
-
if (
|
|
138
|
+
if (cancellationToken?.isCancellationRequested) {
|
|
139
|
+
resolve = undefined;
|
|
140
|
+
return;
|
|
141
|
+
}
|
|
142
|
+
if (resolve && chunk.choices[0]?.delta) {
|
|
146
143
|
resolve({ ...chunk.choices[0]?.delta });
|
|
147
144
|
}
|
|
148
145
|
});
|
|
149
146
|
while (!runnerEnd) {
|
|
147
|
+
if (cancellationToken?.isCancellationRequested) {
|
|
148
|
+
throw new Error('Iterator canceled');
|
|
149
|
+
}
|
|
150
150
|
const promise = new Promise<LanguageModelStreamResponsePart>((res, rej) => {
|
|
151
151
|
resolve = res;
|
|
152
|
+
cancellationToken?.onCancellationRequested(() => {
|
|
153
|
+
rej(new Error('Canceled'));
|
|
154
|
+
runnerEnd = true; // Stop the iterator
|
|
155
|
+
});
|
|
152
156
|
});
|
|
153
157
|
yield promise;
|
|
154
158
|
}
|
|
@@ -161,7 +165,7 @@ export class OpenAiModel implements LanguageModel {
|
|
|
161
165
|
const settings = this.getSettings(request);
|
|
162
166
|
const response = await openai.chat.completions.create({
|
|
163
167
|
model: this.model,
|
|
164
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
168
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
165
169
|
...settings
|
|
166
170
|
});
|
|
167
171
|
|
|
@@ -172,6 +176,24 @@ export class OpenAiModel implements LanguageModel {
|
|
|
172
176
|
};
|
|
173
177
|
}
|
|
174
178
|
|
|
179
|
+
protected toOpenAIMessage(message: LanguageModelRequestMessage): ChatCompletionMessageParam {
|
|
180
|
+
return {
|
|
181
|
+
role: this.toOpenAiRole(message),
|
|
182
|
+
content: message.query || ''
|
|
183
|
+
};
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
protected toOpenAiRole(message: LanguageModelRequestMessage): 'developer' | 'user' | 'assistant' {
|
|
187
|
+
switch (message.actor) {
|
|
188
|
+
case 'system':
|
|
189
|
+
return this.supportsDeveloperMessage ? 'developer' : 'user';
|
|
190
|
+
case 'ai':
|
|
191
|
+
return 'assistant';
|
|
192
|
+
default:
|
|
193
|
+
return 'user';
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
175
197
|
protected isNonStreamingModel(_model: string): boolean {
|
|
176
198
|
return !this.enableStreaming;
|
|
177
199
|
}
|
|
@@ -190,7 +212,7 @@ export class OpenAiModel implements LanguageModel {
|
|
|
190
212
|
// TODO implement tool support for structured output (parse() seems to require different tool format)
|
|
191
213
|
const result = await openai.beta.chat.completions.parse({
|
|
192
214
|
model: this.model,
|
|
193
|
-
messages: request.messages.map(toOpenAIMessage),
|
|
215
|
+
messages: request.messages.map(this.toOpenAIMessage.bind(this)),
|
|
194
216
|
response_format: request.response_format,
|
|
195
217
|
...settings
|
|
196
218
|
});
|
|
@@ -228,7 +250,14 @@ export class OpenAiModel implements LanguageModel {
|
|
|
228
250
|
if (!apiKey && !(this.url)) {
|
|
229
251
|
throw new Error('Please provide OPENAI_API_KEY in preferences or via environment variable');
|
|
230
252
|
}
|
|
231
|
-
|
|
232
|
-
|
|
253
|
+
|
|
254
|
+
const apiVersion = this.apiVersion();
|
|
255
|
+
if (apiVersion) {
|
|
256
|
+
// We need to hand over "some" key, even if a custom url is not key protected as otherwise the OpenAI client will throw an error
|
|
257
|
+
return new AzureOpenAI({ apiKey: apiKey ?? 'no-key', baseURL: this.url, apiVersion: apiVersion });
|
|
258
|
+
} else {
|
|
259
|
+
// We need to hand over "some" key, even if a custom url is not key protected as otherwise the OpenAI client will throw an error
|
|
260
|
+
return new OpenAI({ apiKey: apiKey ?? 'no-key', baseURL: this.url });
|
|
261
|
+
}
|
|
233
262
|
}
|
|
234
263
|
}
|
|
@@ -23,6 +23,7 @@ import { OpenAiLanguageModelsManager, OpenAiModelDescription } from '../common';
|
|
|
23
23
|
export class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsManager {
|
|
24
24
|
|
|
25
25
|
protected _apiKey: string | undefined;
|
|
26
|
+
protected _apiVersion: string | undefined;
|
|
26
27
|
|
|
27
28
|
@inject(LanguageModelRegistry)
|
|
28
29
|
protected readonly languageModelRegistry: LanguageModelRegistry;
|
|
@@ -31,6 +32,10 @@ export class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsMana
|
|
|
31
32
|
return this._apiKey ?? process.env.OPENAI_API_KEY;
|
|
32
33
|
}
|
|
33
34
|
|
|
35
|
+
get apiVersion(): string | undefined {
|
|
36
|
+
return this._apiVersion ?? process.env.OPENAI_API_VERSION;
|
|
37
|
+
}
|
|
38
|
+
|
|
34
39
|
// Triggered from frontend. In case you want to use the models on the backend
|
|
35
40
|
// without a frontend then call this yourself
|
|
36
41
|
async createOrUpdateLanguageModels(...modelDescriptions: OpenAiModelDescription[]): Promise<void> {
|
|
@@ -45,6 +50,15 @@ export class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsMana
|
|
|
45
50
|
}
|
|
46
51
|
return undefined;
|
|
47
52
|
};
|
|
53
|
+
const apiVersionProvider = () => {
|
|
54
|
+
if (modelDescription.apiVersion === true) {
|
|
55
|
+
return this.apiVersion;
|
|
56
|
+
}
|
|
57
|
+
if (modelDescription.apiVersion) {
|
|
58
|
+
return modelDescription.apiVersion;
|
|
59
|
+
}
|
|
60
|
+
return undefined;
|
|
61
|
+
};
|
|
48
62
|
|
|
49
63
|
if (model) {
|
|
50
64
|
if (!(model instanceof OpenAiModel)) {
|
|
@@ -55,6 +69,8 @@ export class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsMana
|
|
|
55
69
|
model.enableStreaming = modelDescription.enableStreaming;
|
|
56
70
|
model.url = modelDescription.url;
|
|
57
71
|
model.apiKey = apiKeyProvider;
|
|
72
|
+
model.apiVersion = apiVersionProvider;
|
|
73
|
+
model.supportsDeveloperMessage = modelDescription.supportsDeveloperMessage;
|
|
58
74
|
model.defaultRequestSettings = modelDescription.defaultRequestSettings;
|
|
59
75
|
} else {
|
|
60
76
|
this.languageModelRegistry.addLanguageModels([
|
|
@@ -63,6 +79,8 @@ export class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsMana
|
|
|
63
79
|
modelDescription.model,
|
|
64
80
|
modelDescription.enableStreaming,
|
|
65
81
|
apiKeyProvider,
|
|
82
|
+
apiVersionProvider,
|
|
83
|
+
modelDescription.supportsDeveloperMessage,
|
|
66
84
|
modelDescription.url,
|
|
67
85
|
modelDescription.defaultRequestSettings
|
|
68
86
|
)
|
|
@@ -82,4 +100,12 @@ export class OpenAiLanguageModelsManagerImpl implements OpenAiLanguageModelsMana
|
|
|
82
100
|
this._apiKey = undefined;
|
|
83
101
|
}
|
|
84
102
|
}
|
|
103
|
+
|
|
104
|
+
setApiVersion(apiVersion: string | undefined): void {
|
|
105
|
+
if (apiVersion) {
|
|
106
|
+
this._apiVersion = apiVersion;
|
|
107
|
+
} else {
|
|
108
|
+
this._apiVersion = undefined;
|
|
109
|
+
}
|
|
110
|
+
}
|
|
85
111
|
}
|