@aigne/openai 0.11.2 → 0.11.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,21 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## [0.11.3](https://github.com/AIGNE-io/aigne-framework/compare/openai-v0.11.2...openai-v0.11.3) (2025-08-16)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
### Bug Fixes
|
|
7
|
+
|
|
8
|
+
* **core:** make getCredential async for aigne-hub mount point retrieval ([#372](https://github.com/AIGNE-io/aigne-framework/issues/372)) ([34ce7a6](https://github.com/AIGNE-io/aigne-framework/commit/34ce7a645fa83994d3dfe0f29ca70098cfecac9c))
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
### Dependencies
|
|
12
|
+
|
|
13
|
+
* The following workspace dependencies were updated
|
|
14
|
+
* dependencies
|
|
15
|
+
* @aigne/core bumped to 1.50.1
|
|
16
|
+
* devDependencies
|
|
17
|
+
* @aigne/test-utils bumped to 0.5.29
|
|
18
|
+
|
|
3
19
|
## [0.11.2](https://github.com/AIGNE-io/aigne-framework/compare/openai-v0.11.1...openai-v0.11.2) (2025-08-14)
|
|
4
20
|
|
|
5
21
|
|
|
@@ -131,12 +131,12 @@ export declare class OpenAIChatModel extends ChatModel {
|
|
|
131
131
|
protected supportsToolsEmptyParameters: boolean;
|
|
132
132
|
protected supportsToolStreaming: boolean;
|
|
133
133
|
protected supportsTemperature: boolean;
|
|
134
|
-
|
|
135
|
-
getCredential(): {
|
|
134
|
+
client(): Promise<OpenAI>;
|
|
135
|
+
getCredential(): Promise<{
|
|
136
136
|
url: string | undefined;
|
|
137
137
|
apiKey: string | undefined;
|
|
138
138
|
model: string;
|
|
139
|
-
}
|
|
139
|
+
}>;
|
|
140
140
|
get modelOptions(): ChatModelOptions | undefined;
|
|
141
141
|
/**
|
|
142
142
|
* Process the input and generate a response
|
|
@@ -81,8 +81,8 @@ class OpenAIChatModel extends core_1.ChatModel {
|
|
|
81
81
|
supportsToolsEmptyParameters = true;
|
|
82
82
|
supportsToolStreaming = true;
|
|
83
83
|
supportsTemperature = true;
|
|
84
|
-
|
|
85
|
-
const { apiKey, url } = this.getCredential();
|
|
84
|
+
async client() {
|
|
85
|
+
const { apiKey, url } = await this.getCredential();
|
|
86
86
|
if (!apiKey)
|
|
87
87
|
throw new Error(`${this.name} requires an API key. Please provide it via \`options.apiKey\`, or set the \`${this.apiKeyEnvName}\` environment variable`);
|
|
88
88
|
this._client ??= new CustomOpenAI({
|
|
@@ -92,7 +92,7 @@ class OpenAIChatModel extends core_1.ChatModel {
|
|
|
92
92
|
});
|
|
93
93
|
return this._client;
|
|
94
94
|
}
|
|
95
|
-
getCredential() {
|
|
95
|
+
async getCredential() {
|
|
96
96
|
return {
|
|
97
97
|
url: this.options?.baseURL || process.env.OPENAI_BASE_URL,
|
|
98
98
|
apiKey: this.options?.apiKey || process.env[this.apiKeyEnvName] || this.apiKeyDefault,
|
|
@@ -113,7 +113,7 @@ class OpenAIChatModel extends core_1.ChatModel {
|
|
|
113
113
|
ajv = new ajv_1.Ajv();
|
|
114
114
|
async _process(input) {
|
|
115
115
|
const messages = await this.getRunMessages(input);
|
|
116
|
-
const { model } = this.getCredential();
|
|
116
|
+
const { model } = await this.getCredential();
|
|
117
117
|
const body = {
|
|
118
118
|
model,
|
|
119
119
|
temperature: this.supportsTemperature
|
|
@@ -134,7 +134,8 @@ class OpenAIChatModel extends core_1.ChatModel {
|
|
|
134
134
|
return await this.requestStructuredOutput(body, input.responseFormat);
|
|
135
135
|
}
|
|
136
136
|
const { jsonMode, responseFormat } = await this.getRunResponseFormat(input);
|
|
137
|
-
const
|
|
137
|
+
const client = await this.client();
|
|
138
|
+
const stream = (await client.chat.completions.create({
|
|
138
139
|
...body,
|
|
139
140
|
tools: toolsFromInputTools(input.tools, {
|
|
140
141
|
addTypeToEmptyParameters: !this.supportsToolsEmptyParameters,
|
|
@@ -213,7 +214,8 @@ class OpenAIChatModel extends core_1.ChatModel {
|
|
|
213
214
|
const { jsonMode, responseFormat: resolvedResponseFormat } = await this.getRunResponseFormat({
|
|
214
215
|
responseFormat,
|
|
215
216
|
});
|
|
216
|
-
const
|
|
217
|
+
const client = await this.client();
|
|
218
|
+
const res = (await client.chat.completions.create({
|
|
217
219
|
...body,
|
|
218
220
|
response_format: resolvedResponseFormat,
|
|
219
221
|
}));
|
|
@@ -131,12 +131,12 @@ export declare class OpenAIChatModel extends ChatModel {
|
|
|
131
131
|
protected supportsToolsEmptyParameters: boolean;
|
|
132
132
|
protected supportsToolStreaming: boolean;
|
|
133
133
|
protected supportsTemperature: boolean;
|
|
134
|
-
|
|
135
|
-
getCredential(): {
|
|
134
|
+
client(): Promise<OpenAI>;
|
|
135
|
+
getCredential(): Promise<{
|
|
136
136
|
url: string | undefined;
|
|
137
137
|
apiKey: string | undefined;
|
|
138
138
|
model: string;
|
|
139
|
-
}
|
|
139
|
+
}>;
|
|
140
140
|
get modelOptions(): ChatModelOptions | undefined;
|
|
141
141
|
/**
|
|
142
142
|
* Process the input and generate a response
|
|
@@ -131,12 +131,12 @@ export declare class OpenAIChatModel extends ChatModel {
|
|
|
131
131
|
protected supportsToolsEmptyParameters: boolean;
|
|
132
132
|
protected supportsToolStreaming: boolean;
|
|
133
133
|
protected supportsTemperature: boolean;
|
|
134
|
-
|
|
135
|
-
getCredential(): {
|
|
134
|
+
client(): Promise<OpenAI>;
|
|
135
|
+
getCredential(): Promise<{
|
|
136
136
|
url: string | undefined;
|
|
137
137
|
apiKey: string | undefined;
|
|
138
138
|
model: string;
|
|
139
|
-
}
|
|
139
|
+
}>;
|
|
140
140
|
get modelOptions(): ChatModelOptions | undefined;
|
|
141
141
|
/**
|
|
142
142
|
* Process the input and generate a response
|
|
@@ -72,8 +72,8 @@ export class OpenAIChatModel extends ChatModel {
|
|
|
72
72
|
supportsToolsEmptyParameters = true;
|
|
73
73
|
supportsToolStreaming = true;
|
|
74
74
|
supportsTemperature = true;
|
|
75
|
-
|
|
76
|
-
const { apiKey, url } = this.getCredential();
|
|
75
|
+
async client() {
|
|
76
|
+
const { apiKey, url } = await this.getCredential();
|
|
77
77
|
if (!apiKey)
|
|
78
78
|
throw new Error(`${this.name} requires an API key. Please provide it via \`options.apiKey\`, or set the \`${this.apiKeyEnvName}\` environment variable`);
|
|
79
79
|
this._client ??= new CustomOpenAI({
|
|
@@ -83,7 +83,7 @@ export class OpenAIChatModel extends ChatModel {
|
|
|
83
83
|
});
|
|
84
84
|
return this._client;
|
|
85
85
|
}
|
|
86
|
-
getCredential() {
|
|
86
|
+
async getCredential() {
|
|
87
87
|
return {
|
|
88
88
|
url: this.options?.baseURL || process.env.OPENAI_BASE_URL,
|
|
89
89
|
apiKey: this.options?.apiKey || process.env[this.apiKeyEnvName] || this.apiKeyDefault,
|
|
@@ -104,7 +104,7 @@ export class OpenAIChatModel extends ChatModel {
|
|
|
104
104
|
ajv = new Ajv();
|
|
105
105
|
async _process(input) {
|
|
106
106
|
const messages = await this.getRunMessages(input);
|
|
107
|
-
const { model } = this.getCredential();
|
|
107
|
+
const { model } = await this.getCredential();
|
|
108
108
|
const body = {
|
|
109
109
|
model,
|
|
110
110
|
temperature: this.supportsTemperature
|
|
@@ -125,7 +125,8 @@ export class OpenAIChatModel extends ChatModel {
|
|
|
125
125
|
return await this.requestStructuredOutput(body, input.responseFormat);
|
|
126
126
|
}
|
|
127
127
|
const { jsonMode, responseFormat } = await this.getRunResponseFormat(input);
|
|
128
|
-
const
|
|
128
|
+
const client = await this.client();
|
|
129
|
+
const stream = (await client.chat.completions.create({
|
|
129
130
|
...body,
|
|
130
131
|
tools: toolsFromInputTools(input.tools, {
|
|
131
132
|
addTypeToEmptyParameters: !this.supportsToolsEmptyParameters,
|
|
@@ -204,7 +205,8 @@ export class OpenAIChatModel extends ChatModel {
|
|
|
204
205
|
const { jsonMode, responseFormat: resolvedResponseFormat } = await this.getRunResponseFormat({
|
|
205
206
|
responseFormat,
|
|
206
207
|
});
|
|
207
|
-
const
|
|
208
|
+
const client = await this.client();
|
|
209
|
+
const res = (await client.chat.completions.create({
|
|
208
210
|
...body,
|
|
209
211
|
response_format: resolvedResponseFormat,
|
|
210
212
|
}));
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aigne/openai",
|
|
3
|
-
"version": "0.11.
|
|
3
|
+
"version": "0.11.3",
|
|
4
4
|
"description": "AIGNE OpenAI SDK for integrating with OpenAI's GPT models and API services",
|
|
5
5
|
"publishConfig": {
|
|
6
6
|
"access": "public"
|
|
@@ -39,7 +39,7 @@
|
|
|
39
39
|
"openai": "^5.8.3",
|
|
40
40
|
"uuid": "^11.1.0",
|
|
41
41
|
"zod": "^3.25.67",
|
|
42
|
-
"@aigne/core": "^1.50.
|
|
42
|
+
"@aigne/core": "^1.50.1"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
45
|
"@types/bun": "^1.2.18",
|
|
@@ -47,7 +47,7 @@
|
|
|
47
47
|
"npm-run-all": "^4.1.5",
|
|
48
48
|
"rimraf": "^6.0.1",
|
|
49
49
|
"typescript": "^5.8.3",
|
|
50
|
-
"@aigne/test-utils": "^0.5.
|
|
50
|
+
"@aigne/test-utils": "^0.5.29"
|
|
51
51
|
},
|
|
52
52
|
"scripts": {
|
|
53
53
|
"lint": "tsc --noEmit",
|