@aigne/openai 0.11.0 → 0.11.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,32 @@
1
1
  # Changelog
2
2
 
3
+ ## [0.11.2](https://github.com/AIGNE-io/aigne-framework/compare/openai-v0.11.1...openai-v0.11.2) (2025-08-14)
4
+
5
+
6
+ ### Bug Fixes
7
+
8
+ * **cli:** log only once in loadAIGNE ([#357](https://github.com/AIGNE-io/aigne-framework/issues/357)) ([6e6d968](https://github.com/AIGNE-io/aigne-framework/commit/6e6d96814fbc87f210522ae16daf94c1f84f311a))
9
+
10
+
11
+ ### Dependencies
12
+
13
+ * The following workspace dependencies were updated
14
+ * dependencies
15
+ * @aigne/core bumped to 1.50.0
16
+ * devDependencies
17
+ * @aigne/test-utils bumped to 0.5.28
18
+
19
+ ## [0.11.1](https://github.com/AIGNE-io/aigne-framework/compare/openai-v0.11.0...openai-v0.11.1) (2025-08-12)
20
+
21
+
22
+ ### Dependencies
23
+
24
+ * The following workspace dependencies were updated
25
+ * dependencies
26
+ * @aigne/core bumped to 1.49.1
27
+ * devDependencies
28
+ * @aigne/test-utils bumped to 0.5.27
29
+
3
30
  ## [0.11.0](https://github.com/AIGNE-io/aigne-framework/compare/openai-v0.10.17...openai-v0.11.0) (2025-08-12)
4
31
 
5
32
 
@@ -132,6 +132,11 @@ export declare class OpenAIChatModel extends ChatModel {
132
132
  protected supportsToolStreaming: boolean;
133
133
  protected supportsTemperature: boolean;
134
134
  get client(): OpenAI;
135
+ getCredential(): {
136
+ url: string | undefined;
137
+ apiKey: string | undefined;
138
+ model: string;
139
+ };
135
140
  get modelOptions(): ChatModelOptions | undefined;
136
141
  /**
137
142
  * Process the input and generate a response
@@ -82,16 +82,23 @@ class OpenAIChatModel extends core_1.ChatModel {
82
82
  supportsToolStreaming = true;
83
83
  supportsTemperature = true;
84
84
  get client() {
85
- const apiKey = this.options?.apiKey || process.env[this.apiKeyEnvName] || this.apiKeyDefault;
85
+ const { apiKey, url } = this.getCredential();
86
86
  if (!apiKey)
87
87
  throw new Error(`${this.name} requires an API key. Please provide it via \`options.apiKey\`, or set the \`${this.apiKeyEnvName}\` environment variable`);
88
88
  this._client ??= new CustomOpenAI({
89
- baseURL: this.options?.baseURL,
89
+ baseURL: url,
90
90
  apiKey,
91
91
  ...this.options?.clientOptions,
92
92
  });
93
93
  return this._client;
94
94
  }
95
+ getCredential() {
96
+ return {
97
+ url: this.options?.baseURL || process.env.OPENAI_BASE_URL,
98
+ apiKey: this.options?.apiKey || process.env[this.apiKeyEnvName] || this.apiKeyDefault,
99
+ model: this.options?.model || CHAT_MODEL_OPENAI_DEFAULT_MODEL,
100
+ };
101
+ }
95
102
  get modelOptions() {
96
103
  return this.options?.modelOptions;
97
104
  }
@@ -106,8 +113,9 @@ class OpenAIChatModel extends core_1.ChatModel {
106
113
  ajv = new ajv_1.Ajv();
107
114
  async _process(input) {
108
115
  const messages = await this.getRunMessages(input);
116
+ const { model } = this.getCredential();
109
117
  const body = {
110
- model: this.options?.model || CHAT_MODEL_OPENAI_DEFAULT_MODEL,
118
+ model,
111
119
  temperature: this.supportsTemperature
112
120
  ? (input.modelOptions?.temperature ?? this.modelOptions?.temperature)
113
121
  : undefined,
@@ -132,6 +132,11 @@ export declare class OpenAIChatModel extends ChatModel {
132
132
  protected supportsToolStreaming: boolean;
133
133
  protected supportsTemperature: boolean;
134
134
  get client(): OpenAI;
135
+ getCredential(): {
136
+ url: string | undefined;
137
+ apiKey: string | undefined;
138
+ model: string;
139
+ };
135
140
  get modelOptions(): ChatModelOptions | undefined;
136
141
  /**
137
142
  * Process the input and generate a response
@@ -132,6 +132,11 @@ export declare class OpenAIChatModel extends ChatModel {
132
132
  protected supportsToolStreaming: boolean;
133
133
  protected supportsTemperature: boolean;
134
134
  get client(): OpenAI;
135
+ getCredential(): {
136
+ url: string | undefined;
137
+ apiKey: string | undefined;
138
+ model: string;
139
+ };
135
140
  get modelOptions(): ChatModelOptions | undefined;
136
141
  /**
137
142
  * Process the input and generate a response
@@ -73,16 +73,23 @@ export class OpenAIChatModel extends ChatModel {
73
73
  supportsToolStreaming = true;
74
74
  supportsTemperature = true;
75
75
  get client() {
76
- const apiKey = this.options?.apiKey || process.env[this.apiKeyEnvName] || this.apiKeyDefault;
76
+ const { apiKey, url } = this.getCredential();
77
77
  if (!apiKey)
78
78
  throw new Error(`${this.name} requires an API key. Please provide it via \`options.apiKey\`, or set the \`${this.apiKeyEnvName}\` environment variable`);
79
79
  this._client ??= new CustomOpenAI({
80
- baseURL: this.options?.baseURL,
80
+ baseURL: url,
81
81
  apiKey,
82
82
  ...this.options?.clientOptions,
83
83
  });
84
84
  return this._client;
85
85
  }
86
+ getCredential() {
87
+ return {
88
+ url: this.options?.baseURL || process.env.OPENAI_BASE_URL,
89
+ apiKey: this.options?.apiKey || process.env[this.apiKeyEnvName] || this.apiKeyDefault,
90
+ model: this.options?.model || CHAT_MODEL_OPENAI_DEFAULT_MODEL,
91
+ };
92
+ }
86
93
  get modelOptions() {
87
94
  return this.options?.modelOptions;
88
95
  }
@@ -97,8 +104,9 @@ export class OpenAIChatModel extends ChatModel {
97
104
  ajv = new Ajv();
98
105
  async _process(input) {
99
106
  const messages = await this.getRunMessages(input);
107
+ const { model } = this.getCredential();
100
108
  const body = {
101
- model: this.options?.model || CHAT_MODEL_OPENAI_DEFAULT_MODEL,
109
+ model,
102
110
  temperature: this.supportsTemperature
103
111
  ? (input.modelOptions?.temperature ?? this.modelOptions?.temperature)
104
112
  : undefined,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aigne/openai",
3
- "version": "0.11.0",
3
+ "version": "0.11.2",
4
4
  "description": "AIGNE OpenAI SDK for integrating with OpenAI's GPT models and API services",
5
5
  "publishConfig": {
6
6
  "access": "public"
@@ -39,7 +39,7 @@
39
39
  "openai": "^5.8.3",
40
40
  "uuid": "^11.1.0",
41
41
  "zod": "^3.25.67",
42
- "@aigne/core": "^1.49.0"
42
+ "@aigne/core": "^1.50.0"
43
43
  },
44
44
  "devDependencies": {
45
45
  "@types/bun": "^1.2.18",
@@ -47,7 +47,7 @@
47
47
  "npm-run-all": "^4.1.5",
48
48
  "rimraf": "^6.0.1",
49
49
  "typescript": "^5.8.3",
50
- "@aigne/test-utils": "^0.5.26"
50
+ "@aigne/test-utils": "^0.5.28"
51
51
  },
52
52
  "scripts": {
53
53
  "lint": "tsc --noEmit",