@core-ai/openai 0.3.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +28 -8
  2. package/package.json +2 -2
package/dist/index.js CHANGED
@@ -169,8 +169,13 @@ function mapGenerateResponse(response) {
169
169
  usage: {
170
170
  inputTokens: 0,
171
171
  outputTokens: 0,
172
- reasoningTokens: 0,
173
- totalTokens: 0
172
+ inputTokenDetails: {
173
+ cacheReadTokens: 0,
174
+ cacheWriteTokens: 0
175
+ },
176
+ outputTokenDetails: {
177
+ reasoningTokens: 0
178
+ }
174
179
  }
175
180
  };
176
181
  }
@@ -182,8 +187,13 @@ function mapGenerateResponse(response) {
182
187
  usage: {
183
188
  inputTokens: response.usage?.prompt_tokens ?? 0,
184
189
  outputTokens: response.usage?.completion_tokens ?? 0,
185
- reasoningTokens,
186
- totalTokens: response.usage?.total_tokens ?? 0
190
+ inputTokenDetails: {
191
+ cacheReadTokens: response.usage?.prompt_tokens_details?.cached_tokens ?? 0,
192
+ cacheWriteTokens: 0
193
+ },
194
+ outputTokenDetails: {
195
+ reasoningTokens
196
+ }
187
197
  }
188
198
  };
189
199
  }
@@ -227,16 +237,26 @@ async function* transformStream(stream) {
227
237
  let usage = {
228
238
  inputTokens: 0,
229
239
  outputTokens: 0,
230
- reasoningTokens: 0,
231
- totalTokens: 0
240
+ inputTokenDetails: {
241
+ cacheReadTokens: 0,
242
+ cacheWriteTokens: 0
243
+ },
244
+ outputTokenDetails: {
245
+ reasoningTokens: 0
246
+ }
232
247
  };
233
248
  for await (const chunk of stream) {
234
249
  if (chunk.usage) {
235
250
  usage = {
236
251
  inputTokens: chunk.usage.prompt_tokens ?? 0,
237
252
  outputTokens: chunk.usage.completion_tokens ?? 0,
238
- reasoningTokens: chunk.usage.completion_tokens_details?.reasoning_tokens ?? 0,
239
- totalTokens: chunk.usage.total_tokens ?? 0
253
+ inputTokenDetails: {
254
+ cacheReadTokens: chunk.usage.prompt_tokens_details?.cached_tokens ?? 0,
255
+ cacheWriteTokens: 0
256
+ },
257
+ outputTokenDetails: {
258
+ reasoningTokens: chunk.usage.completion_tokens_details?.reasoning_tokens ?? 0
259
+ }
240
260
  };
241
261
  }
242
262
  const choice = chunk.choices[0];
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@core-ai/openai",
3
- "version": "0.3.0",
3
+ "version": "0.4.0",
4
4
  "description": "OpenAI provider package for @core-ai/core-ai",
5
5
  "license": "MIT",
6
6
  "author": "Omnifact (https://omnifact.ai)",
@@ -43,7 +43,7 @@
43
43
  "test:watch": "vitest"
44
44
  },
45
45
  "dependencies": {
46
- "@core-ai/core-ai": "^0.3.0",
46
+ "@core-ai/core-ai": "^0.4.0",
47
47
  "openai": "^6.1.0",
48
48
  "zod-to-json-schema": "^3.25.1"
49
49
  },