codex-session-insights 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +199 -0
- package/bin/codex-insights.js +9 -0
- package/lib/cli.js +1002 -0
- package/lib/codex-data.js +640 -0
- package/lib/llm-insights.js +1486 -0
- package/lib/model-provider.js +589 -0
- package/lib/report.js +1383 -0
- package/lib/types.d.ts +87 -0
- package/package.json +47 -0
|
@@ -0,0 +1,589 @@
|
|
|
1
|
+
import os from 'node:os'
|
|
2
|
+
import path from 'node:path'
|
|
3
|
+
import { spawn } from 'node:child_process'
|
|
4
|
+
import { promises as fs } from 'node:fs'
|
|
5
|
+
|
|
6
|
+
const DEFAULT_API_BASE = 'https://api.openai.com/v1'
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* @typedef {Object} ModelCallOptions
|
|
10
|
+
* @property {string=} apiKey
|
|
11
|
+
* @property {string=} apiBase
|
|
12
|
+
* @property {string=} codexBin
|
|
13
|
+
* @property {string=} cwd
|
|
14
|
+
* @property {(usage: any) => void=} onUsage
|
|
15
|
+
* @property {string=} usageStage
|
|
16
|
+
* @property {string=} reasoningEffort
|
|
17
|
+
* @property {string=} facetEffort
|
|
18
|
+
* @property {string[]=} fallbackModels
|
|
19
|
+
* @property {(event: any) => void=} onProgress
|
|
20
|
+
*/
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* @param {{
|
|
24
|
+
* provider: string,
|
|
25
|
+
* model: string,
|
|
26
|
+
* schemaName: string,
|
|
27
|
+
* schema: any,
|
|
28
|
+
* systemPrompt: string,
|
|
29
|
+
* userPrompt: string,
|
|
30
|
+
* options?: ModelCallOptions
|
|
31
|
+
* }} param0
|
|
32
|
+
*/
|
|
33
|
+
export async function callStructuredModel({
|
|
34
|
+
provider,
|
|
35
|
+
model,
|
|
36
|
+
schemaName,
|
|
37
|
+
schema,
|
|
38
|
+
systemPrompt,
|
|
39
|
+
userPrompt,
|
|
40
|
+
options = {},
|
|
41
|
+
}) {
|
|
42
|
+
return callWithModelFallback({
|
|
43
|
+
provider,
|
|
44
|
+
model,
|
|
45
|
+
options,
|
|
46
|
+
invoke: async actualModel => {
|
|
47
|
+
if (provider === 'codex-cli') {
|
|
48
|
+
const result = await callCodexCli({
|
|
49
|
+
model: actualModel,
|
|
50
|
+
prompt: buildStructuredPrompt(systemPrompt, userPrompt, schema),
|
|
51
|
+
schema: null,
|
|
52
|
+
options,
|
|
53
|
+
})
|
|
54
|
+
emitUsage(options, provider, actualModel, result.usage)
|
|
55
|
+
return parseFirstJsonObject(result.text)
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if (provider === 'openai') {
|
|
59
|
+
const apiKey = options.apiKey || process.env.OPENAI_API_KEY || ''
|
|
60
|
+
if (!apiKey) {
|
|
61
|
+
throw new Error('OPENAI_API_KEY is required for provider=openai.')
|
|
62
|
+
}
|
|
63
|
+
const data = /** @type {any} */ (await callResponsesApi({
|
|
64
|
+
apiKey,
|
|
65
|
+
apiBase: options.apiBase || DEFAULT_API_BASE,
|
|
66
|
+
body: {
|
|
67
|
+
model: actualModel,
|
|
68
|
+
input: [
|
|
69
|
+
{ role: 'developer', content: systemPrompt },
|
|
70
|
+
{ role: 'user', content: userPrompt },
|
|
71
|
+
],
|
|
72
|
+
text: {
|
|
73
|
+
format: {
|
|
74
|
+
type: 'json_schema',
|
|
75
|
+
name: schemaName,
|
|
76
|
+
strict: true,
|
|
77
|
+
schema,
|
|
78
|
+
},
|
|
79
|
+
},
|
|
80
|
+
},
|
|
81
|
+
}))
|
|
82
|
+
emitUsage(options, provider, actualModel, normalizeOpenAiUsage(data.usage))
|
|
83
|
+
return JSON.parse(extractOutputText(data))
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
throw new Error(`Unsupported provider "${provider}".`)
|
|
87
|
+
},
|
|
88
|
+
})
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* @param {{
|
|
93
|
+
* provider: string,
|
|
94
|
+
* model: string,
|
|
95
|
+
* systemPrompt: string,
|
|
96
|
+
* userPrompt: string,
|
|
97
|
+
* options?: ModelCallOptions
|
|
98
|
+
* }} param0
|
|
99
|
+
*/
|
|
100
|
+
export async function callTextModel({
|
|
101
|
+
provider,
|
|
102
|
+
model,
|
|
103
|
+
systemPrompt,
|
|
104
|
+
userPrompt,
|
|
105
|
+
options = {},
|
|
106
|
+
}) {
|
|
107
|
+
return callWithModelFallback({
|
|
108
|
+
provider,
|
|
109
|
+
model,
|
|
110
|
+
options,
|
|
111
|
+
invoke: async actualModel => {
|
|
112
|
+
if (provider === 'codex-cli') {
|
|
113
|
+
const result = await callCodexCli({
|
|
114
|
+
model: actualModel,
|
|
115
|
+
prompt: buildPrompt(systemPrompt, userPrompt),
|
|
116
|
+
schema: null,
|
|
117
|
+
options,
|
|
118
|
+
})
|
|
119
|
+
emitUsage(options, provider, actualModel, result.usage)
|
|
120
|
+
return result.text
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
if (provider === 'openai') {
|
|
124
|
+
const apiKey = options.apiKey || process.env.OPENAI_API_KEY || ''
|
|
125
|
+
if (!apiKey) {
|
|
126
|
+
throw new Error('OPENAI_API_KEY is required for provider=openai.')
|
|
127
|
+
}
|
|
128
|
+
const data = /** @type {any} */ (await callResponsesApi({
|
|
129
|
+
apiKey,
|
|
130
|
+
apiBase: options.apiBase || DEFAULT_API_BASE,
|
|
131
|
+
body: {
|
|
132
|
+
model: actualModel,
|
|
133
|
+
input: [
|
|
134
|
+
{ role: 'developer', content: systemPrompt },
|
|
135
|
+
{ role: 'user', content: userPrompt },
|
|
136
|
+
],
|
|
137
|
+
},
|
|
138
|
+
}))
|
|
139
|
+
emitUsage(options, provider, actualModel, normalizeOpenAiUsage(data.usage))
|
|
140
|
+
return extractOutputText(data)
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
throw new Error(`Unsupported provider "${provider}".`)
|
|
144
|
+
},
|
|
145
|
+
})
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
function buildPrompt(systemPrompt, userPrompt) {
|
|
149
|
+
return `${systemPrompt.trim()}\n\n${userPrompt.trim()}`
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
function buildStructuredPrompt(systemPrompt, userPrompt, schema) {
|
|
153
|
+
return `${systemPrompt.trim()}
|
|
154
|
+
|
|
155
|
+
${userPrompt.trim()}
|
|
156
|
+
|
|
157
|
+
RESPOND WITH ONLY A VALID JSON OBJECT matching this schema:
|
|
158
|
+
${JSON.stringify(schema, null, 2)}`
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
async function callWithModelFallback({ provider, model, options, invoke }) {
|
|
162
|
+
const candidates = buildModelCandidates(model, options?.fallbackModels)
|
|
163
|
+
let lastError = null
|
|
164
|
+
|
|
165
|
+
for (let index = 0; index < candidates.length; index += 1) {
|
|
166
|
+
const candidate = candidates[index]
|
|
167
|
+
try {
|
|
168
|
+
if (candidate !== model) {
|
|
169
|
+
emitModelFallback(options, provider, model, candidate, index)
|
|
170
|
+
}
|
|
171
|
+
return await invoke(candidate)
|
|
172
|
+
} catch (error) {
|
|
173
|
+
lastError = error
|
|
174
|
+
if (index === candidates.length - 1 || !isRetryableModelError(error)) {
|
|
175
|
+
throw error
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
throw lastError || new Error(`No model candidate succeeded for ${model}`)
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
async function callCodexCli({ model, prompt, schema, options }) {
|
|
184
|
+
let tempDir = null
|
|
185
|
+
const args = ['exec', '--json', '--skip-git-repo-check', '--ephemeral']
|
|
186
|
+
|
|
187
|
+
if (model) {
|
|
188
|
+
args.push('--model', model)
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
if (options.reasoningEffort) {
|
|
192
|
+
args.push('-c', `model_reasoning_effort=${JSON.stringify(options.reasoningEffort)}`)
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
if (schema) {
|
|
196
|
+
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'codex-insights-schema-'))
|
|
197
|
+
const schemaPath = path.join(tempDir, 'schema.json')
|
|
198
|
+
await fs.writeFile(schemaPath, JSON.stringify(schema, null, 2), 'utf8')
|
|
199
|
+
args.push('--output-schema', schemaPath)
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
args.push('-')
|
|
203
|
+
|
|
204
|
+
try {
|
|
205
|
+
const { stdout, stderr, code } = await runProcess(options.codexBin || 'codex', args, {
|
|
206
|
+
cwd: options.cwd || process.cwd(),
|
|
207
|
+
stdin: prompt,
|
|
208
|
+
maxBuffer: 8 * 1024 * 1024,
|
|
209
|
+
})
|
|
210
|
+
if (code !== 0) {
|
|
211
|
+
throw new Error(`codex exec failed (${code}): ${stderr || stdout}`)
|
|
212
|
+
}
|
|
213
|
+
const result = extractCodexExecResult(stdout)
|
|
214
|
+
if (!result.text) {
|
|
215
|
+
throw new Error('codex exec returned no final agent message.')
|
|
216
|
+
}
|
|
217
|
+
return result
|
|
218
|
+
} finally {
|
|
219
|
+
if (tempDir) {
|
|
220
|
+
await fs.rm(tempDir, { recursive: true, force: true })
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
async function runProcess(command, args, options) {
|
|
226
|
+
return new Promise((resolve, reject) => {
|
|
227
|
+
const child = spawn(command, args, {
|
|
228
|
+
cwd: options.cwd,
|
|
229
|
+
stdio: 'pipe',
|
|
230
|
+
env: process.env,
|
|
231
|
+
})
|
|
232
|
+
|
|
233
|
+
let stdout = ''
|
|
234
|
+
let stderr = ''
|
|
235
|
+
let stdoutBytes = 0
|
|
236
|
+
let stderrBytes = 0
|
|
237
|
+
const maxBuffer = options.maxBuffer || 1024 * 1024
|
|
238
|
+
|
|
239
|
+
child.stdout.setEncoding('utf8')
|
|
240
|
+
child.stderr.setEncoding('utf8')
|
|
241
|
+
|
|
242
|
+
child.stdout.on('data', chunk => {
|
|
243
|
+
stdoutBytes += Buffer.byteLength(chunk)
|
|
244
|
+
if (stdoutBytes > maxBuffer) {
|
|
245
|
+
child.kill('SIGTERM')
|
|
246
|
+
reject(new Error(`${command} stdout exceeded buffer limit`))
|
|
247
|
+
return
|
|
248
|
+
}
|
|
249
|
+
stdout += chunk
|
|
250
|
+
})
|
|
251
|
+
|
|
252
|
+
child.stderr.on('data', chunk => {
|
|
253
|
+
stderrBytes += Buffer.byteLength(chunk)
|
|
254
|
+
if (stderrBytes > maxBuffer) {
|
|
255
|
+
child.kill('SIGTERM')
|
|
256
|
+
reject(new Error(`${command} stderr exceeded buffer limit`))
|
|
257
|
+
return
|
|
258
|
+
}
|
|
259
|
+
stderr += chunk
|
|
260
|
+
})
|
|
261
|
+
|
|
262
|
+
child.on('error', reject)
|
|
263
|
+
child.on('close', code => resolve({ stdout, stderr, code: code ?? 1 }))
|
|
264
|
+
|
|
265
|
+
if (options.stdin) {
|
|
266
|
+
child.stdin.write(options.stdin)
|
|
267
|
+
}
|
|
268
|
+
child.stdin.end()
|
|
269
|
+
})
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
function extractCodexExecResult(stdout) {
|
|
273
|
+
const lines = String(stdout).split('\n')
|
|
274
|
+
let lastText = ''
|
|
275
|
+
let usage = null
|
|
276
|
+
|
|
277
|
+
for (const line of lines) {
|
|
278
|
+
const trimmed = line.trim()
|
|
279
|
+
if (!trimmed.startsWith('{')) continue
|
|
280
|
+
/** @type {any} */
|
|
281
|
+
let parsed
|
|
282
|
+
try {
|
|
283
|
+
parsed = JSON.parse(trimmed)
|
|
284
|
+
} catch {
|
|
285
|
+
continue
|
|
286
|
+
}
|
|
287
|
+
if (parsed.type === 'item.completed' && parsed.item?.type === 'agent_message') {
|
|
288
|
+
lastText = String(parsed.item.text || '')
|
|
289
|
+
continue
|
|
290
|
+
}
|
|
291
|
+
if (parsed.type === 'turn.completed' && parsed.usage) {
|
|
292
|
+
usage = normalizeCodexUsage(parsed.usage)
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
return {
|
|
297
|
+
text: lastText.trim(),
|
|
298
|
+
usage,
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
function parseFirstJsonObject(text) {
|
|
303
|
+
const trimmed = String(text).trim()
|
|
304
|
+
const candidates = []
|
|
305
|
+
|
|
306
|
+
if (trimmed) {
|
|
307
|
+
candidates.push(trimmed)
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
const extracted = extractFirstBalancedJsonObject(trimmed)
|
|
311
|
+
if (extracted) {
|
|
312
|
+
candidates.push(extracted)
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
let lastError = null
|
|
316
|
+
for (const candidate of dedupeStrings(candidates)) {
|
|
317
|
+
try {
|
|
318
|
+
return JSON.parse(candidate)
|
|
319
|
+
} catch (error) {
|
|
320
|
+
lastError = error
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
const repaired = escapeControlCharsInJsonStrings(candidate)
|
|
324
|
+
if (repaired !== candidate) {
|
|
325
|
+
try {
|
|
326
|
+
return JSON.parse(repaired)
|
|
327
|
+
} catch (error) {
|
|
328
|
+
lastError = error
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
const detail = lastError instanceof Error ? `: ${lastError.message}` : ''
|
|
334
|
+
throw new Error(`Model provider returned no parseable JSON object${detail}`)
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
function extractFirstBalancedJsonObject(text) {
|
|
338
|
+
const source = String(text || '')
|
|
339
|
+
const start = source.indexOf('{')
|
|
340
|
+
if (start === -1) return null
|
|
341
|
+
|
|
342
|
+
let depth = 0
|
|
343
|
+
let inString = false
|
|
344
|
+
let escaped = false
|
|
345
|
+
|
|
346
|
+
for (let i = start; i < source.length; i += 1) {
|
|
347
|
+
const char = source[i]
|
|
348
|
+
|
|
349
|
+
if (inString) {
|
|
350
|
+
if (escaped) {
|
|
351
|
+
escaped = false
|
|
352
|
+
} else if (char === '\\') {
|
|
353
|
+
escaped = true
|
|
354
|
+
} else if (char === '"') {
|
|
355
|
+
inString = false
|
|
356
|
+
}
|
|
357
|
+
continue
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
if (char === '"') {
|
|
361
|
+
inString = true
|
|
362
|
+
continue
|
|
363
|
+
}
|
|
364
|
+
if (char === '{') {
|
|
365
|
+
depth += 1
|
|
366
|
+
continue
|
|
367
|
+
}
|
|
368
|
+
if (char === '}') {
|
|
369
|
+
depth -= 1
|
|
370
|
+
if (depth === 0) {
|
|
371
|
+
return source.slice(start, i + 1)
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
return null
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
function escapeControlCharsInJsonStrings(text) {
|
|
380
|
+
let result = ''
|
|
381
|
+
let inString = false
|
|
382
|
+
let escaped = false
|
|
383
|
+
|
|
384
|
+
for (let i = 0; i < text.length; i += 1) {
|
|
385
|
+
const char = text[i]
|
|
386
|
+
|
|
387
|
+
if (inString) {
|
|
388
|
+
if (escaped) {
|
|
389
|
+
result += char
|
|
390
|
+
escaped = false
|
|
391
|
+
continue
|
|
392
|
+
}
|
|
393
|
+
if (char === '\\') {
|
|
394
|
+
result += char
|
|
395
|
+
escaped = true
|
|
396
|
+
continue
|
|
397
|
+
}
|
|
398
|
+
if (char === '"') {
|
|
399
|
+
result += char
|
|
400
|
+
inString = false
|
|
401
|
+
continue
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
const code = char.charCodeAt(0)
|
|
405
|
+
if (code <= 0x1f) {
|
|
406
|
+
switch (char) {
|
|
407
|
+
case '\n':
|
|
408
|
+
result += '\\n'
|
|
409
|
+
break
|
|
410
|
+
case '\r':
|
|
411
|
+
result += '\\r'
|
|
412
|
+
break
|
|
413
|
+
case '\t':
|
|
414
|
+
result += '\\t'
|
|
415
|
+
break
|
|
416
|
+
case '\b':
|
|
417
|
+
result += '\\b'
|
|
418
|
+
break
|
|
419
|
+
case '\f':
|
|
420
|
+
result += '\\f'
|
|
421
|
+
break
|
|
422
|
+
default:
|
|
423
|
+
result += `\\u${code.toString(16).padStart(4, '0')}`
|
|
424
|
+
break
|
|
425
|
+
}
|
|
426
|
+
continue
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
result += char
|
|
430
|
+
continue
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
if (char === '"') {
|
|
434
|
+
inString = true
|
|
435
|
+
}
|
|
436
|
+
result += char
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
return result
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
function dedupeStrings(values) {
|
|
443
|
+
return Array.from(new Set(values.filter(Boolean)))
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
function buildModelCandidates(primaryModel, fallbackModels = []) {
|
|
447
|
+
return dedupeStrings([primaryModel, ...fallbackModels])
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
function isRetryableModelError(error) {
|
|
451
|
+
const message = String(error instanceof Error ? error.message : error).toLowerCase()
|
|
452
|
+
return (
|
|
453
|
+
message.includes('model') &&
|
|
454
|
+
(message.includes('not found') ||
|
|
455
|
+
message.includes('not available') ||
|
|
456
|
+
message.includes('unsupported') ||
|
|
457
|
+
message.includes('permission') ||
|
|
458
|
+
message.includes('access') ||
|
|
459
|
+
message.includes('entitled') ||
|
|
460
|
+
message.includes('not allowed') ||
|
|
461
|
+
message.includes('unknown'))
|
|
462
|
+
)
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
function emitModelFallback(options, provider, fromModel, toModel, attempt) {
|
|
466
|
+
if (typeof options?.onProgress === 'function') {
|
|
467
|
+
options.onProgress({
|
|
468
|
+
kind: 'model:fallback',
|
|
469
|
+
provider,
|
|
470
|
+
fromModel,
|
|
471
|
+
toModel,
|
|
472
|
+
attempt,
|
|
473
|
+
stage: options.usageStage || 'unspecified',
|
|
474
|
+
})
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
/**
|
|
479
|
+
* @param {ModelCallOptions} options
|
|
480
|
+
* @param {string} provider
|
|
481
|
+
* @param {string} model
|
|
482
|
+
* @param {any} usage
|
|
483
|
+
*/
|
|
484
|
+
function emitUsage(options, provider, model, usage) {
|
|
485
|
+
if (!usage || typeof options.onUsage !== 'function') return
|
|
486
|
+
options.onUsage({
|
|
487
|
+
provider,
|
|
488
|
+
model,
|
|
489
|
+
stage: options.usageStage || 'unspecified',
|
|
490
|
+
inputTokens: Number(usage.inputTokens ?? 0),
|
|
491
|
+
cachedInputTokens: Number(usage.cachedInputTokens ?? 0),
|
|
492
|
+
outputTokens: Number(usage.outputTokens ?? 0),
|
|
493
|
+
totalTokens:
|
|
494
|
+
Number(usage.totalTokens ?? 0) ||
|
|
495
|
+
Number(usage.inputTokens ?? 0) +
|
|
496
|
+
Number(usage.cachedInputTokens ?? 0) +
|
|
497
|
+
Number(usage.outputTokens ?? 0),
|
|
498
|
+
})
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
/** @param {any} usage */
|
|
502
|
+
function normalizeCodexUsage(usage) {
|
|
503
|
+
return {
|
|
504
|
+
inputTokens: Number(usage.input_tokens ?? 0),
|
|
505
|
+
cachedInputTokens: Number(usage.cached_input_tokens ?? 0),
|
|
506
|
+
outputTokens: Number(usage.output_tokens ?? 0),
|
|
507
|
+
totalTokens:
|
|
508
|
+
Number(usage.total_tokens ?? 0) ||
|
|
509
|
+
Number(usage.input_tokens ?? 0) +
|
|
510
|
+
Number(usage.cached_input_tokens ?? 0) +
|
|
511
|
+
Number(usage.output_tokens ?? 0),
|
|
512
|
+
}
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
/** @param {any} usage */
|
|
516
|
+
function normalizeOpenAiUsage(usage) {
|
|
517
|
+
if (!usage) return null
|
|
518
|
+
const inputTokens = Number(
|
|
519
|
+
usage.input_tokens ?? usage.inputTokens ?? usage.prompt_tokens ?? 0,
|
|
520
|
+
)
|
|
521
|
+
const cachedInputTokens = Number(
|
|
522
|
+
usage.input_tokens_details?.cached_tokens ??
|
|
523
|
+
usage.cached_input_tokens ??
|
|
524
|
+
usage.cachedInputTokens ??
|
|
525
|
+
0,
|
|
526
|
+
)
|
|
527
|
+
const outputTokens = Number(
|
|
528
|
+
usage.output_tokens ?? usage.outputTokens ?? usage.completion_tokens ?? 0,
|
|
529
|
+
)
|
|
530
|
+
return {
|
|
531
|
+
inputTokens,
|
|
532
|
+
cachedInputTokens,
|
|
533
|
+
outputTokens,
|
|
534
|
+
totalTokens:
|
|
535
|
+
Number(usage.total_tokens ?? usage.totalTokens ?? 0) ||
|
|
536
|
+
inputTokens + cachedInputTokens + outputTokens,
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
|
|
540
|
+
/**
|
|
541
|
+
* @param {{ apiKey: string, apiBase: string, body: any }} param0
|
|
542
|
+
* @returns {Promise<any>}
|
|
543
|
+
*/
|
|
544
|
+
async function callResponsesApi({ apiKey, apiBase, body }) {
|
|
545
|
+
const response = await fetch(`${apiBase}/responses`, {
|
|
546
|
+
method: 'POST',
|
|
547
|
+
headers: {
|
|
548
|
+
'Content-Type': 'application/json',
|
|
549
|
+
Authorization: `Bearer ${apiKey}`,
|
|
550
|
+
},
|
|
551
|
+
body: JSON.stringify(body),
|
|
552
|
+
})
|
|
553
|
+
|
|
554
|
+
if (!response.ok) {
|
|
555
|
+
const bodyText = await response.text()
|
|
556
|
+
throw new Error(`OpenAI Responses API failed (${response.status}): ${bodyText}`)
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
return response.json()
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
/** @param {any} data */
|
|
563
|
+
function extractOutputText(data) {
|
|
564
|
+
if (typeof data.output_text === 'string' && data.output_text) {
|
|
565
|
+
return data.output_text
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
if (Array.isArray(data.output)) {
|
|
569
|
+
for (const item of data.output) {
|
|
570
|
+
if (!Array.isArray(item.content)) continue
|
|
571
|
+
for (const part of item.content) {
|
|
572
|
+
if (typeof part.text === 'string' && part.text) {
|
|
573
|
+
return part.text
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
|
|
579
|
+
throw new Error('No output text returned from model provider')
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
export const __test = {
|
|
583
|
+
extractCodexExecResult,
|
|
584
|
+
parseFirstJsonObject,
|
|
585
|
+
normalizeCodexUsage,
|
|
586
|
+
normalizeOpenAiUsage,
|
|
587
|
+
buildModelCandidates,
|
|
588
|
+
isRetryableModelError,
|
|
589
|
+
}
|