@steipete/oracle 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +85 -0
- package/dist/bin/oracle-cli.js +458 -0
- package/dist/bin/oracle.js +683 -0
- package/dist/scripts/browser-tools.js +536 -0
- package/dist/scripts/check.js +21 -0
- package/dist/scripts/chrome/browser-tools.js +295 -0
- package/dist/scripts/run-cli.js +14 -0
- package/dist/src/browser/actions/assistantResponse.js +471 -0
- package/dist/src/browser/actions/attachments.js +82 -0
- package/dist/src/browser/actions/modelSelection.js +190 -0
- package/dist/src/browser/actions/navigation.js +75 -0
- package/dist/src/browser/actions/promptComposer.js +167 -0
- package/dist/src/browser/chromeLifecycle.js +104 -0
- package/dist/src/browser/config.js +33 -0
- package/dist/src/browser/constants.js +40 -0
- package/dist/src/browser/cookies.js +210 -0
- package/dist/src/browser/domDebug.js +36 -0
- package/dist/src/browser/index.js +319 -0
- package/dist/src/browser/pageActions.js +5 -0
- package/dist/src/browser/prompt.js +56 -0
- package/dist/src/browser/promptSummary.js +20 -0
- package/dist/src/browser/sessionRunner.js +77 -0
- package/dist/src/browser/types.js +1 -0
- package/dist/src/browser/utils.js +62 -0
- package/dist/src/browserMode.js +1 -0
- package/dist/src/cli/browserConfig.js +44 -0
- package/dist/src/cli/dryRun.js +59 -0
- package/dist/src/cli/engine.js +17 -0
- package/dist/src/cli/errorUtils.js +9 -0
- package/dist/src/cli/help.js +68 -0
- package/dist/src/cli/options.js +103 -0
- package/dist/src/cli/promptRequirement.js +14 -0
- package/dist/src/cli/rootAlias.js +16 -0
- package/dist/src/cli/sessionCommand.js +48 -0
- package/dist/src/cli/sessionDisplay.js +222 -0
- package/dist/src/cli/sessionRunner.js +94 -0
- package/dist/src/heartbeat.js +43 -0
- package/dist/src/oracle/client.js +48 -0
- package/dist/src/oracle/config.js +29 -0
- package/dist/src/oracle/errors.js +101 -0
- package/dist/src/oracle/files.js +220 -0
- package/dist/src/oracle/format.js +33 -0
- package/dist/src/oracle/fsAdapter.js +7 -0
- package/dist/src/oracle/request.js +48 -0
- package/dist/src/oracle/run.js +411 -0
- package/dist/src/oracle/tokenStats.js +39 -0
- package/dist/src/oracle/types.js +1 -0
- package/dist/src/oracle.js +9 -0
- package/dist/src/sessionManager.js +205 -0
- package/dist/src/version.js +39 -0
- package/package.json +69 -0
|
@@ -0,0 +1,411 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
import kleur from 'kleur';
|
|
3
|
+
import fs from 'node:fs/promises';
|
|
4
|
+
import path from 'node:path';
|
|
5
|
+
import process from 'node:process';
|
|
6
|
+
import { performance } from 'node:perf_hooks';
|
|
7
|
+
import { APIConnectionError, APIConnectionTimeoutError } from 'openai';
|
|
8
|
+
import { DEFAULT_SYSTEM_PROMPT, MODEL_CONFIGS, TOKENIZER_OPTIONS } from './config.js';
|
|
9
|
+
import { readFiles } from './files.js';
|
|
10
|
+
import { buildPrompt, buildRequestBody } from './request.js';
|
|
11
|
+
import { formatElapsed, formatUSD } from './format.js';
|
|
12
|
+
import { getFileTokenStats, printFileTokenStats } from './tokenStats.js';
|
|
13
|
+
import { OracleResponseError, OracleTransportError, PromptValidationError, describeTransportError, toTransportError, } from './errors.js';
|
|
14
|
+
import { createDefaultClientFactory } from './client.js';
|
|
15
|
+
import { startHeartbeat } from '../heartbeat.js';
|
|
16
|
+
import { getCliVersion } from '../version.js';
|
|
17
|
+
import { createFsAdapter } from './fsAdapter.js';
|
|
18
|
+
const isTty = process.stdout.isTTY;
|
|
19
|
+
const dim = (text) => (isTty ? kleur.dim(text) : text);
|
|
20
|
+
const BACKGROUND_MAX_WAIT_MS = 30 * 60 * 1000;
|
|
21
|
+
const BACKGROUND_POLL_INTERVAL_MS = 5000;
|
|
22
|
+
const BACKGROUND_RETRY_BASE_MS = 3000;
|
|
23
|
+
const BACKGROUND_RETRY_MAX_MS = 15000;
|
|
24
|
+
const defaultWait = (ms) => new Promise((resolve) => {
|
|
25
|
+
setTimeout(resolve, ms);
|
|
26
|
+
});
|
|
27
|
+
export async function runOracle(options, deps = {}) {
|
|
28
|
+
const { apiKey = options.apiKey ?? process.env.OPENAI_API_KEY, cwd = process.cwd(), fs: fsModule = createFsAdapter(fs), log = console.log, write = (text) => process.stdout.write(text), now = () => performance.now(), clientFactory = createDefaultClientFactory(), client, wait = defaultWait, } = deps;
|
|
29
|
+
const logVerbose = (message) => {
|
|
30
|
+
if (options.verbose) {
|
|
31
|
+
log(dim(`[verbose] ${message}`));
|
|
32
|
+
}
|
|
33
|
+
};
|
|
34
|
+
const previewMode = resolvePreviewMode(options.previewMode ?? options.preview);
|
|
35
|
+
const isPreview = Boolean(previewMode);
|
|
36
|
+
if (!apiKey) {
|
|
37
|
+
throw new PromptValidationError('Missing OPENAI_API_KEY. Set it via the environment or a .env file.', {
|
|
38
|
+
env: 'OPENAI_API_KEY',
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
const modelConfig = MODEL_CONFIGS[options.model];
|
|
42
|
+
if (!modelConfig) {
|
|
43
|
+
throw new PromptValidationError(`Unsupported model "${options.model}". Choose one of: ${Object.keys(MODEL_CONFIGS).join(', ')}`, { model: options.model });
|
|
44
|
+
}
|
|
45
|
+
const useBackground = options.background ?? (options.model === 'gpt-5-pro');
|
|
46
|
+
const inputTokenBudget = options.maxInput ?? modelConfig.inputLimit;
|
|
47
|
+
const files = await readFiles(options.file ?? [], { cwd, fsModule });
|
|
48
|
+
const searchEnabled = options.search !== false;
|
|
49
|
+
logVerbose(`cwd: ${cwd}`);
|
|
50
|
+
if (files.length > 0) {
|
|
51
|
+
const displayPaths = files
|
|
52
|
+
.map((file) => path.relative(cwd, file.path) || file.path)
|
|
53
|
+
.slice(0, 10)
|
|
54
|
+
.join(', ');
|
|
55
|
+
const extra = files.length > 10 ? ` (+${files.length - 10} more)` : '';
|
|
56
|
+
logVerbose(`Attached files (${files.length}): ${displayPaths}${extra}`);
|
|
57
|
+
}
|
|
58
|
+
else {
|
|
59
|
+
logVerbose('No files attached.');
|
|
60
|
+
}
|
|
61
|
+
const fileTokenInfo = getFileTokenStats(files, {
|
|
62
|
+
cwd,
|
|
63
|
+
tokenizer: modelConfig.tokenizer,
|
|
64
|
+
tokenizerOptions: TOKENIZER_OPTIONS,
|
|
65
|
+
inputTokenBudget,
|
|
66
|
+
});
|
|
67
|
+
const totalFileTokens = fileTokenInfo.totalTokens;
|
|
68
|
+
logVerbose(`Attached files use ${totalFileTokens.toLocaleString()} tokens`);
|
|
69
|
+
const systemPrompt = options.system?.trim() || DEFAULT_SYSTEM_PROMPT;
|
|
70
|
+
const promptWithFiles = buildPrompt(options.prompt, files, cwd);
|
|
71
|
+
const tokenizerInput = [
|
|
72
|
+
{ role: 'system', content: systemPrompt },
|
|
73
|
+
{ role: 'user', content: promptWithFiles },
|
|
74
|
+
];
|
|
75
|
+
const estimatedInputTokens = modelConfig.tokenizer(tokenizerInput, TOKENIZER_OPTIONS);
|
|
76
|
+
logVerbose(`Estimated tokens (prompt + files): ${estimatedInputTokens.toLocaleString()}`);
|
|
77
|
+
const fileCount = files.length;
|
|
78
|
+
const cliVersion = getCliVersion();
|
|
79
|
+
const headerLine = `Oracle (${cliVersion}) consulting ${modelConfig.model}'s crystal ball with ${estimatedInputTokens.toLocaleString()} tokens and ${fileCount} files...`;
|
|
80
|
+
const shouldReportFiles = (options.filesReport || fileTokenInfo.totalTokens > inputTokenBudget) && fileTokenInfo.stats.length > 0;
|
|
81
|
+
if (!isPreview) {
|
|
82
|
+
log(headerLine);
|
|
83
|
+
if (options.model === 'gpt-5-pro') {
|
|
84
|
+
log(dim('Pro is thinking, this can take up to 30 minutes...'));
|
|
85
|
+
}
|
|
86
|
+
log(dim('Press Ctrl+C to cancel.'));
|
|
87
|
+
}
|
|
88
|
+
if (shouldReportFiles) {
|
|
89
|
+
printFileTokenStats(fileTokenInfo, { inputTokenBudget, log });
|
|
90
|
+
}
|
|
91
|
+
if (estimatedInputTokens > inputTokenBudget) {
|
|
92
|
+
throw new PromptValidationError(`Input too large (${estimatedInputTokens.toLocaleString()} tokens). Limit is ${inputTokenBudget.toLocaleString()} tokens.`, { estimatedInputTokens, inputTokenBudget });
|
|
93
|
+
}
|
|
94
|
+
const requestBody = buildRequestBody({
|
|
95
|
+
modelConfig,
|
|
96
|
+
systemPrompt,
|
|
97
|
+
userPrompt: promptWithFiles,
|
|
98
|
+
searchEnabled,
|
|
99
|
+
maxOutputTokens: options.maxOutput,
|
|
100
|
+
background: useBackground,
|
|
101
|
+
storeResponse: useBackground,
|
|
102
|
+
});
|
|
103
|
+
if (isPreview && previewMode) {
|
|
104
|
+
if (previewMode === 'json' || previewMode === 'full') {
|
|
105
|
+
log('Request JSON');
|
|
106
|
+
log(JSON.stringify(requestBody, null, 2));
|
|
107
|
+
log('');
|
|
108
|
+
}
|
|
109
|
+
if (previewMode === 'full') {
|
|
110
|
+
log('Assembled Prompt');
|
|
111
|
+
log(promptWithFiles);
|
|
112
|
+
log('');
|
|
113
|
+
}
|
|
114
|
+
log(`Estimated input tokens: ${estimatedInputTokens.toLocaleString()} / ${inputTokenBudget.toLocaleString()} (model: ${modelConfig.model})`);
|
|
115
|
+
return {
|
|
116
|
+
mode: 'preview',
|
|
117
|
+
previewMode,
|
|
118
|
+
requestBody,
|
|
119
|
+
estimatedInputTokens,
|
|
120
|
+
inputTokenBudget,
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
const openAiClient = client ?? clientFactory(apiKey);
|
|
124
|
+
logVerbose('Dispatching request to OpenAI Responses API...');
|
|
125
|
+
const runStart = now();
|
|
126
|
+
let response = null;
|
|
127
|
+
let elapsedMs = 0;
|
|
128
|
+
let sawTextDelta = false;
|
|
129
|
+
let answerHeaderPrinted = false;
|
|
130
|
+
const ensureAnswerHeader = () => {
|
|
131
|
+
if (!options.silent && !answerHeaderPrinted) {
|
|
132
|
+
log(chalk.bold('Answer:'));
|
|
133
|
+
answerHeaderPrinted = true;
|
|
134
|
+
}
|
|
135
|
+
};
|
|
136
|
+
if (useBackground) {
|
|
137
|
+
response = await executeBackgroundResponse({
|
|
138
|
+
client: openAiClient,
|
|
139
|
+
requestBody,
|
|
140
|
+
log,
|
|
141
|
+
wait,
|
|
142
|
+
heartbeatIntervalMs: options.heartbeatIntervalMs,
|
|
143
|
+
now,
|
|
144
|
+
});
|
|
145
|
+
elapsedMs = now() - runStart;
|
|
146
|
+
}
|
|
147
|
+
else {
|
|
148
|
+
const stream = await openAiClient.responses.stream(requestBody);
|
|
149
|
+
let heartbeatActive = false;
|
|
150
|
+
let stopHeartbeat = null;
|
|
151
|
+
const stopHeartbeatNow = () => {
|
|
152
|
+
if (!heartbeatActive) {
|
|
153
|
+
return;
|
|
154
|
+
}
|
|
155
|
+
heartbeatActive = false;
|
|
156
|
+
stopHeartbeat?.();
|
|
157
|
+
stopHeartbeat = null;
|
|
158
|
+
};
|
|
159
|
+
if (options.heartbeatIntervalMs && options.heartbeatIntervalMs > 0) {
|
|
160
|
+
heartbeatActive = true;
|
|
161
|
+
stopHeartbeat = startHeartbeat({
|
|
162
|
+
intervalMs: options.heartbeatIntervalMs,
|
|
163
|
+
log: (message) => log(message),
|
|
164
|
+
isActive: () => heartbeatActive,
|
|
165
|
+
makeMessage: (elapsedMs) => {
|
|
166
|
+
const elapsedText = formatElapsed(elapsedMs);
|
|
167
|
+
return `API connection active — ${elapsedText} elapsed. Expect up to ~10 min before GPT-5 responds.`;
|
|
168
|
+
},
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
try {
|
|
172
|
+
for await (const event of stream) {
|
|
173
|
+
if (event.type === 'response.output_text.delta') {
|
|
174
|
+
stopHeartbeatNow();
|
|
175
|
+
sawTextDelta = true;
|
|
176
|
+
ensureAnswerHeader();
|
|
177
|
+
if (!options.silent && typeof event.delta === 'string') {
|
|
178
|
+
write(event.delta);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
catch (streamError) {
|
|
184
|
+
if (typeof stream.abort === 'function') {
|
|
185
|
+
stream.abort();
|
|
186
|
+
}
|
|
187
|
+
stopHeartbeatNow();
|
|
188
|
+
const transportError = toTransportError(streamError);
|
|
189
|
+
log(chalk.yellow(describeTransportError(transportError)));
|
|
190
|
+
throw transportError;
|
|
191
|
+
}
|
|
192
|
+
response = await stream.finalResponse();
|
|
193
|
+
stopHeartbeatNow();
|
|
194
|
+
elapsedMs = now() - runStart;
|
|
195
|
+
}
|
|
196
|
+
if (!response) {
|
|
197
|
+
throw new Error('OpenAI did not return a response.');
|
|
198
|
+
}
|
|
199
|
+
logVerbose(`Response status: ${response.status ?? 'completed'}`);
|
|
200
|
+
if (response.status && response.status !== 'completed') {
|
|
201
|
+
const detail = response.error?.message || response.incomplete_details?.reason || response.status;
|
|
202
|
+
log(chalk.yellow(`OpenAI ended the run early (status=${response.status}${response.incomplete_details?.reason ? `, reason=${response.incomplete_details.reason}` : ''}).`));
|
|
203
|
+
throw new OracleResponseError(`Response did not complete: ${detail}`, response);
|
|
204
|
+
}
|
|
205
|
+
const answerText = extractTextOutput(response);
|
|
206
|
+
if (!options.silent) {
|
|
207
|
+
// biome-ignore lint/nursery/noUnnecessaryConditions: flips true when streaming events arrive
|
|
208
|
+
if (sawTextDelta) {
|
|
209
|
+
write('\n\n');
|
|
210
|
+
}
|
|
211
|
+
else {
|
|
212
|
+
ensureAnswerHeader();
|
|
213
|
+
log(answerText || chalk.dim('(no text output)'));
|
|
214
|
+
log('');
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
const usage = response.usage ?? {};
|
|
218
|
+
const inputTokens = usage.input_tokens ?? estimatedInputTokens;
|
|
219
|
+
const outputTokens = usage.output_tokens ?? 0;
|
|
220
|
+
const reasoningTokens = usage.reasoning_tokens ?? 0;
|
|
221
|
+
const totalTokens = usage.total_tokens ?? inputTokens + outputTokens + reasoningTokens;
|
|
222
|
+
const cost = inputTokens * modelConfig.pricing.inputPerToken + outputTokens * modelConfig.pricing.outputPerToken;
|
|
223
|
+
const elapsedDisplay = formatElapsed(elapsedMs);
|
|
224
|
+
const statsParts = [];
|
|
225
|
+
const modelLabel = modelConfig.model + (modelConfig.reasoning ? '[high]' : '');
|
|
226
|
+
statsParts.push(modelLabel);
|
|
227
|
+
statsParts.push(formatUSD(cost));
|
|
228
|
+
const tokensDisplay = [inputTokens, outputTokens, reasoningTokens, totalTokens]
|
|
229
|
+
.map((value, index) => formatTokenValue(value, usage, index))
|
|
230
|
+
.join('/');
|
|
231
|
+
statsParts.push(`tok(i/o/r/t)=${tokensDisplay}`);
|
|
232
|
+
if (!searchEnabled) {
|
|
233
|
+
statsParts.push('search=off');
|
|
234
|
+
}
|
|
235
|
+
if (files.length > 0) {
|
|
236
|
+
statsParts.push(`files=${files.length}`);
|
|
237
|
+
}
|
|
238
|
+
log(chalk.blue(`Finished in ${elapsedDisplay} (${statsParts.join(' | ')})`));
|
|
239
|
+
return {
|
|
240
|
+
mode: 'live',
|
|
241
|
+
response,
|
|
242
|
+
usage: { inputTokens, outputTokens, reasoningTokens, totalTokens },
|
|
243
|
+
elapsedMs,
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
function formatTokenValue(value, usage, index) {
|
|
247
|
+
const estimatedFlag = (index === 0 && usage?.input_tokens == null) ||
|
|
248
|
+
(index === 1 && usage?.output_tokens == null) ||
|
|
249
|
+
(index === 2 && usage?.reasoning_tokens == null) ||
|
|
250
|
+
(index === 3 && usage?.total_tokens == null);
|
|
251
|
+
const text = value.toLocaleString();
|
|
252
|
+
return estimatedFlag ? `${text}*` : text;
|
|
253
|
+
}
|
|
254
|
+
function resolvePreviewMode(value) {
|
|
255
|
+
const allowed = new Set(['summary', 'json', 'full']);
|
|
256
|
+
if (typeof value === 'string' && value.length > 0) {
|
|
257
|
+
return allowed.has(value) ? value : 'summary';
|
|
258
|
+
}
|
|
259
|
+
if (value) {
|
|
260
|
+
return 'summary';
|
|
261
|
+
}
|
|
262
|
+
return undefined;
|
|
263
|
+
}
|
|
264
|
+
export function extractTextOutput(response) {
|
|
265
|
+
if (Array.isArray(response.output_text) && response.output_text.length > 0) {
|
|
266
|
+
return response.output_text.join('\n');
|
|
267
|
+
}
|
|
268
|
+
if (Array.isArray(response.output)) {
|
|
269
|
+
const segments = [];
|
|
270
|
+
for (const item of response.output) {
|
|
271
|
+
if (Array.isArray(item.content)) {
|
|
272
|
+
for (const chunk of item.content) {
|
|
273
|
+
if (chunk && (chunk.type === 'output_text' || chunk.type === 'text') && chunk.text) {
|
|
274
|
+
segments.push(chunk.text);
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
else if (typeof item.text === 'string') {
|
|
279
|
+
segments.push(item.text);
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
return segments.join('\n');
|
|
283
|
+
}
|
|
284
|
+
return '';
|
|
285
|
+
}
|
|
286
|
+
async function executeBackgroundResponse(params) {
|
|
287
|
+
const { client, requestBody, log, wait, heartbeatIntervalMs, now } = params;
|
|
288
|
+
const initialResponse = await client.responses.create(requestBody);
|
|
289
|
+
if (!initialResponse || !initialResponse.id) {
|
|
290
|
+
throw new OracleResponseError('OpenAI did not return a response ID for the background run.', initialResponse);
|
|
291
|
+
}
|
|
292
|
+
const responseId = initialResponse.id;
|
|
293
|
+
log(dim(`OpenAI scheduled background response ${responseId} (status=${initialResponse.status ?? 'unknown'}). Monitoring up to ${Math.round(BACKGROUND_MAX_WAIT_MS / 60000)} minutes for completion...`));
|
|
294
|
+
let heartbeatActive = false;
|
|
295
|
+
let stopHeartbeat = null;
|
|
296
|
+
const stopHeartbeatNow = () => {
|
|
297
|
+
if (!heartbeatActive) {
|
|
298
|
+
return;
|
|
299
|
+
}
|
|
300
|
+
heartbeatActive = false;
|
|
301
|
+
stopHeartbeat?.();
|
|
302
|
+
stopHeartbeat = null;
|
|
303
|
+
};
|
|
304
|
+
if (heartbeatIntervalMs && heartbeatIntervalMs > 0) {
|
|
305
|
+
heartbeatActive = true;
|
|
306
|
+
stopHeartbeat = startHeartbeat({
|
|
307
|
+
intervalMs: heartbeatIntervalMs,
|
|
308
|
+
log: (message) => log(message),
|
|
309
|
+
isActive: () => heartbeatActive,
|
|
310
|
+
makeMessage: (elapsedMs) => {
|
|
311
|
+
const elapsedText = formatElapsed(elapsedMs);
|
|
312
|
+
return `OpenAI background run still in progress — ${elapsedText} elapsed (id=${responseId}).`;
|
|
313
|
+
},
|
|
314
|
+
});
|
|
315
|
+
}
|
|
316
|
+
try {
|
|
317
|
+
return await pollBackgroundResponse({
|
|
318
|
+
client,
|
|
319
|
+
responseId,
|
|
320
|
+
initialResponse,
|
|
321
|
+
log,
|
|
322
|
+
wait,
|
|
323
|
+
now,
|
|
324
|
+
maxWaitMs: BACKGROUND_MAX_WAIT_MS,
|
|
325
|
+
});
|
|
326
|
+
}
|
|
327
|
+
finally {
|
|
328
|
+
stopHeartbeatNow();
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
async function pollBackgroundResponse(params) {
|
|
332
|
+
const { client, responseId, initialResponse, log, wait, now, maxWaitMs } = params;
|
|
333
|
+
const startMark = now();
|
|
334
|
+
let response = initialResponse;
|
|
335
|
+
let firstCycle = true;
|
|
336
|
+
let lastStatus = response.status;
|
|
337
|
+
// biome-ignore lint/nursery/noUnnecessaryConditions: intentional polling loop
|
|
338
|
+
while (true) {
|
|
339
|
+
const status = response.status ?? 'completed';
|
|
340
|
+
// biome-ignore lint/nursery/noUnnecessaryConditions: guard only for first iteration
|
|
341
|
+
if (firstCycle) {
|
|
342
|
+
firstCycle = false;
|
|
343
|
+
log(dim(`OpenAI background response status=${status}. We'll keep retrying automatically.`));
|
|
344
|
+
}
|
|
345
|
+
else if (status !== lastStatus && status !== 'completed') {
|
|
346
|
+
log(dim(`OpenAI background response status=${status}.`));
|
|
347
|
+
}
|
|
348
|
+
lastStatus = status;
|
|
349
|
+
if (status === 'completed') {
|
|
350
|
+
return response;
|
|
351
|
+
}
|
|
352
|
+
if (status !== 'in_progress' && status !== 'queued') {
|
|
353
|
+
const detail = response.error?.message || response.incomplete_details?.reason || status;
|
|
354
|
+
throw new OracleResponseError(`Response did not complete: ${detail}`, response);
|
|
355
|
+
}
|
|
356
|
+
if (now() - startMark >= maxWaitMs) {
|
|
357
|
+
throw new OracleTransportError('client-timeout', 'Timed out waiting for OpenAI background response to finish.');
|
|
358
|
+
}
|
|
359
|
+
await wait(BACKGROUND_POLL_INTERVAL_MS);
|
|
360
|
+
if (now() - startMark >= maxWaitMs) {
|
|
361
|
+
throw new OracleTransportError('client-timeout', 'Timed out waiting for OpenAI background response to finish.');
|
|
362
|
+
}
|
|
363
|
+
const { response: nextResponse, reconnected } = await retrieveBackgroundResponseWithRetry({
|
|
364
|
+
client,
|
|
365
|
+
responseId,
|
|
366
|
+
wait,
|
|
367
|
+
now,
|
|
368
|
+
maxWaitMs,
|
|
369
|
+
startMark,
|
|
370
|
+
log,
|
|
371
|
+
});
|
|
372
|
+
if (reconnected) {
|
|
373
|
+
const nextStatus = nextResponse.status ?? 'in_progress';
|
|
374
|
+
log(dim(`Reconnected to OpenAI background response (status=${nextStatus}). OpenAI is still working...`));
|
|
375
|
+
}
|
|
376
|
+
response = nextResponse;
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
async function retrieveBackgroundResponseWithRetry(params) {
|
|
380
|
+
const { client, responseId, wait, now, maxWaitMs, startMark, log } = params;
|
|
381
|
+
let retries = 0;
|
|
382
|
+
// biome-ignore lint/nursery/noUnnecessaryConditions: intentional retry loop
|
|
383
|
+
while (true) {
|
|
384
|
+
try {
|
|
385
|
+
const next = await client.responses.retrieve(responseId);
|
|
386
|
+
return { response: next, reconnected: retries > 0 };
|
|
387
|
+
}
|
|
388
|
+
catch (error) {
|
|
389
|
+
const transportError = asRetryableTransportError(error);
|
|
390
|
+
if (!transportError) {
|
|
391
|
+
throw error;
|
|
392
|
+
}
|
|
393
|
+
retries += 1;
|
|
394
|
+
const delay = Math.min(BACKGROUND_RETRY_BASE_MS * 2 ** (retries - 1), BACKGROUND_RETRY_MAX_MS);
|
|
395
|
+
log(chalk.yellow(`${describeTransportError(transportError)} Retrying in ${formatElapsed(delay)}...`));
|
|
396
|
+
await wait(delay);
|
|
397
|
+
if (now() - startMark >= maxWaitMs) {
|
|
398
|
+
throw new OracleTransportError('client-timeout', 'Timed out waiting for OpenAI background response to finish.');
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
function asRetryableTransportError(error) {
|
|
404
|
+
if (error instanceof OracleTransportError) {
|
|
405
|
+
return error;
|
|
406
|
+
}
|
|
407
|
+
if (error instanceof APIConnectionError || error instanceof APIConnectionTimeoutError) {
|
|
408
|
+
return toTransportError(error);
|
|
409
|
+
}
|
|
410
|
+
return null;
|
|
411
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
import { createFileSections } from './files.js';
|
|
3
|
+
export function getFileTokenStats(files, { cwd = process.cwd(), tokenizer, tokenizerOptions, inputTokenBudget, }) {
|
|
4
|
+
if (!files.length) {
|
|
5
|
+
return { stats: [], totalTokens: 0 };
|
|
6
|
+
}
|
|
7
|
+
const sections = createFileSections(files, cwd);
|
|
8
|
+
const stats = sections
|
|
9
|
+
.map((section) => {
|
|
10
|
+
const tokens = tokenizer(section.sectionText, tokenizerOptions);
|
|
11
|
+
const percent = inputTokenBudget ? (tokens / inputTokenBudget) * 100 : undefined;
|
|
12
|
+
return {
|
|
13
|
+
path: section.absolutePath,
|
|
14
|
+
displayPath: section.displayPath,
|
|
15
|
+
tokens,
|
|
16
|
+
percent,
|
|
17
|
+
};
|
|
18
|
+
})
|
|
19
|
+
.sort((a, b) => b.tokens - a.tokens);
|
|
20
|
+
const totalTokens = stats.reduce((sum, entry) => sum + entry.tokens, 0);
|
|
21
|
+
return { stats, totalTokens };
|
|
22
|
+
}
|
|
23
|
+
export function printFileTokenStats({ stats, totalTokens }, { inputTokenBudget, log = console.log }) {
|
|
24
|
+
if (!stats.length) {
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
log(chalk.bold('File Token Usage'));
|
|
28
|
+
for (const entry of stats) {
|
|
29
|
+
const percentLabel = inputTokenBudget && entry.percent != null ? `${entry.percent.toFixed(2)}%` : 'n/a';
|
|
30
|
+
log(`${entry.tokens.toLocaleString().padStart(10)} ${percentLabel.padStart(8)} ${entry.displayPath}`);
|
|
31
|
+
}
|
|
32
|
+
if (inputTokenBudget) {
|
|
33
|
+
const totalPercent = (totalTokens / inputTokenBudget) * 100;
|
|
34
|
+
log(`Total: ${totalTokens.toLocaleString()} tokens (${totalPercent.toFixed(2)}% of ${inputTokenBudget.toLocaleString()})`);
|
|
35
|
+
}
|
|
36
|
+
else {
|
|
37
|
+
log(`Total: ${totalTokens.toLocaleString()} tokens`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export * from './oracle/types.js';
|
|
2
|
+
export { MODEL_CONFIGS, DEFAULT_SYSTEM_PROMPT, TOKENIZER_OPTIONS, } from './oracle/config.js';
|
|
3
|
+
export { readFiles, createFileSections } from './oracle/files.js';
|
|
4
|
+
export { buildPrompt, buildRequestBody, renderPromptMarkdown } from './oracle/request.js';
|
|
5
|
+
export { formatUSD, formatNumber, formatElapsed } from './oracle/format.js';
|
|
6
|
+
export { getFileTokenStats, printFileTokenStats } from './oracle/tokenStats.js';
|
|
7
|
+
export { OracleResponseError, OracleTransportError, OracleUserError, FileValidationError, BrowserAutomationError, PromptValidationError, describeTransportError, extractResponseMetadata, asOracleUserError, toTransportError, } from './oracle/errors.js';
|
|
8
|
+
export { createDefaultClientFactory } from './oracle/client.js';
|
|
9
|
+
export { runOracle, extractTextOutput } from './oracle/run.js';
|
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
import os from 'node:os';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import fs from 'node:fs/promises';
|
|
4
|
+
import { createWriteStream } from 'node:fs';
|
|
5
|
+
const ORACLE_HOME = process.env.ORACLE_HOME_DIR ?? path.join(os.homedir(), '.oracle');
|
|
6
|
+
const SESSIONS_DIR = path.join(ORACLE_HOME, 'sessions');
|
|
7
|
+
const MAX_STATUS_LIMIT = 1000;
|
|
8
|
+
const DEFAULT_SLUG = 'session';
|
|
9
|
+
const MAX_SLUG_WORDS = 5;
|
|
10
|
+
const MIN_CUSTOM_SLUG_WORDS = 3;
|
|
11
|
+
async function ensureDir(dirPath) {
|
|
12
|
+
await fs.mkdir(dirPath, { recursive: true });
|
|
13
|
+
}
|
|
14
|
+
export async function ensureSessionStorage() {
|
|
15
|
+
await ensureDir(SESSIONS_DIR);
|
|
16
|
+
}
|
|
17
|
+
function slugify(text, maxWords = MAX_SLUG_WORDS) {
|
|
18
|
+
const normalized = text?.toLowerCase() ?? '';
|
|
19
|
+
const words = normalized.match(/[a-z0-9]+/g) ?? [];
|
|
20
|
+
const trimmed = words.slice(0, maxWords);
|
|
21
|
+
return trimmed.length > 0 ? trimmed.join('-') : DEFAULT_SLUG;
|
|
22
|
+
}
|
|
23
|
+
function countSlugWords(slug) {
|
|
24
|
+
return slug.split('-').filter(Boolean).length;
|
|
25
|
+
}
|
|
26
|
+
function normalizeCustomSlug(candidate) {
|
|
27
|
+
const slug = slugify(candidate, MAX_SLUG_WORDS);
|
|
28
|
+
const wordCount = countSlugWords(slug);
|
|
29
|
+
if (wordCount < MIN_CUSTOM_SLUG_WORDS || wordCount > MAX_SLUG_WORDS) {
|
|
30
|
+
throw new Error(`Custom slug must include between ${MIN_CUSTOM_SLUG_WORDS} and ${MAX_SLUG_WORDS} words.`);
|
|
31
|
+
}
|
|
32
|
+
return slug;
|
|
33
|
+
}
|
|
34
|
+
export function createSessionId(prompt, customSlug) {
|
|
35
|
+
if (customSlug) {
|
|
36
|
+
return normalizeCustomSlug(customSlug);
|
|
37
|
+
}
|
|
38
|
+
return slugify(prompt);
|
|
39
|
+
}
|
|
40
|
+
function sessionDir(id) {
|
|
41
|
+
return path.join(SESSIONS_DIR, id);
|
|
42
|
+
}
|
|
43
|
+
function metaPath(id) {
|
|
44
|
+
return path.join(sessionDir(id), 'session.json');
|
|
45
|
+
}
|
|
46
|
+
function logPath(id) {
|
|
47
|
+
return path.join(sessionDir(id), 'output.log');
|
|
48
|
+
}
|
|
49
|
+
function requestPath(id) {
|
|
50
|
+
return path.join(sessionDir(id), 'request.json');
|
|
51
|
+
}
|
|
52
|
+
async function fileExists(targetPath) {
|
|
53
|
+
try {
|
|
54
|
+
await fs.access(targetPath);
|
|
55
|
+
return true;
|
|
56
|
+
}
|
|
57
|
+
catch {
|
|
58
|
+
return false;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
async function ensureUniqueSessionId(baseSlug) {
|
|
62
|
+
let candidate = baseSlug;
|
|
63
|
+
let suffix = 2;
|
|
64
|
+
while (await fileExists(sessionDir(candidate))) {
|
|
65
|
+
candidate = `${baseSlug}-${suffix}`;
|
|
66
|
+
suffix += 1;
|
|
67
|
+
}
|
|
68
|
+
return candidate;
|
|
69
|
+
}
|
|
70
|
+
export async function initializeSession(options, cwd) {
|
|
71
|
+
await ensureSessionStorage();
|
|
72
|
+
const baseSlug = createSessionId(options.prompt || DEFAULT_SLUG, options.slug);
|
|
73
|
+
const sessionId = await ensureUniqueSessionId(baseSlug);
|
|
74
|
+
const dir = sessionDir(sessionId);
|
|
75
|
+
await ensureDir(dir);
|
|
76
|
+
const mode = options.mode ?? 'api';
|
|
77
|
+
const browserConfig = options.browserConfig;
|
|
78
|
+
const metadata = {
|
|
79
|
+
id: sessionId,
|
|
80
|
+
createdAt: new Date().toISOString(),
|
|
81
|
+
status: 'pending',
|
|
82
|
+
promptPreview: (options.prompt || '').slice(0, 160),
|
|
83
|
+
model: options.model,
|
|
84
|
+
cwd,
|
|
85
|
+
mode,
|
|
86
|
+
browser: browserConfig ? { config: browserConfig } : undefined,
|
|
87
|
+
options: {
|
|
88
|
+
prompt: options.prompt,
|
|
89
|
+
file: options.file ?? [],
|
|
90
|
+
model: options.model,
|
|
91
|
+
maxInput: options.maxInput,
|
|
92
|
+
system: options.system,
|
|
93
|
+
maxOutput: options.maxOutput,
|
|
94
|
+
silent: options.silent,
|
|
95
|
+
filesReport: options.filesReport,
|
|
96
|
+
slug: sessionId,
|
|
97
|
+
mode,
|
|
98
|
+
browserConfig,
|
|
99
|
+
verbose: options.verbose,
|
|
100
|
+
heartbeatIntervalMs: options.heartbeatIntervalMs,
|
|
101
|
+
browserInlineFiles: options.browserInlineFiles,
|
|
102
|
+
background: options.background,
|
|
103
|
+
},
|
|
104
|
+
};
|
|
105
|
+
await fs.writeFile(metaPath(sessionId), JSON.stringify(metadata, null, 2), 'utf8');
|
|
106
|
+
await fs.writeFile(requestPath(sessionId), JSON.stringify(metadata.options, null, 2), 'utf8');
|
|
107
|
+
await fs.writeFile(logPath(sessionId), '', 'utf8');
|
|
108
|
+
return metadata;
|
|
109
|
+
}
|
|
110
|
+
export async function readSessionMetadata(sessionId) {
|
|
111
|
+
try {
|
|
112
|
+
const raw = await fs.readFile(metaPath(sessionId), 'utf8');
|
|
113
|
+
return JSON.parse(raw);
|
|
114
|
+
}
|
|
115
|
+
catch {
|
|
116
|
+
return null;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
export async function updateSessionMetadata(sessionId, updates) {
|
|
120
|
+
const existing = (await readSessionMetadata(sessionId)) ?? { id: sessionId };
|
|
121
|
+
const next = { ...existing, ...updates };
|
|
122
|
+
await fs.writeFile(metaPath(sessionId), JSON.stringify(next, null, 2), 'utf8');
|
|
123
|
+
return next;
|
|
124
|
+
}
|
|
125
|
+
export function createSessionLogWriter(sessionId) {
|
|
126
|
+
const stream = createWriteStream(logPath(sessionId), { flags: 'a' });
|
|
127
|
+
const logLine = (line = '') => {
|
|
128
|
+
stream.write(`${line}\n`);
|
|
129
|
+
};
|
|
130
|
+
const writeChunk = (chunk) => {
|
|
131
|
+
stream.write(chunk);
|
|
132
|
+
return true;
|
|
133
|
+
};
|
|
134
|
+
return { stream, logLine, writeChunk, logPath: logPath(sessionId) };
|
|
135
|
+
}
|
|
136
|
+
export async function listSessionsMetadata() {
|
|
137
|
+
await ensureSessionStorage();
|
|
138
|
+
const entries = await fs.readdir(SESSIONS_DIR).catch(() => []);
|
|
139
|
+
const metas = [];
|
|
140
|
+
for (const entry of entries) {
|
|
141
|
+
const meta = await readSessionMetadata(entry);
|
|
142
|
+
if (meta) {
|
|
143
|
+
metas.push(meta);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
return metas.sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
|
|
147
|
+
}
|
|
148
|
+
export function filterSessionsByRange(metas, { hours = 24, includeAll = false, limit = 100 }) {
|
|
149
|
+
const maxLimit = Math.min(limit, MAX_STATUS_LIMIT);
|
|
150
|
+
let filtered = metas;
|
|
151
|
+
if (!includeAll) {
|
|
152
|
+
const cutoff = Date.now() - hours * 60 * 60 * 1000;
|
|
153
|
+
filtered = metas.filter((meta) => new Date(meta.createdAt).getTime() >= cutoff);
|
|
154
|
+
}
|
|
155
|
+
const limited = filtered.slice(0, maxLimit);
|
|
156
|
+
const truncated = filtered.length > maxLimit;
|
|
157
|
+
return { entries: limited, truncated, total: filtered.length };
|
|
158
|
+
}
|
|
159
|
+
export async function readSessionLog(sessionId) {
|
|
160
|
+
try {
|
|
161
|
+
return await fs.readFile(logPath(sessionId), 'utf8');
|
|
162
|
+
}
|
|
163
|
+
catch {
|
|
164
|
+
return '';
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
export async function deleteSessionsOlderThan({ hours = 24, includeAll = false, } = {}) {
|
|
168
|
+
await ensureSessionStorage();
|
|
169
|
+
const entries = await fs.readdir(SESSIONS_DIR).catch(() => []);
|
|
170
|
+
if (!entries.length) {
|
|
171
|
+
return { deleted: 0, remaining: 0 };
|
|
172
|
+
}
|
|
173
|
+
const cutoff = includeAll ? Number.NEGATIVE_INFINITY : Date.now() - hours * 60 * 60 * 1000;
|
|
174
|
+
let deleted = 0;
|
|
175
|
+
for (const entry of entries) {
|
|
176
|
+
const dir = sessionDir(entry);
|
|
177
|
+
let createdMs;
|
|
178
|
+
const meta = await readSessionMetadata(entry);
|
|
179
|
+
if (meta?.createdAt) {
|
|
180
|
+
const parsed = Date.parse(meta.createdAt);
|
|
181
|
+
if (!Number.isNaN(parsed)) {
|
|
182
|
+
createdMs = parsed;
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
if (createdMs == null) {
|
|
186
|
+
try {
|
|
187
|
+
const stats = await fs.stat(dir);
|
|
188
|
+
createdMs = stats.birthtimeMs || stats.mtimeMs;
|
|
189
|
+
}
|
|
190
|
+
catch {
|
|
191
|
+
continue;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
if (includeAll || (createdMs != null && createdMs < cutoff)) {
|
|
195
|
+
await fs.rm(dir, { recursive: true, force: true });
|
|
196
|
+
deleted += 1;
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
const remaining = Math.max(entries.length - deleted, 0);
|
|
200
|
+
return { deleted, remaining };
|
|
201
|
+
}
|
|
202
|
+
export async function wait(ms) {
|
|
203
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
204
|
+
}
|
|
205
|
+
export { ORACLE_HOME, SESSIONS_DIR, MAX_STATUS_LIMIT };
|