@steipete/oracle 1.0.8 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/README.md +3 -0
  2. package/dist/.DS_Store +0 -0
  3. package/dist/bin/oracle-cli.js +9 -3
  4. package/dist/markdansi/types/index.js +4 -0
  5. package/dist/oracle/bin/oracle-cli.js +472 -0
  6. package/dist/oracle/src/browser/actions/assistantResponse.js +471 -0
  7. package/dist/oracle/src/browser/actions/attachments.js +82 -0
  8. package/dist/oracle/src/browser/actions/modelSelection.js +190 -0
  9. package/dist/oracle/src/browser/actions/navigation.js +75 -0
  10. package/dist/oracle/src/browser/actions/promptComposer.js +167 -0
  11. package/dist/oracle/src/browser/chromeLifecycle.js +104 -0
  12. package/dist/oracle/src/browser/config.js +33 -0
  13. package/dist/oracle/src/browser/constants.js +40 -0
  14. package/dist/oracle/src/browser/cookies.js +210 -0
  15. package/dist/oracle/src/browser/domDebug.js +36 -0
  16. package/dist/oracle/src/browser/index.js +331 -0
  17. package/dist/oracle/src/browser/pageActions.js +5 -0
  18. package/dist/oracle/src/browser/prompt.js +88 -0
  19. package/dist/oracle/src/browser/promptSummary.js +20 -0
  20. package/dist/oracle/src/browser/sessionRunner.js +80 -0
  21. package/dist/oracle/src/browser/types.js +1 -0
  22. package/dist/oracle/src/browser/utils.js +62 -0
  23. package/dist/oracle/src/browserMode.js +1 -0
  24. package/dist/oracle/src/cli/browserConfig.js +44 -0
  25. package/dist/oracle/src/cli/dryRun.js +59 -0
  26. package/dist/oracle/src/cli/engine.js +17 -0
  27. package/dist/oracle/src/cli/errorUtils.js +9 -0
  28. package/dist/oracle/src/cli/help.js +70 -0
  29. package/dist/oracle/src/cli/markdownRenderer.js +15 -0
  30. package/dist/oracle/src/cli/options.js +103 -0
  31. package/dist/oracle/src/cli/promptRequirement.js +14 -0
  32. package/dist/oracle/src/cli/rootAlias.js +30 -0
  33. package/dist/oracle/src/cli/sessionCommand.js +77 -0
  34. package/dist/oracle/src/cli/sessionDisplay.js +270 -0
  35. package/dist/oracle/src/cli/sessionRunner.js +94 -0
  36. package/dist/oracle/src/heartbeat.js +43 -0
  37. package/dist/oracle/src/oracle/client.js +48 -0
  38. package/dist/oracle/src/oracle/config.js +29 -0
  39. package/dist/oracle/src/oracle/errors.js +101 -0
  40. package/dist/oracle/src/oracle/files.js +220 -0
  41. package/dist/oracle/src/oracle/format.js +33 -0
  42. package/dist/oracle/src/oracle/fsAdapter.js +7 -0
  43. package/dist/oracle/src/oracle/oscProgress.js +60 -0
  44. package/dist/oracle/src/oracle/request.js +48 -0
  45. package/dist/oracle/src/oracle/run.js +444 -0
  46. package/dist/oracle/src/oracle/tokenStats.js +39 -0
  47. package/dist/oracle/src/oracle/types.js +1 -0
  48. package/dist/oracle/src/oracle.js +9 -0
  49. package/dist/oracle/src/sessionManager.js +205 -0
  50. package/dist/oracle/src/version.js +39 -0
  51. package/dist/src/cli/markdownRenderer.js +18 -0
  52. package/dist/src/cli/rootAlias.js +14 -0
  53. package/dist/src/cli/sessionCommand.js +60 -2
  54. package/dist/src/cli/sessionDisplay.js +129 -4
  55. package/dist/src/oracle/oscProgress.js +60 -0
  56. package/dist/src/oracle/run.js +63 -51
  57. package/dist/src/sessionManager.js +17 -0
  58. package/package.json +14 -22
@@ -0,0 +1,444 @@
1
+ import chalk from 'chalk';
2
+ import kleur from 'kleur';
3
+ import fs from 'node:fs/promises';
4
+ import path from 'node:path';
5
+ import process from 'node:process';
6
+ import { performance } from 'node:perf_hooks';
7
+ import { APIConnectionError, APIConnectionTimeoutError } from 'openai';
8
+ import { DEFAULT_SYSTEM_PROMPT, MODEL_CONFIGS, TOKENIZER_OPTIONS } from './config.js';
9
+ import { readFiles } from './files.js';
10
+ import { buildPrompt, buildRequestBody } from './request.js';
11
+ import { formatElapsed, formatUSD } from './format.js';
12
+ import { getFileTokenStats, printFileTokenStats } from './tokenStats.js';
13
+ import { OracleResponseError, OracleTransportError, PromptValidationError, describeTransportError, toTransportError, } from './errors.js';
14
+ import { createDefaultClientFactory } from './client.js';
15
+ import { startHeartbeat } from '../heartbeat.js';
16
+ import { startOscProgress } from './oscProgress.js';
17
+ import { getCliVersion } from '../version.js';
18
+ import { createFsAdapter } from './fsAdapter.js';
19
+ const isTty = process.stdout.isTTY;
20
+ const dim = (text) => (isTty ? kleur.dim(text) : text);
21
+ const BACKGROUND_MAX_WAIT_MS = 30 * 60 * 1000;
22
+ const BACKGROUND_POLL_INTERVAL_MS = 5000;
23
+ const BACKGROUND_RETRY_BASE_MS = 3000;
24
+ const BACKGROUND_RETRY_MAX_MS = 15000;
25
+ const defaultWait = (ms) => new Promise((resolve) => {
26
+ setTimeout(resolve, ms);
27
+ });
28
+ export async function runOracle(options, deps = {}) {
29
+ const { apiKey = options.apiKey ?? process.env.OPENAI_API_KEY, cwd = process.cwd(), fs: fsModule = createFsAdapter(fs), log = console.log, write = (text) => process.stdout.write(text), now = () => performance.now(), clientFactory = createDefaultClientFactory(), client, wait = defaultWait, } = deps;
30
+ const maskApiKey = (key) => {
31
+ if (!key)
32
+ return null;
33
+ if (key.length <= 8)
34
+ return `${key[0] ?? ''}***${key[key.length - 1] ?? ''}`;
35
+ const prefix = key.slice(0, 4);
36
+ const suffix = key.slice(-4);
37
+ return `${prefix}****${suffix}`;
38
+ };
39
+ const logVerbose = (message) => {
40
+ if (options.verbose) {
41
+ log(dim(`[verbose] ${message}`));
42
+ }
43
+ };
44
+ const previewMode = resolvePreviewMode(options.previewMode ?? options.preview);
45
+ const isPreview = Boolean(previewMode);
46
+ if (!apiKey) {
47
+ throw new PromptValidationError('Missing OPENAI_API_KEY. Set it via the environment or a .env file.', {
48
+ env: 'OPENAI_API_KEY',
49
+ });
50
+ }
51
+ const maskedKey = maskApiKey(apiKey);
52
+ if (maskedKey) {
53
+ log(dim(`Using OPENAI_API_KEY=${maskedKey}`));
54
+ }
55
+ const modelConfig = MODEL_CONFIGS[options.model];
56
+ if (!modelConfig) {
57
+ throw new PromptValidationError(`Unsupported model "${options.model}". Choose one of: ${Object.keys(MODEL_CONFIGS).join(', ')}`, { model: options.model });
58
+ }
59
+ const useBackground = options.background ?? (options.model === 'gpt-5-pro');
60
+ const inputTokenBudget = options.maxInput ?? modelConfig.inputLimit;
61
+ const files = await readFiles(options.file ?? [], { cwd, fsModule });
62
+ const searchEnabled = options.search !== false;
63
+ logVerbose(`cwd: ${cwd}`);
64
+ if (files.length > 0) {
65
+ const displayPaths = files
66
+ .map((file) => path.relative(cwd, file.path) || file.path)
67
+ .slice(0, 10)
68
+ .join(', ');
69
+ const extra = files.length > 10 ? ` (+${files.length - 10} more)` : '';
70
+ logVerbose(`Attached files (${files.length}): ${displayPaths}${extra}`);
71
+ }
72
+ else {
73
+ logVerbose('No files attached.');
74
+ if (!isPreview) {
75
+ log(dim('Tip: no files attached — Oracle works best with project context. Add files via --file path/to/code or docs.'));
76
+ }
77
+ }
78
+ const fileTokenInfo = getFileTokenStats(files, {
79
+ cwd,
80
+ tokenizer: modelConfig.tokenizer,
81
+ tokenizerOptions: TOKENIZER_OPTIONS,
82
+ inputTokenBudget,
83
+ });
84
+ const totalFileTokens = fileTokenInfo.totalTokens;
85
+ logVerbose(`Attached files use ${totalFileTokens.toLocaleString()} tokens`);
86
+ const systemPrompt = options.system?.trim() || DEFAULT_SYSTEM_PROMPT;
87
+ const promptWithFiles = buildPrompt(options.prompt, files, cwd);
88
+ const tokenizerInput = [
89
+ { role: 'system', content: systemPrompt },
90
+ { role: 'user', content: promptWithFiles },
91
+ ];
92
+ const estimatedInputTokens = modelConfig.tokenizer(tokenizerInput, TOKENIZER_OPTIONS);
93
+ logVerbose(`Estimated tokens (prompt + files): ${estimatedInputTokens.toLocaleString()}`);
94
+ const fileCount = files.length;
95
+ const cliVersion = getCliVersion();
96
+ const richTty = process.stdout.isTTY && chalk.level > 0;
97
+ const headerModelLabel = richTty ? chalk.cyan(modelConfig.model) : modelConfig.model;
98
+ const tokenLabel = richTty ? chalk.green(estimatedInputTokens.toLocaleString()) : estimatedInputTokens.toLocaleString();
99
+ const fileLabel = richTty ? chalk.magenta(fileCount.toString()) : fileCount.toString();
100
+ const headerLine = `Oracle (${cliVersion}) consulting ${headerModelLabel}'s crystal ball with ${tokenLabel} tokens and ${fileLabel} files...`;
101
+ const shouldReportFiles = (options.filesReport || fileTokenInfo.totalTokens > inputTokenBudget) && fileTokenInfo.stats.length > 0;
102
+ if (!isPreview) {
103
+ log(headerLine);
104
+ if (options.model === 'gpt-5-pro') {
105
+ log(dim('Pro is thinking, this can take up to 30 minutes...'));
106
+ }
107
+ log(dim('Press Ctrl+C to cancel.'));
108
+ }
109
+ if (shouldReportFiles) {
110
+ printFileTokenStats(fileTokenInfo, { inputTokenBudget, log });
111
+ }
112
+ if (estimatedInputTokens > inputTokenBudget) {
113
+ throw new PromptValidationError(`Input too large (${estimatedInputTokens.toLocaleString()} tokens). Limit is ${inputTokenBudget.toLocaleString()} tokens.`, { estimatedInputTokens, inputTokenBudget });
114
+ }
115
+ const requestBody = buildRequestBody({
116
+ modelConfig,
117
+ systemPrompt,
118
+ userPrompt: promptWithFiles,
119
+ searchEnabled,
120
+ maxOutputTokens: options.maxOutput,
121
+ background: useBackground,
122
+ storeResponse: useBackground,
123
+ });
124
+ if (isPreview && previewMode) {
125
+ if (previewMode === 'json' || previewMode === 'full') {
126
+ log('Request JSON');
127
+ log(JSON.stringify(requestBody, null, 2));
128
+ log('');
129
+ }
130
+ if (previewMode === 'full') {
131
+ log('Assembled Prompt');
132
+ log(promptWithFiles);
133
+ log('');
134
+ }
135
+ log(`Estimated input tokens: ${estimatedInputTokens.toLocaleString()} / ${inputTokenBudget.toLocaleString()} (model: ${modelConfig.model})`);
136
+ return {
137
+ mode: 'preview',
138
+ previewMode,
139
+ requestBody,
140
+ estimatedInputTokens,
141
+ inputTokenBudget,
142
+ };
143
+ }
144
+ const openAiClient = client ?? clientFactory(apiKey);
145
+ logVerbose('Dispatching request to OpenAI Responses API...');
146
+ const stopOscProgress = startOscProgress({
147
+ label: useBackground ? 'Waiting for OpenAI (background)' : 'Waiting for OpenAI',
148
+ targetMs: useBackground ? BACKGROUND_MAX_WAIT_MS : 10 * 60_000,
149
+ write,
150
+ });
151
+ const runStart = now();
152
+ let response = null;
153
+ let elapsedMs = 0;
154
+ let sawTextDelta = false;
155
+ let answerHeaderPrinted = false;
156
+ const ensureAnswerHeader = () => {
157
+ if (!options.silent && !answerHeaderPrinted) {
158
+ log('');
159
+ log(chalk.bold('Answer:'));
160
+ answerHeaderPrinted = true;
161
+ }
162
+ };
163
+ try {
164
+ if (useBackground) {
165
+ response = await executeBackgroundResponse({
166
+ client: openAiClient,
167
+ requestBody,
168
+ log,
169
+ wait,
170
+ heartbeatIntervalMs: options.heartbeatIntervalMs,
171
+ now,
172
+ });
173
+ elapsedMs = now() - runStart;
174
+ }
175
+ else {
176
+ const stream = await openAiClient.responses.stream(requestBody);
177
+ let heartbeatActive = false;
178
+ let stopHeartbeat = null;
179
+ const stopHeartbeatNow = () => {
180
+ if (!heartbeatActive) {
181
+ return;
182
+ }
183
+ heartbeatActive = false;
184
+ stopHeartbeat?.();
185
+ stopHeartbeat = null;
186
+ };
187
+ if (options.heartbeatIntervalMs && options.heartbeatIntervalMs > 0) {
188
+ heartbeatActive = true;
189
+ stopHeartbeat = startHeartbeat({
190
+ intervalMs: options.heartbeatIntervalMs,
191
+ log: (message) => log(message),
192
+ isActive: () => heartbeatActive,
193
+ makeMessage: (elapsedMs) => {
194
+ const elapsedText = formatElapsed(elapsedMs);
195
+ return `API connection active — ${elapsedText} elapsed. Expect up to ~10 min before GPT-5 responds.`;
196
+ },
197
+ });
198
+ }
199
+ try {
200
+ for await (const event of stream) {
201
+ if (event.type === 'response.output_text.delta') {
202
+ stopOscProgress();
203
+ stopHeartbeatNow();
204
+ sawTextDelta = true;
205
+ ensureAnswerHeader();
206
+ if (!options.silent && typeof event.delta === 'string') {
207
+ write(event.delta);
208
+ }
209
+ }
210
+ }
211
+ }
212
+ catch (streamError) {
213
+ if (typeof stream.abort === 'function') {
214
+ stream.abort();
215
+ }
216
+ stopHeartbeatNow();
217
+ const transportError = toTransportError(streamError);
218
+ log(chalk.yellow(describeTransportError(transportError)));
219
+ throw transportError;
220
+ }
221
+ response = await stream.finalResponse();
222
+ stopHeartbeatNow();
223
+ elapsedMs = now() - runStart;
224
+ }
225
+ }
226
+ finally {
227
+ stopOscProgress();
228
+ }
229
+ if (!response) {
230
+ throw new Error('OpenAI did not return a response.');
231
+ }
232
+ logVerbose(`Response status: ${response.status ?? 'completed'}`);
233
+ if (response.status && response.status !== 'completed') {
234
+ const detail = response.error?.message || response.incomplete_details?.reason || response.status;
235
+ log(chalk.yellow(`OpenAI ended the run early (status=${response.status}${response.incomplete_details?.reason ? `, reason=${response.incomplete_details.reason}` : ''}).`));
236
+ throw new OracleResponseError(`Response did not complete: ${detail}`, response);
237
+ }
238
+ const answerText = extractTextOutput(response);
239
+ if (!options.silent) {
240
+ // biome-ignore lint/nursery/noUnnecessaryConditions: flips true when streaming events arrive
241
+ if (sawTextDelta) {
242
+ write('\n\n');
243
+ }
244
+ else {
245
+ ensureAnswerHeader();
246
+ log(answerText || chalk.dim('(no text output)'));
247
+ log('');
248
+ }
249
+ }
250
+ const usage = response.usage ?? {};
251
+ const inputTokens = usage.input_tokens ?? estimatedInputTokens;
252
+ const outputTokens = usage.output_tokens ?? 0;
253
+ const reasoningTokens = usage.reasoning_tokens ?? 0;
254
+ const totalTokens = usage.total_tokens ?? inputTokens + outputTokens + reasoningTokens;
255
+ const cost = inputTokens * modelConfig.pricing.inputPerToken + outputTokens * modelConfig.pricing.outputPerToken;
256
+ const elapsedDisplay = formatElapsed(elapsedMs);
257
+ const statsParts = [];
258
+ const modelLabel = modelConfig.model + (modelConfig.reasoning ? '[high]' : '');
259
+ statsParts.push(modelLabel);
260
+ statsParts.push(formatUSD(cost));
261
+ const tokensDisplay = [inputTokens, outputTokens, reasoningTokens, totalTokens]
262
+ .map((value, index) => formatTokenValue(value, usage, index))
263
+ .join('/');
264
+ statsParts.push(`tok(i/o/r/t)=${tokensDisplay}`);
265
+ if (!searchEnabled) {
266
+ statsParts.push('search=off');
267
+ }
268
+ if (files.length > 0) {
269
+ statsParts.push(`files=${files.length}`);
270
+ }
271
+ log(chalk.blue(`Finished in ${elapsedDisplay} (${statsParts.join(' | ')})`));
272
+ return {
273
+ mode: 'live',
274
+ response,
275
+ usage: { inputTokens, outputTokens, reasoningTokens, totalTokens },
276
+ elapsedMs,
277
+ };
278
+ }
279
+ function formatTokenValue(value, usage, index) {
280
+ const estimatedFlag = (index === 0 && usage?.input_tokens == null) ||
281
+ (index === 1 && usage?.output_tokens == null) ||
282
+ (index === 2 && usage?.reasoning_tokens == null) ||
283
+ (index === 3 && usage?.total_tokens == null);
284
+ const text = value.toLocaleString();
285
+ return estimatedFlag ? `${text}*` : text;
286
+ }
287
+ function resolvePreviewMode(value) {
288
+ const allowed = new Set(['summary', 'json', 'full']);
289
+ if (typeof value === 'string' && value.length > 0) {
290
+ return allowed.has(value) ? value : 'summary';
291
+ }
292
+ if (value) {
293
+ return 'summary';
294
+ }
295
+ return undefined;
296
+ }
297
+ export function extractTextOutput(response) {
298
+ if (Array.isArray(response.output_text) && response.output_text.length > 0) {
299
+ return response.output_text.join('\n');
300
+ }
301
+ if (Array.isArray(response.output)) {
302
+ const segments = [];
303
+ for (const item of response.output) {
304
+ if (Array.isArray(item.content)) {
305
+ for (const chunk of item.content) {
306
+ if (chunk && (chunk.type === 'output_text' || chunk.type === 'text') && chunk.text) {
307
+ segments.push(chunk.text);
308
+ }
309
+ }
310
+ }
311
+ else if (typeof item.text === 'string') {
312
+ segments.push(item.text);
313
+ }
314
+ }
315
+ return segments.join('\n');
316
+ }
317
+ return '';
318
+ }
319
+ async function executeBackgroundResponse(params) {
320
+ const { client, requestBody, log, wait, heartbeatIntervalMs, now } = params;
321
+ const initialResponse = await client.responses.create(requestBody);
322
+ if (!initialResponse || !initialResponse.id) {
323
+ throw new OracleResponseError('OpenAI did not return a response ID for the background run.', initialResponse);
324
+ }
325
+ const responseId = initialResponse.id;
326
+ log(dim(`OpenAI scheduled background response ${responseId} (status=${initialResponse.status ?? 'unknown'}). Monitoring up to ${Math.round(BACKGROUND_MAX_WAIT_MS / 60000)} minutes for completion...`));
327
+ let heartbeatActive = false;
328
+ let stopHeartbeat = null;
329
+ const stopHeartbeatNow = () => {
330
+ if (!heartbeatActive) {
331
+ return;
332
+ }
333
+ heartbeatActive = false;
334
+ stopHeartbeat?.();
335
+ stopHeartbeat = null;
336
+ };
337
+ if (heartbeatIntervalMs && heartbeatIntervalMs > 0) {
338
+ heartbeatActive = true;
339
+ stopHeartbeat = startHeartbeat({
340
+ intervalMs: heartbeatIntervalMs,
341
+ log: (message) => log(message),
342
+ isActive: () => heartbeatActive,
343
+ makeMessage: (elapsedMs) => {
344
+ const elapsedText = formatElapsed(elapsedMs);
345
+ return `OpenAI background run still in progress — ${elapsedText} elapsed.`;
346
+ },
347
+ });
348
+ }
349
+ try {
350
+ return await pollBackgroundResponse({
351
+ client,
352
+ responseId,
353
+ initialResponse,
354
+ log,
355
+ wait,
356
+ now,
357
+ maxWaitMs: BACKGROUND_MAX_WAIT_MS,
358
+ });
359
+ }
360
+ finally {
361
+ stopHeartbeatNow();
362
+ }
363
+ }
364
+ async function pollBackgroundResponse(params) {
365
+ const { client, responseId, initialResponse, log, wait, now, maxWaitMs } = params;
366
+ const startMark = now();
367
+ let response = initialResponse;
368
+ let firstCycle = true;
369
+ let lastStatus = response.status;
370
+ // biome-ignore lint/nursery/noUnnecessaryConditions: intentional polling loop
371
+ while (true) {
372
+ const status = response.status ?? 'completed';
373
+ // biome-ignore lint/nursery/noUnnecessaryConditions: guard only for first iteration
374
+ if (firstCycle) {
375
+ firstCycle = false;
376
+ log(dim(`OpenAI background response status=${status}. We'll keep retrying automatically.`));
377
+ }
378
+ else if (status !== lastStatus && status !== 'completed') {
379
+ log(dim(`OpenAI background response status=${status}.`));
380
+ }
381
+ lastStatus = status;
382
+ if (status === 'completed') {
383
+ return response;
384
+ }
385
+ if (status !== 'in_progress' && status !== 'queued') {
386
+ const detail = response.error?.message || response.incomplete_details?.reason || status;
387
+ throw new OracleResponseError(`Response did not complete: ${detail}`, response);
388
+ }
389
+ if (now() - startMark >= maxWaitMs) {
390
+ throw new OracleTransportError('client-timeout', 'Timed out waiting for OpenAI background response to finish.');
391
+ }
392
+ await wait(BACKGROUND_POLL_INTERVAL_MS);
393
+ if (now() - startMark >= maxWaitMs) {
394
+ throw new OracleTransportError('client-timeout', 'Timed out waiting for OpenAI background response to finish.');
395
+ }
396
+ const { response: nextResponse, reconnected } = await retrieveBackgroundResponseWithRetry({
397
+ client,
398
+ responseId,
399
+ wait,
400
+ now,
401
+ maxWaitMs,
402
+ startMark,
403
+ log,
404
+ });
405
+ if (reconnected) {
406
+ const nextStatus = nextResponse.status ?? 'in_progress';
407
+ log(dim(`Reconnected to OpenAI background response (status=${nextStatus}). OpenAI is still working...`));
408
+ }
409
+ response = nextResponse;
410
+ }
411
+ }
412
+ async function retrieveBackgroundResponseWithRetry(params) {
413
+ const { client, responseId, wait, now, maxWaitMs, startMark, log } = params;
414
+ let retries = 0;
415
+ // biome-ignore lint/nursery/noUnnecessaryConditions: intentional retry loop
416
+ while (true) {
417
+ try {
418
+ const next = await client.responses.retrieve(responseId);
419
+ return { response: next, reconnected: retries > 0 };
420
+ }
421
+ catch (error) {
422
+ const transportError = asRetryableTransportError(error);
423
+ if (!transportError) {
424
+ throw error;
425
+ }
426
+ retries += 1;
427
+ const delay = Math.min(BACKGROUND_RETRY_BASE_MS * 2 ** (retries - 1), BACKGROUND_RETRY_MAX_MS);
428
+ log(chalk.yellow(`${describeTransportError(transportError)} Retrying in ${formatElapsed(delay)}...`));
429
+ await wait(delay);
430
+ if (now() - startMark >= maxWaitMs) {
431
+ throw new OracleTransportError('client-timeout', 'Timed out waiting for OpenAI background response to finish.');
432
+ }
433
+ }
434
+ }
435
+ }
436
+ function asRetryableTransportError(error) {
437
+ if (error instanceof OracleTransportError) {
438
+ return error;
439
+ }
440
+ if (error instanceof APIConnectionError || error instanceof APIConnectionTimeoutError) {
441
+ return toTransportError(error);
442
+ }
443
+ return null;
444
+ }
@@ -0,0 +1,39 @@
1
+ import chalk from 'chalk';
2
+ import { createFileSections } from './files.js';
3
+ export function getFileTokenStats(files, { cwd = process.cwd(), tokenizer, tokenizerOptions, inputTokenBudget, }) {
4
+ if (!files.length) {
5
+ return { stats: [], totalTokens: 0 };
6
+ }
7
+ const sections = createFileSections(files, cwd);
8
+ const stats = sections
9
+ .map((section) => {
10
+ const tokens = tokenizer(section.sectionText, tokenizerOptions);
11
+ const percent = inputTokenBudget ? (tokens / inputTokenBudget) * 100 : undefined;
12
+ return {
13
+ path: section.absolutePath,
14
+ displayPath: section.displayPath,
15
+ tokens,
16
+ percent,
17
+ };
18
+ })
19
+ .sort((a, b) => b.tokens - a.tokens);
20
+ const totalTokens = stats.reduce((sum, entry) => sum + entry.tokens, 0);
21
+ return { stats, totalTokens };
22
+ }
23
+ export function printFileTokenStats({ stats, totalTokens }, { inputTokenBudget, log = console.log }) {
24
+ if (!stats.length) {
25
+ return;
26
+ }
27
+ log(chalk.bold('File Token Usage'));
28
+ for (const entry of stats) {
29
+ const percentLabel = inputTokenBudget && entry.percent != null ? `${entry.percent.toFixed(2)}%` : 'n/a';
30
+ log(`${entry.tokens.toLocaleString().padStart(10)} ${percentLabel.padStart(8)} ${entry.displayPath}`);
31
+ }
32
+ if (inputTokenBudget) {
33
+ const totalPercent = (totalTokens / inputTokenBudget) * 100;
34
+ log(`Total: ${totalTokens.toLocaleString()} tokens (${totalPercent.toFixed(2)}% of ${inputTokenBudget.toLocaleString()})`);
35
+ }
36
+ else {
37
+ log(`Total: ${totalTokens.toLocaleString()} tokens`);
38
+ }
39
+ }
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,9 @@
1
+ export * from './oracle/types.js';
2
+ export { MODEL_CONFIGS, DEFAULT_SYSTEM_PROMPT, TOKENIZER_OPTIONS, } from './oracle/config.js';
3
+ export { readFiles, createFileSections } from './oracle/files.js';
4
+ export { buildPrompt, buildRequestBody, renderPromptMarkdown } from './oracle/request.js';
5
+ export { formatUSD, formatNumber, formatElapsed } from './oracle/format.js';
6
+ export { getFileTokenStats, printFileTokenStats } from './oracle/tokenStats.js';
7
+ export { OracleResponseError, OracleTransportError, OracleUserError, FileValidationError, BrowserAutomationError, PromptValidationError, describeTransportError, extractResponseMetadata, asOracleUserError, toTransportError, } from './oracle/errors.js';
8
+ export { createDefaultClientFactory } from './oracle/client.js';
9
+ export { runOracle, extractTextOutput } from './oracle/run.js';