formagent-sdk 0.3.3 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +247 -79
- package/dist/index.js +216 -76
- package/package.json +1 -1
package/dist/cli/index.js
CHANGED
|
@@ -2116,6 +2116,111 @@ class SessionManagerImpl {
|
|
|
2116
2116
|
}
|
|
2117
2117
|
}
|
|
2118
2118
|
|
|
2119
|
+
// src/utils/retry.ts
|
|
2120
|
+
var DEFAULT_RETRY_OPTIONS = {
|
|
2121
|
+
maxAttempts: 3,
|
|
2122
|
+
initialDelay: 1000,
|
|
2123
|
+
maxDelay: 30000,
|
|
2124
|
+
backoffMultiplier: 2,
|
|
2125
|
+
jitter: true,
|
|
2126
|
+
onRetry: () => {},
|
|
2127
|
+
signal: undefined
|
|
2128
|
+
};
|
|
2129
|
+
function sleep(ms) {
|
|
2130
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
2131
|
+
}
|
|
2132
|
+
function calculateDelay(attempt, initialDelay, maxDelay, backoffMultiplier, jitter) {
|
|
2133
|
+
const delay = Math.min(initialDelay * Math.pow(backoffMultiplier, attempt), maxDelay);
|
|
2134
|
+
if (jitter) {
|
|
2135
|
+
return delay * (0.5 + Math.random() * 0.5);
|
|
2136
|
+
}
|
|
2137
|
+
return delay;
|
|
2138
|
+
}
|
|
2139
|
+
function isRetryableStatus(status) {
|
|
2140
|
+
return status >= 500 || status === 429;
|
|
2141
|
+
}
|
|
2142
|
+
function isRetryableError(error) {
|
|
2143
|
+
if (error instanceof TypeError) {
|
|
2144
|
+
return error.message.includes("fetch") || error.message.includes("network") || error.message.includes("ECONNREFUSED") || error.message.includes("ETIMEDOUT") || error.message.includes("ECONNRESET");
|
|
2145
|
+
}
|
|
2146
|
+
if (error instanceof Error) {
|
|
2147
|
+
const message = error.message.toLowerCase();
|
|
2148
|
+
if (message.includes("rate_limit") || message.includes("rate limit")) {
|
|
2149
|
+
return true;
|
|
2150
|
+
}
|
|
2151
|
+
if (message.includes("usage_limit") || message.includes("usage limit")) {
|
|
2152
|
+
return true;
|
|
2153
|
+
}
|
|
2154
|
+
if (message.includes("timeout") || message.includes("timed out")) {
|
|
2155
|
+
return true;
|
|
2156
|
+
}
|
|
2157
|
+
if (message.includes("5") || message.includes("502") || message.includes("503") || message.includes("504")) {
|
|
2158
|
+
return true;
|
|
2159
|
+
}
|
|
2160
|
+
}
|
|
2161
|
+
return false;
|
|
2162
|
+
}
|
|
2163
|
+
function extractStatusCode(error) {
|
|
2164
|
+
const match = error.message.match(/(\d{3})/);
|
|
2165
|
+
return match ? parseInt(match[1], 10) : null;
|
|
2166
|
+
}
|
|
2167
|
+
async function withRetry(fn, options = {}) {
|
|
2168
|
+
const opts = { ...DEFAULT_RETRY_OPTIONS, ...options };
|
|
2169
|
+
let lastError = null;
|
|
2170
|
+
for (let attempt = 0;attempt < opts.maxAttempts; attempt++) {
|
|
2171
|
+
if (opts.signal?.aborted) {
|
|
2172
|
+
throw new Error("Request aborted");
|
|
2173
|
+
}
|
|
2174
|
+
try {
|
|
2175
|
+
return await fn();
|
|
2176
|
+
} catch (error) {
|
|
2177
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
2178
|
+
if (attempt >= opts.maxAttempts - 1) {
|
|
2179
|
+
throw lastError;
|
|
2180
|
+
}
|
|
2181
|
+
const statusCode = extractStatusCode(lastError);
|
|
2182
|
+
const isRetryable = statusCode !== null ? isRetryableStatus(statusCode) : isRetryableError(lastError);
|
|
2183
|
+
if (!isRetryable) {
|
|
2184
|
+
throw lastError;
|
|
2185
|
+
}
|
|
2186
|
+
const delay = calculateDelay(attempt, opts.initialDelay, opts.maxDelay, opts.backoffMultiplier, opts.jitter);
|
|
2187
|
+
opts.onRetry(attempt + 1, lastError);
|
|
2188
|
+
await sleep(delay);
|
|
2189
|
+
}
|
|
2190
|
+
}
|
|
2191
|
+
throw lastError || new Error("Max retries exceeded");
|
|
2192
|
+
}
|
|
2193
|
+
async function fetchWithRetry(url, init, retryOptions) {
|
|
2194
|
+
return withRetry(async () => {
|
|
2195
|
+
const response = await fetch(url, init);
|
|
2196
|
+
if (!response.ok) {
|
|
2197
|
+
const errorText = await response.text().catch(() => "");
|
|
2198
|
+
let errorMessage = `HTTP ${response.status}`;
|
|
2199
|
+
try {
|
|
2200
|
+
const errorJson = JSON.parse(errorText);
|
|
2201
|
+
if (errorJson.error?.type) {
|
|
2202
|
+
errorMessage += `: ${errorJson.error.type}`;
|
|
2203
|
+
}
|
|
2204
|
+
if (errorJson.error?.message) {
|
|
2205
|
+
errorMessage += ` - ${errorJson.error.message}`;
|
|
2206
|
+
}
|
|
2207
|
+
} catch {
|
|
2208
|
+
if (errorText) {
|
|
2209
|
+
errorMessage += ` ${errorText}`;
|
|
2210
|
+
}
|
|
2211
|
+
}
|
|
2212
|
+
const error = new Error(errorMessage);
|
|
2213
|
+
error.status = response.status;
|
|
2214
|
+
error.responseText = errorText;
|
|
2215
|
+
throw error;
|
|
2216
|
+
}
|
|
2217
|
+
return response;
|
|
2218
|
+
}, {
|
|
2219
|
+
signal: init.signal,
|
|
2220
|
+
...retryOptions
|
|
2221
|
+
});
|
|
2222
|
+
}
|
|
2223
|
+
|
|
2119
2224
|
// src/llm/anthropic.ts
|
|
2120
2225
|
class AnthropicProvider {
|
|
2121
2226
|
id = "anthropic";
|
|
@@ -2128,6 +2233,7 @@ class AnthropicProvider {
|
|
|
2128
2233
|
/^claude-instant/
|
|
2129
2234
|
];
|
|
2130
2235
|
config;
|
|
2236
|
+
defaultRetryOptions;
|
|
2131
2237
|
constructor(config = {}) {
|
|
2132
2238
|
const apiKey = config.apiKey ?? process.env.ANTHROPIC_API_KEY;
|
|
2133
2239
|
if (!apiKey) {
|
|
@@ -2137,7 +2243,15 @@ class AnthropicProvider {
|
|
|
2137
2243
|
apiKey,
|
|
2138
2244
|
baseUrl: config.baseUrl ?? process.env.ANTHROPIC_BASE_URL ?? "https://api.anthropic.com",
|
|
2139
2245
|
apiVersion: config.apiVersion ?? "2023-06-01",
|
|
2140
|
-
defaultMaxTokens: config.defaultMaxTokens ?? 4096
|
|
2246
|
+
defaultMaxTokens: config.defaultMaxTokens ?? 4096,
|
|
2247
|
+
retry: config.retry
|
|
2248
|
+
};
|
|
2249
|
+
this.defaultRetryOptions = {
|
|
2250
|
+
maxAttempts: 3,
|
|
2251
|
+
initialDelay: 1000,
|
|
2252
|
+
maxDelay: 30000,
|
|
2253
|
+
backoffMultiplier: 2,
|
|
2254
|
+
jitter: true
|
|
2141
2255
|
};
|
|
2142
2256
|
}
|
|
2143
2257
|
supportsModel(model) {
|
|
@@ -2145,31 +2259,25 @@ class AnthropicProvider {
|
|
|
2145
2259
|
}
|
|
2146
2260
|
async complete(request) {
|
|
2147
2261
|
const anthropicRequest = this.buildRequest(request, false);
|
|
2148
|
-
const
|
|
2262
|
+
const retryOptions = this.config.retry ?? this.defaultRetryOptions;
|
|
2263
|
+
const response = await fetchWithRetry(`${this.config.baseUrl}/v1/messages`, {
|
|
2149
2264
|
method: "POST",
|
|
2150
2265
|
headers: this.getHeaders(),
|
|
2151
2266
|
body: JSON.stringify(anthropicRequest),
|
|
2152
2267
|
signal: request.abortSignal
|
|
2153
|
-
});
|
|
2154
|
-
if (!response.ok) {
|
|
2155
|
-
const error = await response.text();
|
|
2156
|
-
throw new Error(`Anthropic API error: ${response.status} ${error}`);
|
|
2157
|
-
}
|
|
2268
|
+
}, retryOptions);
|
|
2158
2269
|
const data = await response.json();
|
|
2159
2270
|
return this.convertResponse(data);
|
|
2160
2271
|
}
|
|
2161
2272
|
async stream(request, options) {
|
|
2162
2273
|
const anthropicRequest = this.buildRequest(request, true);
|
|
2163
|
-
const
|
|
2274
|
+
const retryOptions = this.config.retry ?? this.defaultRetryOptions;
|
|
2275
|
+
const response = await fetchWithRetry(`${this.config.baseUrl}/v1/messages`, {
|
|
2164
2276
|
method: "POST",
|
|
2165
2277
|
headers: this.getHeaders(),
|
|
2166
2278
|
body: JSON.stringify(anthropicRequest),
|
|
2167
2279
|
signal: request.abortSignal
|
|
2168
|
-
});
|
|
2169
|
-
if (!response.ok) {
|
|
2170
|
-
const error = await response.text();
|
|
2171
|
-
throw new Error(`Anthropic API error: ${response.status} ${error}`);
|
|
2172
|
-
}
|
|
2280
|
+
}, retryOptions);
|
|
2173
2281
|
return this.createStreamIterator(response.body, options);
|
|
2174
2282
|
}
|
|
2175
2283
|
buildRequest(request, stream) {
|
|
@@ -2390,6 +2498,7 @@ class OpenAIProvider {
|
|
|
2390
2498
|
/^chatgpt/
|
|
2391
2499
|
];
|
|
2392
2500
|
config;
|
|
2501
|
+
defaultRetryOptions;
|
|
2393
2502
|
constructor(config = {}) {
|
|
2394
2503
|
const apiKey = config.apiKey ?? process.env.OPENAI_API_KEY;
|
|
2395
2504
|
if (!apiKey) {
|
|
@@ -2399,96 +2508,88 @@ class OpenAIProvider {
|
|
|
2399
2508
|
apiKey,
|
|
2400
2509
|
baseUrl: this.normalizeBaseUrl(config.baseUrl ?? process.env.OPENAI_BASE_URL ?? "https://api.openai.com/v1"),
|
|
2401
2510
|
organization: config.organization,
|
|
2402
|
-
defaultMaxTokens: config.defaultMaxTokens ?? 4096
|
|
2511
|
+
defaultMaxTokens: config.defaultMaxTokens ?? 4096,
|
|
2512
|
+
retry: config.retry
|
|
2513
|
+
};
|
|
2514
|
+
this.defaultRetryOptions = {
|
|
2515
|
+
maxAttempts: 3,
|
|
2516
|
+
initialDelay: 1000,
|
|
2517
|
+
maxDelay: 30000,
|
|
2518
|
+
backoffMultiplier: 2,
|
|
2519
|
+
jitter: true
|
|
2403
2520
|
};
|
|
2404
2521
|
}
|
|
2405
2522
|
supportsModel(model) {
|
|
2406
2523
|
return this.supportedModels.some((pattern) => pattern.test(model));
|
|
2407
2524
|
}
|
|
2408
2525
|
async complete(request) {
|
|
2526
|
+
const retryOptions = this.config.retry ?? this.defaultRetryOptions;
|
|
2409
2527
|
if (this.usesResponsesApi(request.config.model)) {
|
|
2410
2528
|
const openaiRequest2 = this.buildResponsesRequest(request, false);
|
|
2411
|
-
const response2 = await
|
|
2529
|
+
const response2 = await fetchWithRetry(`${this.config.baseUrl}/responses`, {
|
|
2412
2530
|
method: "POST",
|
|
2413
2531
|
headers: this.getHeaders(),
|
|
2414
2532
|
body: JSON.stringify(openaiRequest2),
|
|
2415
2533
|
signal: request.abortSignal
|
|
2416
|
-
});
|
|
2417
|
-
if (!response2.ok) {
|
|
2418
|
-
const error = await response2.text();
|
|
2419
|
-
throw new Error(`OpenAI API error: ${response2.status} ${error}`);
|
|
2420
|
-
}
|
|
2534
|
+
}, retryOptions);
|
|
2421
2535
|
const data2 = await response2.json();
|
|
2422
2536
|
return this.convertResponsesResponse(data2);
|
|
2423
2537
|
}
|
|
2424
2538
|
const openaiRequest = this.buildRequest(request, false);
|
|
2425
|
-
|
|
2539
|
+
const response = await fetchWithRetry(`${this.config.baseUrl}/chat/completions`, {
|
|
2426
2540
|
method: "POST",
|
|
2427
2541
|
headers: this.getHeaders(),
|
|
2428
2542
|
body: JSON.stringify(openaiRequest),
|
|
2429
2543
|
signal: request.abortSignal
|
|
2430
|
-
});
|
|
2431
|
-
if (
|
|
2432
|
-
const
|
|
2433
|
-
if (this.shouldFallbackToResponses(
|
|
2544
|
+
}, retryOptions);
|
|
2545
|
+
if (response.status === 404) {
|
|
2546
|
+
const errorText = await response.clone().text();
|
|
2547
|
+
if (this.shouldFallbackToResponses(404, errorText)) {
|
|
2434
2548
|
const fallbackRequest = this.buildResponsesRequest(request, false);
|
|
2435
|
-
|
|
2549
|
+
const fallbackResponse = await fetchWithRetry(`${this.config.baseUrl}/responses`, {
|
|
2436
2550
|
method: "POST",
|
|
2437
2551
|
headers: this.getHeaders(),
|
|
2438
2552
|
body: JSON.stringify(fallbackRequest),
|
|
2439
2553
|
signal: request.abortSignal
|
|
2440
|
-
});
|
|
2441
|
-
|
|
2442
|
-
const fallbackError = await response.text();
|
|
2443
|
-
throw new Error(`OpenAI API error: ${response.status} ${fallbackError}`);
|
|
2444
|
-
}
|
|
2445
|
-
const data2 = await response.json();
|
|
2554
|
+
}, retryOptions);
|
|
2555
|
+
const data2 = await fallbackResponse.json();
|
|
2446
2556
|
return this.convertResponsesResponse(data2);
|
|
2447
2557
|
}
|
|
2448
|
-
throw new Error(`OpenAI API error: ${response.status} ${error}`);
|
|
2449
2558
|
}
|
|
2450
2559
|
const data = await response.json();
|
|
2451
2560
|
return this.convertResponse(data);
|
|
2452
2561
|
}
|
|
2453
2562
|
async stream(request, options) {
|
|
2563
|
+
const retryOptions = this.config.retry ?? this.defaultRetryOptions;
|
|
2454
2564
|
if (this.usesResponsesApi(request.config.model)) {
|
|
2455
2565
|
const openaiRequest2 = this.buildResponsesRequest(request, true);
|
|
2456
|
-
const response2 = await
|
|
2566
|
+
const response2 = await fetchWithRetry(`${this.config.baseUrl}/responses`, {
|
|
2457
2567
|
method: "POST",
|
|
2458
2568
|
headers: this.getHeaders(),
|
|
2459
2569
|
body: JSON.stringify(openaiRequest2),
|
|
2460
2570
|
signal: request.abortSignal
|
|
2461
|
-
});
|
|
2462
|
-
if (!response2.ok) {
|
|
2463
|
-
const error = await response2.text();
|
|
2464
|
-
throw new Error(`OpenAI API error: ${response2.status} ${error}`);
|
|
2465
|
-
}
|
|
2571
|
+
}, retryOptions);
|
|
2466
2572
|
return this.createResponsesStreamIterator(response2.body, options);
|
|
2467
2573
|
}
|
|
2468
2574
|
const openaiRequest = this.buildRequest(request, true);
|
|
2469
|
-
|
|
2575
|
+
const response = await fetchWithRetry(`${this.config.baseUrl}/chat/completions`, {
|
|
2470
2576
|
method: "POST",
|
|
2471
2577
|
headers: this.getHeaders(),
|
|
2472
2578
|
body: JSON.stringify(openaiRequest),
|
|
2473
2579
|
signal: request.abortSignal
|
|
2474
|
-
});
|
|
2475
|
-
if (
|
|
2476
|
-
const
|
|
2477
|
-
if (this.shouldFallbackToResponses(
|
|
2580
|
+
}, retryOptions);
|
|
2581
|
+
if (response.status === 404) {
|
|
2582
|
+
const errorText = await response.clone().text();
|
|
2583
|
+
if (this.shouldFallbackToResponses(404, errorText)) {
|
|
2478
2584
|
const fallbackRequest = this.buildResponsesRequest(request, true);
|
|
2479
|
-
|
|
2585
|
+
const fallbackResponse = await fetchWithRetry(`${this.config.baseUrl}/responses`, {
|
|
2480
2586
|
method: "POST",
|
|
2481
2587
|
headers: this.getHeaders(),
|
|
2482
2588
|
body: JSON.stringify(fallbackRequest),
|
|
2483
2589
|
signal: request.abortSignal
|
|
2484
|
-
});
|
|
2485
|
-
|
|
2486
|
-
const fallbackError = await response.text();
|
|
2487
|
-
throw new Error(`OpenAI API error: ${response.status} ${fallbackError}`);
|
|
2488
|
-
}
|
|
2489
|
-
return this.createResponsesStreamIterator(response.body, options);
|
|
2590
|
+
}, retryOptions);
|
|
2591
|
+
return this.createResponsesStreamIterator(fallbackResponse.body, options);
|
|
2490
2592
|
}
|
|
2491
|
-
throw new Error(`OpenAI API error: ${response.status} ${error}`);
|
|
2492
2593
|
}
|
|
2493
2594
|
return this.createStreamIterator(response.body, options);
|
|
2494
2595
|
}
|
|
@@ -3203,6 +3304,7 @@ class GeminiProvider {
|
|
|
3203
3304
|
name = "Gemini";
|
|
3204
3305
|
supportedModels = [/^gemini-/, /^models\/gemini-/];
|
|
3205
3306
|
config;
|
|
3307
|
+
defaultRetryOptions;
|
|
3206
3308
|
constructor(config = {}) {
|
|
3207
3309
|
const apiKey = config.apiKey ?? process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY;
|
|
3208
3310
|
if (!apiKey) {
|
|
@@ -3211,7 +3313,15 @@ class GeminiProvider {
|
|
|
3211
3313
|
this.config = {
|
|
3212
3314
|
apiKey,
|
|
3213
3315
|
baseUrl: config.baseUrl ?? process.env.GEMINI_BASE_URL ?? "https://generativelanguage.googleapis.com/v1beta",
|
|
3214
|
-
defaultMaxTokens: config.defaultMaxTokens ?? 4096
|
|
3316
|
+
defaultMaxTokens: config.defaultMaxTokens ?? 4096,
|
|
3317
|
+
retry: config.retry
|
|
3318
|
+
};
|
|
3319
|
+
this.defaultRetryOptions = {
|
|
3320
|
+
maxAttempts: 3,
|
|
3321
|
+
initialDelay: 1000,
|
|
3322
|
+
maxDelay: 30000,
|
|
3323
|
+
backoffMultiplier: 2,
|
|
3324
|
+
jitter: true
|
|
3215
3325
|
};
|
|
3216
3326
|
}
|
|
3217
3327
|
supportsModel(model) {
|
|
@@ -3220,16 +3330,13 @@ class GeminiProvider {
|
|
|
3220
3330
|
async complete(request) {
|
|
3221
3331
|
const geminiRequest = this.buildRequest(request);
|
|
3222
3332
|
const url = this.buildUrl(this.getModelPath(request.config.model) + ":generateContent");
|
|
3223
|
-
const
|
|
3333
|
+
const retryOptions = this.config.retry ?? this.defaultRetryOptions;
|
|
3334
|
+
const response = await fetchWithRetry(url, {
|
|
3224
3335
|
method: "POST",
|
|
3225
3336
|
headers: this.getHeaders(),
|
|
3226
3337
|
body: JSON.stringify(geminiRequest),
|
|
3227
3338
|
signal: request.abortSignal
|
|
3228
|
-
});
|
|
3229
|
-
if (!response.ok) {
|
|
3230
|
-
const error = await response.text();
|
|
3231
|
-
throw new Error(`Gemini API error: ${response.status} ${error}`);
|
|
3232
|
-
}
|
|
3339
|
+
}, retryOptions);
|
|
3233
3340
|
const data = await response.json();
|
|
3234
3341
|
return this.convertResponse(data, request.config.model);
|
|
3235
3342
|
}
|
|
@@ -3238,16 +3345,13 @@ class GeminiProvider {
|
|
|
3238
3345
|
const url = this.buildUrl(this.getModelPath(request.config.model) + ":streamGenerateContent", {
|
|
3239
3346
|
alt: "sse"
|
|
3240
3347
|
});
|
|
3241
|
-
const
|
|
3348
|
+
const retryOptions = this.config.retry ?? this.defaultRetryOptions;
|
|
3349
|
+
const response = await fetchWithRetry(url, {
|
|
3242
3350
|
method: "POST",
|
|
3243
3351
|
headers: this.getHeaders(),
|
|
3244
3352
|
body: JSON.stringify(geminiRequest),
|
|
3245
3353
|
signal: request.abortSignal
|
|
3246
|
-
});
|
|
3247
|
-
if (!response.ok) {
|
|
3248
|
-
const error = await response.text();
|
|
3249
|
-
throw new Error(`Gemini API error: ${response.status} ${error}`);
|
|
3250
|
-
}
|
|
3354
|
+
}, retryOptions);
|
|
3251
3355
|
const contentType = response.headers.get("content-type") ?? "";
|
|
3252
3356
|
if (!contentType.includes("text/event-stream")) {
|
|
3253
3357
|
const data = await response.json();
|
|
@@ -6236,14 +6340,17 @@ ${c.bold("Interactive Commands:")}
|
|
|
6236
6340
|
${c.cyan("/exit")} Exit the CLI
|
|
6237
6341
|
|
|
6238
6342
|
${c.bold("Environment:")}
|
|
6239
|
-
${c.cyan("ANTHROPIC_API_KEY")}
|
|
6240
|
-
${c.cyan("ANTHROPIC_MODEL")}
|
|
6241
|
-
${c.cyan("GEMINI_API_KEY")}
|
|
6242
|
-
${c.cyan("GEMINI_MODEL")}
|
|
6243
|
-
${c.cyan("GEMINI_BASE_URL")}
|
|
6244
|
-
${c.cyan("OPENAI_API_KEY")}
|
|
6245
|
-
${c.cyan("OPENAI_MODEL")}
|
|
6246
|
-
${c.cyan("OPENAI_BASE_URL")}
|
|
6343
|
+
${c.cyan("ANTHROPIC_API_KEY")} Anthropic API key (for Claude models)
|
|
6344
|
+
${c.cyan("ANTHROPIC_MODEL")} Optional. Claude model (default: claude-sonnet-4-20250514)
|
|
6345
|
+
${c.cyan("GEMINI_API_KEY")} Gemini API key (for Gemini models)
|
|
6346
|
+
${c.cyan("GEMINI_MODEL")} Optional. Gemini model (default: gemini-1.5-pro)
|
|
6347
|
+
${c.cyan("GEMINI_BASE_URL")} Optional. Custom Gemini API base URL
|
|
6348
|
+
${c.cyan("OPENAI_API_KEY")} OpenAI API key (for GPT models)
|
|
6349
|
+
${c.cyan("OPENAI_MODEL")} Optional. OpenAI model (default: gpt-5.2)
|
|
6350
|
+
${c.cyan("OPENAI_BASE_URL")} Optional. Custom OpenAI-compatible API URL
|
|
6351
|
+
${c.cyan("ANTIGRAVITY_API_KEY")} Antigravity API key (default: sk-antigravity)
|
|
6352
|
+
${c.cyan("ANTIGRAVITY_MODEL")} Optional. Antigravity model (default: gemini-3-flash)
|
|
6353
|
+
${c.cyan("ANTIGRAVITY_BASE_URL")} Optional. Antigravity gateway URL (default: http://127.0.0.1:8045)
|
|
6247
6354
|
|
|
6248
6355
|
${c.bold("Examples:")}
|
|
6249
6356
|
${c.dim("# Start interactive mode")}
|
|
@@ -6379,6 +6486,7 @@ function printModelsHelp() {
|
|
|
6379
6486
|
console.log(` ${c.cyan("/models")} openai gpt-5-mini`);
|
|
6380
6487
|
console.log(` ${c.cyan("/models")} anthropic claude-sonnet-4-20250514`);
|
|
6381
6488
|
console.log(` ${c.cyan("/models")} gemini gemini-1.5-pro`);
|
|
6489
|
+
console.log(` ${c.cyan("/models")} antigravity gemini-3-flash`);
|
|
6382
6490
|
console.log(` ${c.cyan("/models")} gpt-5.2`);
|
|
6383
6491
|
console.log(` ${c.cyan("/models")} reset`);
|
|
6384
6492
|
console.log();
|
|
@@ -6419,7 +6527,7 @@ async function handleModelsCommand(args) {
|
|
|
6419
6527
|
const provider = parseProvider(args[0]);
|
|
6420
6528
|
if (!provider) {
|
|
6421
6529
|
console.log(c.yellow(`
|
|
6422
|
-
Unknown provider: ${args[0]}. Use "openai", "anthropic", or "
|
|
6530
|
+
Unknown provider: ${args[0]}. Use "openai", "anthropic", "gemini", or "antigravity".
|
|
6423
6531
|
`));
|
|
6424
6532
|
return;
|
|
6425
6533
|
}
|
|
@@ -6563,6 +6671,34 @@ async function listGeminiModels() {
|
|
|
6563
6671
|
}
|
|
6564
6672
|
console.log();
|
|
6565
6673
|
}
|
|
6674
|
+
async function listAntigravityModels() {
|
|
6675
|
+
const config = getAntigravityConfig();
|
|
6676
|
+
const baseUrl = `${config.baseUrl}/v1`;
|
|
6677
|
+
console.log(c.bold("Antigravity Models:"));
|
|
6678
|
+
console.log(c.dim(" API Type: openai-compatible (Antigravity Gateway)"));
|
|
6679
|
+
console.log(c.dim(` Base URL: ${baseUrl}`));
|
|
6680
|
+
console.log(c.dim(` Default Model: ${config.model}`));
|
|
6681
|
+
const res = await fetch(`${baseUrl}/models`, {
|
|
6682
|
+
headers: {
|
|
6683
|
+
Authorization: `Bearer ${config.apiKey}`,
|
|
6684
|
+
"x-api-key": config.apiKey
|
|
6685
|
+
}
|
|
6686
|
+
});
|
|
6687
|
+
if (!res.ok) {
|
|
6688
|
+
console.log(c.red(` ✗ Failed to fetch models (${res.status})`));
|
|
6689
|
+
console.log(c.dim(` URL: ${baseUrl}/models`));
|
|
6690
|
+
console.log(c.dim(" Make sure Antigravity Manager is running"));
|
|
6691
|
+
console.log();
|
|
6692
|
+
return;
|
|
6693
|
+
}
|
|
6694
|
+
const payload = await res.json();
|
|
6695
|
+
const items = payload.data ?? [];
|
|
6696
|
+
for (const item of items) {
|
|
6697
|
+
const owner = item.owned_by ? ` (${item.owned_by})` : "";
|
|
6698
|
+
console.log(` ${c.green("●")} ${item.id}${owner}`);
|
|
6699
|
+
}
|
|
6700
|
+
console.log();
|
|
6701
|
+
}
|
|
6566
6702
|
async function listModelsSummary() {
|
|
6567
6703
|
const provider = getActiveProviderId();
|
|
6568
6704
|
const apiType = provider ?? "auto";
|
|
@@ -6583,6 +6719,12 @@ async function listModelsSummary() {
|
|
|
6583
6719
|
console.log(c.red(` ✗ Gemini: ${error instanceof Error ? error.message : String(error)}`));
|
|
6584
6720
|
console.log();
|
|
6585
6721
|
}
|
|
6722
|
+
try {
|
|
6723
|
+
await listAntigravityModels();
|
|
6724
|
+
} catch (error) {
|
|
6725
|
+
console.log(c.red(` ✗ Antigravity: ${error instanceof Error ? error.message : String(error)}`));
|
|
6726
|
+
console.log();
|
|
6727
|
+
}
|
|
6586
6728
|
await listAnthropicModels();
|
|
6587
6729
|
}
|
|
6588
6730
|
function printDebug() {
|
|
@@ -6602,16 +6744,20 @@ function printDebug() {
|
|
|
6602
6744
|
console.log(` ${c.cyan("ANTHROPIC_MODEL:")} ${process.env.ANTHROPIC_MODEL || c.dim("(not set)")}`);
|
|
6603
6745
|
console.log(` ${c.cyan("GEMINI_MODEL:")} ${process.env.GEMINI_MODEL || c.dim("(not set)")}`);
|
|
6604
6746
|
console.log(` ${c.cyan("GEMINI_BASE_URL:")} ${process.env.GEMINI_BASE_URL || c.dim("(not set)")}`);
|
|
6605
|
-
console.log(` ${c.cyan("OPENAI_MODEL:")}
|
|
6606
|
-
console.log(` ${c.cyan("OPENAI_BASE_URL:")}
|
|
6747
|
+
console.log(` ${c.cyan("OPENAI_MODEL:")} ${process.env.OPENAI_MODEL || c.dim("(not set)")}`);
|
|
6748
|
+
console.log(` ${c.cyan("OPENAI_BASE_URL:")} ${process.env.OPENAI_BASE_URL || c.dim("(not set)")}`);
|
|
6749
|
+
console.log(` ${c.cyan("ANTIGRAVITY_MODEL:")} ${process.env.ANTIGRAVITY_MODEL || c.dim("(not set)")}`);
|
|
6750
|
+
console.log(` ${c.cyan("ANTIGRAVITY_BASE_URL:")} ${process.env.ANTIGRAVITY_BASE_URL || c.dim("(not set)")}`);
|
|
6607
6751
|
console.log();
|
|
6608
6752
|
console.log(c.bold("API Keys:"));
|
|
6609
6753
|
const anthropicKey = process.env.ANTHROPIC_API_KEY;
|
|
6610
6754
|
const geminiKey = process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY;
|
|
6611
6755
|
const openaiKey = process.env.OPENAI_API_KEY;
|
|
6612
|
-
|
|
6613
|
-
console.log(` ${c.cyan("
|
|
6614
|
-
console.log(` ${c.cyan("
|
|
6756
|
+
const antigravityKey = process.env.ANTIGRAVITY_API_KEY;
|
|
6757
|
+
console.log(` ${c.cyan("ANTHROPIC_API_KEY:")} ${anthropicKey ? c.green("✓ set") + c.dim(` (${anthropicKey.slice(0, 8)}...${anthropicKey.slice(-4)})`) : c.red("✗ not set")}`);
|
|
6758
|
+
console.log(` ${c.cyan("GEMINI_API_KEY:")} ${geminiKey ? c.green("✓ set") + c.dim(` (${geminiKey.slice(0, 8)}...${geminiKey.slice(-4)})`) : c.red("✗ not set")}`);
|
|
6759
|
+
console.log(` ${c.cyan("OPENAI_API_KEY:")} ${openaiKey ? c.green("✓ set") + c.dim(` (${openaiKey.slice(0, 8)}...${openaiKey.slice(-4)})`) : c.red("✗ not set")}`);
|
|
6760
|
+
console.log(` ${c.cyan("ANTIGRAVITY_API_KEY:")} ${antigravityKey ? c.green("✓ set") + c.dim(` (${antigravityKey.slice(0, 8)}...${antigravityKey.slice(-4)})`) : c.dim("(default: sk-antigravity)")}`);
|
|
6615
6761
|
console.log();
|
|
6616
6762
|
console.log(c.bold("Environment:"));
|
|
6617
6763
|
console.log(` ${c.cyan("Working dir:")} ${cwd}`);
|
|
@@ -6675,6 +6821,9 @@ function formatToolInput(name, input) {
|
|
|
6675
6821
|
}
|
|
6676
6822
|
}
|
|
6677
6823
|
function getDefaultProviderFromEnv() {
|
|
6824
|
+
if (process.env.ANTIGRAVITY_API_KEY || process.env.ANTIGRAVITY_BASE_URL) {
|
|
6825
|
+
return "antigravity";
|
|
6826
|
+
}
|
|
6678
6827
|
if (process.env.ANTHROPIC_API_KEY) {
|
|
6679
6828
|
return "anthropic";
|
|
6680
6829
|
}
|
|
@@ -6706,6 +6855,9 @@ function getDefaultModelForProvider(providerId) {
|
|
|
6706
6855
|
if (providerId === "gemini") {
|
|
6707
6856
|
return process.env.GEMINI_MODEL || "gemini-1.5-pro";
|
|
6708
6857
|
}
|
|
6858
|
+
if (providerId === "antigravity") {
|
|
6859
|
+
return process.env.ANTIGRAVITY_MODEL || "gemini-3-flash";
|
|
6860
|
+
}
|
|
6709
6861
|
return process.env.OPENAI_MODEL || "gpt-5.2";
|
|
6710
6862
|
}
|
|
6711
6863
|
function getActiveProviderId() {
|
|
@@ -6738,8 +6890,17 @@ function parseProvider(arg) {
|
|
|
6738
6890
|
if (normalized === "gemini" || normalized === "google") {
|
|
6739
6891
|
return "gemini";
|
|
6740
6892
|
}
|
|
6893
|
+
if (normalized === "antigravity" || normalized === "ag") {
|
|
6894
|
+
return "antigravity";
|
|
6895
|
+
}
|
|
6741
6896
|
return null;
|
|
6742
6897
|
}
|
|
6898
|
+
function getAntigravityConfig() {
|
|
6899
|
+
const baseUrl = (process.env.ANTIGRAVITY_BASE_URL || "http://127.0.0.1:8045").replace(/\/+$/, "");
|
|
6900
|
+
const apiKey = process.env.ANTIGRAVITY_API_KEY || "sk-antigravity";
|
|
6901
|
+
const model = process.env.ANTIGRAVITY_MODEL || "gemini-3-flash";
|
|
6902
|
+
return { baseUrl, apiKey, model };
|
|
6903
|
+
}
|
|
6743
6904
|
function createProvider(providerId) {
|
|
6744
6905
|
if (providerId === "anthropic") {
|
|
6745
6906
|
return new AnthropicProvider;
|
|
@@ -6750,6 +6911,13 @@ function createProvider(providerId) {
|
|
|
6750
6911
|
baseUrl: process.env.GEMINI_BASE_URL
|
|
6751
6912
|
});
|
|
6752
6913
|
}
|
|
6914
|
+
if (providerId === "antigravity") {
|
|
6915
|
+
const config = getAntigravityConfig();
|
|
6916
|
+
return new OpenAIProvider({
|
|
6917
|
+
apiKey: config.apiKey,
|
|
6918
|
+
baseUrl: `${config.baseUrl}/v1`
|
|
6919
|
+
});
|
|
6920
|
+
}
|
|
6753
6921
|
return new OpenAIProvider({
|
|
6754
6922
|
apiKey: process.env.OPENAI_API_KEY,
|
|
6755
6923
|
baseUrl: process.env.OPENAI_BASE_URL
|
package/dist/index.js
CHANGED
|
@@ -480,6 +480,117 @@ function getProviderEnv() {
|
|
|
480
480
|
}
|
|
481
481
|
};
|
|
482
482
|
}
|
|
483
|
+
// src/utils/retry.ts
|
|
484
|
+
var DEFAULT_RETRY_OPTIONS = {
|
|
485
|
+
maxAttempts: 3,
|
|
486
|
+
initialDelay: 1000,
|
|
487
|
+
maxDelay: 30000,
|
|
488
|
+
backoffMultiplier: 2,
|
|
489
|
+
jitter: true,
|
|
490
|
+
onRetry: () => {},
|
|
491
|
+
signal: undefined
|
|
492
|
+
};
|
|
493
|
+
function sleep(ms) {
|
|
494
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
495
|
+
}
|
|
496
|
+
function calculateDelay(attempt, initialDelay, maxDelay, backoffMultiplier, jitter) {
|
|
497
|
+
const delay = Math.min(initialDelay * Math.pow(backoffMultiplier, attempt), maxDelay);
|
|
498
|
+
if (jitter) {
|
|
499
|
+
return delay * (0.5 + Math.random() * 0.5);
|
|
500
|
+
}
|
|
501
|
+
return delay;
|
|
502
|
+
}
|
|
503
|
+
function isRetryableStatus(status) {
|
|
504
|
+
return status >= 500 || status === 429;
|
|
505
|
+
}
|
|
506
|
+
function isRetryableError(error) {
|
|
507
|
+
if (error instanceof TypeError) {
|
|
508
|
+
return error.message.includes("fetch") || error.message.includes("network") || error.message.includes("ECONNREFUSED") || error.message.includes("ETIMEDOUT") || error.message.includes("ECONNRESET");
|
|
509
|
+
}
|
|
510
|
+
if (error instanceof Error) {
|
|
511
|
+
const message = error.message.toLowerCase();
|
|
512
|
+
if (message.includes("rate_limit") || message.includes("rate limit")) {
|
|
513
|
+
return true;
|
|
514
|
+
}
|
|
515
|
+
if (message.includes("usage_limit") || message.includes("usage limit")) {
|
|
516
|
+
return true;
|
|
517
|
+
}
|
|
518
|
+
if (message.includes("timeout") || message.includes("timed out")) {
|
|
519
|
+
return true;
|
|
520
|
+
}
|
|
521
|
+
if (message.includes("5") || message.includes("502") || message.includes("503") || message.includes("504")) {
|
|
522
|
+
return true;
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
return false;
|
|
526
|
+
}
|
|
527
|
+
function extractStatusCode(error) {
|
|
528
|
+
const match = error.message.match(/(\d{3})/);
|
|
529
|
+
return match ? parseInt(match[1], 10) : null;
|
|
530
|
+
}
|
|
531
|
+
async function withRetry(fn, options2 = {}) {
|
|
532
|
+
const opts = { ...DEFAULT_RETRY_OPTIONS, ...options2 };
|
|
533
|
+
let lastError = null;
|
|
534
|
+
for (let attempt = 0;attempt < opts.maxAttempts; attempt++) {
|
|
535
|
+
if (opts.signal?.aborted) {
|
|
536
|
+
throw new Error("Request aborted");
|
|
537
|
+
}
|
|
538
|
+
try {
|
|
539
|
+
return await fn();
|
|
540
|
+
} catch (error) {
|
|
541
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
542
|
+
if (attempt >= opts.maxAttempts - 1) {
|
|
543
|
+
throw lastError;
|
|
544
|
+
}
|
|
545
|
+
const statusCode = extractStatusCode(lastError);
|
|
546
|
+
const isRetryable = statusCode !== null ? isRetryableStatus(statusCode) : isRetryableError(lastError);
|
|
547
|
+
if (!isRetryable) {
|
|
548
|
+
throw lastError;
|
|
549
|
+
}
|
|
550
|
+
const delay = calculateDelay(attempt, opts.initialDelay, opts.maxDelay, opts.backoffMultiplier, opts.jitter);
|
|
551
|
+
opts.onRetry(attempt + 1, lastError);
|
|
552
|
+
await sleep(delay);
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
throw lastError || new Error("Max retries exceeded");
|
|
556
|
+
}
|
|
557
|
+
async function fetchWithRetry(url, init, retryOptions) {
|
|
558
|
+
return withRetry(async () => {
|
|
559
|
+
const response = await fetch(url, init);
|
|
560
|
+
if (!response.ok) {
|
|
561
|
+
const errorText = await response.text().catch(() => "");
|
|
562
|
+
let errorMessage = `HTTP ${response.status}`;
|
|
563
|
+
try {
|
|
564
|
+
const errorJson = JSON.parse(errorText);
|
|
565
|
+
if (errorJson.error?.type) {
|
|
566
|
+
errorMessage += `: ${errorJson.error.type}`;
|
|
567
|
+
}
|
|
568
|
+
if (errorJson.error?.message) {
|
|
569
|
+
errorMessage += ` - ${errorJson.error.message}`;
|
|
570
|
+
}
|
|
571
|
+
} catch {
|
|
572
|
+
if (errorText) {
|
|
573
|
+
errorMessage += ` ${errorText}`;
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
const error = new Error(errorMessage);
|
|
577
|
+
error.status = response.status;
|
|
578
|
+
error.responseText = errorText;
|
|
579
|
+
throw error;
|
|
580
|
+
}
|
|
581
|
+
return response;
|
|
582
|
+
}, {
|
|
583
|
+
signal: init.signal,
|
|
584
|
+
...retryOptions
|
|
585
|
+
});
|
|
586
|
+
}
|
|
587
|
+
function getRetryOptionsFromEnv() {
|
|
588
|
+
return {
|
|
589
|
+
maxAttempts: Number.parseInt(process.env.FORMAGENT_RETRY_MAX_ATTEMPTS || "3", 10),
|
|
590
|
+
initialDelay: Number.parseInt(process.env.FORMAGENT_RETRY_INITIAL_DELAY || "1000", 10),
|
|
591
|
+
maxDelay: Number.parseInt(process.env.FORMAGENT_RETRY_MAX_DELAY || "30000", 10)
|
|
592
|
+
};
|
|
593
|
+
}
|
|
483
594
|
// src/utils/truncation.ts
|
|
484
595
|
import * as fs from "node:fs/promises";
|
|
485
596
|
import * as path from "node:path";
|
|
@@ -2785,6 +2896,7 @@ class AnthropicProvider {
|
|
|
2785
2896
|
/^claude-instant/
|
|
2786
2897
|
];
|
|
2787
2898
|
config;
|
|
2899
|
+
defaultRetryOptions;
|
|
2788
2900
|
constructor(config = {}) {
|
|
2789
2901
|
const apiKey = config.apiKey ?? process.env.ANTHROPIC_API_KEY;
|
|
2790
2902
|
if (!apiKey) {
|
|
@@ -2794,7 +2906,15 @@ class AnthropicProvider {
|
|
|
2794
2906
|
apiKey,
|
|
2795
2907
|
baseUrl: config.baseUrl ?? process.env.ANTHROPIC_BASE_URL ?? "https://api.anthropic.com",
|
|
2796
2908
|
apiVersion: config.apiVersion ?? "2023-06-01",
|
|
2797
|
-
defaultMaxTokens: config.defaultMaxTokens ?? 4096
|
|
2909
|
+
defaultMaxTokens: config.defaultMaxTokens ?? 4096,
|
|
2910
|
+
retry: config.retry
|
|
2911
|
+
};
|
|
2912
|
+
this.defaultRetryOptions = {
|
|
2913
|
+
maxAttempts: 3,
|
|
2914
|
+
initialDelay: 1000,
|
|
2915
|
+
maxDelay: 30000,
|
|
2916
|
+
backoffMultiplier: 2,
|
|
2917
|
+
jitter: true
|
|
2798
2918
|
};
|
|
2799
2919
|
}
|
|
2800
2920
|
supportsModel(model) {
|
|
@@ -2802,31 +2922,25 @@ class AnthropicProvider {
|
|
|
2802
2922
|
}
|
|
2803
2923
|
async complete(request) {
|
|
2804
2924
|
const anthropicRequest = this.buildRequest(request, false);
|
|
2805
|
-
const
|
|
2925
|
+
const retryOptions = this.config.retry ?? this.defaultRetryOptions;
|
|
2926
|
+
const response = await fetchWithRetry(`${this.config.baseUrl}/v1/messages`, {
|
|
2806
2927
|
method: "POST",
|
|
2807
2928
|
headers: this.getHeaders(),
|
|
2808
2929
|
body: JSON.stringify(anthropicRequest),
|
|
2809
2930
|
signal: request.abortSignal
|
|
2810
|
-
});
|
|
2811
|
-
if (!response.ok) {
|
|
2812
|
-
const error = await response.text();
|
|
2813
|
-
throw new Error(`Anthropic API error: ${response.status} ${error}`);
|
|
2814
|
-
}
|
|
2931
|
+
}, retryOptions);
|
|
2815
2932
|
const data = await response.json();
|
|
2816
2933
|
return this.convertResponse(data);
|
|
2817
2934
|
}
|
|
2818
2935
|
async stream(request, options2) {
|
|
2819
2936
|
const anthropicRequest = this.buildRequest(request, true);
|
|
2820
|
-
const
|
|
2937
|
+
const retryOptions = this.config.retry ?? this.defaultRetryOptions;
|
|
2938
|
+
const response = await fetchWithRetry(`${this.config.baseUrl}/v1/messages`, {
|
|
2821
2939
|
method: "POST",
|
|
2822
2940
|
headers: this.getHeaders(),
|
|
2823
2941
|
body: JSON.stringify(anthropicRequest),
|
|
2824
2942
|
signal: request.abortSignal
|
|
2825
|
-
});
|
|
2826
|
-
if (!response.ok) {
|
|
2827
|
-
const error = await response.text();
|
|
2828
|
-
throw new Error(`Anthropic API error: ${response.status} ${error}`);
|
|
2829
|
-
}
|
|
2943
|
+
}, retryOptions);
|
|
2830
2944
|
return this.createStreamIterator(response.body, options2);
|
|
2831
2945
|
}
|
|
2832
2946
|
buildRequest(request, stream) {
|
|
@@ -3050,6 +3164,7 @@ class OpenAIProvider {
|
|
|
3050
3164
|
/^chatgpt/
|
|
3051
3165
|
];
|
|
3052
3166
|
config;
|
|
3167
|
+
defaultRetryOptions;
|
|
3053
3168
|
constructor(config = {}) {
|
|
3054
3169
|
const apiKey = config.apiKey ?? process.env.OPENAI_API_KEY;
|
|
3055
3170
|
if (!apiKey) {
|
|
@@ -3059,96 +3174,88 @@ class OpenAIProvider {
|
|
|
3059
3174
|
apiKey,
|
|
3060
3175
|
baseUrl: this.normalizeBaseUrl(config.baseUrl ?? process.env.OPENAI_BASE_URL ?? "https://api.openai.com/v1"),
|
|
3061
3176
|
organization: config.organization,
|
|
3062
|
-
defaultMaxTokens: config.defaultMaxTokens ?? 4096
|
|
3177
|
+
defaultMaxTokens: config.defaultMaxTokens ?? 4096,
|
|
3178
|
+
retry: config.retry
|
|
3179
|
+
};
|
|
3180
|
+
this.defaultRetryOptions = {
|
|
3181
|
+
maxAttempts: 3,
|
|
3182
|
+
initialDelay: 1000,
|
|
3183
|
+
maxDelay: 30000,
|
|
3184
|
+
backoffMultiplier: 2,
|
|
3185
|
+
jitter: true
|
|
3063
3186
|
};
|
|
3064
3187
|
}
|
|
3065
3188
|
supportsModel(model) {
|
|
3066
3189
|
return this.supportedModels.some((pattern) => pattern.test(model));
|
|
3067
3190
|
}
|
|
3068
3191
|
async complete(request) {
|
|
3192
|
+
const retryOptions = this.config.retry ?? this.defaultRetryOptions;
|
|
3069
3193
|
if (this.usesResponsesApi(request.config.model)) {
|
|
3070
3194
|
const openaiRequest2 = this.buildResponsesRequest(request, false);
|
|
3071
|
-
const response2 = await
|
|
3195
|
+
const response2 = await fetchWithRetry(`${this.config.baseUrl}/responses`, {
|
|
3072
3196
|
method: "POST",
|
|
3073
3197
|
headers: this.getHeaders(),
|
|
3074
3198
|
body: JSON.stringify(openaiRequest2),
|
|
3075
3199
|
signal: request.abortSignal
|
|
3076
|
-
});
|
|
3077
|
-
if (!response2.ok) {
|
|
3078
|
-
const error = await response2.text();
|
|
3079
|
-
throw new Error(`OpenAI API error: ${response2.status} ${error}`);
|
|
3080
|
-
}
|
|
3200
|
+
}, retryOptions);
|
|
3081
3201
|
const data2 = await response2.json();
|
|
3082
3202
|
return this.convertResponsesResponse(data2);
|
|
3083
3203
|
}
|
|
3084
3204
|
const openaiRequest = this.buildRequest(request, false);
|
|
3085
|
-
|
|
3205
|
+
const response = await fetchWithRetry(`${this.config.baseUrl}/chat/completions`, {
|
|
3086
3206
|
method: "POST",
|
|
3087
3207
|
headers: this.getHeaders(),
|
|
3088
3208
|
body: JSON.stringify(openaiRequest),
|
|
3089
3209
|
signal: request.abortSignal
|
|
3090
|
-
});
|
|
3091
|
-
if (
|
|
3092
|
-
const
|
|
3093
|
-
if (this.shouldFallbackToResponses(
|
|
3210
|
+
}, retryOptions);
|
|
3211
|
+
if (response.status === 404) {
|
|
3212
|
+
const errorText = await response.clone().text();
|
|
3213
|
+
if (this.shouldFallbackToResponses(404, errorText)) {
|
|
3094
3214
|
const fallbackRequest = this.buildResponsesRequest(request, false);
|
|
3095
|
-
|
|
3215
|
+
const fallbackResponse = await fetchWithRetry(`${this.config.baseUrl}/responses`, {
|
|
3096
3216
|
method: "POST",
|
|
3097
3217
|
headers: this.getHeaders(),
|
|
3098
3218
|
body: JSON.stringify(fallbackRequest),
|
|
3099
3219
|
signal: request.abortSignal
|
|
3100
|
-
});
|
|
3101
|
-
|
|
3102
|
-
const fallbackError = await response.text();
|
|
3103
|
-
throw new Error(`OpenAI API error: ${response.status} ${fallbackError}`);
|
|
3104
|
-
}
|
|
3105
|
-
const data2 = await response.json();
|
|
3220
|
+
}, retryOptions);
|
|
3221
|
+
const data2 = await fallbackResponse.json();
|
|
3106
3222
|
return this.convertResponsesResponse(data2);
|
|
3107
3223
|
}
|
|
3108
|
-
throw new Error(`OpenAI API error: ${response.status} ${error}`);
|
|
3109
3224
|
}
|
|
3110
3225
|
const data = await response.json();
|
|
3111
3226
|
return this.convertResponse(data);
|
|
3112
3227
|
}
|
|
3113
3228
|
async stream(request, options2) {
|
|
3229
|
+
const retryOptions = this.config.retry ?? this.defaultRetryOptions;
|
|
3114
3230
|
if (this.usesResponsesApi(request.config.model)) {
|
|
3115
3231
|
const openaiRequest2 = this.buildResponsesRequest(request, true);
|
|
3116
|
-
const response2 = await
|
|
3232
|
+
const response2 = await fetchWithRetry(`${this.config.baseUrl}/responses`, {
|
|
3117
3233
|
method: "POST",
|
|
3118
3234
|
headers: this.getHeaders(),
|
|
3119
3235
|
body: JSON.stringify(openaiRequest2),
|
|
3120
3236
|
signal: request.abortSignal
|
|
3121
|
-
});
|
|
3122
|
-
if (!response2.ok) {
|
|
3123
|
-
const error = await response2.text();
|
|
3124
|
-
throw new Error(`OpenAI API error: ${response2.status} ${error}`);
|
|
3125
|
-
}
|
|
3237
|
+
}, retryOptions);
|
|
3126
3238
|
return this.createResponsesStreamIterator(response2.body, options2);
|
|
3127
3239
|
}
|
|
3128
3240
|
const openaiRequest = this.buildRequest(request, true);
|
|
3129
|
-
|
|
3241
|
+
const response = await fetchWithRetry(`${this.config.baseUrl}/chat/completions`, {
|
|
3130
3242
|
method: "POST",
|
|
3131
3243
|
headers: this.getHeaders(),
|
|
3132
3244
|
body: JSON.stringify(openaiRequest),
|
|
3133
3245
|
signal: request.abortSignal
|
|
3134
|
-
});
|
|
3135
|
-
if (
|
|
3136
|
-
const
|
|
3137
|
-
if (this.shouldFallbackToResponses(
|
|
3246
|
+
}, retryOptions);
|
|
3247
|
+
if (response.status === 404) {
|
|
3248
|
+
const errorText = await response.clone().text();
|
|
3249
|
+
if (this.shouldFallbackToResponses(404, errorText)) {
|
|
3138
3250
|
const fallbackRequest = this.buildResponsesRequest(request, true);
|
|
3139
|
-
|
|
3251
|
+
const fallbackResponse = await fetchWithRetry(`${this.config.baseUrl}/responses`, {
|
|
3140
3252
|
method: "POST",
|
|
3141
3253
|
headers: this.getHeaders(),
|
|
3142
3254
|
body: JSON.stringify(fallbackRequest),
|
|
3143
3255
|
signal: request.abortSignal
|
|
3144
|
-
});
|
|
3145
|
-
|
|
3146
|
-
const fallbackError = await response.text();
|
|
3147
|
-
throw new Error(`OpenAI API error: ${response.status} ${fallbackError}`);
|
|
3148
|
-
}
|
|
3149
|
-
return this.createResponsesStreamIterator(response.body, options2);
|
|
3256
|
+
}, retryOptions);
|
|
3257
|
+
return this.createResponsesStreamIterator(fallbackResponse.body, options2);
|
|
3150
3258
|
}
|
|
3151
|
-
throw new Error(`OpenAI API error: ${response.status} ${error}`);
|
|
3152
3259
|
}
|
|
3153
3260
|
return this.createStreamIterator(response.body, options2);
|
|
3154
3261
|
}
|
|
@@ -3866,6 +3973,7 @@ class GeminiProvider {
|
|
|
3866
3973
|
name = "Gemini";
|
|
3867
3974
|
supportedModels = [/^gemini-/, /^models\/gemini-/];
|
|
3868
3975
|
config;
|
|
3976
|
+
defaultRetryOptions;
|
|
3869
3977
|
constructor(config = {}) {
|
|
3870
3978
|
const apiKey = config.apiKey ?? process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY;
|
|
3871
3979
|
if (!apiKey) {
|
|
@@ -3874,7 +3982,15 @@ class GeminiProvider {
|
|
|
3874
3982
|
this.config = {
|
|
3875
3983
|
apiKey,
|
|
3876
3984
|
baseUrl: config.baseUrl ?? process.env.GEMINI_BASE_URL ?? "https://generativelanguage.googleapis.com/v1beta",
|
|
3877
|
-
defaultMaxTokens: config.defaultMaxTokens ?? 4096
|
|
3985
|
+
defaultMaxTokens: config.defaultMaxTokens ?? 4096,
|
|
3986
|
+
retry: config.retry
|
|
3987
|
+
};
|
|
3988
|
+
this.defaultRetryOptions = {
|
|
3989
|
+
maxAttempts: 3,
|
|
3990
|
+
initialDelay: 1000,
|
|
3991
|
+
maxDelay: 30000,
|
|
3992
|
+
backoffMultiplier: 2,
|
|
3993
|
+
jitter: true
|
|
3878
3994
|
};
|
|
3879
3995
|
}
|
|
3880
3996
|
supportsModel(model) {
|
|
@@ -3883,16 +3999,13 @@ class GeminiProvider {
|
|
|
3883
3999
|
async complete(request) {
|
|
3884
4000
|
const geminiRequest = this.buildRequest(request);
|
|
3885
4001
|
const url = this.buildUrl(this.getModelPath(request.config.model) + ":generateContent");
|
|
3886
|
-
const
|
|
4002
|
+
const retryOptions = this.config.retry ?? this.defaultRetryOptions;
|
|
4003
|
+
const response = await fetchWithRetry(url, {
|
|
3887
4004
|
method: "POST",
|
|
3888
4005
|
headers: this.getHeaders(),
|
|
3889
4006
|
body: JSON.stringify(geminiRequest),
|
|
3890
4007
|
signal: request.abortSignal
|
|
3891
|
-
});
|
|
3892
|
-
if (!response.ok) {
|
|
3893
|
-
const error = await response.text();
|
|
3894
|
-
throw new Error(`Gemini API error: ${response.status} ${error}`);
|
|
3895
|
-
}
|
|
4008
|
+
}, retryOptions);
|
|
3896
4009
|
const data = await response.json();
|
|
3897
4010
|
return this.convertResponse(data, request.config.model);
|
|
3898
4011
|
}
|
|
@@ -3901,16 +4014,13 @@ class GeminiProvider {
|
|
|
3901
4014
|
const url = this.buildUrl(this.getModelPath(request.config.model) + ":streamGenerateContent", {
|
|
3902
4015
|
alt: "sse"
|
|
3903
4016
|
});
|
|
3904
|
-
const
|
|
4017
|
+
const retryOptions = this.config.retry ?? this.defaultRetryOptions;
|
|
4018
|
+
const response = await fetchWithRetry(url, {
|
|
3905
4019
|
method: "POST",
|
|
3906
4020
|
headers: this.getHeaders(),
|
|
3907
4021
|
body: JSON.stringify(geminiRequest),
|
|
3908
4022
|
signal: request.abortSignal
|
|
3909
|
-
});
|
|
3910
|
-
if (!response.ok) {
|
|
3911
|
-
const error = await response.text();
|
|
3912
|
-
throw new Error(`Gemini API error: ${response.status} ${error}`);
|
|
3913
|
-
}
|
|
4023
|
+
}, retryOptions);
|
|
3914
4024
|
const contentType = response.headers.get("content-type") ?? "";
|
|
3915
4025
|
if (!contentType.includes("text/event-stream")) {
|
|
3916
4026
|
const data = await response.json();
|
|
@@ -4778,10 +4888,13 @@ function isZodSchema(schema) {
|
|
|
4778
4888
|
return typeof schema === "object" && schema !== null && "parse" in schema && "safeParse" in schema && typeof schema.parse === "function" && typeof schema.safeParse === "function";
|
|
4779
4889
|
}
|
|
4780
4890
|
function zodToJsonSchema(zodSchema) {
|
|
4781
|
-
const
|
|
4782
|
-
const typeName =
|
|
4783
|
-
if (typeName === "ZodObject"
|
|
4784
|
-
|
|
4891
|
+
const def = zodSchema._def;
|
|
4892
|
+
const typeName = def?.typeName;
|
|
4893
|
+
if (typeName === "ZodObject") {
|
|
4894
|
+
const shape = typeof def.shape === "function" ? def.shape() : def.shape;
|
|
4895
|
+
if (shape) {
|
|
4896
|
+
return objectShapeToJsonSchema(shape);
|
|
4897
|
+
}
|
|
4785
4898
|
}
|
|
4786
4899
|
return {
|
|
4787
4900
|
type: "object",
|
|
@@ -4794,13 +4907,35 @@ function objectShapeToJsonSchema(shape) {
|
|
|
4794
4907
|
const required = [];
|
|
4795
4908
|
for (const [key, value] of Object.entries(shape)) {
|
|
4796
4909
|
const def = value?._def;
|
|
4797
|
-
|
|
4798
|
-
|
|
4910
|
+
let typeName = def?.typeName;
|
|
4911
|
+
let description = def?.description;
|
|
4799
4912
|
let innerDef = def;
|
|
4800
4913
|
let isOptional = false;
|
|
4801
|
-
|
|
4802
|
-
|
|
4803
|
-
|
|
4914
|
+
while (innerDef) {
|
|
4915
|
+
const innerTypeName = innerDef.typeName;
|
|
4916
|
+
if (innerTypeName === "ZodOptional") {
|
|
4917
|
+
isOptional = true;
|
|
4918
|
+
if (!description && innerDef.description) {
|
|
4919
|
+
description = innerDef.description;
|
|
4920
|
+
}
|
|
4921
|
+
innerDef = innerDef.innerType?._def;
|
|
4922
|
+
} else if (innerTypeName === "ZodDefault") {
|
|
4923
|
+
isOptional = true;
|
|
4924
|
+
if (!description && innerDef.description) {
|
|
4925
|
+
description = innerDef.description;
|
|
4926
|
+
}
|
|
4927
|
+
innerDef = innerDef.innerType?._def;
|
|
4928
|
+
} else if (innerTypeName === "ZodNullable") {
|
|
4929
|
+
if (!description && innerDef.description) {
|
|
4930
|
+
description = innerDef.description;
|
|
4931
|
+
}
|
|
4932
|
+
innerDef = innerDef.innerType?._def;
|
|
4933
|
+
} else {
|
|
4934
|
+
if (!description && innerDef.description) {
|
|
4935
|
+
description = innerDef.description;
|
|
4936
|
+
}
|
|
4937
|
+
break;
|
|
4938
|
+
}
|
|
4804
4939
|
}
|
|
4805
4940
|
const prop = zodDefToJsonSchema(innerDef);
|
|
4806
4941
|
if (description) {
|
|
@@ -4833,8 +4968,10 @@ function zodDefToJsonSchema(def) {
|
|
|
4833
4968
|
type: "array",
|
|
4834
4969
|
items: zodDefToJsonSchema(def.type?._def)
|
|
4835
4970
|
};
|
|
4836
|
-
case "ZodObject":
|
|
4837
|
-
|
|
4971
|
+
case "ZodObject": {
|
|
4972
|
+
const shape = typeof def.shape === "function" ? def.shape() : def.shape;
|
|
4973
|
+
return objectShapeToJsonSchema(shape);
|
|
4974
|
+
}
|
|
4838
4975
|
case "ZodLiteral":
|
|
4839
4976
|
return { type: typeof def.value, enum: [def.value] };
|
|
4840
4977
|
case "ZodUnion":
|
|
@@ -7938,6 +8075,7 @@ function createAgent(config) {
|
|
|
7938
8075
|
}
|
|
7939
8076
|
export {
|
|
7940
8077
|
zodToJsonSchema,
|
|
8078
|
+
withRetry,
|
|
7941
8079
|
webTools,
|
|
7942
8080
|
waitForEvent,
|
|
7943
8081
|
utilityTools,
|
|
@@ -7969,6 +8107,7 @@ export {
|
|
|
7969
8107
|
interactiveTools,
|
|
7970
8108
|
globalCostTracker,
|
|
7971
8109
|
getTodos,
|
|
8110
|
+
getRetryOptionsFromEnv,
|
|
7972
8111
|
getProviderEnv,
|
|
7973
8112
|
getProjectSkillsPath,
|
|
7974
8113
|
getPreset,
|
|
@@ -7985,6 +8124,7 @@ export {
|
|
|
7985
8124
|
generateEnvContext,
|
|
7986
8125
|
forkSession,
|
|
7987
8126
|
fileTools,
|
|
8127
|
+
fetchWithRetry,
|
|
7988
8128
|
extractTitle,
|
|
7989
8129
|
extractDescription,
|
|
7990
8130
|
estimateTokens,
|
package/package.json
CHANGED