formagent-sdk 0.3.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -228,6 +228,8 @@ for await (const event of session.receive()) {
228
228
 
229
229
  ## Session Management
230
230
 
231
+ **Default Behavior:** Sessions use in-memory storage by default. Conversation history is lost when the process exits. Use `FileSessionStorage` for persistence across restarts.
232
+
231
233
  ### Persistent Sessions
232
234
 
233
235
  Enable session persistence with `FileSessionStorage`:
package/dist/cli/index.js CHANGED
@@ -1735,7 +1735,8 @@ class SessionImpl {
1735
1735
  const toolResult = await tool.execute(toolInput, context);
1736
1736
  let content = typeof toolResult.content === "string" ? toolResult.content : JSON.stringify(toolResult.content);
1737
1737
  if (needsTruncation(content)) {
1738
- content = await truncateToolOutput(content);
1738
+ const truncationConfig = this.config.tempDir ? { tempDir: this.config.tempDir } : undefined;
1739
+ content = await truncateToolOutput(content, truncationConfig);
1739
1740
  }
1740
1741
  toolResponse = toolResult;
1741
1742
  result = {
@@ -2115,6 +2116,111 @@ class SessionManagerImpl {
2115
2116
  }
2116
2117
  }
2117
2118
 
2119
+ // src/utils/retry.ts
2120
+ var DEFAULT_RETRY_OPTIONS = {
2121
+ maxAttempts: 3,
2122
+ initialDelay: 1000,
2123
+ maxDelay: 30000,
2124
+ backoffMultiplier: 2,
2125
+ jitter: true,
2126
+ onRetry: () => {},
2127
+ signal: undefined
2128
+ };
2129
+ function sleep(ms) {
2130
+ return new Promise((resolve) => setTimeout(resolve, ms));
2131
+ }
2132
+ function calculateDelay(attempt, initialDelay, maxDelay, backoffMultiplier, jitter) {
2133
+ const delay = Math.min(initialDelay * Math.pow(backoffMultiplier, attempt), maxDelay);
2134
+ if (jitter) {
2135
+ return delay * (0.5 + Math.random() * 0.5);
2136
+ }
2137
+ return delay;
2138
+ }
2139
+ function isRetryableStatus(status) {
2140
+ return status >= 500 || status === 429;
2141
+ }
2142
+ function isRetryableError(error) {
2143
+ if (error instanceof TypeError) {
2144
+ return error.message.includes("fetch") || error.message.includes("network") || error.message.includes("ECONNREFUSED") || error.message.includes("ETIMEDOUT") || error.message.includes("ECONNRESET");
2145
+ }
2146
+ if (error instanceof Error) {
2147
+ const message = error.message.toLowerCase();
2148
+ if (message.includes("rate_limit") || message.includes("rate limit")) {
2149
+ return true;
2150
+ }
2151
+ if (message.includes("usage_limit") || message.includes("usage limit")) {
2152
+ return true;
2153
+ }
2154
+ if (message.includes("timeout") || message.includes("timed out")) {
2155
+ return true;
2156
+ }
2157
+ if (message.includes("5") || message.includes("502") || message.includes("503") || message.includes("504")) {
2158
+ return true;
2159
+ }
2160
+ }
2161
+ return false;
2162
+ }
2163
+ function extractStatusCode(error) {
2164
+ const match = error.message.match(/(\d{3})/);
2165
+ return match ? parseInt(match[1], 10) : null;
2166
+ }
2167
+ async function withRetry(fn, options = {}) {
2168
+ const opts = { ...DEFAULT_RETRY_OPTIONS, ...options };
2169
+ let lastError = null;
2170
+ for (let attempt = 0;attempt < opts.maxAttempts; attempt++) {
2171
+ if (opts.signal?.aborted) {
2172
+ throw new Error("Request aborted");
2173
+ }
2174
+ try {
2175
+ return await fn();
2176
+ } catch (error) {
2177
+ lastError = error instanceof Error ? error : new Error(String(error));
2178
+ if (attempt >= opts.maxAttempts - 1) {
2179
+ throw lastError;
2180
+ }
2181
+ const statusCode = extractStatusCode(lastError);
2182
+ const isRetryable = statusCode !== null ? isRetryableStatus(statusCode) : isRetryableError(lastError);
2183
+ if (!isRetryable) {
2184
+ throw lastError;
2185
+ }
2186
+ const delay = calculateDelay(attempt, opts.initialDelay, opts.maxDelay, opts.backoffMultiplier, opts.jitter);
2187
+ opts.onRetry(attempt + 1, lastError);
2188
+ await sleep(delay);
2189
+ }
2190
+ }
2191
+ throw lastError || new Error("Max retries exceeded");
2192
+ }
2193
+ async function fetchWithRetry(url, init, retryOptions) {
2194
+ return withRetry(async () => {
2195
+ const response = await fetch(url, init);
2196
+ if (!response.ok) {
2197
+ const errorText = await response.text().catch(() => "");
2198
+ let errorMessage = `HTTP ${response.status}`;
2199
+ try {
2200
+ const errorJson = JSON.parse(errorText);
2201
+ if (errorJson.error?.type) {
2202
+ errorMessage += `: ${errorJson.error.type}`;
2203
+ }
2204
+ if (errorJson.error?.message) {
2205
+ errorMessage += ` - ${errorJson.error.message}`;
2206
+ }
2207
+ } catch {
2208
+ if (errorText) {
2209
+ errorMessage += ` ${errorText}`;
2210
+ }
2211
+ }
2212
+ const error = new Error(errorMessage);
2213
+ error.status = response.status;
2214
+ error.responseText = errorText;
2215
+ throw error;
2216
+ }
2217
+ return response;
2218
+ }, {
2219
+ signal: init.signal,
2220
+ ...retryOptions
2221
+ });
2222
+ }
2223
+
2118
2224
  // src/llm/anthropic.ts
2119
2225
  class AnthropicProvider {
2120
2226
  id = "anthropic";
@@ -2127,6 +2233,7 @@ class AnthropicProvider {
2127
2233
  /^claude-instant/
2128
2234
  ];
2129
2235
  config;
2236
+ defaultRetryOptions;
2130
2237
  constructor(config = {}) {
2131
2238
  const apiKey = config.apiKey ?? process.env.ANTHROPIC_API_KEY;
2132
2239
  if (!apiKey) {
@@ -2136,7 +2243,15 @@ class AnthropicProvider {
2136
2243
  apiKey,
2137
2244
  baseUrl: config.baseUrl ?? process.env.ANTHROPIC_BASE_URL ?? "https://api.anthropic.com",
2138
2245
  apiVersion: config.apiVersion ?? "2023-06-01",
2139
- defaultMaxTokens: config.defaultMaxTokens ?? 4096
2246
+ defaultMaxTokens: config.defaultMaxTokens ?? 4096,
2247
+ retry: config.retry
2248
+ };
2249
+ this.defaultRetryOptions = {
2250
+ maxAttempts: 3,
2251
+ initialDelay: 1000,
2252
+ maxDelay: 30000,
2253
+ backoffMultiplier: 2,
2254
+ jitter: true
2140
2255
  };
2141
2256
  }
2142
2257
  supportsModel(model) {
@@ -2144,31 +2259,25 @@ class AnthropicProvider {
2144
2259
  }
2145
2260
  async complete(request) {
2146
2261
  const anthropicRequest = this.buildRequest(request, false);
2147
- const response = await fetch(`${this.config.baseUrl}/v1/messages`, {
2262
+ const retryOptions = this.config.retry ?? this.defaultRetryOptions;
2263
+ const response = await fetchWithRetry(`${this.config.baseUrl}/v1/messages`, {
2148
2264
  method: "POST",
2149
2265
  headers: this.getHeaders(),
2150
2266
  body: JSON.stringify(anthropicRequest),
2151
2267
  signal: request.abortSignal
2152
- });
2153
- if (!response.ok) {
2154
- const error = await response.text();
2155
- throw new Error(`Anthropic API error: ${response.status} ${error}`);
2156
- }
2268
+ }, retryOptions);
2157
2269
  const data = await response.json();
2158
2270
  return this.convertResponse(data);
2159
2271
  }
2160
2272
  async stream(request, options) {
2161
2273
  const anthropicRequest = this.buildRequest(request, true);
2162
- const response = await fetch(`${this.config.baseUrl}/v1/messages`, {
2274
+ const retryOptions = this.config.retry ?? this.defaultRetryOptions;
2275
+ const response = await fetchWithRetry(`${this.config.baseUrl}/v1/messages`, {
2163
2276
  method: "POST",
2164
2277
  headers: this.getHeaders(),
2165
2278
  body: JSON.stringify(anthropicRequest),
2166
2279
  signal: request.abortSignal
2167
- });
2168
- if (!response.ok) {
2169
- const error = await response.text();
2170
- throw new Error(`Anthropic API error: ${response.status} ${error}`);
2171
- }
2280
+ }, retryOptions);
2172
2281
  return this.createStreamIterator(response.body, options);
2173
2282
  }
2174
2283
  buildRequest(request, stream) {
@@ -2389,6 +2498,7 @@ class OpenAIProvider {
2389
2498
  /^chatgpt/
2390
2499
  ];
2391
2500
  config;
2501
+ defaultRetryOptions;
2392
2502
  constructor(config = {}) {
2393
2503
  const apiKey = config.apiKey ?? process.env.OPENAI_API_KEY;
2394
2504
  if (!apiKey) {
@@ -2398,96 +2508,88 @@ class OpenAIProvider {
2398
2508
  apiKey,
2399
2509
  baseUrl: this.normalizeBaseUrl(config.baseUrl ?? process.env.OPENAI_BASE_URL ?? "https://api.openai.com/v1"),
2400
2510
  organization: config.organization,
2401
- defaultMaxTokens: config.defaultMaxTokens ?? 4096
2511
+ defaultMaxTokens: config.defaultMaxTokens ?? 4096,
2512
+ retry: config.retry
2513
+ };
2514
+ this.defaultRetryOptions = {
2515
+ maxAttempts: 3,
2516
+ initialDelay: 1000,
2517
+ maxDelay: 30000,
2518
+ backoffMultiplier: 2,
2519
+ jitter: true
2402
2520
  };
2403
2521
  }
2404
2522
  supportsModel(model) {
2405
2523
  return this.supportedModels.some((pattern) => pattern.test(model));
2406
2524
  }
2407
2525
  async complete(request) {
2526
+ const retryOptions = this.config.retry ?? this.defaultRetryOptions;
2408
2527
  if (this.usesResponsesApi(request.config.model)) {
2409
2528
  const openaiRequest2 = this.buildResponsesRequest(request, false);
2410
- const response2 = await fetch(`${this.config.baseUrl}/responses`, {
2529
+ const response2 = await fetchWithRetry(`${this.config.baseUrl}/responses`, {
2411
2530
  method: "POST",
2412
2531
  headers: this.getHeaders(),
2413
2532
  body: JSON.stringify(openaiRequest2),
2414
2533
  signal: request.abortSignal
2415
- });
2416
- if (!response2.ok) {
2417
- const error = await response2.text();
2418
- throw new Error(`OpenAI API error: ${response2.status} ${error}`);
2419
- }
2534
+ }, retryOptions);
2420
2535
  const data2 = await response2.json();
2421
2536
  return this.convertResponsesResponse(data2);
2422
2537
  }
2423
2538
  const openaiRequest = this.buildRequest(request, false);
2424
- let response = await fetch(`${this.config.baseUrl}/chat/completions`, {
2539
+ const response = await fetchWithRetry(`${this.config.baseUrl}/chat/completions`, {
2425
2540
  method: "POST",
2426
2541
  headers: this.getHeaders(),
2427
2542
  body: JSON.stringify(openaiRequest),
2428
2543
  signal: request.abortSignal
2429
- });
2430
- if (!response.ok) {
2431
- const error = await response.text();
2432
- if (this.shouldFallbackToResponses(response.status, error)) {
2544
+ }, retryOptions);
2545
+ if (response.status === 404) {
2546
+ const errorText = await response.clone().text();
2547
+ if (this.shouldFallbackToResponses(404, errorText)) {
2433
2548
  const fallbackRequest = this.buildResponsesRequest(request, false);
2434
- response = await fetch(`${this.config.baseUrl}/responses`, {
2549
+ const fallbackResponse = await fetchWithRetry(`${this.config.baseUrl}/responses`, {
2435
2550
  method: "POST",
2436
2551
  headers: this.getHeaders(),
2437
2552
  body: JSON.stringify(fallbackRequest),
2438
2553
  signal: request.abortSignal
2439
- });
2440
- if (!response.ok) {
2441
- const fallbackError = await response.text();
2442
- throw new Error(`OpenAI API error: ${response.status} ${fallbackError}`);
2443
- }
2444
- const data2 = await response.json();
2554
+ }, retryOptions);
2555
+ const data2 = await fallbackResponse.json();
2445
2556
  return this.convertResponsesResponse(data2);
2446
2557
  }
2447
- throw new Error(`OpenAI API error: ${response.status} ${error}`);
2448
2558
  }
2449
2559
  const data = await response.json();
2450
2560
  return this.convertResponse(data);
2451
2561
  }
2452
2562
  async stream(request, options) {
2563
+ const retryOptions = this.config.retry ?? this.defaultRetryOptions;
2453
2564
  if (this.usesResponsesApi(request.config.model)) {
2454
2565
  const openaiRequest2 = this.buildResponsesRequest(request, true);
2455
- const response2 = await fetch(`${this.config.baseUrl}/responses`, {
2566
+ const response2 = await fetchWithRetry(`${this.config.baseUrl}/responses`, {
2456
2567
  method: "POST",
2457
2568
  headers: this.getHeaders(),
2458
2569
  body: JSON.stringify(openaiRequest2),
2459
2570
  signal: request.abortSignal
2460
- });
2461
- if (!response2.ok) {
2462
- const error = await response2.text();
2463
- throw new Error(`OpenAI API error: ${response2.status} ${error}`);
2464
- }
2571
+ }, retryOptions);
2465
2572
  return this.createResponsesStreamIterator(response2.body, options);
2466
2573
  }
2467
2574
  const openaiRequest = this.buildRequest(request, true);
2468
- let response = await fetch(`${this.config.baseUrl}/chat/completions`, {
2575
+ const response = await fetchWithRetry(`${this.config.baseUrl}/chat/completions`, {
2469
2576
  method: "POST",
2470
2577
  headers: this.getHeaders(),
2471
2578
  body: JSON.stringify(openaiRequest),
2472
2579
  signal: request.abortSignal
2473
- });
2474
- if (!response.ok) {
2475
- const error = await response.text();
2476
- if (this.shouldFallbackToResponses(response.status, error)) {
2580
+ }, retryOptions);
2581
+ if (response.status === 404) {
2582
+ const errorText = await response.clone().text();
2583
+ if (this.shouldFallbackToResponses(404, errorText)) {
2477
2584
  const fallbackRequest = this.buildResponsesRequest(request, true);
2478
- response = await fetch(`${this.config.baseUrl}/responses`, {
2585
+ const fallbackResponse = await fetchWithRetry(`${this.config.baseUrl}/responses`, {
2479
2586
  method: "POST",
2480
2587
  headers: this.getHeaders(),
2481
2588
  body: JSON.stringify(fallbackRequest),
2482
2589
  signal: request.abortSignal
2483
- });
2484
- if (!response.ok) {
2485
- const fallbackError = await response.text();
2486
- throw new Error(`OpenAI API error: ${response.status} ${fallbackError}`);
2487
- }
2488
- return this.createResponsesStreamIterator(response.body, options);
2590
+ }, retryOptions);
2591
+ return this.createResponsesStreamIterator(fallbackResponse.body, options);
2489
2592
  }
2490
- throw new Error(`OpenAI API error: ${response.status} ${error}`);
2491
2593
  }
2492
2594
  return this.createStreamIterator(response.body, options);
2493
2595
  }
@@ -3202,6 +3304,7 @@ class GeminiProvider {
3202
3304
  name = "Gemini";
3203
3305
  supportedModels = [/^gemini-/, /^models\/gemini-/];
3204
3306
  config;
3307
+ defaultRetryOptions;
3205
3308
  constructor(config = {}) {
3206
3309
  const apiKey = config.apiKey ?? process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY;
3207
3310
  if (!apiKey) {
@@ -3210,7 +3313,15 @@ class GeminiProvider {
3210
3313
  this.config = {
3211
3314
  apiKey,
3212
3315
  baseUrl: config.baseUrl ?? process.env.GEMINI_BASE_URL ?? "https://generativelanguage.googleapis.com/v1beta",
3213
- defaultMaxTokens: config.defaultMaxTokens ?? 4096
3316
+ defaultMaxTokens: config.defaultMaxTokens ?? 4096,
3317
+ retry: config.retry
3318
+ };
3319
+ this.defaultRetryOptions = {
3320
+ maxAttempts: 3,
3321
+ initialDelay: 1000,
3322
+ maxDelay: 30000,
3323
+ backoffMultiplier: 2,
3324
+ jitter: true
3214
3325
  };
3215
3326
  }
3216
3327
  supportsModel(model) {
@@ -3219,16 +3330,13 @@ class GeminiProvider {
3219
3330
  async complete(request) {
3220
3331
  const geminiRequest = this.buildRequest(request);
3221
3332
  const url = this.buildUrl(this.getModelPath(request.config.model) + ":generateContent");
3222
- const response = await fetch(url, {
3333
+ const retryOptions = this.config.retry ?? this.defaultRetryOptions;
3334
+ const response = await fetchWithRetry(url, {
3223
3335
  method: "POST",
3224
3336
  headers: this.getHeaders(),
3225
3337
  body: JSON.stringify(geminiRequest),
3226
3338
  signal: request.abortSignal
3227
- });
3228
- if (!response.ok) {
3229
- const error = await response.text();
3230
- throw new Error(`Gemini API error: ${response.status} ${error}`);
3231
- }
3339
+ }, retryOptions);
3232
3340
  const data = await response.json();
3233
3341
  return this.convertResponse(data, request.config.model);
3234
3342
  }
@@ -3237,16 +3345,13 @@ class GeminiProvider {
3237
3345
  const url = this.buildUrl(this.getModelPath(request.config.model) + ":streamGenerateContent", {
3238
3346
  alt: "sse"
3239
3347
  });
3240
- const response = await fetch(url, {
3348
+ const retryOptions = this.config.retry ?? this.defaultRetryOptions;
3349
+ const response = await fetchWithRetry(url, {
3241
3350
  method: "POST",
3242
3351
  headers: this.getHeaders(),
3243
3352
  body: JSON.stringify(geminiRequest),
3244
3353
  signal: request.abortSignal
3245
- });
3246
- if (!response.ok) {
3247
- const error = await response.text();
3248
- throw new Error(`Gemini API error: ${response.status} ${error}`);
3249
- }
3354
+ }, retryOptions);
3250
3355
  const contentType = response.headers.get("content-type") ?? "";
3251
3356
  if (!contentType.includes("text/event-stream")) {
3252
3357
  const data = await response.json();
@@ -4262,6 +4367,7 @@ function checkDirAccess(dirPath, options) {
4262
4367
 
4263
4368
  // src/tools/builtin/bash.ts
4264
4369
  var DEFAULT_TIMEOUT = 120000;
4370
+ var DEFAULT_IDLE_TIMEOUT = 30000;
4265
4371
  var MAX_OUTPUT_LENGTH = 1e5;
4266
4372
  var DEFAULT_BLOCKED_PATTERNS = [
4267
4373
  "\\bsudo\\b",
@@ -4328,21 +4434,39 @@ function createBashTool(options = {}) {
4328
4434
  }
4329
4435
  }
4330
4436
  const actualTimeout = Math.min(timeout, 600000);
4437
+ const idleTimeout = options.idleTimeout ?? DEFAULT_IDLE_TIMEOUT;
4331
4438
  return new Promise((resolve2) => {
4332
4439
  let stdout = "";
4333
4440
  let stderr = "";
4334
4441
  let killed = false;
4442
+ let killedReason = null;
4443
+ let lastOutputTime = Date.now();
4335
4444
  const proc = spawn("bash", ["-c", command], {
4336
4445
  cwd: cwdAccess.resolved,
4337
4446
  env: process.env,
4338
- shell: false
4447
+ shell: false,
4448
+ stdio: ["ignore", "pipe", "pipe"]
4339
4449
  });
4340
4450
  const timer = setTimeout(() => {
4341
4451
  killed = true;
4452
+ killedReason = "timeout";
4342
4453
  proc.kill("SIGTERM");
4343
4454
  setTimeout(() => proc.kill("SIGKILL"), 1000);
4344
4455
  }, actualTimeout);
4456
+ const idleChecker = setInterval(() => {
4457
+ const idleTime = Date.now() - lastOutputTime;
4458
+ if (idleTime >= idleTimeout && !killed) {
4459
+ killed = true;
4460
+ killedReason = "idle";
4461
+ proc.kill("SIGTERM");
4462
+ setTimeout(() => proc.kill("SIGKILL"), 1000);
4463
+ }
4464
+ }, 1000);
4465
+ const updateLastOutputTime = () => {
4466
+ lastOutputTime = Date.now();
4467
+ };
4345
4468
  proc.stdout?.on("data", (data) => {
4469
+ updateLastOutputTime();
4346
4470
  stdout += data.toString();
4347
4471
  if (stdout.length > MAX_OUTPUT_LENGTH) {
4348
4472
  stdout = stdout.slice(0, MAX_OUTPUT_LENGTH) + `
@@ -4351,6 +4475,7 @@ function createBashTool(options = {}) {
4351
4475
  }
4352
4476
  });
4353
4477
  proc.stderr?.on("data", (data) => {
4478
+ updateLastOutputTime();
4354
4479
  stderr += data.toString();
4355
4480
  if (stderr.length > MAX_OUTPUT_LENGTH) {
4356
4481
  stderr = stderr.slice(0, MAX_OUTPUT_LENGTH) + `
@@ -4359,17 +4484,31 @@ function createBashTool(options = {}) {
4359
4484
  });
4360
4485
  proc.on("close", (code) => {
4361
4486
  clearTimeout(timer);
4487
+ clearInterval(idleChecker);
4362
4488
  if (killed) {
4363
- resolve2({
4364
- content: `Command timed out after ${actualTimeout}ms
4489
+ if (killedReason === "idle") {
4490
+ resolve2({
4491
+ content: `Command terminated: no output for ${idleTimeout / 1000} seconds (likely waiting for input)
4365
4492
 
4366
4493
  Partial output:
4367
4494
  ${stdout}
4368
4495
 
4369
4496
  Stderr:
4370
4497
  ${stderr}`,
4371
- isError: true
4372
- });
4498
+ isError: true
4499
+ });
4500
+ } else {
4501
+ resolve2({
4502
+ content: `Command timed out after ${actualTimeout}ms
4503
+
4504
+ Partial output:
4505
+ ${stdout}
4506
+
4507
+ Stderr:
4508
+ ${stderr}`,
4509
+ isError: true
4510
+ });
4511
+ }
4373
4512
  return;
4374
4513
  }
4375
4514
  const output = stdout + (stderr ? `
@@ -4390,6 +4529,7 @@ ${output}`,
4390
4529
  });
4391
4530
  proc.on("error", (error) => {
4392
4531
  clearTimeout(timer);
4532
+ clearInterval(idleChecker);
4393
4533
  resolve2({
4394
4534
  content: `Failed to execute command: ${error.message}`,
4395
4535
  isError: true
@@ -6029,7 +6169,7 @@ ${responseText}`;
6029
6169
  metadata: {
6030
6170
  status: response.status,
6031
6171
  statusText: response.statusText,
6032
- headers: Object.fromEntries(response.headers.entries()),
6172
+ headers: Object.fromEntries(response.headers),
6033
6173
  body: responseBody
6034
6174
  }
6035
6175
  };
@@ -6200,14 +6340,17 @@ ${c.bold("Interactive Commands:")}
6200
6340
  ${c.cyan("/exit")} Exit the CLI
6201
6341
 
6202
6342
  ${c.bold("Environment:")}
6203
- ${c.cyan("ANTHROPIC_API_KEY")} Anthropic API key (for Claude models)
6204
- ${c.cyan("ANTHROPIC_MODEL")} Optional. Claude model (default: claude-sonnet-4-20250514)
6205
- ${c.cyan("GEMINI_API_KEY")} Gemini API key (for Gemini models)
6206
- ${c.cyan("GEMINI_MODEL")} Optional. Gemini model (default: gemini-1.5-pro)
6207
- ${c.cyan("GEMINI_BASE_URL")} Optional. Custom Gemini API base URL
6208
- ${c.cyan("OPENAI_API_KEY")} OpenAI API key (for GPT models)
6209
- ${c.cyan("OPENAI_MODEL")} Optional. OpenAI model (default: gpt-5.2)
6210
- ${c.cyan("OPENAI_BASE_URL")} Optional. Custom OpenAI-compatible API URL
6343
+ ${c.cyan("ANTHROPIC_API_KEY")} Anthropic API key (for Claude models)
6344
+ ${c.cyan("ANTHROPIC_MODEL")} Optional. Claude model (default: claude-sonnet-4-20250514)
6345
+ ${c.cyan("GEMINI_API_KEY")} Gemini API key (for Gemini models)
6346
+ ${c.cyan("GEMINI_MODEL")} Optional. Gemini model (default: gemini-1.5-pro)
6347
+ ${c.cyan("GEMINI_BASE_URL")} Optional. Custom Gemini API base URL
6348
+ ${c.cyan("OPENAI_API_KEY")} OpenAI API key (for GPT models)
6349
+ ${c.cyan("OPENAI_MODEL")} Optional. OpenAI model (default: gpt-5.2)
6350
+ ${c.cyan("OPENAI_BASE_URL")} Optional. Custom OpenAI-compatible API URL
6351
+ ${c.cyan("ANTIGRAVITY_API_KEY")} Antigravity API key (default: sk-antigravity)
6352
+ ${c.cyan("ANTIGRAVITY_MODEL")} Optional. Antigravity model (default: gemini-3-flash)
6353
+ ${c.cyan("ANTIGRAVITY_BASE_URL")} Optional. Antigravity gateway URL (default: http://127.0.0.1:8045)
6211
6354
 
6212
6355
  ${c.bold("Examples:")}
6213
6356
  ${c.dim("# Start interactive mode")}
@@ -6343,6 +6486,7 @@ function printModelsHelp() {
6343
6486
  console.log(` ${c.cyan("/models")} openai gpt-5-mini`);
6344
6487
  console.log(` ${c.cyan("/models")} anthropic claude-sonnet-4-20250514`);
6345
6488
  console.log(` ${c.cyan("/models")} gemini gemini-1.5-pro`);
6489
+ console.log(` ${c.cyan("/models")} antigravity gemini-3-flash`);
6346
6490
  console.log(` ${c.cyan("/models")} gpt-5.2`);
6347
6491
  console.log(` ${c.cyan("/models")} reset`);
6348
6492
  console.log();
@@ -6383,7 +6527,7 @@ async function handleModelsCommand(args) {
6383
6527
  const provider = parseProvider(args[0]);
6384
6528
  if (!provider) {
6385
6529
  console.log(c.yellow(`
6386
- Unknown provider: ${args[0]}. Use "openai", "anthropic", or "gemini".
6530
+ Unknown provider: ${args[0]}. Use "openai", "anthropic", "gemini", or "antigravity".
6387
6531
  `));
6388
6532
  return;
6389
6533
  }
@@ -6527,6 +6671,34 @@ async function listGeminiModels() {
6527
6671
  }
6528
6672
  console.log();
6529
6673
  }
6674
+ async function listAntigravityModels() {
6675
+ const config = getAntigravityConfig();
6676
+ const baseUrl = `${config.baseUrl}/v1`;
6677
+ console.log(c.bold("Antigravity Models:"));
6678
+ console.log(c.dim(" API Type: openai-compatible (Antigravity Gateway)"));
6679
+ console.log(c.dim(` Base URL: ${baseUrl}`));
6680
+ console.log(c.dim(` Default Model: ${config.model}`));
6681
+ const res = await fetch(`${baseUrl}/models`, {
6682
+ headers: {
6683
+ Authorization: `Bearer ${config.apiKey}`,
6684
+ "x-api-key": config.apiKey
6685
+ }
6686
+ });
6687
+ if (!res.ok) {
6688
+ console.log(c.red(` ✗ Failed to fetch models (${res.status})`));
6689
+ console.log(c.dim(` URL: ${baseUrl}/models`));
6690
+ console.log(c.dim(" Make sure Antigravity Manager is running"));
6691
+ console.log();
6692
+ return;
6693
+ }
6694
+ const payload = await res.json();
6695
+ const items = payload.data ?? [];
6696
+ for (const item of items) {
6697
+ const owner = item.owned_by ? ` (${item.owned_by})` : "";
6698
+ console.log(` ${c.green("●")} ${item.id}${owner}`);
6699
+ }
6700
+ console.log();
6701
+ }
6530
6702
  async function listModelsSummary() {
6531
6703
  const provider = getActiveProviderId();
6532
6704
  const apiType = provider ?? "auto";
@@ -6547,6 +6719,12 @@ async function listModelsSummary() {
6547
6719
  console.log(c.red(` ✗ Gemini: ${error instanceof Error ? error.message : String(error)}`));
6548
6720
  console.log();
6549
6721
  }
6722
+ try {
6723
+ await listAntigravityModels();
6724
+ } catch (error) {
6725
+ console.log(c.red(` ✗ Antigravity: ${error instanceof Error ? error.message : String(error)}`));
6726
+ console.log();
6727
+ }
6550
6728
  await listAnthropicModels();
6551
6729
  }
6552
6730
  function printDebug() {
@@ -6566,16 +6744,20 @@ function printDebug() {
6566
6744
  console.log(` ${c.cyan("ANTHROPIC_MODEL:")} ${process.env.ANTHROPIC_MODEL || c.dim("(not set)")}`);
6567
6745
  console.log(` ${c.cyan("GEMINI_MODEL:")} ${process.env.GEMINI_MODEL || c.dim("(not set)")}`);
6568
6746
  console.log(` ${c.cyan("GEMINI_BASE_URL:")} ${process.env.GEMINI_BASE_URL || c.dim("(not set)")}`);
6569
- console.log(` ${c.cyan("OPENAI_MODEL:")} ${process.env.OPENAI_MODEL || c.dim("(not set)")}`);
6570
- console.log(` ${c.cyan("OPENAI_BASE_URL:")} ${process.env.OPENAI_BASE_URL || c.dim("(not set)")}`);
6747
+ console.log(` ${c.cyan("OPENAI_MODEL:")} ${process.env.OPENAI_MODEL || c.dim("(not set)")}`);
6748
+ console.log(` ${c.cyan("OPENAI_BASE_URL:")} ${process.env.OPENAI_BASE_URL || c.dim("(not set)")}`);
6749
+ console.log(` ${c.cyan("ANTIGRAVITY_MODEL:")} ${process.env.ANTIGRAVITY_MODEL || c.dim("(not set)")}`);
6750
+ console.log(` ${c.cyan("ANTIGRAVITY_BASE_URL:")} ${process.env.ANTIGRAVITY_BASE_URL || c.dim("(not set)")}`);
6571
6751
  console.log();
6572
6752
  console.log(c.bold("API Keys:"));
6573
6753
  const anthropicKey = process.env.ANTHROPIC_API_KEY;
6574
6754
  const geminiKey = process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY;
6575
6755
  const openaiKey = process.env.OPENAI_API_KEY;
6576
- console.log(` ${c.cyan("ANTHROPIC_API_KEY:")} ${anthropicKey ? c.green("✓ set") + c.dim(` (${anthropicKey.slice(0, 8)}...${anthropicKey.slice(-4)})`) : c.red("✗ not set")}`);
6577
- console.log(` ${c.cyan("GEMINI_API_KEY:")} ${geminiKey ? c.green("✓ set") + c.dim(` (${geminiKey.slice(0, 8)}...${geminiKey.slice(-4)})`) : c.red("✗ not set")}`);
6578
- console.log(` ${c.cyan("OPENAI_API_KEY:")} ${openaiKey ? c.green("✓ set") + c.dim(` (${openaiKey.slice(0, 8)}...${openaiKey.slice(-4)})`) : c.red("✗ not set")}`);
6756
+ const antigravityKey = process.env.ANTIGRAVITY_API_KEY;
6757
+ console.log(` ${c.cyan("ANTHROPIC_API_KEY:")} ${anthropicKey ? c.green("✓ set") + c.dim(` (${anthropicKey.slice(0, 8)}...${anthropicKey.slice(-4)})`) : c.red("✗ not set")}`);
6758
+ console.log(` ${c.cyan("GEMINI_API_KEY:")} ${geminiKey ? c.green("✓ set") + c.dim(` (${geminiKey.slice(0, 8)}...${geminiKey.slice(-4)})`) : c.red("✗ not set")}`);
6759
+ console.log(` ${c.cyan("OPENAI_API_KEY:")} ${openaiKey ? c.green("✓ set") + c.dim(` (${openaiKey.slice(0, 8)}...${openaiKey.slice(-4)})`) : c.red("✗ not set")}`);
6760
+ console.log(` ${c.cyan("ANTIGRAVITY_API_KEY:")} ${antigravityKey ? c.green("✓ set") + c.dim(` (${antigravityKey.slice(0, 8)}...${antigravityKey.slice(-4)})`) : c.dim("(default: sk-antigravity)")}`);
6579
6761
  console.log();
6580
6762
  console.log(c.bold("Environment:"));
6581
6763
  console.log(` ${c.cyan("Working dir:")} ${cwd}`);
@@ -6639,6 +6821,9 @@ function formatToolInput(name, input) {
6639
6821
  }
6640
6822
  }
6641
6823
  function getDefaultProviderFromEnv() {
6824
+ if (process.env.ANTIGRAVITY_API_KEY || process.env.ANTIGRAVITY_BASE_URL) {
6825
+ return "antigravity";
6826
+ }
6642
6827
  if (process.env.ANTHROPIC_API_KEY) {
6643
6828
  return "anthropic";
6644
6829
  }
@@ -6670,6 +6855,9 @@ function getDefaultModelForProvider(providerId) {
6670
6855
  if (providerId === "gemini") {
6671
6856
  return process.env.GEMINI_MODEL || "gemini-1.5-pro";
6672
6857
  }
6858
+ if (providerId === "antigravity") {
6859
+ return process.env.ANTIGRAVITY_MODEL || "gemini-3-flash";
6860
+ }
6673
6861
  return process.env.OPENAI_MODEL || "gpt-5.2";
6674
6862
  }
6675
6863
  function getActiveProviderId() {
@@ -6702,8 +6890,17 @@ function parseProvider(arg) {
6702
6890
  if (normalized === "gemini" || normalized === "google") {
6703
6891
  return "gemini";
6704
6892
  }
6893
+ if (normalized === "antigravity" || normalized === "ag") {
6894
+ return "antigravity";
6895
+ }
6705
6896
  return null;
6706
6897
  }
6898
+ function getAntigravityConfig() {
6899
+ const baseUrl = (process.env.ANTIGRAVITY_BASE_URL || "http://127.0.0.1:8045").replace(/\/+$/, "");
6900
+ const apiKey = process.env.ANTIGRAVITY_API_KEY || "sk-antigravity";
6901
+ const model = process.env.ANTIGRAVITY_MODEL || "gemini-3-flash";
6902
+ return { baseUrl, apiKey, model };
6903
+ }
6707
6904
  function createProvider(providerId) {
6708
6905
  if (providerId === "anthropic") {
6709
6906
  return new AnthropicProvider;
@@ -6714,6 +6911,13 @@ function createProvider(providerId) {
6714
6911
  baseUrl: process.env.GEMINI_BASE_URL
6715
6912
  });
6716
6913
  }
6914
+ if (providerId === "antigravity") {
6915
+ const config = getAntigravityConfig();
6916
+ return new OpenAIProvider({
6917
+ apiKey: config.apiKey,
6918
+ baseUrl: `${config.baseUrl}/v1`
6919
+ });
6920
+ }
6717
6921
  return new OpenAIProvider({
6718
6922
  apiKey: process.env.OPENAI_API_KEY,
6719
6923
  baseUrl: process.env.OPENAI_BASE_URL