claude-code-openai 0.1.27 → 0.1.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli.js +30 -15
  2. package/package.json +9 -2
package/dist/cli.js CHANGED
@@ -204660,7 +204660,7 @@ var init_metadata = __esm(() => {
204660
204660
  isClaudeAiAuth: isClaudeAISubscriber(),
204661
204661
  version: "2.1.88-rebuild",
204662
204662
  versionBase: getVersionBase(),
204663
- buildTime: "2026-04-01T21:06:14.024Z",
204663
+ buildTime: "2026-04-01T21:37:21.080Z",
204664
204664
  deploymentEnvironment: env4.detectDeploymentEnvironment(),
204665
204665
  ...isEnvTruthy(process.env.GITHUB_ACTIONS) && {
204666
204666
  githubEventName: process.env.GITHUB_EVENT_NAME,
@@ -592955,7 +592955,7 @@ function getAnthropicEnvMetadata() {
592955
592955
  function getBuildAgeMinutes() {
592956
592956
  if (false)
592957
592957
  ;
592958
- const buildTime = new Date("2026-04-01T21:06:14.024Z").getTime();
592958
+ const buildTime = new Date("2026-04-01T21:37:21.080Z").getTime();
592959
592959
  if (isNaN(buildTime))
592960
592960
  return;
592961
592961
  return Math.floor((Date.now() - buildTime) / 60000);
@@ -595140,6 +595140,8 @@ function convertStopReason(status) {
595140
595140
  return "max_tokens";
595141
595141
  case "failed":
595142
595142
  case "cancelled":
595143
+ case "in_progress":
595144
+ case "queued":
595143
595145
  return null;
595144
595146
  default:
595145
595147
  return null;
@@ -595246,11 +595248,7 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595246
595248
  const oaiTools = convertToolSchemas(tools);
595247
595249
  const hasWebSearch = oaiTools.some((t2) => t2.type === "web_search_preview" || t2.type === "web_search");
595248
595250
  if (!hasWebSearch) {
595249
- if (client3.isOAuthChatgpt) {
595250
- oaiTools.push({ type: "web_search", search_context_size: "medium", external_web_access: true });
595251
- } else {
595252
- oaiTools.push({ type: "web_search_preview", search_context_size: "medium" });
595253
- }
595251
+ oaiTools.push({ type: "web_search", search_context_size: "medium" });
595254
595252
  }
595255
595253
  const toolChoice = convertToolChoice(options.toolChoice);
595256
595254
  const supportsReasoning = /^(gpt-5|o[1-9]|o3)/.test(openaiModel);
@@ -595258,7 +595256,12 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595258
595256
  if (supportsReasoning && thinkingConfig.type !== "disabled") {
595259
595257
  const resolved = resolveAppliedEffort(options.model, options.effortValue);
595260
595258
  const level = resolved ? convertEffortValueToLevel(resolved) : "medium";
595261
- const effort = level === "max" ? "high" : level;
595259
+ let effort;
595260
+ if (level === "max") {
595261
+ effort = /codex/.test(openaiModel) ? "xhigh" : "high";
595262
+ } else {
595263
+ effort = level;
595264
+ }
595262
595265
  reasoning = { effort, summary: "auto" };
595263
595266
  }
595264
595267
  let input;
@@ -595278,6 +595281,7 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595278
595281
  }
595279
595282
  const hasFunctionTools = oaiTools.some((t2) => t2.type === "function");
595280
595283
  let params;
595284
+ const promptCacheKey = getSessionId() || undefined;
595281
595285
  if (client3.isOAuthChatgpt) {
595282
595286
  params = {
595283
595287
  model: openaiModel,
@@ -595289,7 +595293,9 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595289
595293
  reasoning,
595290
595294
  store: false,
595291
595295
  stream: true,
595292
- include: reasoning ? ["reasoning.encrypted_content"] : []
595296
+ include: reasoning ? ["reasoning.encrypted_content"] : [],
595297
+ prompt_cache_key: promptCacheKey,
595298
+ truncation: "auto"
595293
595299
  };
595294
595300
  } else {
595295
595301
  const maxOutputTokens = options.maxOutputTokensOverride || MAX_OUTPUT_TOKENS[openaiModel] || 16384;
@@ -595305,13 +595311,16 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595305
595311
  temperature: options.temperatureOverride ?? 1,
595306
595312
  reasoning,
595307
595313
  previous_response_id: usePreviousResponseId ? _lastResponseId : undefined,
595308
- store: true
595314
+ store: true,
595315
+ prompt_cache_key: promptCacheKey,
595316
+ truncation: "auto"
595309
595317
  };
595310
595318
  }
595311
595319
  logForDebugging(`[OpenAI] Request: model=${openaiModel} input=${input.length} items (${usePreviousResponseId ? "incremental, chain=" + _lastResponseId : "full"}) tools=${oaiTools.length}`);
595312
595320
  const start = Date.now();
595313
595321
  const MAX_RETRIES4 = 3;
595314
595322
  const BASE_DELAY_MS4 = 500;
595323
+ let responseId = null;
595315
595324
  try {
595316
595325
  let resetWatchdog = function() {
595317
595326
  if (watchdogTimer)
@@ -595453,7 +595462,7 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595453
595462
  cache_read_input_tokens: 0
595454
595463
  };
595455
595464
  let isFirstChunk = true;
595456
- let responseId = null;
595465
+ responseId = null;
595457
595466
  let responseModel = openaiModel;
595458
595467
  let responseStatus = "completed";
595459
595468
  const functionCallState = new Map;
@@ -595792,10 +595801,15 @@ Sources:
595792
595801
  requestId: responseId ?? undefined,
595793
595802
  isApiErrorMessage: false
595794
595803
  };
595795
- logForDebugging(`[OpenAI] Complete: ${contentBlocks.length} blocks, ${usage.input_tokens}in/${usage.output_tokens}out tokens, ${Date.now() - start}ms`);
595804
+ const usageAny = usage;
595805
+ const cachedInfo = usageAny.cache_read_input_tokens ? ` (${usageAny.cache_read_input_tokens} cached)` : "";
595806
+ const reasoningInfo = usageAny.reasoning_output_tokens ? ` (${usageAny.reasoning_output_tokens} reasoning)` : "";
595807
+ logForDebugging(`[OpenAI] Complete: ${contentBlocks.length} blocks, ${usageAny.input_tokens}in${cachedInfo}/${usageAny.output_tokens}out${reasoningInfo} tokens, ${Date.now() - start}ms`);
595796
595808
  } catch (err2) {
595797
595809
  if (err2 instanceof Error && err2.name === "AbortError") {
595798
- _lastResponseId = null;
595810
+ if (!responseId) {
595811
+ _lastResponseId = null;
595812
+ }
595799
595813
  return;
595800
595814
  }
595801
595815
  const errorMsg = err2 instanceof Error ? err2.message : String(err2);
@@ -595840,6 +595854,7 @@ var init_openai_query = __esm(() => {
595840
595854
  init_client5();
595841
595855
  init_openai_adapter();
595842
595856
  init_effort();
595857
+ init_state();
595843
595858
  OPENAI_MODEL_MAP = {
595844
595859
  "claude-opus-4-6-20260401": "gpt-5.4",
595845
595860
  "claude-opus-4-5-20250918": "gpt-5.4",
@@ -679559,7 +679574,7 @@ var init_bridge_kick = __esm(() => {
679559
679574
  var call56 = async () => {
679560
679575
  return {
679561
679576
  type: "text",
679562
- value: `${"2.1.88-rebuild"} (built ${"2026-04-01T21:06:14.024Z"})`
679577
+ value: `${"2.1.88-rebuild"} (built ${"2026-04-01T21:37:21.080Z"})`
679563
679578
  };
679564
679579
  }, version6, version_default;
679565
679580
  var init_version = __esm(() => {
@@ -777570,4 +777585,4 @@ async function main2() {
777570
777585
  }
777571
777586
  main2();
777572
777587
 
777573
- //# debugId=40696CE4C1EEC09764756E2164756E21
777588
+ //# debugId=E1FE85DFD19006BC64756E2164756E21
package/package.json CHANGED
@@ -1,12 +1,19 @@
1
1
  {
2
2
  "name": "claude-code-openai",
3
- "version": "0.1.27",
3
+ "version": "0.1.29",
4
4
  "description": "Claude Code CLI with OpenAI GPT-5.4 backend support",
5
5
  "type": "module",
6
6
  "bin": {
7
7
  "claude-openai": "./dist/claude-openai.js"
8
8
  },
9
- "keywords": ["claude", "openai", "gpt-5.4", "cli", "ai", "coding-assistant"],
9
+ "keywords": [
10
+ "claude",
11
+ "openai",
12
+ "gpt-5.4",
13
+ "cli",
14
+ "ai",
15
+ "coding-assistant"
16
+ ],
10
17
  "license": "MIT",
11
18
  "repository": {
12
19
  "type": "git",