claude-code-openai 0.1.18 → 0.1.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli.js +32 -22
  2. package/package.json +1 -1
package/dist/cli.js CHANGED
@@ -204660,7 +204660,7 @@ var init_metadata = __esm(() => {
204660
204660
  isClaudeAiAuth: isClaudeAISubscriber(),
204661
204661
  version: "2.1.88-rebuild",
204662
204662
  versionBase: getVersionBase(),
204663
- buildTime: "2026-04-01T20:05:39.830Z",
204663
+ buildTime: "2026-04-01T20:18:48.855Z",
204664
204664
  deploymentEnvironment: env4.detectDeploymentEnvironment(),
204665
204665
  ...isEnvTruthy(process.env.GITHUB_ACTIONS) && {
204666
204666
  githubEventName: process.env.GITHUB_EVENT_NAME,
@@ -592946,7 +592946,7 @@ function getAnthropicEnvMetadata() {
592946
592946
  function getBuildAgeMinutes() {
592947
592947
  if (false)
592948
592948
  ;
592949
- const buildTime = new Date("2026-04-01T20:05:39.830Z").getTime();
592949
+ const buildTime = new Date("2026-04-01T20:18:48.855Z").getTime();
592950
592950
  if (isNaN(buildTime))
592951
592951
  return;
592952
592952
  return Math.floor((Date.now() - buildTime) / 60000);
@@ -595009,11 +595009,9 @@ function convertAssistantMessage(msg, items) {
595009
595009
  textParts.length = 0;
595010
595010
  }
595011
595011
  const tu = block;
595012
- const callId = tu.id;
595013
595012
  items.push({
595014
595013
  type: "function_call",
595015
- id: callId,
595016
- call_id: callId,
595014
+ call_id: tu.id,
595017
595015
  name: tu.name,
595018
595016
  arguments: typeof tu.input === "string" ? tu.input : JSON.stringify(tu.input)
595019
595017
  });
@@ -595255,26 +595253,38 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595255
595253
  } else {
595256
595254
  input = convertMessages(messages);
595257
595255
  }
595258
- const maxOutputTokens = options.maxOutputTokensOverride || MAX_OUTPUT_TOKENS[openaiModel] || 16384;
595259
595256
  const hasFunctionTools = oaiTools.some((t2) => t2.type === "function");
595260
- const params = {
595261
- model: openaiModel,
595262
- instructions: instructions || undefined,
595263
- input,
595264
- tools: oaiTools.length > 0 ? oaiTools : undefined,
595265
- tool_choice: hasFunctionTools ? toolChoice : undefined,
595266
- parallel_tool_calls: hasFunctionTools ? false : undefined,
595267
- stream: true,
595268
- ...client3.isOAuthChatgpt ? {
595269
- store: false
595270
- } : {
595271
- previous_response_id: usePreviousResponseId ? _lastResponseId : undefined,
595257
+ let params;
595258
+ if (client3.isOAuthChatgpt) {
595259
+ params = {
595260
+ model: openaiModel,
595261
+ instructions: instructions || undefined,
595262
+ input,
595263
+ tools: oaiTools.length > 0 ? oaiTools : undefined,
595264
+ tool_choice: hasFunctionTools ? toolChoice : undefined,
595265
+ parallel_tool_calls: hasFunctionTools ? false : undefined,
595266
+ reasoning,
595267
+ store: false,
595268
+ stream: true,
595269
+ include: reasoning ? ["reasoning.encrypted_content"] : []
595270
+ };
595271
+ } else {
595272
+ const maxOutputTokens = options.maxOutputTokensOverride || MAX_OUTPUT_TOKENS[openaiModel] || 16384;
595273
+ params = {
595274
+ model: openaiModel,
595275
+ instructions: instructions || undefined,
595276
+ input,
595277
+ tools: oaiTools.length > 0 ? oaiTools : undefined,
595278
+ tool_choice: hasFunctionTools ? toolChoice : undefined,
595279
+ parallel_tool_calls: hasFunctionTools ? false : undefined,
595280
+ stream: true,
595272
595281
  max_output_tokens: maxOutputTokens,
595273
595282
  temperature: options.temperatureOverride ?? 1,
595274
595283
  reasoning,
595284
+ previous_response_id: usePreviousResponseId ? _lastResponseId : undefined,
595275
595285
  store: true
595276
- }
595277
- };
595286
+ };
595287
+ }
595278
595288
  logForDebugging(`[OpenAI] Request: model=${openaiModel} input=${input.length} items (${usePreviousResponseId ? "incremental, chain=" + _lastResponseId : "full"}) tools=${oaiTools.length}`);
595279
595289
  const start = Date.now();
595280
595290
  const MAX_RETRIES4 = 3;
@@ -679525,7 +679535,7 @@ var init_bridge_kick = __esm(() => {
679525
679535
  var call56 = async () => {
679526
679536
  return {
679527
679537
  type: "text",
679528
- value: `${"2.1.88-rebuild"} (built ${"2026-04-01T20:05:39.830Z"})`
679538
+ value: `${"2.1.88-rebuild"} (built ${"2026-04-01T20:18:48.855Z"})`
679529
679539
  };
679530
679540
  }, version6, version_default;
679531
679541
  var init_version = __esm(() => {
@@ -777536,4 +777546,4 @@ async function main2() {
777536
777546
  }
777537
777547
  main2();
777538
777548
 
777539
- //# debugId=3878C1B925538A1564756E2164756E21
777549
+ //# debugId=13B36095FE4CE89A64756E2164756E21
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claude-code-openai",
3
- "version": "0.1.18",
3
+ "version": "0.1.20",
4
4
  "description": "Claude Code CLI with OpenAI GPT-5.4 backend support",
5
5
  "type": "module",
6
6
  "bin": {