claude-code-openai 0.1.17 → 0.1.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli.js +32 -20
  2. package/package.json +1 -1
package/dist/cli.js CHANGED
@@ -204660,7 +204660,7 @@ var init_metadata = __esm(() => {
204660
204660
  isClaudeAiAuth: isClaudeAISubscriber(),
204661
204661
  version: "2.1.88-rebuild",
204662
204662
  versionBase: getVersionBase(),
204663
- buildTime: "2026-04-01T20:02:48.011Z",
204663
+ buildTime: "2026-04-01T20:12:34.819Z",
204664
204664
  deploymentEnvironment: env4.detectDeploymentEnvironment(),
204665
204665
  ...isEnvTruthy(process.env.GITHUB_ACTIONS) && {
204666
204666
  githubEventName: process.env.GITHUB_EVENT_NAME,
@@ -592946,7 +592946,7 @@ function getAnthropicEnvMetadata() {
592946
592946
  function getBuildAgeMinutes() {
592947
592947
  if (false)
592948
592948
  ;
592949
- const buildTime = new Date("2026-04-01T20:02:48.011Z").getTime();
592949
+ const buildTime = new Date("2026-04-01T20:12:34.819Z").getTime();
592950
592950
  if (isNaN(buildTime))
592951
592951
  return;
592952
592952
  return Math.floor((Date.now() - buildTime) / 60000);
@@ -595241,7 +595241,7 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595241
595241
  const supportsReasoning = /^(gpt-5|o[1-9]|o3)/.test(openaiModel);
595242
595242
  const reasoning = supportsReasoning ? convertThinkingConfig(thinkingConfig) : undefined;
595243
595243
  let input;
595244
- const usePreviousResponseId = _lastResponseId != null;
595244
+ const usePreviousResponseId = !client3.isOAuthChatgpt && _lastResponseId != null;
595245
595245
  if (usePreviousResponseId) {
595246
595246
  let lastAssistantIdx = -1;
595247
595247
  for (let i4 = messages.length - 1;i4 >= 0; i4--) {
@@ -595255,26 +595255,38 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595255
595255
  } else {
595256
595256
  input = convertMessages(messages);
595257
595257
  }
595258
- const maxOutputTokens = options.maxOutputTokensOverride || MAX_OUTPUT_TOKENS[openaiModel] || 16384;
595259
595258
  const hasFunctionTools = oaiTools.some((t2) => t2.type === "function");
595260
- const params = {
595261
- model: openaiModel,
595262
- instructions: instructions || undefined,
595263
- input,
595264
- tools: oaiTools.length > 0 ? oaiTools : undefined,
595265
- tool_choice: hasFunctionTools ? toolChoice : undefined,
595266
- parallel_tool_calls: hasFunctionTools ? false : undefined,
595267
- stream: true,
595268
- previous_response_id: usePreviousResponseId ? _lastResponseId : undefined,
595269
- ...client3.isOAuthChatgpt ? {
595270
- store: false
595271
- } : {
595259
+ let params;
595260
+ if (client3.isOAuthChatgpt) {
595261
+ params = {
595262
+ model: openaiModel,
595263
+ instructions: instructions || undefined,
595264
+ input,
595265
+ tools: oaiTools.length > 0 ? oaiTools : undefined,
595266
+ tool_choice: hasFunctionTools ? toolChoice : undefined,
595267
+ parallel_tool_calls: hasFunctionTools ? false : undefined,
595268
+ reasoning,
595269
+ store: false,
595270
+ stream: true,
595271
+ include: reasoning ? ["reasoning.encrypted_content"] : []
595272
+ };
595273
+ } else {
595274
+ const maxOutputTokens = options.maxOutputTokensOverride || MAX_OUTPUT_TOKENS[openaiModel] || 16384;
595275
+ params = {
595276
+ model: openaiModel,
595277
+ instructions: instructions || undefined,
595278
+ input,
595279
+ tools: oaiTools.length > 0 ? oaiTools : undefined,
595280
+ tool_choice: hasFunctionTools ? toolChoice : undefined,
595281
+ parallel_tool_calls: hasFunctionTools ? false : undefined,
595282
+ stream: true,
595272
595283
  max_output_tokens: maxOutputTokens,
595273
595284
  temperature: options.temperatureOverride ?? 1,
595274
595285
  reasoning,
595286
+ previous_response_id: usePreviousResponseId ? _lastResponseId : undefined,
595275
595287
  store: true
595276
- }
595277
- };
595288
+ };
595289
+ }
595278
595290
  logForDebugging(`[OpenAI] Request: model=${openaiModel} input=${input.length} items (${usePreviousResponseId ? "incremental, chain=" + _lastResponseId : "full"}) tools=${oaiTools.length}`);
595279
595291
  const start = Date.now();
595280
595292
  const MAX_RETRIES4 = 3;
@@ -679525,7 +679537,7 @@ var init_bridge_kick = __esm(() => {
679525
679537
  var call56 = async () => {
679526
679538
  return {
679527
679539
  type: "text",
679528
- value: `${"2.1.88-rebuild"} (built ${"2026-04-01T20:02:48.011Z"})`
679540
+ value: `${"2.1.88-rebuild"} (built ${"2026-04-01T20:12:34.819Z"})`
679529
679541
  };
679530
679542
  }, version6, version_default;
679531
679543
  var init_version = __esm(() => {
@@ -777536,4 +777548,4 @@ async function main2() {
777536
777548
  }
777537
777549
  main2();
777538
777550
 
777539
- //# debugId=F579BE2E065F285E64756E2164756E21
777551
+ //# debugId=8CA90C6D85CA6DF264756E2164756E21
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claude-code-openai",
3
- "version": "0.1.17",
3
+ "version": "0.1.19",
4
4
  "description": "Claude Code CLI with OpenAI GPT-5.4 backend support",
5
5
  "type": "module",
6
6
  "bin": {