claude-code-openai 0.1.9 → 0.1.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli.js +126 -23
  2. package/package.json +1 -1
package/dist/cli.js CHANGED
@@ -186389,6 +186389,9 @@ function getCustomHeaders() {
186389
186389
  }
186390
186390
  return customHeaders;
186391
186391
  }
186392
+ function clearOpenAIClientCache() {
186393
+ _cachedOpenAIClient = null;
186394
+ }
186392
186395
  async function getOpenAIClient({
186393
186396
  apiKey
186394
186397
  }) {
@@ -204603,7 +204606,7 @@ var init_metadata = __esm(() => {
204603
204606
  isClaudeAiAuth: isClaudeAISubscriber(),
204604
204607
  version: "2.1.88-rebuild",
204605
204608
  versionBase: getVersionBase(),
204606
- buildTime: "2026-04-01T09:46:44.490Z",
204609
+ buildTime: "2026-04-01T10:13:56.609Z",
204607
204610
  deploymentEnvironment: env4.detectDeploymentEnvironment(),
204608
204611
  ...isEnvTruthy(process.env.GITHUB_ACTIONS) && {
204609
204612
  githubEventName: process.env.GITHUB_EVENT_NAME,
@@ -592889,7 +592892,7 @@ function getAnthropicEnvMetadata() {
592889
592892
  function getBuildAgeMinutes() {
592890
592893
  if (false)
592891
592894
  ;
592892
- const buildTime = new Date("2026-04-01T09:46:44.490Z").getTime();
592895
+ const buildTime = new Date("2026-04-01T10:13:56.609Z").getTime();
592893
592896
  if (isNaN(buildTime))
592894
592897
  return;
592895
592898
  return Math.floor((Date.now() - buildTime) / 60000);
@@ -594982,30 +594985,54 @@ function convertAssistantMessage(msg, items) {
594982
594985
  }
594983
594986
  function enforceStrictSchema(schema) {
594984
594987
  const out = { ...schema };
594988
+ let strictCompatible = true;
594989
+ for (const kw of UNSUPPORTED_SCHEMA_KEYWORDS) {
594990
+ delete out[kw];
594991
+ }
594985
594992
  for (const keyword of ["anyOf", "oneOf", "allOf"]) {
594986
594993
  if (Array.isArray(out[keyword])) {
594987
- out[keyword] = out[keyword].map((s2) => enforceStrictSchema(s2));
594994
+ out[keyword] = out[keyword].map((s2) => {
594995
+ const r2 = enforceStrictSchema(s2);
594996
+ if (!r2.strictCompatible)
594997
+ strictCompatible = false;
594998
+ return r2.schema;
594999
+ });
594988
595000
  }
594989
595001
  }
594990
595002
  if (out.type === "object") {
594991
- out.additionalProperties = false;
594992
- if (out.properties && typeof out.properties === "object") {
594993
- const props = out.properties;
594994
- const newProps = {};
594995
- for (const [key2, val] of Object.entries(props)) {
594996
- newProps[key2] = enforceStrictSchema(val);
594997
- }
594998
- out.properties = newProps;
594999
- const allKeys = Object.keys(newProps);
595000
- if (allKeys.length > 0) {
595001
- out.required = allKeys;
595003
+ const hasAdditionalPropsSchema = out.additionalProperties != null && typeof out.additionalProperties === "object";
595004
+ const hasFixedProperties = out.properties != null && typeof out.properties === "object" && Object.keys(out.properties).length > 0;
595005
+ if (hasAdditionalPropsSchema && !hasFixedProperties) {
595006
+ strictCompatible = false;
595007
+ const apSchema = out.additionalProperties;
595008
+ const r2 = enforceStrictSchema(apSchema);
595009
+ out.additionalProperties = r2.schema;
595010
+ } else {
595011
+ out.additionalProperties = false;
595012
+ if (out.properties && typeof out.properties === "object") {
595013
+ const props = out.properties;
595014
+ const newProps = {};
595015
+ for (const [key2, val] of Object.entries(props)) {
595016
+ const r2 = enforceStrictSchema(val);
595017
+ if (!r2.strictCompatible)
595018
+ strictCompatible = false;
595019
+ newProps[key2] = r2.schema;
595020
+ }
595021
+ out.properties = newProps;
595022
+ const allKeys = Object.keys(newProps);
595023
+ if (allKeys.length > 0) {
595024
+ out.required = allKeys;
595025
+ }
595002
595026
  }
595003
595027
  }
595004
595028
  }
595005
595029
  if (out.type === "array" && out.items && typeof out.items === "object") {
595006
- out.items = enforceStrictSchema(out.items);
595030
+ const r2 = enforceStrictSchema(out.items);
595031
+ if (!r2.strictCompatible)
595032
+ strictCompatible = false;
595033
+ out.items = r2.schema;
595007
595034
  }
595008
- return out;
595035
+ return { schema: out, strictCompatible };
595009
595036
  }
595010
595037
  function convertToolSchemas(tools) {
595011
595038
  const oaiTools = [];
@@ -595016,13 +595043,13 @@ function convertToolSchemas(tools) {
595016
595043
  }
595017
595044
  if (t2.type === "custom" || !("type" in t2) || t2.type === undefined) {
595018
595045
  const tool = t2;
595019
- const strictParams = enforceStrictSchema(tool.input_schema);
595046
+ const { schema: cleanedParams, strictCompatible } = enforceStrictSchema(tool.input_schema);
595020
595047
  oaiTools.push({
595021
595048
  type: "function",
595022
595049
  name: tool.name,
595023
595050
  description: tool.description ?? "",
595024
- parameters: strictParams,
595025
- strict: true
595051
+ parameters: cleanedParams,
595052
+ strict: strictCompatible
595026
595053
  });
595027
595054
  }
595028
595055
  }
@@ -595087,6 +595114,35 @@ function convertThinkingConfig(thinkingConfig) {
595087
595114
  return;
595088
595115
  }
595089
595116
  }
595117
+ var UNSUPPORTED_SCHEMA_KEYWORDS;
595118
+ var init_openai_adapter = __esm(() => {
595119
+ UNSUPPORTED_SCHEMA_KEYWORDS = [
595120
+ "propertyNames",
595121
+ "patternProperties",
595122
+ "if",
595123
+ "then",
595124
+ "else",
595125
+ "dependencies",
595126
+ "dependentRequired",
595127
+ "dependentSchemas",
595128
+ "minProperties",
595129
+ "maxProperties",
595130
+ "contentEncoding",
595131
+ "contentMediaType",
595132
+ "$anchor",
595133
+ "$dynamicAnchor",
595134
+ "$dynamicRef",
595135
+ "unevaluatedProperties",
595136
+ "unevaluatedItems",
595137
+ "prefixItems",
595138
+ "$comment",
595139
+ "examples",
595140
+ "deprecated",
595141
+ "readOnly",
595142
+ "writeOnly",
595143
+ "minLength"
595144
+ ];
595145
+ });
595090
595146
 
595091
595147
  // src/services/api/openai-query.ts
595092
595148
  var exports_openai_query = {};
@@ -595165,6 +595221,15 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595165
595221
  const MAX_RETRIES4 = 3;
595166
595222
  const BASE_DELAY_MS4 = 500;
595167
595223
  try {
595224
+ let resetWatchdog = function() {
595225
+ if (watchdogTimer)
595226
+ clearTimeout(watchdogTimer);
595227
+ watchdogTimer = setTimeout(() => {
595228
+ logForDebugging(`[OpenAI] Stream watchdog triggered — no data for ${STREAM_WATCHDOG_MS / 1000}s, aborting`);
595229
+ watchdogController.abort();
595230
+ reader.cancel().catch(() => {});
595231
+ }, STREAM_WATCHDOG_MS);
595232
+ };
595168
595233
  let response;
595169
595234
  let lastErrorMessage = "";
595170
595235
  let lastStatus = 0;
@@ -595222,8 +595287,30 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595222
595287
  error: "rate_limit"
595223
595288
  });
595224
595289
  } else if (status === 401 || status === 403) {
595290
+ clearOpenAIClientCache();
595291
+ if (!_auth401RetryInProgress) {
595292
+ try {
595293
+ const { loadOpenAITokens: loadOpenAITokens2, refreshOpenAIToken: refreshOpenAIToken2 } = await Promise.resolve().then(() => (init_openai_oauth(), exports_openai_oauth));
595294
+ const tokens = loadOpenAITokens2();
595295
+ if (tokens?.refresh_token) {
595296
+ logForDebugging("[OpenAI] 401 received — attempting OAuth token refresh...");
595297
+ _auth401RetryInProgress = true;
595298
+ try {
595299
+ await refreshOpenAIToken2(tokens.refresh_token);
595300
+ logForDebugging("[OpenAI] Token refreshed, retrying request...");
595301
+ yield* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools, signal, options);
595302
+ return;
595303
+ } finally {
595304
+ _auth401RetryInProgress = false;
595305
+ }
595306
+ }
595307
+ } catch (refreshErr) {
595308
+ _auth401RetryInProgress = false;
595309
+ logForDebugging(`[OpenAI] OAuth token refresh failed: ${refreshErr instanceof Error ? refreshErr.message : String(refreshErr)}`);
595310
+ }
595311
+ }
595225
595312
  yield createAssistantAPIErrorMessage({
595226
- content: `Authentication error: ${lastErrorMessage}. Check your OPENAI_API_KEY.`,
595313
+ content: `Authentication error: ${lastErrorMessage}. Run /login to re-authenticate or check your OPENAI_API_KEY.`,
595227
595314
  error: "authentication_failed"
595228
595315
  });
595229
595316
  } else if (status === 400 && lastErrorMessage.includes("context_length_exceeded")) {
@@ -595281,11 +595368,16 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595281
595368
  const reader = response.body.getReader();
595282
595369
  const decoder = new TextDecoder;
595283
595370
  let buffer = "";
595371
+ const STREAM_WATCHDOG_MS = 90000;
595372
+ let watchdogTimer = null;
595373
+ const watchdogController = new AbortController;
595374
+ resetWatchdog();
595284
595375
  try {
595285
595376
  while (true) {
595286
595377
  const { done, value } = await reader.read();
595287
595378
  if (done)
595288
595379
  break;
595380
+ resetWatchdog();
595289
595381
  buffer += decoder.decode(value, { stream: true });
595290
595382
  const lines2 = buffer.split(`
595291
595383
  `);
@@ -595525,8 +595617,17 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595525
595617
  }
595526
595618
  }
595527
595619
  } finally {
595620
+ if (watchdogTimer)
595621
+ clearTimeout(watchdogTimer);
595528
595622
  reader.releaseLock();
595529
595623
  }
595624
+ if (watchdogController.signal.aborted) {
595625
+ yield createAssistantAPIErrorMessage({
595626
+ content: `Stream timed out — no data received for ${STREAM_WATCHDOG_MS / 1000} seconds. The server may be overloaded. Please try again.`,
595627
+ error: "unknown"
595628
+ });
595629
+ return;
595630
+ }
595530
595631
  if (pendingAnnotations.size > 0) {
595531
595632
  for (const [outputIdx, annotations] of pendingAnnotations) {
595532
595633
  const blockIdx = findBlockIndex(contentBlocks, outputIdx, "text", textState, functionCallState, reasoningState);
@@ -595636,11 +595737,12 @@ async function queryModelOpenAINonStreaming(messages, systemPrompt, thinkingConf
595636
595737
  }
595637
595738
  return result;
595638
595739
  }
595639
- var OPENAI_MODEL_MAP, MAX_OUTPUT_TOKENS, _lastResponseId = null;
595740
+ var OPENAI_MODEL_MAP, MAX_OUTPUT_TOKENS, _lastResponseId = null, _auth401RetryInProgress = false;
595640
595741
  var init_openai_query = __esm(() => {
595641
595742
  init_messages7();
595642
595743
  init_debug();
595643
595744
  init_client5();
595745
+ init_openai_adapter();
595644
595746
  OPENAI_MODEL_MAP = {
595645
595747
  "claude-opus-4-6-20260401": "gpt-5.4",
595646
595748
  "claude-opus-4-5-20250918": "gpt-5.4",
@@ -679360,7 +679462,7 @@ var init_bridge_kick = __esm(() => {
679360
679462
  var call56 = async () => {
679361
679463
  return {
679362
679464
  type: "text",
679363
- value: `${"2.1.88-rebuild"} (built ${"2026-04-01T09:46:44.490Z"})`
679465
+ value: `${"2.1.88-rebuild"} (built ${"2026-04-01T10:13:56.609Z"})`
679364
679466
  };
679365
679467
  }, version6, version_default;
679366
679468
  var init_version = __esm(() => {
@@ -700973,6 +701075,7 @@ var init_sideQuery = __esm(() => {
700973
701075
  init_fingerprint();
700974
701076
  init_model();
700975
701077
  init_providers();
701078
+ init_openai_adapter();
700976
701079
  init_openai_query();
700977
701080
  });
700978
701081
 
@@ -777365,4 +777468,4 @@ async function main2() {
777365
777468
  }
777366
777469
  main2();
777367
777470
 
777368
- //# debugId=DACEE88BBB4178C864756E2164756E21
777471
+ //# debugId=6C6F0DB434BD161F64756E2164756E21
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claude-code-openai",
3
- "version": "0.1.9",
3
+ "version": "0.1.11",
4
4
  "description": "Claude Code CLI with OpenAI GPT-5.4 backend support",
5
5
  "type": "module",
6
6
  "bin": {