claude-code-openai 0.1.2 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli.js +90 -21
  2. package/package.json +1 -1
package/dist/cli.js CHANGED
@@ -204204,7 +204204,7 @@ var init_metadata = __esm(() => {
204204
204204
  isClaudeAiAuth: isClaudeAISubscriber(),
204205
204205
  version: "2.1.88-rebuild",
204206
204206
  versionBase: getVersionBase(),
204207
- buildTime: "2026-04-01T08:06:57.637Z",
204207
+ buildTime: "2026-04-01T08:22:00.019Z",
204208
204208
  deploymentEnvironment: env4.detectDeploymentEnvironment(),
204209
204209
  ...isEnvTruthy(process.env.GITHUB_ACTIONS) && {
204210
204210
  githubEventName: process.env.GITHUB_EVENT_NAME,
@@ -592566,7 +592566,7 @@ function getAnthropicEnvMetadata() {
592566
592566
  function getBuildAgeMinutes() {
592567
592567
  if (false)
592568
592568
  ;
592569
- const buildTime = new Date("2026-04-01T08:06:57.637Z").getTime();
592569
+ const buildTime = new Date("2026-04-01T08:22:00.019Z").getTime();
592570
592570
  if (isNaN(buildTime))
592571
592571
  return;
592572
592572
  return Math.floor((Date.now() - buildTime) / 60000);
@@ -594658,16 +594658,24 @@ function convertAssistantMessage(msg, items) {
594658
594658
  }
594659
594659
  }
594660
594660
  function convertToolSchemas(tools) {
594661
- return tools.filter((t2) => t2.type === "custom" || !("type" in t2) || t2.type === undefined).map((t2) => {
594662
- const tool = t2;
594663
- return {
594664
- type: "function",
594665
- name: tool.name,
594666
- description: tool.description ?? "",
594667
- parameters: tool.input_schema,
594668
- strict: tool.strict === true
594669
- };
594670
- });
594661
+ const oaiTools = [];
594662
+ for (const t2 of tools) {
594663
+ if ("type" in t2 && typeof t2.type === "string" && t2.type.startsWith("web_search")) {
594664
+ oaiTools.push({ type: "web_search_preview", search_context_size: "medium" });
594665
+ continue;
594666
+ }
594667
+ if (t2.type === "custom" || !("type" in t2) || t2.type === undefined) {
594668
+ const tool = t2;
594669
+ oaiTools.push({
594670
+ type: "function",
594671
+ name: tool.name,
594672
+ description: tool.description ?? "",
594673
+ parameters: tool.input_schema,
594674
+ strict: tool.strict === true
594675
+ });
594676
+ }
594677
+ }
594678
+ return oaiTools;
594671
594679
  }
594672
594680
  function convertToolChoice(toolChoice) {
594673
594681
  if (!toolChoice)
@@ -594777,19 +594785,21 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
594777
594785
  } else {
594778
594786
  input = convertMessages(messages);
594779
594787
  }
594780
- const maxOutputTokens = options.maxOutputTokensOverride || 16384;
594788
+ const maxOutputTokens = options.maxOutputTokensOverride || MAX_OUTPUT_TOKENS[openaiModel] || 16384;
594789
+ const hasFunctionTools = oaiTools.some((t2) => t2.type === "function");
594781
594790
  const params = {
594782
594791
  model: openaiModel,
594783
594792
  instructions: instructions || undefined,
594784
594793
  input,
594785
594794
  tools: oaiTools.length > 0 ? oaiTools : undefined,
594786
- tool_choice: oaiTools.length > 0 ? toolChoice : undefined,
594787
- parallel_tool_calls: false,
594795
+ tool_choice: hasFunctionTools ? toolChoice : undefined,
594796
+ parallel_tool_calls: hasFunctionTools ? false : undefined,
594788
594797
  stream: true,
594789
594798
  max_output_tokens: maxOutputTokens,
594790
594799
  temperature: options.temperatureOverride ?? 1,
594791
594800
  reasoning,
594792
- previous_response_id: usePreviousResponseId ? _lastResponseId : undefined
594801
+ previous_response_id: usePreviousResponseId ? _lastResponseId : undefined,
594802
+ store: true
594793
594803
  };
594794
594804
  logForDebugging(`[OpenAI] Request: model=${openaiModel} input=${input.length} items (${usePreviousResponseId ? "incremental, chain=" + _lastResponseId : "full"}) tools=${oaiTools.length}`);
594795
594805
  const start = Date.now();
@@ -594896,6 +594906,8 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
594896
594906
  const functionCallState = new Map;
594897
594907
  const textState = new Map;
594898
594908
  const reasoningState = new Map;
594909
+ const webSearchState = new Map;
594910
+ const pendingAnnotations = new Map;
594899
594911
  const reader = response.body.getReader();
594900
594912
  const decoder = new TextDecoder;
594901
594913
  let buffer = "";
@@ -595008,6 +595020,9 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595008
595020
  content_block: { type: "thinking", thinking: "", signature: "openai-reasoning" }
595009
595021
  }
595010
595022
  };
595023
+ } else if (item.type === "web_search_call") {
595024
+ webSearchState.set(event.output_index, item.id);
595025
+ logForDebugging(`[OpenAI] web_search_call started: ${item.id}`);
595011
595026
  }
595012
595027
  break;
595013
595028
  }
@@ -595117,10 +595132,16 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595117
595132
  responseStatus = "incomplete";
595118
595133
  break;
595119
595134
  }
595135
+ case "response.output_text.done": {
595136
+ if (event.annotations && event.annotations.length > 0) {
595137
+ pendingAnnotations.set(event.output_index, event.annotations);
595138
+ logForDebugging(`[OpenAI] Got ${event.annotations.length} annotations for output_index ${event.output_index}`);
595139
+ }
595140
+ break;
595141
+ }
595120
595142
  case "response.in_progress":
595121
595143
  case "response.content_part.added":
595122
595144
  case "response.content_part.done":
595123
- case "response.output_text.done":
595124
595145
  break;
595125
595146
  case "error": {
595126
595147
  logForDebugging(`[OpenAI] Stream error: ${event.error.message}`);
@@ -595136,6 +595157,36 @@ async function* queryModelOpenAI(messages, systemPrompt, thinkingConfig, tools,
595136
595157
  } finally {
595137
595158
  reader.releaseLock();
595138
595159
  }
595160
+ if (pendingAnnotations.size > 0) {
595161
+ for (const [outputIdx, annotations] of pendingAnnotations) {
595162
+ const blockIdx = findBlockIndex(contentBlocks, outputIdx, "text", textState, functionCallState, reasoningState);
595163
+ if (blockIdx >= 0) {
595164
+ const block = contentBlocks[blockIdx];
595165
+ const uniqueUrls = new Map;
595166
+ for (const ann of annotations) {
595167
+ if (ann.type === "url_citation" && ann.url && !uniqueUrls.has(ann.url)) {
595168
+ uniqueUrls.set(ann.url, ann.title || ann.url);
595169
+ }
595170
+ }
595171
+ if (uniqueUrls.size > 0) {
595172
+ const sourcesText = `
595173
+
595174
+ Sources:
595175
+ ` + [...uniqueUrls].map(([url3, title]) => `- [${title}](${url3})`).join(`
595176
+ `);
595177
+ block.text += sourcesText;
595178
+ yield {
595179
+ type: "stream_event",
595180
+ event: {
595181
+ type: "content_block_delta",
595182
+ index: blockIdx,
595183
+ delta: { type: "text_delta", text: sourcesText }
595184
+ }
595185
+ };
595186
+ }
595187
+ }
595188
+ }
595189
+ }
595139
595190
  const stopReason = convertStopReason(responseStatus);
595140
595191
  yield {
595141
595192
  type: "stream_event",
@@ -595214,18 +595265,36 @@ async function queryModelOpenAINonStreaming(messages, systemPrompt, thinkingConf
595214
595265
  }
595215
595266
  return result;
595216
595267
  }
595217
- var OPENAI_MODEL_MAP, _lastResponseId = null;
595268
+ var OPENAI_MODEL_MAP, MAX_OUTPUT_TOKENS, _lastResponseId = null;
595218
595269
  var init_openai_query = __esm(() => {
595219
595270
  init_messages7();
595220
595271
  init_debug();
595221
595272
  init_client5();
595222
595273
  OPENAI_MODEL_MAP = {
595223
595274
  "claude-opus-4-6-20260401": "gpt-5.4",
595275
+ "claude-opus-4-5-20250918": "gpt-5.4",
595276
+ "claude-opus-4-1-20250415": "gpt-5.4",
595277
+ "claude-opus-4-20250115": "gpt-5.4",
595278
+ "claude-sonnet-4-6-20260401": "gpt-5.4-mini",
595224
595279
  "claude-sonnet-4-5-20250929": "gpt-5.4-mini",
595280
+ "claude-sonnet-4-20250514": "gpt-5.4-mini",
595281
+ "claude-3-7-sonnet-20250219": "gpt-5.4-mini",
595282
+ "claude-3-5-sonnet-20241022": "gpt-5.4-mini",
595225
595283
  "claude-haiku-4-5-20251001": "gpt-4.1-mini",
595284
+ "claude-3-5-haiku-20241022": "gpt-4.1-mini",
595226
595285
  opus: "gpt-5.4",
595227
595286
  sonnet: "gpt-5.4-mini",
595228
- haiku: "gpt-4.1-mini"
595287
+ haiku: "gpt-4.1-mini",
595288
+ "gpt-5.4": "gpt-5.4",
595289
+ "gpt-5.4-mini": "gpt-5.4-mini",
595290
+ "gpt-4.1": "gpt-4.1",
595291
+ "gpt-4.1-mini": "gpt-4.1-mini"
595292
+ };
595293
+ MAX_OUTPUT_TOKENS = {
595294
+ "gpt-5.4": 32768,
595295
+ "gpt-5.4-mini": 16384,
595296
+ "gpt-4.1": 16384,
595297
+ "gpt-4.1-mini": 16384
595229
595298
  };
595230
595299
  });
595231
595300
 
@@ -678920,7 +678989,7 @@ var init_bridge_kick = __esm(() => {
678920
678989
  var call56 = async () => {
678921
678990
  return {
678922
678991
  type: "text",
678923
- value: `${"2.1.88-rebuild"} (built ${"2026-04-01T08:06:57.637Z"})`
678992
+ value: `${"2.1.88-rebuild"} (built ${"2026-04-01T08:22:00.019Z"})`
678924
678993
  };
678925
678994
  }, version6, version_default;
678926
678995
  var init_version = __esm(() => {
@@ -776876,4 +776945,4 @@ async function main2() {
776876
776945
  }
776877
776946
  main2();
776878
776947
 
776879
- //# debugId=0CF638629D23D41464756E2164756E21
776948
+ //# debugId=EA0F0C0F950E52A464756E2164756E21
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claude-code-openai",
3
- "version": "0.1.2",
3
+ "version": "0.1.3",
4
4
  "description": "Claude Code CLI with OpenAI GPT-5.4 backend support",
5
5
  "type": "module",
6
6
  "bin": {