opencode-aicodewith-auth 0.1.59 → 0.1.62

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +25 -28
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -239,7 +239,7 @@ var AICODEWITH_GEMINI_BASE_URL = "https://api.aicodewith.com/gemini_cli";
239
239
  var GEMINI_USER_AGENT = "GeminiCLI/v25.2.1 (darwin; arm64)";
240
240
  var GEMINI_API_CLIENT = "google-genai-sdk/1.30.0 gl-node/v25.2.1";
241
241
  var GEMINI_PRIVILEGED_USER_ID_ENV = "AICODEWITH_GEMINI_USER_ID";
242
- var USER_AGENT = "codex_cli_rs/0.77.0 (Mac OS 26.2.0; arm64) iTerm.app/3.6.6";
242
+ var USER_AGENT = "codex_cli_rs/0.93.0 (Mac OS 26.2.0; arm64) iTerm.app/3.6.6";
243
243
  var ORIGINATOR = "codex_cli_rs";
244
244
  var SAVE_RAW_RESPONSE_ENV = "SAVE_RAW_RESPONSE";
245
245
  var MODEL_MIGRATIONS = buildModelMigrations();
@@ -624,11 +624,13 @@ var collectCallIds = (input) => {
624
624
  };
625
625
  var normalizeOrphanedToolOutputs = (input) => {
626
626
  const { functionCallIds, localShellCallIds, customToolCallIds } = collectCallIds(input);
627
- return input.map((item) => {
627
+ let orphanCount = 0;
628
+ const result = input.map((item) => {
628
629
  if (item.type === "function_call_output") {
629
630
  const callId = getCallId(item);
630
631
  const hasMatch = !!callId && (functionCallIds.has(callId) || localShellCallIds.has(callId));
631
632
  if (!hasMatch) {
633
+ orphanCount++;
632
634
  return convertOrphanedOutputToMessage(item, callId);
633
635
  }
634
636
  }
@@ -636,6 +638,7 @@ var normalizeOrphanedToolOutputs = (input) => {
636
638
  const callId = getCallId(item);
637
639
  const hasMatch = !!callId && customToolCallIds.has(callId);
638
640
  if (!hasMatch) {
641
+ orphanCount++;
639
642
  return convertOrphanedOutputToMessage(item, callId);
640
643
  }
641
644
  }
@@ -643,11 +646,20 @@ var normalizeOrphanedToolOutputs = (input) => {
643
646
  const callId = getCallId(item);
644
647
  const hasMatch = !!callId && localShellCallIds.has(callId);
645
648
  if (!hasMatch) {
649
+ orphanCount++;
646
650
  return convertOrphanedOutputToMessage(item, callId);
647
651
  }
648
652
  }
649
653
  return item;
650
654
  });
655
+ if (orphanCount > 0 || functionCallIds.size > 0 || localShellCallIds.size > 0 || customToolCallIds.size > 0) {
656
+ logDebug(`normalizeOrphanedToolOutputs: ${orphanCount} orphans converted`, {
657
+ functionCallIds: functionCallIds.size,
658
+ localShellCallIds: localShellCallIds.size,
659
+ customToolCallIds: customToolCallIds.size
660
+ });
661
+ }
662
+ return result;
651
663
  };
652
664
 
653
665
  // lib/request/request-transformer.ts
@@ -683,7 +695,7 @@ function resolveReasoningConfig(modelName, body) {
683
695
  }
684
696
  function resolveTextVerbosity(body) {
685
697
  const providerOpenAI = body.providerOptions?.openai;
686
- return body.text?.verbosity ?? providerOpenAI?.textVerbosity;
698
+ return body.text?.verbosity ?? providerOpenAI?.textVerbosity ?? "medium";
687
699
  }
688
700
  function resolveInclude(body) {
689
701
  const providerOpenAI = body.providerOptions?.openai;
@@ -758,26 +770,18 @@ async function transformRequestBody(body, codexInstructions) {
758
770
  logDebug(`Model lookup: "${originalModel}" -> "${normalizedModel}"`, {
759
771
  hasTools: !!body.tools
760
772
  });
761
- const hadPreviousResponseId = !!(body.previousResponseId || body.previous_response_id);
762
- const hadItemReferences = Array.isArray(body.input) && body.input.some((item) => item.type === "item_reference");
763
773
  body.model = normalizedModel;
764
774
  body.stream = true;
765
775
  body.store = false;
766
- delete body.previousResponseId;
767
- delete body.previous_response_id;
768
- body.instructions = codexInstructions;
776
+ if (!body.instructions) {
777
+ body.instructions = codexInstructions;
778
+ }
769
779
  if (body.input && Array.isArray(body.input)) {
770
780
  body.input = sanitizeItemIds(body.input);
771
781
  body.input = filterOpenCodeSystemPrompts(body.input);
772
782
  body.input = addCodexBridgeMessage(body.input, !!body.tools);
773
783
  if (body.input) {
774
784
  body.input = normalizeOrphanedToolOutputs(body.input);
775
- if (hadPreviousResponseId || hadItemReferences) {
776
- const hasAssistantOrToolHistory = body.input.some((item) => item.role === "assistant" || item.type === "function_call" || item.type === "function_call_output" || item.type === "local_shell_call" || item.type === "local_shell_call_output" || item.type === "custom_tool_call" || item.type === "custom_tool_call_output");
777
- if (!hasAssistantOrToolHistory) {
778
- logDebug("WARNING: Request had previous_response_id/item_reference but input lacks assistant/tool history. " + "Context may be lost in store:false mode. Upstream should send full conversation history in input.", { hadPreviousResponseId, hadItemReferences, inputLength: body.input.length });
779
- }
780
- }
781
785
  }
782
786
  }
783
787
  const reasoningConfig = resolveReasoningConfig(normalizedModel, body);
@@ -786,12 +790,10 @@ async function transformRequestBody(body, codexInstructions) {
786
790
  ...reasoningConfig
787
791
  };
788
792
  const verbosity = resolveTextVerbosity(body);
789
- if (verbosity) {
790
- body.text = {
791
- ...body.text,
792
- verbosity
793
- };
794
- }
793
+ body.text = {
794
+ ...body.text,
795
+ verbosity
796
+ };
795
797
  body.include = resolveInclude(body);
796
798
  body.max_output_tokens = undefined;
797
799
  body.max_completion_tokens = undefined;
@@ -873,15 +875,10 @@ function extractRequestUrl(input) {
873
875
  function sanitizeRequestBody(bodyStr) {
874
876
  try {
875
877
  const body = JSON.parse(bodyStr);
876
- delete body.previousResponseId;
877
- delete body.previous_response_id;
878
+ body.store = false;
878
879
  if (Array.isArray(body.input)) {
879
- body.input = body.input.filter((item) => item.type !== "item_reference").map((item) => {
880
- if (item.type === "function_call" || item.type === "local_shell_call" || item.type === "custom_tool_call")
881
- return item;
882
- const { id, ...rest } = item;
883
- return rest;
884
- });
880
+ body.input = sanitizeItemIds(body.input);
881
+ body.input = normalizeOrphanedToolOutputs(body.input);
885
882
  }
886
883
  return JSON.stringify(body);
887
884
  } catch {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "opencode-aicodewith-auth",
3
- "version": "0.1.59",
3
+ "version": "0.1.62",
4
4
  "description": "OpenCode plugin for AICodewith authentication - Access GPT-5.3 Codex, GPT-5.2, Claude, and Gemini models through AICodewith API",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",