opencode-aicodewith-auth 0.1.35 → 0.1.39

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.ai.md CHANGED
@@ -198,25 +198,27 @@ If the output is NOT `https://registry.npmjs.org/` (e.g., shows taobao, cnpm, or
198
198
 
199
199
  > "安装完成!请重启 OpenCode 以加载插件。
200
200
  > 你现在可以使用以下模型:
201
+ > - `gpt-5.3-codex`
201
202
  > - `gpt-5.2-codex`
202
203
  > - `gpt-5.2`
203
204
  > - `claude-sonnet-4-5-20250929
204
205
  > - `claude-opus-4-6-20260205
205
206
  > - `gemini-3-pro`
206
207
  >
207
- > 测试命令: `opencode --model gpt-5.2-codex`"
208
+ > 测试命令: `opencode --model gpt-5.3-codex`"
208
209
 
209
210
  **If using a mirror registry** (taobao, cnpm, etc.), add this warning:
210
211
 
211
212
  > "安装完成!请重启 OpenCode 以加载插件。
212
213
  > 你现在可以使用以下模型:
214
+ > - `gpt-5.3-codex`
213
215
  > - `gpt-5.2-codex`
214
216
  > - `gpt-5.2`
215
217
  > - `claude-sonnet-4-5-20250929
216
218
  > - `claude-opus-4-6-20260205
217
219
  > - `gemini-3-pro`
218
220
  >
219
- > 测试命令: `opencode --model gpt-5.2-codex`
221
+ > 测试命令: `opencode --model gpt-5.3-codex`
220
222
  >
221
223
  > ⚠️ **注意**: 检测到你使用的是非官方 npm 源。如果启动 OpenCode 时遇到插件加载失败(如 `BunInstallFailedError`),可能是镜像源尚未同步最新版本。解决方法:
222
224
  > ```bash
package/README.md CHANGED
@@ -61,10 +61,10 @@ User Request → OpenCode → Plugin Auth Hook → Route by Model:
61
61
 
62
62
  | 模型 ID | 显示名称 | 图片输入 | 适合场景 |
63
63
  |---------|---------|:-------:|---------|
64
- | `aicodewith/gpt-5.2-codex` | GPT-5.2 Codex | ✅ | 日常编程、代码生成 |
64
+ | `aicodewith/gpt-5.3-codex` | GPT-5.3 Codex | ✅ | 日常编程、代码生成 |
65
65
  | `aicodewith/gpt-5.2` | GPT-5.2 | ✅ | 架构设计、逻辑推理 |
66
66
  | `aicodewith/claude-sonnet-4-5-20250929` | Claude Sonnet 4.5 | ✅ | 代码审查、文档查询 |
67
- | `aicodewith/claude-opus-4-6-20260205` | Claude Opus 4 | ✅ | 复杂任务、深度思考 |
67
+ | `aicodewith/claude-opus-4-6-20260205` | Claude Opus 4.6 | ✅ | 复杂任务、深度思考 |
68
68
  | `aicodewith/gemini-3-pro` | Gemini 3 Pro | ✅ | 前端 UI、多模态任务 |
69
69
 
70
70
  ---
package/dist/index.js CHANGED
@@ -4,9 +4,231 @@ import { mkdir as mkdir2, readFile as readFile3, writeFile as writeFile3, access
4
4
  import path5 from "path";
5
5
  import os4 from "os";
6
6
 
7
+ // lib/models/registry.ts
8
+ var PROVIDER_ID = "aicodewith";
9
+ var MODELS = [
10
+ {
11
+ id: "gpt-5.3-codex",
12
+ family: "codex",
13
+ displayName: "GPT-5.3 Codex",
14
+ version: "5.3",
15
+ limit: { context: 400000, output: 128000 },
16
+ modalities: { input: ["text", "image"], output: ["text"] },
17
+ reasoning: "xhigh",
18
+ aliases: ["gpt-5.3-codex", "gpt 5.3 codex", "codex"]
19
+ },
20
+ {
21
+ id: "gpt-5.2",
22
+ family: "gpt",
23
+ displayName: "GPT-5.2",
24
+ version: "5.2",
25
+ limit: { context: 400000, output: 128000 },
26
+ modalities: { input: ["text", "image"], output: ["text"] },
27
+ reasoning: "xhigh",
28
+ aliases: ["gpt-5.2", "gpt 5.2"]
29
+ },
30
+ {
31
+ id: "gpt-5.2-codex",
32
+ family: "codex",
33
+ displayName: "GPT-5.2 Codex (deprecated)",
34
+ version: "5.2",
35
+ limit: { context: 400000, output: 128000 },
36
+ modalities: { input: ["text", "image"], output: ["text"] },
37
+ reasoning: "xhigh",
38
+ deprecated: true,
39
+ replacedBy: "gpt-5.3-codex"
40
+ },
41
+ {
42
+ id: "gpt-5.1-codex",
43
+ family: "codex",
44
+ displayName: "GPT-5.1 Codex (deprecated)",
45
+ version: "5.1",
46
+ limit: { context: 400000, output: 128000 },
47
+ modalities: { input: ["text", "image"], output: ["text"] },
48
+ reasoning: "full",
49
+ deprecated: true,
50
+ replacedBy: "gpt-5.3-codex"
51
+ },
52
+ {
53
+ id: "gpt-5.1-codex-max",
54
+ family: "codex",
55
+ displayName: "GPT-5.1 Codex Max (deprecated)",
56
+ version: "5.1",
57
+ limit: { context: 400000, output: 128000 },
58
+ modalities: { input: ["text", "image"], output: ["text"] },
59
+ reasoning: "xhigh",
60
+ deprecated: true,
61
+ replacedBy: "gpt-5.3-codex"
62
+ },
63
+ {
64
+ id: "gpt-5.1-codex-mini",
65
+ family: "codex",
66
+ displayName: "GPT-5.1 Codex Mini (deprecated)",
67
+ version: "5.1",
68
+ limit: { context: 200000, output: 64000 },
69
+ modalities: { input: ["text", "image"], output: ["text"] },
70
+ reasoning: "basic",
71
+ deprecated: true,
72
+ replacedBy: "gpt-5.3-codex"
73
+ },
74
+ {
75
+ id: "gpt-5.1",
76
+ family: "gpt",
77
+ displayName: "GPT-5.1 (deprecated)",
78
+ version: "5.1",
79
+ limit: { context: 400000, output: 128000 },
80
+ modalities: { input: ["text", "image"], output: ["text"] },
81
+ reasoning: "full",
82
+ deprecated: true,
83
+ replacedBy: "gpt-5.2"
84
+ },
85
+ {
86
+ id: "claude-opus-4-6-20260205",
87
+ family: "claude",
88
+ displayName: "Claude Opus 4.6",
89
+ version: "4.6",
90
+ limit: { context: 200000, output: 64000 },
91
+ modalities: { input: ["text", "image"], output: ["text"] }
92
+ },
93
+ {
94
+ id: "claude-opus-4-6-20260205-third-party",
95
+ family: "claude",
96
+ displayName: "Claude Opus 4.6 (third party)",
97
+ version: "4.6",
98
+ limit: { context: 200000, output: 64000 },
99
+ modalities: { input: ["text", "image"], output: ["text"] },
100
+ thirdPartyVariant: true
101
+ },
102
+ {
103
+ id: "claude-sonnet-4-5-20250929",
104
+ family: "claude",
105
+ displayName: "Claude Sonnet 4.5",
106
+ version: "4.5",
107
+ limit: { context: 200000, output: 64000 },
108
+ modalities: { input: ["text", "image"], output: ["text"] }
109
+ },
110
+ {
111
+ id: "claude-sonnet-4-5-20250929-third-party",
112
+ family: "claude",
113
+ displayName: "Claude Sonnet 4.5 (third party)",
114
+ version: "4.5",
115
+ limit: { context: 200000, output: 64000 },
116
+ modalities: { input: ["text", "image"], output: ["text"] },
117
+ thirdPartyVariant: true
118
+ },
119
+ {
120
+ id: "claude-haiku-4-5-20251001",
121
+ family: "claude",
122
+ displayName: "Claude Haiku 4.5",
123
+ version: "4.5",
124
+ limit: { context: 200000, output: 8192 },
125
+ modalities: { input: ["text", "image"], output: ["text"] }
126
+ },
127
+ {
128
+ id: "claude-haiku-4-5-20251001-third-party",
129
+ family: "claude",
130
+ displayName: "Claude Haiku 4.5 (third party)",
131
+ version: "4.5",
132
+ limit: { context: 200000, output: 8192 },
133
+ modalities: { input: ["text", "image"], output: ["text"] },
134
+ thirdPartyVariant: true
135
+ },
136
+ {
137
+ id: "claude-opus-4-5-20251101",
138
+ family: "claude",
139
+ displayName: "Claude Opus 4.5 (deprecated)",
140
+ version: "4.5",
141
+ limit: { context: 200000, output: 64000 },
142
+ modalities: { input: ["text", "image"], output: ["text"] },
143
+ deprecated: true,
144
+ replacedBy: "claude-opus-4-6-20260205"
145
+ },
146
+ {
147
+ id: "claude-opus-4-5-20251101-third-party",
148
+ family: "claude",
149
+ displayName: "Claude Opus 4.5 (third party, deprecated)",
150
+ version: "4.5",
151
+ limit: { context: 200000, output: 64000 },
152
+ modalities: { input: ["text", "image"], output: ["text"] },
153
+ deprecated: true,
154
+ replacedBy: "claude-opus-4-6-20260205-third-party",
155
+ thirdPartyVariant: true
156
+ },
157
+ {
158
+ id: "gemini-3-pro",
159
+ family: "gemini",
160
+ displayName: "Gemini 3 Pro",
161
+ version: "3",
162
+ limit: { context: 1048576, output: 65536 },
163
+ modalities: { input: ["text", "image"], output: ["text"] }
164
+ }
165
+ ];
166
+ var getActiveModels = () => MODELS.filter((m) => !m.deprecated);
167
+ var getDeprecatedModels = () => MODELS.filter((m) => m.deprecated);
168
+ var buildModelMigrations = () => {
169
+ const migrations = {};
170
+ for (const model of getDeprecatedModels()) {
171
+ if (model.replacedBy) {
172
+ migrations[model.id] = model.replacedBy;
173
+ migrations[`${PROVIDER_ID}/${model.id}`] = `${PROVIDER_ID}/${model.replacedBy}`;
174
+ }
175
+ }
176
+ return migrations;
177
+ };
178
+ var buildAliasMap = () => {
179
+ const map = {};
180
+ for (const model of getActiveModels()) {
181
+ map[model.id] = model.id;
182
+ if (model.aliases) {
183
+ for (const alias of model.aliases) {
184
+ map[alias.toLowerCase()] = model.id;
185
+ }
186
+ }
187
+ const effortLevels = ["none", "low", "medium", "high", "xhigh"];
188
+ for (const effort of effortLevels) {
189
+ map[`${model.id}-${effort}`] = model.id;
190
+ }
191
+ }
192
+ return map;
193
+ };
194
+ var getFullModelId = (id) => `${PROVIDER_ID}/${id}`;
195
+ var OMO_MODEL_ASSIGNMENTS = {
196
+ agents: {
197
+ sisyphus: getFullModelId("claude-sonnet-4-5-20250929"),
198
+ hephaestus: getFullModelId("claude-sonnet-4-5-20250929"),
199
+ oracle: getFullModelId("gpt-5.2"),
200
+ librarian: getFullModelId("claude-sonnet-4-5-20250929"),
201
+ explore: getFullModelId("claude-sonnet-4-5-20250929"),
202
+ "multimodal-looker": getFullModelId("gemini-3-pro"),
203
+ prometheus: getFullModelId("gpt-5.2"),
204
+ metis: getFullModelId("gpt-5.2"),
205
+ momus: getFullModelId("gpt-5.2"),
206
+ atlas: getFullModelId("claude-sonnet-4-5-20250929"),
207
+ build: getFullModelId("claude-opus-4-6-20260205"),
208
+ plan: getFullModelId("claude-opus-4-6-20260205"),
209
+ "sisyphus-junior": getFullModelId("claude-sonnet-4-5-20250929"),
210
+ "OpenCode-Builder": getFullModelId("claude-opus-4-6-20260205"),
211
+ general: getFullModelId("claude-sonnet-4-5-20250929"),
212
+ "frontend-ui-ux-engineer": getFullModelId("gemini-3-pro"),
213
+ "document-writer": getFullModelId("gemini-3-pro")
214
+ },
215
+ categories: {
216
+ "visual-engineering": getFullModelId("gemini-3-pro"),
217
+ ultrabrain: getFullModelId("gemini-3-pro"),
218
+ deep: getFullModelId("gemini-3-pro"),
219
+ artistry: getFullModelId("gemini-3-pro"),
220
+ quick: getFullModelId("claude-sonnet-4-5-20250929"),
221
+ "unspecified-low": getFullModelId("claude-sonnet-4-5-20250929"),
222
+ "unspecified-high": getFullModelId("gpt-5.2"),
223
+ writing: getFullModelId("gemini-3-pro"),
224
+ visual: getFullModelId("gemini-3-pro"),
225
+ "business-logic": getFullModelId("gpt-5.2"),
226
+ "data-analysis": getFullModelId("claude-sonnet-4-5-20250929")
227
+ }
228
+ };
7
229
  // lib/constants.ts
8
230
  var PLUGIN_NAME = "opencode-aicodewith-auth";
9
- var PROVIDER_ID = "aicodewith";
231
+ var PROVIDER_ID2 = PROVIDER_ID;
10
232
  var AUTH_METHOD_LABEL = "AICodewith API Key";
11
233
  var CODEX_BASE_URL = "https://api.aicodewith.com/chatgpt/v1";
12
234
  var AICODEWITH_ANTHROPIC_BASE_URL = "https://api.aicodewith.com/v1";
@@ -18,6 +240,7 @@ var GEMINI_PRIVILEGED_USER_ID_ENV = "AICODEWITH_GEMINI_USER_ID";
18
240
  var USER_AGENT = "codex_cli_rs/0.77.0 (Mac OS 26.2.0; arm64) iTerm.app/3.6.6";
19
241
  var ORIGINATOR = "codex_cli_rs";
20
242
  var SAVE_RAW_RESPONSE_ENV = "SAVE_RAW_RESPONSE";
243
+ var MODEL_MIGRATIONS = buildModelMigrations();
21
244
  var HEADER_NAMES = {
22
245
  AUTHORIZATION: "authorization",
23
246
  ORIGINATOR: "originator",
@@ -426,56 +649,10 @@ async function getOpenCodeCodexPrompt() {
426
649
  }
427
650
 
428
651
  // lib/request/helpers/model-map.ts
429
- var MODEL_MAP = {
430
- "gpt-5.1-codex": "gpt-5.1-codex",
431
- "gpt-5.1-codex-low": "gpt-5.1-codex",
432
- "gpt-5.1-codex-medium": "gpt-5.1-codex",
433
- "gpt-5.1-codex-high": "gpt-5.1-codex",
434
- "gpt-5.1-codex-max": "gpt-5.1-codex-max",
435
- "gpt-5.1-codex-max-low": "gpt-5.1-codex-max",
436
- "gpt-5.1-codex-max-medium": "gpt-5.1-codex-max",
437
- "gpt-5.1-codex-max-high": "gpt-5.1-codex-max",
438
- "gpt-5.1-codex-max-xhigh": "gpt-5.1-codex-max",
439
- "gpt-5.2": "gpt-5.2",
440
- "gpt-5.2-none": "gpt-5.2",
441
- "gpt-5.2-low": "gpt-5.2",
442
- "gpt-5.2-medium": "gpt-5.2",
443
- "gpt-5.2-high": "gpt-5.2",
444
- "gpt-5.2-xhigh": "gpt-5.2",
445
- "gpt-5.2-codex": "gpt-5.2-codex",
446
- "gpt-5.2-codex-low": "gpt-5.2-codex",
447
- "gpt-5.2-codex-medium": "gpt-5.2-codex",
448
- "gpt-5.2-codex-high": "gpt-5.2-codex",
449
- "gpt-5.2-codex-xhigh": "gpt-5.2-codex",
450
- "gpt-5.1-codex-mini": "gpt-5.1-codex-mini",
451
- "gpt-5.1-codex-mini-medium": "gpt-5.1-codex-mini",
452
- "gpt-5.1-codex-mini-high": "gpt-5.1-codex-mini",
453
- "gpt-5.1": "gpt-5.1",
454
- "gpt-5.1-none": "gpt-5.1",
455
- "gpt-5.1-low": "gpt-5.1",
456
- "gpt-5.1-medium": "gpt-5.1",
457
- "gpt-5.1-high": "gpt-5.1",
458
- "gpt-5.1-chat-latest": "gpt-5.1",
459
- "gpt-5-codex": "gpt-5.1-codex",
460
- "codex-mini-latest": "gpt-5.1-codex-mini",
461
- "gpt-5-codex-mini": "gpt-5.1-codex-mini",
462
- "gpt-5-codex-mini-medium": "gpt-5.1-codex-mini",
463
- "gpt-5-codex-mini-high": "gpt-5.1-codex-mini",
464
- "gpt-5": "gpt-5.1",
465
- "gpt-5-mini": "gpt-5.1",
466
- "gpt-5-nano": "gpt-5.1"
467
- };
652
+ var MODEL_MAP = buildAliasMap();
468
653
  function getNormalizedModel(modelId) {
469
- try {
470
- if (MODEL_MAP[modelId]) {
471
- return MODEL_MAP[modelId];
472
- }
473
- const lowerModelId = modelId.toLowerCase();
474
- const match = Object.keys(MODEL_MAP).find((key) => key.toLowerCase() === lowerModelId);
475
- return match ? MODEL_MAP[match] : undefined;
476
- } catch {
477
- return;
478
- }
654
+ const lowerModelId = modelId.toLowerCase().trim();
655
+ return MODEL_MAP[lowerModelId];
479
656
  }
480
657
 
481
658
  // lib/request/helpers/input-utils.ts
@@ -651,41 +828,23 @@ var normalizeOrphanedToolOutputs = (input) => {
651
828
  // lib/request/request-transformer.ts
652
829
  function normalizeModel(model) {
653
830
  if (!model)
654
- return "gpt-5.1";
831
+ return "gpt-5.3-codex";
655
832
  const modelId = model.includes("/") ? model.split("/").pop() : model;
656
833
  const mappedModel = getNormalizedModel(modelId);
657
834
  if (mappedModel) {
658
835
  return mappedModel;
659
836
  }
660
837
  const normalized = modelId.toLowerCase();
661
- if (normalized.includes("gpt-5.2-codex") || normalized.includes("gpt 5.2 codex")) {
662
- return "gpt-5.2-codex";
838
+ if (normalized.includes("gpt-5.3-codex") || normalized.includes("gpt 5.3 codex")) {
839
+ return "gpt-5.3-codex";
663
840
  }
664
841
  if (normalized.includes("gpt-5.2") || normalized.includes("gpt 5.2")) {
665
842
  return "gpt-5.2";
666
843
  }
667
- if (normalized.includes("gpt-5.1-codex-max") || normalized.includes("gpt 5.1 codex max")) {
668
- return "gpt-5.1-codex-max";
669
- }
670
- if (normalized.includes("gpt-5.1-codex-mini") || normalized.includes("gpt 5.1 codex mini")) {
671
- return "gpt-5.1-codex-mini";
672
- }
673
- if (normalized.includes("codex-mini-latest") || normalized.includes("gpt-5-codex-mini") || normalized.includes("gpt 5 codex mini")) {
674
- return "codex-mini-latest";
675
- }
676
- if (normalized.includes("gpt-5.1-codex") || normalized.includes("gpt 5.1 codex")) {
677
- return "gpt-5.1-codex";
678
- }
679
- if (normalized.includes("gpt-5.1") || normalized.includes("gpt 5.1")) {
680
- return "gpt-5.1";
681
- }
682
844
  if (normalized.includes("codex")) {
683
- return "gpt-5.1-codex";
845
+ return "gpt-5.3-codex";
684
846
  }
685
- if (normalized.includes("gpt-5") || normalized.includes("gpt 5")) {
686
- return "gpt-5.1";
687
- }
688
- return "gpt-5.1";
847
+ return "gpt-5.3-codex";
689
848
  }
690
849
  function resolveReasoningConfig(modelName, body) {
691
850
  const providerOpenAI = body.providerOptions?.openai;
@@ -752,37 +911,18 @@ function addCodexBridgeMessage(input, hasTools) {
752
911
  }
753
912
  function getReasoningConfig(modelName, userConfig = {}) {
754
913
  const normalizedName = modelName?.toLowerCase() ?? "";
755
- const isGpt52Codex = normalizedName.includes("gpt-5.2-codex") || normalizedName.includes("gpt 5.2 codex");
756
- const isGpt52General = (normalizedName.includes("gpt-5.2") || normalizedName.includes("gpt 5.2")) && !isGpt52Codex;
757
- const isCodexMax = normalizedName.includes("codex-max") || normalizedName.includes("codex max");
758
- const isCodexMini = normalizedName.includes("codex-mini") || normalizedName.includes("codex mini") || normalizedName.includes("codex_mini") || normalizedName.includes("codex-mini-latest");
759
- const isCodex = normalizedName.includes("codex") && !isCodexMini;
760
- const isLightweight = !isCodexMini && (normalizedName.includes("nano") || normalizedName.includes("mini"));
761
- const isGpt51General = (normalizedName.includes("gpt-5.1") || normalizedName.includes("gpt 5.1")) && !isCodex && !isCodexMax && !isCodexMini;
762
- const supportsXhigh = isGpt52General || isGpt52Codex || isCodexMax;
763
- const supportsNone = isGpt52General || isGpt51General;
764
- const defaultEffort = isCodexMini ? "medium" : supportsXhigh ? "high" : isLightweight ? "minimal" : "medium";
914
+ const isGpt53Codex = normalizedName.includes("gpt-5.3-codex") || normalizedName.includes("gpt 5.3 codex");
915
+ const isGpt52General = normalizedName.includes("gpt-5.2") || normalizedName.includes("gpt 5.2");
916
+ const supportsXhigh = isGpt52General || isGpt53Codex;
917
+ const supportsNone = isGpt52General;
918
+ const defaultEffort = supportsXhigh ? "high" : "medium";
765
919
  let effort = userConfig.reasoningEffort || defaultEffort;
766
- if (isCodexMini) {
767
- if (effort === "minimal" || effort === "low" || effort === "none") {
768
- effort = "medium";
769
- }
770
- if (effort === "xhigh") {
771
- effort = "high";
772
- }
773
- if (effort !== "high" && effort !== "medium") {
774
- effort = "medium";
775
- }
776
- }
777
920
  if (!supportsXhigh && effort === "xhigh") {
778
921
  effort = "high";
779
922
  }
780
923
  if (!supportsNone && effort === "none") {
781
924
  effort = "low";
782
925
  }
783
- if (isCodex && effort === "minimal") {
784
- effort = "low";
785
- }
786
926
  return {
787
927
  effort,
788
928
  summary: userConfig.reasoningSummary || "auto"
@@ -1587,6 +1727,18 @@ var syncAgentsAndCategories = (userConfig, defaultConfig) => {
1587
1727
  if (!userConfig.categories) {
1588
1728
  userConfig.categories = {};
1589
1729
  }
1730
+ for (const agent of Object.values(userConfig.agents)) {
1731
+ if (agent.model && MODEL_MIGRATIONS[agent.model]) {
1732
+ agent.model = MODEL_MIGRATIONS[agent.model];
1733
+ changed = true;
1734
+ }
1735
+ }
1736
+ for (const category of Object.values(userConfig.categories)) {
1737
+ if (category.model && MODEL_MIGRATIONS[category.model]) {
1738
+ category.model = MODEL_MIGRATIONS[category.model];
1739
+ changed = true;
1740
+ }
1741
+ }
1590
1742
  if (defaultConfig.agents) {
1591
1743
  for (const [name, defaultAgent] of Object.entries(defaultConfig.agents)) {
1592
1744
  if (!userConfig.agents[name] && defaultAgent.model) {
@@ -1644,52 +1796,153 @@ var syncOmoConfig = async () => {
1644
1796
  // lib/provider-config.json
1645
1797
  var provider_config_default = {
1646
1798
  name: "AICodewith",
1647
- env: ["AICODEWITH_API_KEY"],
1799
+ env: [
1800
+ "AICODEWITH_API_KEY"
1801
+ ],
1648
1802
  models: {
1649
- "gpt-5.2-codex": {
1650
- name: "GPT-5.2 Codex",
1651
- limit: { context: 400000, output: 128000 },
1652
- modalities: { input: ["text", "image"], output: ["text"] }
1803
+ "gpt-5.3-codex": {
1804
+ name: "GPT-5.3 Codex",
1805
+ limit: {
1806
+ context: 400000,
1807
+ output: 128000
1808
+ },
1809
+ modalities: {
1810
+ input: [
1811
+ "text",
1812
+ "image"
1813
+ ],
1814
+ output: [
1815
+ "text"
1816
+ ]
1817
+ }
1653
1818
  },
1654
1819
  "gpt-5.2": {
1655
1820
  name: "GPT-5.2",
1656
- limit: { context: 400000, output: 128000 },
1657
- modalities: { input: ["text", "image"], output: ["text"] }
1821
+ limit: {
1822
+ context: 400000,
1823
+ output: 128000
1824
+ },
1825
+ modalities: {
1826
+ input: [
1827
+ "text",
1828
+ "image"
1829
+ ],
1830
+ output: [
1831
+ "text"
1832
+ ]
1833
+ }
1834
+ },
1835
+ "claude-opus-4-6-20260205": {
1836
+ name: "Claude Opus 4.6",
1837
+ limit: {
1838
+ context: 200000,
1839
+ output: 64000
1840
+ },
1841
+ modalities: {
1842
+ input: [
1843
+ "text",
1844
+ "image"
1845
+ ],
1846
+ output: [
1847
+ "text"
1848
+ ]
1849
+ }
1850
+ },
1851
+ "claude-opus-4-6-20260205-third-party": {
1852
+ name: "Claude Opus 4.6 (third party)",
1853
+ limit: {
1854
+ context: 200000,
1855
+ output: 64000
1856
+ },
1857
+ modalities: {
1858
+ input: [
1859
+ "text",
1860
+ "image"
1861
+ ],
1862
+ output: [
1863
+ "text"
1864
+ ]
1865
+ }
1658
1866
  },
1659
1867
  "claude-sonnet-4-5-20250929": {
1660
1868
  name: "Claude Sonnet 4.5",
1661
- limit: { context: 200000, output: 64000 },
1662
- modalities: { input: ["text", "image"], output: ["text"] }
1663
- },
1664
- "claude-opus-4-6-20260205": {
1665
- name: "Claude Opus 4",
1666
- limit: { context: 200000, output: 64000 },
1667
- modalities: { input: ["text", "image"], output: ["text"] }
1869
+ limit: {
1870
+ context: 200000,
1871
+ output: 64000
1872
+ },
1873
+ modalities: {
1874
+ input: [
1875
+ "text",
1876
+ "image"
1877
+ ],
1878
+ output: [
1879
+ "text"
1880
+ ]
1881
+ }
1668
1882
  },
1669
1883
  "claude-sonnet-4-5-20250929-third-party": {
1670
1884
  name: "Claude Sonnet 4.5 (third party)",
1671
- limit: { context: 200000, output: 64000 },
1672
- modalities: { input: ["text", "image"], output: ["text"] }
1673
- },
1674
- "claude-opus-4-6-20260205-third-party": {
1675
- name: "Claude Opus 4 (third party)",
1676
- limit: { context: 200000, output: 64000 },
1677
- modalities: { input: ["text", "image"], output: ["text"] }
1885
+ limit: {
1886
+ context: 200000,
1887
+ output: 64000
1888
+ },
1889
+ modalities: {
1890
+ input: [
1891
+ "text",
1892
+ "image"
1893
+ ],
1894
+ output: [
1895
+ "text"
1896
+ ]
1897
+ }
1678
1898
  },
1679
1899
  "claude-haiku-4-5-20251001": {
1680
1900
  name: "Claude Haiku 4.5",
1681
- limit: { context: 200000, output: 8192 },
1682
- modalities: { input: ["text", "image"], output: ["text"] }
1901
+ limit: {
1902
+ context: 200000,
1903
+ output: 8192
1904
+ },
1905
+ modalities: {
1906
+ input: [
1907
+ "text",
1908
+ "image"
1909
+ ],
1910
+ output: [
1911
+ "text"
1912
+ ]
1913
+ }
1683
1914
  },
1684
1915
  "claude-haiku-4-5-20251001-third-party": {
1685
1916
  name: "Claude Haiku 4.5 (third party)",
1686
- limit: { context: 200000, output: 8192 },
1687
- modalities: { input: ["text", "image"], output: ["text"] }
1917
+ limit: {
1918
+ context: 200000,
1919
+ output: 8192
1920
+ },
1921
+ modalities: {
1922
+ input: [
1923
+ "text",
1924
+ "image"
1925
+ ],
1926
+ output: [
1927
+ "text"
1928
+ ]
1929
+ }
1688
1930
  },
1689
1931
  "gemini-3-pro": {
1690
1932
  name: "Gemini 3 Pro",
1691
- limit: { context: 1048576, output: 65536 },
1692
- modalities: { input: ["text", "image"], output: ["text"] }
1933
+ limit: {
1934
+ context: 1048576,
1935
+ output: 65536
1936
+ },
1937
+ modalities: {
1938
+ input: [
1939
+ "text",
1940
+ "image"
1941
+ ],
1942
+ output: [
1943
+ "text"
1944
+ ]
1945
+ }
1693
1946
  }
1694
1947
  }
1695
1948
  };
@@ -1764,10 +2017,10 @@ var buildStandardProviderConfig = () => ({
1764
2017
  var applyProviderConfig = (config) => {
1765
2018
  let changed = false;
1766
2019
  const providerMap = config.provider && typeof config.provider === "object" ? config.provider : {};
1767
- const existingProvider = providerMap[PROVIDER_ID];
2020
+ const existingProvider = providerMap[PROVIDER_ID2];
1768
2021
  const standardProvider = buildStandardProviderConfig();
1769
2022
  if (!deepEqual(existingProvider, standardProvider)) {
1770
- providerMap[PROVIDER_ID] = standardProvider;
2023
+ providerMap[PROVIDER_ID2] = standardProvider;
1771
2024
  config.provider = providerMap;
1772
2025
  changed = true;
1773
2026
  }
@@ -1776,6 +2029,10 @@ var applyProviderConfig = (config) => {
1776
2029
  config.plugin = nextPlugins;
1777
2030
  changed = true;
1778
2031
  }
2032
+ if (config.model && MODEL_MIGRATIONS[config.model]) {
2033
+ config.model = MODEL_MIGRATIONS[config.model];
2034
+ changed = true;
2035
+ }
1779
2036
  return changed;
1780
2037
  };
1781
2038
  var ensureConfigFile = async () => {
@@ -1913,7 +2170,7 @@ var AicodewithCodexAuthPlugin = async (ctx) => {
1913
2170
  });
1914
2171
  const autoUpdateHook = createAutoUpdateHook(ctx, { autoUpdate: true });
1915
2172
  const authHook = {
1916
- provider: PROVIDER_ID,
2173
+ provider: PROVIDER_ID2,
1917
2174
  loader: async (getAuth, _provider) => {
1918
2175
  const auth = await getAuth();
1919
2176
  if (auth.type !== "api" || !auth.key) {
@@ -2003,7 +2260,7 @@ var AicodewithCodexAuthPlugin = async (ctx) => {
2003
2260
  },
2004
2261
  event: autoUpdateHook.event,
2005
2262
  "chat.params": async (input, output) => {
2006
- if (input.model.providerID !== PROVIDER_ID)
2263
+ if (input.model.providerID !== PROVIDER_ID2)
2007
2264
  return;
2008
2265
  if (!input.model.id?.startsWith("claude-"))
2009
2266
  return;
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "opencode-aicodewith-auth",
3
- "version": "0.1.35",
4
- "description": "OpenCode plugin for AICodewith authentication - Access GPT-5.2, Claude, and Gemini models through AICodewith API",
3
+ "version": "0.1.39",
4
+ "description": "OpenCode plugin for AICodewith authentication - Access GPT-5.3 Codex, GPT-5.2, Claude, and Gemini models through AICodewith API",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
7
7
  "types": "dist/index.d.ts",
@@ -19,10 +19,14 @@
19
19
  "dist"
20
20
  ],
21
21
  "scripts": {
22
- "build": "bun build index.ts provider.ts --outdir dist --target bun --format esm --external @ai-sdk/anthropic --external @ai-sdk/google --external @ai-sdk/openai --external @ai-sdk/provider --external @ai-sdk/provider-utils --external @opencode-ai/plugin --external @opencode-ai/sdk",
22
+ "build": "bun run generate:config && bun build index.ts provider.ts --outdir dist --target bun --format esm --external @ai-sdk/anthropic --external @ai-sdk/google --external @ai-sdk/openai --external @ai-sdk/provider --external @ai-sdk/provider-utils --external @opencode-ai/plugin --external @opencode-ai/sdk",
23
23
  "clean": "rm -rf dist",
24
+ "generate:config": "bun scripts/generate-provider-config.ts && bun scripts/generate-omo-config.ts",
24
25
  "prepublishOnly": "bun run clean && bun run build",
25
26
  "typecheck": "bunx tsc --noEmit",
27
+ "test": "vitest run",
28
+ "test:watch": "vitest",
29
+ "test:coverage": "vitest run --coverage",
26
30
  "postinstall": "bun ./scripts/install-opencode-aicodewith.js"
27
31
  },
28
32
  "keywords": [
@@ -58,6 +62,7 @@
58
62
  },
59
63
  "devDependencies": {
60
64
  "@types/node": "^22.19.8",
61
- "typescript": "^5.9.3"
65
+ "typescript": "^5.9.3",
66
+ "vitest": "^4.0.18"
62
67
  }
63
68
  }