opencode-windsurf-codeium 0.1.24 → 0.1.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/README.md +53 -20
  2. package/package.json +4 -4
package/README.md CHANGED
@@ -29,12 +29,12 @@ bun add opencode-windsurf-codeium@beta
29
29
 
30
30
  ## OpenCode Configuration
31
31
 
32
- Add the following to your OpenCode config (typically `~/.config/opencode/config.json`). The plugin starts a local proxy server on port 42100 (falls back to a random free port and updates `chat.params` automatically):
32
+ Add the following to your OpenCode config (typically `~/.config/opencode/config.json`). The plugin starts a local proxy server on port 42100 (falls back to a random free port and updates `chat.params` automatically). The full model list with variants is in `opencode_config_example.json`; thinking vs non-thinking are separate models, while variants are only for performance tiers (low/high/xhigh/etc.).
33
33
 
34
34
  ```json
35
35
  {
36
36
  "$schema": "https://opencode.ai/config.json",
37
- "plugin": ["opencode-windsurf-codeium"],
37
+ "plugin": ["opencode-windsurf-codeium@beta"],
38
38
  "provider": {
39
39
  "windsurf": {
40
40
  "npm": "@ai-sdk/openai-compatible",
@@ -42,25 +42,58 @@ Add the following to your OpenCode config (typically `~/.config/opencode/config.
42
42
  "baseURL": "http://127.0.0.1:42100/v1"
43
43
  },
44
44
  "models": {
45
- "claude-4.5-opus": {
46
- "name": "Claude 4.5 Opus (Windsurf)",
47
- "limit": { "context": 200000, "output": 8192 }
45
+ "claude-4.5-opus-thinking": {
46
+ "name": "Claude 4.5 Opus Thinking (Windsurf)",
47
+ "limit": {
48
+ "context": 200000,
49
+ "output": 8192
50
+ }
48
51
  },
49
- "gpt-5.2-xhigh": {
50
- "name": "GPT 5.2 XHigh (Windsurf)",
51
- "limit": { "context": 128000, "output": 16384 }
52
+ "gpt-5.1-codex-max": {
53
+ "name": "GPT 5.1 Codex Max (Windsurf)",
54
+ "limit": {
55
+ "context": 200000,
56
+ "output": 8192
57
+ },
58
+ "variants": {
59
+ "low": {},
60
+ "medium": {},
61
+ "high": {}
62
+ }
52
63
  },
53
- "gemini-3.0-pro-high": {
54
- "name": "Gemini 3.0 Pro High (Windsurf)",
55
- "limit": { "context": 200000, "output": 8192 }
64
+ "gemini-3.0-pro": {
65
+ "name": "Gemini 3.0 Pro (Windsurf)",
66
+ "limit": {
67
+ "context": 200000,
68
+ "output": 8192
69
+ },
70
+ "variants": {
71
+ "minimal": {},
72
+ "low": {},
73
+ "medium": {},
74
+ "high": {}
75
+ }
56
76
  },
57
- "deepseek-r1": {
58
- "name": "DeepSeek R1 (Windsurf)",
59
- "limit": { "context": 64000, "output": 8192 }
77
+ "minimax-m2.1": {
78
+ "name": "Minimax M2.1 (Windsurf)",
79
+ "limit": {
80
+ "context": 200000,
81
+ "output": 8192
82
+ }
60
83
  },
61
- "swe-1.5": {
62
- "name": "SWE 1.5 (Windsurf)",
63
- "limit": { "context": 128000, "output": 32000 }
84
+ "glm-4.7": {
85
+ "name": "GLM 4.7 (Windsurf)",
86
+ "limit": {
87
+ "context": 200000,
88
+ "output": 8192
89
+ }
90
+ },
91
+ "glm-4.7-fast": {
92
+ "name": "GLM 4.7 Fast (Windsurf)",
93
+ "limit": {
94
+ "context": 200000,
95
+ "output": 8192
96
+ }
64
97
  }
65
98
  }
66
99
  }
@@ -104,11 +137,11 @@ src/
104
137
 
105
138
  **Claude**: `claude-3-opus`, `claude-3-sonnet`, `claude-3-haiku`, `claude-3.5-sonnet`, `claude-3.5-haiku`, `claude-3.7-sonnet`, `claude-3.7-sonnet-thinking`, `claude-4-opus`, `claude-4-opus-thinking`, `claude-4-sonnet`, `claude-4-sonnet-thinking`, `claude-4.1-opus`, `claude-4.1-opus-thinking`, `claude-4.5-sonnet`, `claude-4.5-sonnet-thinking`, `claude-4.5-opus`, `claude-4.5-opus-thinking`, `claude-code`.
106
139
 
107
- **OpenAI GPT**: `gpt-4`, `gpt-4-turbo`, `gpt-4o`, `gpt-4o-mini`, `gpt-4.1`, `gpt-4.1-mini`, `gpt-4.1-nano`, `gpt-5`, `gpt-5-nano`, `gpt-5-low`, `gpt-5-high`, `gpt-5-codex`, `gpt-5.1-codex-mini`, `gpt-5.1-codex`, `gpt-5.1-codex-max`, `gpt-5.2-low`, `gpt-5.2`, `gpt-5.2-high`, `gpt-5.2-xhigh`, `gpt-5.2-priority` (plus the low/high/xhigh priority variants).
140
+ **OpenAI GPT**: `gpt-4`, `gpt-4-turbo`, `gpt-4o`, `gpt-4o-mini`, `gpt-4.1`, `gpt-4.1-mini`, `gpt-4.1-nano`, `gpt-5`, `gpt-5-nano`, `gpt-5-codex`, `gpt-5.1-codex-mini`, `gpt-5.1-codex`, `gpt-5.1-codex-max`, `gpt-5.2` (variants low/medium/high/xhigh + priority tiers). Non-thinking vs thinking are separate model IDs, not variants.
108
141
 
109
142
  **OpenAI O-series**: `o3`, `o3-mini`, `o3-low`, `o3-high`, `o3-pro`, `o3-pro-low`, `o3-pro-high`, `o4-mini`, `o4-mini-low`, `o4-mini-high`.
110
143
 
111
- **Gemini**: `gemini-2.0-flash`, `gemini-2.5-pro`, `gemini-2.5-flash` (+ variants: `thinking`, `lite`), `gemini-3.0-pro` (+ variants: `minimal`, `low`, `medium`, `high`), `gemini-3.0-flash` (+ variants: `minimal`, `low`, `medium`, `high`).
144
+ **Gemini**: `gemini-2.0-flash`, `gemini-2.5-pro`, `gemini-2.5-flash`, `gemini-2.5-flash-thinking`, `gemini-2.5-flash-lite`, `gemini-3.0-pro` (variants: `minimal`, `low`, `medium`, `high`), `gemini-3.0-flash` (variants: `minimal`, `low`, `medium`, `high`). Thinking versions of Gemini 2.5 are separate models.
112
145
 
113
146
  **DeepSeek**: `deepseek-v3`, `deepseek-v3-2`, `deepseek-r1`, `deepseek-r1-fast`, `deepseek-r1-slow`.
114
147
 
@@ -120,7 +153,7 @@ src/
120
153
 
121
154
  **Specialty & Proprietary**: `mistral-7b`, `kimi-k2`, `kimi-k2-thinking`, `glm-4.5`, `glm-4.5-fast`, `glm-4.6`, `glm-4.6-fast`, `glm-4.7`, `glm-4.7-fast`, `minimax-m2`, `minimax-m2.1`, `swe-1.5`, `swe-1.5-thinking`, `swe-1.5-slow`.
122
155
 
123
- Aliases (e.g., `gpt-5.2-low-priority`) are also accepted.
156
+ Aliases (e.g., `gpt-5.2-low-priority`) are also accepted. Variants live under `provider.windsurf.models[model].variants`; thinking/non-thinking are distinct models.
124
157
 
125
158
  ## Development
126
159
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "opencode-windsurf-codeium",
3
- "version": "0.1.24",
3
+ "version": "0.1.26",
4
4
  "description": "OpenCode plugin for Windsurf/Codeium authentication - use Windsurf models in OpenCode",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -41,12 +41,12 @@
41
41
  "@bufbuild/protobuf": "^2.0.0",
42
42
  "proper-lockfile": "^4.1.2",
43
43
  "xdg-basedir": "^5.1.0",
44
- "zod": "^3.24.0"
44
+ "zod": "^4.3.5"
45
45
  },
46
46
  "devDependencies": {
47
- "@opencode-ai/plugin": "^0.15.30",
47
+ "@opencode-ai/plugin": "^1.1.21",
48
48
  "@types/bun": "^1.3.6",
49
- "@types/node": "^22.0.0",
49
+ "@types/node": "^25.0.0",
50
50
  "@types/proper-lockfile": "^4.1.4",
51
51
  "typescript": "^5.7.0"
52
52
  },