opencode-windsurf-codeium 0.1.24 → 0.1.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/README.md +15 -22
  2. package/package.json +4 -4
package/README.md CHANGED
@@ -29,7 +29,7 @@ bun add opencode-windsurf-codeium@beta
29
29
 
30
30
  ## OpenCode Configuration
31
31
 
32
- Add the following to your OpenCode config (typically `~/.config/opencode/config.json`). The plugin starts a local proxy server on port 42100 (falls back to a random free port and updates `chat.params` automatically):
32
+ Add the following to your OpenCode config (typically `~/.config/opencode/config.json`). The plugin starts a local proxy server on port 42100 (falls back to a random free port and updates `chat.params` automatically). The full model list with variants is in `opencode_config_example.json`; thinking vs non-thinking are separate models, while variants are only for performance tiers (low/high/xhigh/etc.).
33
33
 
34
34
  ```json
35
35
  {
@@ -42,26 +42,19 @@ Add the following to your OpenCode config (typically `~/.config/opencode/config.
42
42
  "baseURL": "http://127.0.0.1:42100/v1"
43
43
  },
44
44
  "models": {
45
- "claude-4.5-opus": {
46
- "name": "Claude 4.5 Opus (Windsurf)",
47
- "limit": { "context": 200000, "output": 8192 }
45
+ "claude-4.5-opus": { "name": "Claude 4.5 Opus (Windsurf)", "limit": { "context": 200000, "output": 8192 } },
46
+ "gpt-5.2": {
47
+ "name": "GPT 5.2 (Windsurf)",
48
+ "limit": { "context": 200000, "output": 8192 },
49
+ "variants": { "low": {}, "medium": {}, "high": {}, "xhigh": {}, "priority": {}, "low-priority": {}, "high-priority": {}, "xhigh-priority": {} }
48
50
  },
49
- "gpt-5.2-xhigh": {
50
- "name": "GPT 5.2 XHigh (Windsurf)",
51
- "limit": { "context": 128000, "output": 16384 }
51
+ "gemini-3.0-pro": {
52
+ "name": "Gemini 3.0 Pro (Windsurf)",
53
+ "limit": { "context": 200000, "output": 8192 },
54
+ "variants": { "minimal": {}, "low": {}, "medium": {}, "high": {} }
52
55
  },
53
- "gemini-3.0-pro-high": {
54
- "name": "Gemini 3.0 Pro High (Windsurf)",
55
- "limit": { "context": 200000, "output": 8192 }
56
- },
57
- "deepseek-r1": {
58
- "name": "DeepSeek R1 (Windsurf)",
59
- "limit": { "context": 64000, "output": 8192 }
60
- },
61
- "swe-1.5": {
62
- "name": "SWE 1.5 (Windsurf)",
63
- "limit": { "context": 128000, "output": 32000 }
64
- }
56
+ "deepseek-r1": { "name": "DeepSeek R1 (Windsurf)", "limit": { "context": 64000, "output": 8192 } },
57
+ "swe-1.5": { "name": "SWE 1.5 (Windsurf)", "limit": { "context": 128000, "output": 32000 } }
65
58
  }
66
59
  }
67
60
  }
@@ -104,11 +97,11 @@ src/
104
97
 
105
98
  **Claude**: `claude-3-opus`, `claude-3-sonnet`, `claude-3-haiku`, `claude-3.5-sonnet`, `claude-3.5-haiku`, `claude-3.7-sonnet`, `claude-3.7-sonnet-thinking`, `claude-4-opus`, `claude-4-opus-thinking`, `claude-4-sonnet`, `claude-4-sonnet-thinking`, `claude-4.1-opus`, `claude-4.1-opus-thinking`, `claude-4.5-sonnet`, `claude-4.5-sonnet-thinking`, `claude-4.5-opus`, `claude-4.5-opus-thinking`, `claude-code`.
106
99
 
107
- **OpenAI GPT**: `gpt-4`, `gpt-4-turbo`, `gpt-4o`, `gpt-4o-mini`, `gpt-4.1`, `gpt-4.1-mini`, `gpt-4.1-nano`, `gpt-5`, `gpt-5-nano`, `gpt-5-low`, `gpt-5-high`, `gpt-5-codex`, `gpt-5.1-codex-mini`, `gpt-5.1-codex`, `gpt-5.1-codex-max`, `gpt-5.2-low`, `gpt-5.2`, `gpt-5.2-high`, `gpt-5.2-xhigh`, `gpt-5.2-priority` (plus the low/high/xhigh priority variants).
100
+ **OpenAI GPT**: `gpt-4`, `gpt-4-turbo`, `gpt-4o`, `gpt-4o-mini`, `gpt-4.1`, `gpt-4.1-mini`, `gpt-4.1-nano`, `gpt-5`, `gpt-5-nano`, `gpt-5-codex`, `gpt-5.1-codex-mini`, `gpt-5.1-codex`, `gpt-5.1-codex-max`, `gpt-5.2` (variants low/medium/high/xhigh + priority tiers). Non-thinking vs thinking are separate model IDs, not variants.
108
101
 
109
102
  **OpenAI O-series**: `o3`, `o3-mini`, `o3-low`, `o3-high`, `o3-pro`, `o3-pro-low`, `o3-pro-high`, `o4-mini`, `o4-mini-low`, `o4-mini-high`.
110
103
 
111
- **Gemini**: `gemini-2.0-flash`, `gemini-2.5-pro`, `gemini-2.5-flash` (+ variants: `thinking`, `lite`), `gemini-3.0-pro` (+ variants: `minimal`, `low`, `medium`, `high`), `gemini-3.0-flash` (+ variants: `minimal`, `low`, `medium`, `high`).
104
+ **Gemini**: `gemini-2.0-flash`, `gemini-2.5-pro`, `gemini-2.5-flash`, `gemini-2.5-flash-thinking`, `gemini-2.5-flash-lite`, `gemini-3.0-pro` (variants: `minimal`, `low`, `medium`, `high`), `gemini-3.0-flash` (variants: `minimal`, `low`, `medium`, `high`). Thinking versions of Gemini 2.5 are separate models.
112
105
 
113
106
  **DeepSeek**: `deepseek-v3`, `deepseek-v3-2`, `deepseek-r1`, `deepseek-r1-fast`, `deepseek-r1-slow`.
114
107
 
@@ -120,7 +113,7 @@ src/
120
113
 
121
114
  **Specialty & Proprietary**: `mistral-7b`, `kimi-k2`, `kimi-k2-thinking`, `glm-4.5`, `glm-4.5-fast`, `glm-4.6`, `glm-4.6-fast`, `glm-4.7`, `glm-4.7-fast`, `minimax-m2`, `minimax-m2.1`, `swe-1.5`, `swe-1.5-thinking`, `swe-1.5-slow`.
122
115
 
123
- Aliases (e.g., `gpt-5.2-low-priority`) are also accepted.
116
+ Aliases (e.g., `gpt-5.2-low-priority`) are also accepted. Variants live under `provider.windsurf.models[model].variants`; thinking/non-thinking are distinct models.
124
117
 
125
118
  ## Development
126
119
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "opencode-windsurf-codeium",
3
- "version": "0.1.24",
3
+ "version": "0.1.25",
4
4
  "description": "OpenCode plugin for Windsurf/Codeium authentication - use Windsurf models in OpenCode",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -41,12 +41,12 @@
41
41
  "@bufbuild/protobuf": "^2.0.0",
42
42
  "proper-lockfile": "^4.1.2",
43
43
  "xdg-basedir": "^5.1.0",
44
- "zod": "^3.24.0"
44
+ "zod": "^4.3.5"
45
45
  },
46
46
  "devDependencies": {
47
- "@opencode-ai/plugin": "^0.15.30",
47
+ "@opencode-ai/plugin": "^1.1.21",
48
48
  "@types/bun": "^1.3.6",
49
- "@types/node": "^22.0.0",
49
+ "@types/node": "^25.0.0",
50
50
  "@types/proper-lockfile": "^4.1.4",
51
51
  "typescript": "^5.7.0"
52
52
  },