claude-evolve 1.8.28 → 1.8.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/ai-cli.sh +13 -0
- package/lib/config.sh +2 -2
- package/package.json +1 -1
- package/templates/config.yaml +6 -4
package/lib/ai-cli.sh
CHANGED
|
@@ -168,12 +168,25 @@ $prompt"
|
|
|
168
168
|
ai_output=$(timeout -k 30 600 opencode -m openrouter/anthropic/claude-opus-4.1 run "$prompt" 2>&1)
|
|
169
169
|
local ai_exit_code=$?
|
|
170
170
|
;;
|
|
171
|
+
kimi-k2-openrouter)
|
|
172
|
+
local ai_output
|
|
173
|
+
# Kimi K2 Thinking via OpenRouter (no separate auth needed)
|
|
174
|
+
ai_output=$(timeout -k 30 600 opencode -m openrouter/moonshotai/kimi-k2-thinking run "$prompt" 2>&1)
|
|
175
|
+
local ai_exit_code=$?
|
|
176
|
+
;;
|
|
171
177
|
kimi-k2-think-moonshot)
|
|
172
178
|
local ai_output
|
|
173
179
|
# Use kimi CLI directly (assumes kimi is installed and configured)
|
|
174
180
|
ai_output=$(timeout -k 30 600 kimi --print -c "$prompt" 2>&1)
|
|
175
181
|
local ai_exit_code=$?
|
|
176
182
|
;;
|
|
183
|
+
kimi-coder)
|
|
184
|
+
local ai_output
|
|
185
|
+
# Kimi for Coding model via kimi CLI (fast coding-focused model)
|
|
186
|
+
# Use --print to see agent actions while still allowing file modifications
|
|
187
|
+
ai_output=$(timeout -k 30 600 kimi --print -y -m kimi-for-coding -c "$prompt" 2>&1)
|
|
188
|
+
local ai_exit_code=$?
|
|
189
|
+
;;
|
|
177
190
|
codex-oss-local)
|
|
178
191
|
# Codex-OSS via Codex CLI with Ollama backend
|
|
179
192
|
local ai_output
|
package/lib/config.sh
CHANGED
|
@@ -58,9 +58,9 @@ DEFAULT_MEMORY_LIMIT_MB=12288
|
|
|
58
58
|
DEFAULT_WORKER_MAX_CANDIDATES=3
|
|
59
59
|
|
|
60
60
|
# Default LLM CLI configuration
|
|
61
|
-
DEFAULT_LLM_RUN="glm-zai glm-zai glm-zai glm-zai glm-zai codex-oss-local haiku"
|
|
61
|
+
DEFAULT_LLM_RUN="glm-zai glm-zai glm-zai kimi-coder glm-zai glm-zai codex-oss-local haiku"
|
|
62
62
|
# Ideate: Commercial models for idea generation + local fallback
|
|
63
|
-
DEFAULT_LLM_IDEATE="opus-openrouter kimi-k2-
|
|
63
|
+
DEFAULT_LLM_IDEATE="opus-openrouter kimi-k2-openrouter gemini-3-pro-preview sonnet-think gpt5high grok-4-openrouter deepseek-openrouter glm-zai"
|
|
64
64
|
|
|
65
65
|
# Load configuration from a YAML file and update variables
|
|
66
66
|
_load_yaml_config() {
|
package/package.json
CHANGED
package/templates/config.yaml
CHANGED
|
@@ -77,10 +77,10 @@ llm_cli:
|
|
|
77
77
|
# Models are tried in order, with round-robin distribution across candidates
|
|
78
78
|
# You can repeat models for weighted selection (e.g., "sonnet sonnet gemini" for 2:1 ratio)
|
|
79
79
|
|
|
80
|
-
# Default configuration:
|
|
80
|
+
# Default configuration: Mix of local and commercial code generation, commercial ideation
|
|
81
81
|
# Commented out because these change over time; uncomment to override
|
|
82
|
-
#run: codex-
|
|
83
|
-
#ideate: opus-openrouter kimi-k2-
|
|
82
|
+
#run: glm-zai glm-zai kimi-coder codex-oss-local
|
|
83
|
+
#ideate: opus-openrouter kimi-k2-openrouter gemini-pro sonnet-think gpt5high grok-4-openrouter deepseek-openrouter glm-zai
|
|
84
84
|
|
|
85
85
|
# Available models:
|
|
86
86
|
# - sonnet: Claude 3.5 Sonnet via Claude CLI
|
|
@@ -97,6 +97,8 @@ llm_cli:
|
|
|
97
97
|
# - grok-code-fast: Grok Code Fast 1 via OpenRouter
|
|
98
98
|
# - grok-4: Grok 4 via OpenRouter
|
|
99
99
|
# - opus-openrouter: Claude Opus 4.1 via OpenRouter
|
|
100
|
-
# - kimi-k2-
|
|
100
|
+
# - kimi-k2-openrouter: Kimi K2 Thinking via OpenRouter (RECOMMENDED - no separate auth)
|
|
101
|
+
# - kimi-k2-think-moonshot: Kimi K2 Thinking via Moonshot CLI (requires separate kimi CLI setup)
|
|
102
|
+
# - kimi-coder: Kimi for Coding model via kimi CLI (fast, good for code generation)
|
|
101
103
|
# - codex-qwen3: Qwen3-Coder via Codex + Ollama (local, free, RECOMMENDED)
|
|
102
104
|
# - aider-qwen3: Qwen3-Coder via Aider + Ollama (local, free, experimental)
|