claude-evolve 1.8.15 → 1.8.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/ai-cli.sh +10 -0
- package/lib/config.sh +1 -1
- package/package.json +1 -1
- package/templates/config.yaml +3 -1
package/lib/ai-cli.sh
CHANGED
|
@@ -113,6 +113,16 @@ $prompt"
|
|
|
113
113
|
ai_output=$(timeout 600 opencode -m openrouter/x-ai/grok-4 run "$prompt" 2>&1)
|
|
114
114
|
local ai_exit_code=$?
|
|
115
115
|
;;
|
|
116
|
+
opus-openrouter)
|
|
117
|
+
local ai_output
|
|
118
|
+
ai_output=$(timeout 600 opencode -m openrouter/anthropic/claude-opus-4.1 run "$prompt" 2>&1)
|
|
119
|
+
local ai_exit_code=$?
|
|
120
|
+
;;
|
|
121
|
+
kimi-k2-thinking-openrouter)
|
|
122
|
+
local ai_output
|
|
123
|
+
ai_output=$(timeout 600 opencode -m openrouter/moonshotai/kimi-k2-thinking run "$prompt" 2>&1)
|
|
124
|
+
local ai_exit_code=$?
|
|
125
|
+
;;
|
|
116
126
|
codex-oss-local)
|
|
117
127
|
# Codex-OSS via Codex CLI with Ollama backend
|
|
118
128
|
local ai_output
|
package/lib/config.sh
CHANGED
|
@@ -60,7 +60,7 @@ DEFAULT_WORKER_MAX_CANDIDATES=3
|
|
|
60
60
|
# Default LLM CLI configuration
|
|
61
61
|
DEFAULT_LLM_RUN="glm-zai glm-zai glm-zai glm-zai glm-zai codex-oss-local gemini-flash haiku"
|
|
62
62
|
# Ideate: Commercial models for idea generation + local fallback
|
|
63
|
-
DEFAULT_LLM_IDEATE="opus gemini-pro sonnet-think gpt5high grok-4-openrouter deepseek-openrouter glm-zai"
|
|
63
|
+
DEFAULT_LLM_IDEATE="opus-openrouter kimi-k2-thinking-openrouter gemini-pro sonnet-think gpt5high grok-4-openrouter deepseek-openrouter glm-zai"
|
|
64
64
|
|
|
65
65
|
# Load configuration from a YAML file and update variables
|
|
66
66
|
_load_yaml_config() {
|
package/package.json
CHANGED
package/templates/config.yaml
CHANGED
|
@@ -80,7 +80,7 @@ llm_cli:
|
|
|
80
80
|
# Default configuration: 100% local code generation, commercial ideation + local fallback
|
|
81
81
|
# Commented out because these change over time; uncomment to override
|
|
82
82
|
#run: codex-qwen3
|
|
83
|
-
#ideate: opus gemini-pro sonnet-think gpt5high grok-4-openrouter deepseek-openrouter glm-zai
|
|
83
|
+
#ideate: opus-openrouter kimi-k2-thinking-openrouter gemini-pro sonnet-think gpt5high grok-4-openrouter deepseek-openrouter glm-zai
|
|
84
84
|
|
|
85
85
|
# Available models:
|
|
86
86
|
# - sonnet: Claude 3.5 Sonnet via Claude CLI
|
|
@@ -96,5 +96,7 @@ llm_cli:
|
|
|
96
96
|
# - glm: GLM-4.6 via OpenCode CLI
|
|
97
97
|
# - grok-code-fast: Grok Code Fast 1 via OpenRouter
|
|
98
98
|
# - grok-4: Grok 4 via OpenRouter
|
|
99
|
+
# - opus-openrouter: Claude Opus 4.1 via OpenRouter
|
|
100
|
+
# - kimi-k2-thinking-openrouter: Kimi K2 Thinking via OpenRouter
|
|
99
101
|
# - codex-qwen3: Qwen3-Coder via Codex + Ollama (local, free, RECOMMENDED)
|
|
100
102
|
# - aider-qwen3: Qwen3-Coder via Aider + Ollama (local, free, experimental)
|