calvyn-code 0.14.14 → 0.14.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.py +12 -8
- package/hermes_cli/model_normalize.py +29 -19
- package/hermes_cli/model_switch.py +25 -0
- package/hermes_cli/models.py +2 -0
- package/package.json +1 -1
package/cli.py
CHANGED
|
@@ -3536,18 +3536,22 @@ class HermesCLI:
|
|
|
3536
3536
|
normalize_model_for_provider,
|
|
3537
3537
|
)
|
|
3538
3538
|
|
|
3539
|
-
if resolved_provider not in _AGGREGATOR_PROVIDERS:
|
|
3540
|
-
normalized_model = normalize_model_for_provider(current_model, resolved_provider)
|
|
3541
|
-
if normalized_model and normalized_model != current_model:
|
|
3539
|
+
if resolved_provider not in _AGGREGATOR_PROVIDERS:
|
|
3540
|
+
normalized_model = normalize_model_for_provider(current_model, resolved_provider)
|
|
3541
|
+
if normalized_model and normalized_model != current_model:
|
|
3542
3542
|
if not self._model_is_default:
|
|
3543
3543
|
self._console_print(
|
|
3544
3544
|
f"[yellow]⚠️ Normalized model '{current_model}' to '{normalized_model}' for {resolved_provider}.[/]"
|
|
3545
3545
|
)
|
|
3546
|
-
self.model = normalized_model
|
|
3547
|
-
current_model = normalized_model
|
|
3548
|
-
changed = True
|
|
3549
|
-
|
|
3550
|
-
|
|
3546
|
+
self.model = normalized_model
|
|
3547
|
+
current_model = normalized_model
|
|
3548
|
+
changed = True
|
|
3549
|
+
if resolved_provider == "fireworks" and current_model.startswith("accounts/fireworks/models/"):
|
|
3550
|
+
self._explicit_base_url = "https://api.fireworks.ai/inference/v1"
|
|
3551
|
+
self.base_url = "https://api.fireworks.ai/inference/v1"
|
|
3552
|
+
self.api_mode = "chat_completions"
|
|
3553
|
+
except Exception:
|
|
3554
|
+
pass
|
|
3551
3555
|
|
|
3552
3556
|
if resolved_provider == "copilot":
|
|
3553
3557
|
try:
|
|
@@ -91,18 +91,19 @@ _AUTHORITATIVE_NATIVE_PROVIDERS: frozenset[str] = frozenset({
|
|
|
91
91
|
|
|
92
92
|
# Direct providers that accept bare native names but should repair a matching
|
|
93
93
|
# provider/ prefix when users copy the aggregator form into config.yaml.
|
|
94
|
-
_MATCHING_PREFIX_STRIP_PROVIDERS: frozenset[str] = frozenset({
|
|
95
|
-
"zai",
|
|
96
|
-
"kimi-coding",
|
|
97
|
-
"kimi-coding-cn",
|
|
98
|
-
"minimax",
|
|
99
|
-
"minimax-oauth",
|
|
100
|
-
"minimax-cn",
|
|
101
|
-
"
|
|
102
|
-
"
|
|
103
|
-
"
|
|
104
|
-
"
|
|
105
|
-
"
|
|
94
|
+
_MATCHING_PREFIX_STRIP_PROVIDERS: frozenset[str] = frozenset({
|
|
95
|
+
"zai",
|
|
96
|
+
"kimi-coding",
|
|
97
|
+
"kimi-coding-cn",
|
|
98
|
+
"minimax",
|
|
99
|
+
"minimax-oauth",
|
|
100
|
+
"minimax-cn",
|
|
101
|
+
"fireworks",
|
|
102
|
+
"alibaba",
|
|
103
|
+
"qwen-oauth",
|
|
104
|
+
"xiaomi",
|
|
105
|
+
"arcee",
|
|
106
|
+
"ollama-cloud",
|
|
106
107
|
"custom",
|
|
107
108
|
})
|
|
108
109
|
|
|
@@ -450,13 +451,22 @@ def normalize_model_for_provider(model_input: str, target_provider: str) -> str:
|
|
|
450
451
|
return bare
|
|
451
452
|
return _normalize_for_deepseek(bare)
|
|
452
453
|
|
|
453
|
-
# --- Direct providers: repair matching provider prefixes only ---
|
|
454
|
-
if provider in _MATCHING_PREFIX_STRIP_PROVIDERS:
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
454
|
+
# --- Direct providers: repair matching provider prefixes only ---
|
|
455
|
+
if provider in _MATCHING_PREFIX_STRIP_PROVIDERS:
|
|
456
|
+
if provider == "fireworks":
|
|
457
|
+
if name.startswith("fireworks/"):
|
|
458
|
+
return "accounts/fireworks/models/" + name.split("/", 1)[1]
|
|
459
|
+
if name.startswith("accounts/fireworks/models/"):
|
|
460
|
+
return name
|
|
461
|
+
if "/" in name:
|
|
462
|
+
_, bare = name.split("/", 1)
|
|
463
|
+
return f"accounts/fireworks/models/{bare}"
|
|
464
|
+
return f"accounts/fireworks/models/{name}"
|
|
465
|
+
result = _strip_matching_provider_prefix(name, provider)
|
|
466
|
+
# Some providers require lowercase model IDs (e.g. Xiaomi's API
|
|
467
|
+
# rejects "MiMo-V2.5-Pro" but accepts "mimo-v2.5-pro").
|
|
468
|
+
if provider in _LOWERCASE_MODEL_PROVIDERS:
|
|
469
|
+
result = result.lower()
|
|
460
470
|
return result
|
|
461
471
|
|
|
462
472
|
# --- Authoritative native providers: preserve user-facing slugs as-is ---
|
|
@@ -186,6 +186,31 @@ _BUILTIN_DIRECT_ALIASES: dict[str, DirectAlias] = {
|
|
|
186
186
|
provider="openai",
|
|
187
187
|
base_url="",
|
|
188
188
|
),
|
|
189
|
+
"fireworks/gpt-oss-120b": DirectAlias(
|
|
190
|
+
model="accounts/fireworks/models/gpt-oss-120b",
|
|
191
|
+
provider="fireworks",
|
|
192
|
+
base_url="https://api.fireworks.ai/inference/v1",
|
|
193
|
+
),
|
|
194
|
+
"fireworks/gpt-oss-20b": DirectAlias(
|
|
195
|
+
model="accounts/fireworks/models/gpt-oss-20b",
|
|
196
|
+
provider="fireworks",
|
|
197
|
+
base_url="https://api.fireworks.ai/inference/v1",
|
|
198
|
+
),
|
|
199
|
+
"fireworks/glm-5": DirectAlias(
|
|
200
|
+
model="accounts/fireworks/models/glm-5",
|
|
201
|
+
provider="fireworks",
|
|
202
|
+
base_url="https://api.fireworks.ai/inference/v1",
|
|
203
|
+
),
|
|
204
|
+
"fireworks/deepseek-v3p1": DirectAlias(
|
|
205
|
+
model="accounts/fireworks/models/deepseek-v3p1",
|
|
206
|
+
provider="fireworks",
|
|
207
|
+
base_url="https://api.fireworks.ai/inference/v1",
|
|
208
|
+
),
|
|
209
|
+
"fireworks/qwen3-coder-480b-a35b-instruct": DirectAlias(
|
|
210
|
+
model="accounts/fireworks/models/qwen3-coder-480b-a35b-instruct",
|
|
211
|
+
provider="fireworks",
|
|
212
|
+
base_url="https://api.fireworks.ai/inference/v1",
|
|
213
|
+
),
|
|
189
214
|
}
|
|
190
215
|
|
|
191
216
|
# Merged dict (builtins + user config); populated by _load_direct_aliases()
|
package/hermes_cli/models.py
CHANGED
|
@@ -354,6 +354,8 @@ _PROVIDER_MODELS: dict[str, list[str]] = {
|
|
|
354
354
|
"accounts/fireworks/models/llama4-maverick-instruct-basic",
|
|
355
355
|
"accounts/fireworks/models/deepseek-v3p1",
|
|
356
356
|
"accounts/fireworks/models/qwen3-coder-480b-a35b-instruct",
|
|
357
|
+
"accounts/fireworks/models/glm-5",
|
|
358
|
+
"accounts/fireworks/models/kimi-k2-0905",
|
|
357
359
|
],
|
|
358
360
|
"opencode-zen": [
|
|
359
361
|
"kimi-k2.5",
|