opencode-llmstack 0.9.0__tar.gz → 0.9.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/PKG-INFO +1 -1
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/app.py +3 -3
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/models.ini +1 -1
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/opencode_llmstack.egg-info/PKG-INFO +1 -1
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/pyproject.toml +1 -1
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/README.md +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/AGENTS.md +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/__init__.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/__main__.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/_platform.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/backends/__init__.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/backends/bedrock.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/check_models.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/cli.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/commands/__init__.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/commands/_helpers.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/commands/activate.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/commands/check.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/commands/download.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/commands/install.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/commands/install_llama_swap.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/commands/reload.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/commands/restart.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/commands/setup.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/commands/start.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/commands/status.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/commands/stop.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/download/__init__.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/download/binary.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/download/ggufs.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/generators/__init__.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/generators/llama_swap.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/generators/opencode.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/paths.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/shell_env.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/llmstack/tiers.py +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/opencode_llmstack.egg-info/SOURCES.txt +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/opencode_llmstack.egg-info/dependency_links.txt +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/opencode_llmstack.egg-info/entry_points.txt +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/opencode_llmstack.egg-info/requires.txt +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/opencode_llmstack.egg-info/top_level.txt +0 -0
- {opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/setup.cfg +0 -0
|
@@ -170,7 +170,7 @@ MID_FIDELITY_CEILING = int(os.getenv("ROUTER_MID_FIDELITY_CEILING", "32000"))
|
|
|
170
170
|
# Floor the long-context rung at code-smart whenever a tool-call
|
|
171
171
|
# protocol is in play -- 3B models tool-call unreliably regardless of
|
|
172
172
|
# how big their context window is.
|
|
173
|
-
MULTI_TURN_THRESHOLD = int(os.getenv("ROUTER_MULTI_TURN", "
|
|
173
|
+
MULTI_TURN_THRESHOLD = int(os.getenv("ROUTER_MULTI_TURN", "10"))
|
|
174
174
|
AUTO_ALIASES = {"auto", "", None}
|
|
175
175
|
|
|
176
176
|
UNCENSORED_TRIGGERS = re.compile(
|
|
@@ -353,7 +353,7 @@ def classify(body: dict[str, Any]) -> tuple[str, str]:
|
|
|
353
353
|
ULTRA_MODEL, AGENT_MODEL)
|
|
354
354
|
return AGENT_MODEL, f"ultra-trigger->agent ({ULTRA_MODEL} unavailable)"
|
|
355
355
|
|
|
356
|
-
n_turns =
|
|
356
|
+
n_turns = sum(1 for m in (messages or []) if m.get("role") == "user")
|
|
357
357
|
has_code_signal = (
|
|
358
358
|
_matches(CODE_BLOCK, messages, prompt)
|
|
359
359
|
or _matches(AGENT_SIGNALS, messages, prompt)
|
|
@@ -402,7 +402,7 @@ def classify(body: dict[str, Any]) -> tuple[str, str]:
|
|
|
402
402
|
# prevent the step-down (plan tiers strip tools before dispatch,
|
|
403
403
|
# and code-fast is a hosted model that tool-calls reliably).
|
|
404
404
|
if n_turns >= MULTI_TURN_THRESHOLD:
|
|
405
|
-
return AGENT_MODEL, f"long-context tokens~{est}>{MID_FIDELITY_CEILING} (turns={n_turns} floor)"
|
|
405
|
+
return AGENT_MODEL, f"long-context tokens~{est}>{MID_FIDELITY_CEILING} (user-turns={n_turns}>={MULTI_TURN_THRESHOLD} floor)"
|
|
406
406
|
return FAST_MODEL, f"long-context tokens~{est}>{MID_FIDELITY_CEILING}"
|
|
407
407
|
|
|
408
408
|
|
|
@@ -300,7 +300,7 @@ description = Mistral-Small 3.2 24B Heretic - no-filter planning
|
|
|
300
300
|
;
|
|
301
301
|
high_fidelity_ceiling = 12000 ; tokens; below this, top-tier model is still cheap+fast (and ultra ctx_size = 2 * this)
|
|
302
302
|
mid_fidelity_ceiling = 32000 ; tokens; smart's sweet spot up to here, then step down to fast (smart ctx_size = 2 * this)
|
|
303
|
-
multi_turn =
|
|
303
|
+
multi_turn = 10 ; turn count that floors the long-context rung at code-smart
|
|
304
304
|
agent_signal_words = implement, fix bug, write a function, refactor, edit, patch, debug, run tests, build it
|
|
305
305
|
plan_signal_words = design, architect, approach, trade-off, should we, how would you, explain why, think through, compare options, brainstorm, root cause
|
|
306
306
|
uncensored_triggers = [nofilter], [uncensored], [heretic], "uncensored:", "nofilter:" (line start)
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "opencode-llmstack"
|
|
7
|
-
version = "0.9.
|
|
7
|
+
version = "0.9.1"
|
|
8
8
|
description = "Multi-tier local LLM stack: llama-swap + FastAPI auto-router + opencode wiring."
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
requires-python = ">=3.11"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/opencode_llmstack.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
{opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/opencode_llmstack.egg-info/entry_points.txt
RENAMED
|
File without changes
|
|
File without changes
|
{opencode_llmstack-0.9.0 → opencode_llmstack-0.9.1}/opencode_llmstack.egg-info/top_level.txt
RENAMED
|
File without changes
|
|
File without changes
|