henosis-cli 0.6.7__py3-none-any.whl → 0.6.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cli.py +119 -53
- {henosis_cli-0.6.7.dist-info → henosis_cli-0.6.8.dist-info}/METADATA +1 -1
- {henosis_cli-0.6.7.dist-info → henosis_cli-0.6.8.dist-info}/RECORD +6 -6
- {henosis_cli-0.6.7.dist-info → henosis_cli-0.6.8.dist-info}/WHEEL +0 -0
- {henosis_cli-0.6.7.dist-info → henosis_cli-0.6.8.dist-info}/entry_points.txt +0 -0
- {henosis_cli-0.6.7.dist-info → henosis_cli-0.6.8.dist-info}/top_level.txt +0 -0
cli.py
CHANGED
|
@@ -683,7 +683,7 @@ class UI:
|
|
|
683
683
|
for n, ty, sz in rows:
|
|
684
684
|
print(f"{n:<40} {ty:<8} {sz}")
|
|
685
685
|
|
|
686
|
-
class ChatCLI:
|
|
686
|
+
class ChatCLI:
|
|
687
687
|
def __init__(
|
|
688
688
|
self,
|
|
689
689
|
server: str,
|
|
@@ -997,8 +997,10 @@ class ChatCLI:
|
|
|
997
997
|
}
|
|
998
998
|
# Track last used model for display
|
|
999
999
|
self._last_used_model: Optional[str] = None
|
|
1000
|
-
# Provider-native history for Kimi (preserve reasoning_content across turns)
|
|
1001
|
-
self._kimi_raw_history: List[Dict[str, Any]] = []
|
|
1000
|
+
# Provider-native history for Kimi (preserve reasoning_content across turns)
|
|
1001
|
+
self._kimi_raw_history: List[Dict[str, Any]] = []
|
|
1002
|
+
# Provider-native history for Gemini (preserve thoughtSignatures + strict tool-call chains across turns)
|
|
1003
|
+
self._gemini_raw_history: List[Dict[str, Any]] = []
|
|
1002
1004
|
# Last server billing info from /api/usage/commit
|
|
1003
1005
|
self._last_commit_cost_usd: float = 0.0
|
|
1004
1006
|
self._last_remaining_credits: Optional[float] = None
|
|
@@ -1955,14 +1957,15 @@ class ChatCLI:
|
|
|
1955
1957
|
pass
|
|
1956
1958
|
return data
|
|
1957
1959
|
|
|
1958
|
-
def _apply_settings_dict(self, data: Dict[str, Any]) -> None:
|
|
1959
|
-
try:
|
|
1960
|
+
def _apply_settings_dict(self, data: Dict[str, Any]) -> None:
|
|
1961
|
+
try:
|
|
1962
|
+
old_system_prompt = getattr(self, "system_prompt", None)
|
|
1960
1963
|
self.model = data.get("model", self.model)
|
|
1961
1964
|
if "save_chat_history" in data:
|
|
1962
1965
|
try:
|
|
1963
1966
|
self.save_chat_history = bool(data.get("save_chat_history"))
|
|
1964
1967
|
except Exception:
|
|
1965
|
-
pass
|
|
1968
|
+
pass
|
|
1966
1969
|
self.requested_tools = data.get("requested_tools", self.requested_tools)
|
|
1967
1970
|
self.fs_scope = data.get("fs_scope", self.fs_scope)
|
|
1968
1971
|
self.host_base = data.get("host_base", self.host_base)
|
|
@@ -2101,18 +2104,30 @@ class ChatCLI:
|
|
|
2101
2104
|
self.anthropic_cache_ttl = None
|
|
2102
2105
|
except Exception:
|
|
2103
2106
|
pass
|
|
2104
|
-
# Rebuild history if system prompt changed
|
|
2105
|
-
|
|
2106
|
-
|
|
2107
|
-
|
|
2108
|
-
|
|
2109
|
-
|
|
2110
|
-
|
|
2111
|
-
|
|
2112
|
-
|
|
2113
|
-
|
|
2114
|
-
|
|
2115
|
-
|
|
2107
|
+
# Rebuild history if system prompt changed
|
|
2108
|
+
try:
|
|
2109
|
+
system_prompt_changed = old_system_prompt != getattr(self, "system_prompt", None)
|
|
2110
|
+
except Exception:
|
|
2111
|
+
system_prompt_changed = False
|
|
2112
|
+
|
|
2113
|
+
if system_prompt_changed:
|
|
2114
|
+
# Changing the system prompt can materially alter the behavior of the assistant;
|
|
2115
|
+
# warn the user and reset the current conversation history to avoid mixing contexts.
|
|
2116
|
+
try:
|
|
2117
|
+
self.ui.warn("[settings] System prompt changed - clearing current conversation history.")
|
|
2118
|
+
except Exception:
|
|
2119
|
+
pass
|
|
2120
|
+
self.history = []
|
|
2121
|
+
if self.system_prompt:
|
|
2122
|
+
self.history.append({"role": "system", "content": self.system_prompt})
|
|
2123
|
+
# On settings load, do not assume the custom first-turn was injected yet
|
|
2124
|
+
try:
|
|
2125
|
+
self._did_inject_custom_first_turn = False
|
|
2126
|
+
except Exception:
|
|
2127
|
+
pass
|
|
2128
|
+
self._apply_model_side_effects()
|
|
2129
|
+
except Exception as e:
|
|
2130
|
+
self.ui.warn(f"Failed to apply settings: {e}")
|
|
2116
2131
|
|
|
2117
2132
|
async def _fetch_server_settings(self) -> Optional[Dict[str, Any]]:
|
|
2118
2133
|
try:
|
|
@@ -3256,7 +3271,7 @@ class ChatCLI:
|
|
|
3256
3271
|
"Fonts: San Serif, Inter, Geist, Mona Sans, IBM Plex Sans, Manrope\n"
|
|
3257
3272
|
)
|
|
3258
3273
|
|
|
3259
|
-
def _build_kimi_raw_messages(self, user_input: str) -> List[Dict[str, Any]]:
|
|
3274
|
+
def _build_kimi_raw_messages(self, user_input: str) -> List[Dict[str, Any]]:
|
|
3260
3275
|
"""Build provider-native messages for Kimi preserving prior assistant reasoning_content.
|
|
3261
3276
|
Includes prior provider-native turns and the current user message with first-turn injections.
|
|
3262
3277
|
"""
|
|
@@ -3274,8 +3289,31 @@ class ChatCLI:
|
|
|
3274
3289
|
for m in (self._kimi_raw_history or []):
|
|
3275
3290
|
raw.append(m)
|
|
3276
3291
|
# Append current user message
|
|
3277
|
-
raw.append({"role": "user", "content": content})
|
|
3278
|
-
return raw
|
|
3292
|
+
raw.append({"role": "user", "content": content})
|
|
3293
|
+
return raw
|
|
3294
|
+
|
|
3295
|
+
def _normalize_gemini_raw_messages(self, rpm: Any) -> List[Dict[str, Any]]:
|
|
3296
|
+
"""Normalize Gemini provider-native history.
|
|
3297
|
+
|
|
3298
|
+
Ensures we only send a flat list of dicts back to the server.
|
|
3299
|
+
This prevents accidental nesting like [[{...}, {...}]] which the
|
|
3300
|
+
google-genai SDK rejects with pydantic union validation errors.
|
|
3301
|
+
"""
|
|
3302
|
+
out: List[Dict[str, Any]] = []
|
|
3303
|
+
if not isinstance(rpm, list):
|
|
3304
|
+
return out
|
|
3305
|
+
for item in rpm:
|
|
3306
|
+
if item is None:
|
|
3307
|
+
continue
|
|
3308
|
+
if isinstance(item, list):
|
|
3309
|
+
# Flatten one level
|
|
3310
|
+
for sub in item:
|
|
3311
|
+
if isinstance(sub, dict):
|
|
3312
|
+
out.append(dict(sub))
|
|
3313
|
+
continue
|
|
3314
|
+
if isinstance(item, dict):
|
|
3315
|
+
out.append(dict(item))
|
|
3316
|
+
return out
|
|
3279
3317
|
|
|
3280
3318
|
def _build_working_memory_injection(self) -> Optional[str]:
|
|
3281
3319
|
try:
|
|
@@ -4603,7 +4641,7 @@ class ChatCLI:
|
|
|
4603
4641
|
self.ui.success(f"Thread title set to: {self.thread_name}")
|
|
4604
4642
|
return True
|
|
4605
4643
|
|
|
4606
|
-
if cmd == "/clear":
|
|
4644
|
+
if cmd == "/clear":
|
|
4607
4645
|
self.history = [{"role": "system", "content": self.system_prompt}] if self.system_prompt else []
|
|
4608
4646
|
self._did_inject_codebase_map = False
|
|
4609
4647
|
try:
|
|
@@ -4615,9 +4653,10 @@ class ChatCLI:
|
|
|
4615
4653
|
self.messages_for_save = []
|
|
4616
4654
|
if not self.save_chat_history:
|
|
4617
4655
|
self.thread_uid = None
|
|
4618
|
-
self._kimi_raw_history = []
|
|
4619
|
-
|
|
4620
|
-
|
|
4656
|
+
self._kimi_raw_history = []
|
|
4657
|
+
self._gemini_raw_history = []
|
|
4658
|
+
except Exception:
|
|
4659
|
+
pass
|
|
4621
4660
|
# Reset local cumulative token counters on session clear
|
|
4622
4661
|
self._cum_input_tokens = 0
|
|
4623
4662
|
self._cum_output_tokens = 0
|
|
@@ -5227,20 +5266,21 @@ class ChatCLI:
|
|
|
5227
5266
|
self.save_settings()
|
|
5228
5267
|
return True
|
|
5229
5268
|
|
|
5230
|
-
if choice == "clear_history":
|
|
5269
|
+
if choice == "clear_history":
|
|
5231
5270
|
self.history = [{"role": "system", "content": self.system_prompt}] if self.system_prompt else []
|
|
5232
5271
|
self._did_inject_codebase_map = False
|
|
5233
5272
|
try:
|
|
5234
5273
|
self._did_inject_custom_first_turn = False
|
|
5235
5274
|
except Exception:
|
|
5236
5275
|
pass
|
|
5237
|
-
try:
|
|
5276
|
+
try:
|
|
5238
5277
|
self.messages_for_save = []
|
|
5239
5278
|
if not self.save_chat_history:
|
|
5240
5279
|
self.thread_uid = None
|
|
5241
|
-
self._kimi_raw_history = []
|
|
5242
|
-
|
|
5243
|
-
|
|
5280
|
+
self._kimi_raw_history = []
|
|
5281
|
+
self._gemini_raw_history = []
|
|
5282
|
+
except Exception:
|
|
5283
|
+
pass
|
|
5244
5284
|
# Reset local cumulative token counters on session clear
|
|
5245
5285
|
self._cum_input_tokens = 0
|
|
5246
5286
|
self._cum_output_tokens = 0
|
|
@@ -5479,13 +5519,23 @@ class ChatCLI:
|
|
|
5479
5519
|
headers["X-Request-Timeout"] = str(int(req_timeout_hint))
|
|
5480
5520
|
except Exception:
|
|
5481
5521
|
pass
|
|
5482
|
-
# If using a Kimi model, include provider-native messages to preserve reasoning_content
|
|
5483
|
-
try:
|
|
5484
|
-
if isinstance(self.model, str) and self.model.startswith("kimi-"):
|
|
5485
|
-
req_payload = dict(req_payload)
|
|
5486
|
-
req_payload["raw_provider_messages"] = self._build_kimi_raw_messages(user_input)
|
|
5487
|
-
except Exception:
|
|
5488
|
-
pass
|
|
5522
|
+
# If using a Kimi model, include provider-native messages to preserve reasoning_content
|
|
5523
|
+
try:
|
|
5524
|
+
if isinstance(self.model, str) and self.model.startswith("kimi-"):
|
|
5525
|
+
req_payload = dict(req_payload)
|
|
5526
|
+
req_payload["raw_provider_messages"] = self._build_kimi_raw_messages(user_input)
|
|
5527
|
+
except Exception:
|
|
5528
|
+
pass
|
|
5529
|
+
# If using a Gemini model, include provider-native contents to preserve thought signatures
|
|
5530
|
+
# and strict tool-call chains across HTTP turns.
|
|
5531
|
+
try:
|
|
5532
|
+
if isinstance(self.model, str) and self.model.startswith("gemini-"):
|
|
5533
|
+
req_payload = dict(req_payload)
|
|
5534
|
+
hist = self._normalize_gemini_raw_messages(self._gemini_raw_history)
|
|
5535
|
+
if hist:
|
|
5536
|
+
req_payload["raw_provider_messages"] = hist
|
|
5537
|
+
except Exception:
|
|
5538
|
+
pass
|
|
5489
5539
|
async with httpx.AsyncClient(timeout=http_timeout, cookies=self.cookies) as client:
|
|
5490
5540
|
async with client.stream("POST", self.stream_url, json=req_payload, headers=headers, follow_redirects=True) as resp:
|
|
5491
5541
|
if resp.status_code == 429:
|
|
@@ -6393,10 +6443,17 @@ class ChatCLI:
|
|
|
6393
6443
|
except Exception as e:
|
|
6394
6444
|
self.ui.warn(f"tools.callback error: {e}")
|
|
6395
6445
|
|
|
6396
|
-
elif event == "message.completed":
|
|
6446
|
+
elif event == "message.completed":
|
|
6397
6447
|
# Safety: this block handles only 'message.completed'.
|
|
6398
6448
|
usage = data.get("usage", {})
|
|
6399
|
-
model_used = data.get("model") or self.model
|
|
6449
|
+
model_used = data.get("model") or self.model
|
|
6450
|
+
# Gemini: server may include an authoritative provider-native history snapshot.
|
|
6451
|
+
try:
|
|
6452
|
+
if isinstance(model_used, str) and model_used.startswith("gemini-"):
|
|
6453
|
+
rpm = data.get("raw_provider_messages")
|
|
6454
|
+
self._gemini_raw_history = self._normalize_gemini_raw_messages(rpm)
|
|
6455
|
+
except Exception:
|
|
6456
|
+
pass
|
|
6400
6457
|
# Mark completion for retry controller
|
|
6401
6458
|
try:
|
|
6402
6459
|
last_completed = True
|
|
@@ -7238,20 +7295,29 @@ class ChatCLI:
|
|
|
7238
7295
|
pass
|
|
7239
7296
|
return "".join(assistant_buf)
|
|
7240
7297
|
|
|
7241
|
-
elif event == "provider.message":
|
|
7242
|
-
# Provider-native message snapshot (e.g., Kimi assistant with reasoning_content)
|
|
7243
|
-
provider = (data.get("provider") or "").lower()
|
|
7244
|
-
msg = data.get("message")
|
|
7245
|
-
if
|
|
7246
|
-
#
|
|
7247
|
-
try:
|
|
7248
|
-
|
|
7249
|
-
|
|
7250
|
-
|
|
7251
|
-
self.
|
|
7252
|
-
|
|
7253
|
-
|
|
7254
|
-
|
|
7298
|
+
elif event == "provider.message":
|
|
7299
|
+
# Provider-native message snapshot (e.g., Kimi assistant with reasoning_content)
|
|
7300
|
+
provider = (data.get("provider") or "").lower()
|
|
7301
|
+
msg = data.get("message")
|
|
7302
|
+
if provider == "gemini":
|
|
7303
|
+
# Always retain Gemini provider-native messages (needed for multi-turn tool calling).
|
|
7304
|
+
try:
|
|
7305
|
+
if isinstance(msg, dict):
|
|
7306
|
+
self._gemini_raw_history.append(dict(msg))
|
|
7307
|
+
elif isinstance(msg, list):
|
|
7308
|
+
self._gemini_raw_history.extend(self._normalize_gemini_raw_messages(msg))
|
|
7309
|
+
except Exception:
|
|
7310
|
+
pass
|
|
7311
|
+
if bool(getattr(self, "retain_native_tool_results", False)) and provider == "kimi" and isinstance(msg, dict):
|
|
7312
|
+
# Append as-is to local raw history for the next turn
|
|
7313
|
+
try:
|
|
7314
|
+
self._kimi_raw_history.append(dict(msg))
|
|
7315
|
+
except Exception:
|
|
7316
|
+
try:
|
|
7317
|
+
self._kimi_raw_history.append(msg) # type: ignore
|
|
7318
|
+
except Exception:
|
|
7319
|
+
pass
|
|
7320
|
+
continue
|
|
7255
7321
|
|
|
7256
7322
|
else:
|
|
7257
7323
|
# TEMP DEBUG: show unknown/unhandled events
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: henosis-cli
|
|
3
|
-
Version: 0.6.
|
|
3
|
+
Version: 0.6.8
|
|
4
4
|
Summary: henosis-cli — interactive CLI for the Henosis multi-provider streaming chat backend, with optional local tools.
|
|
5
5
|
Author-email: henosis <henosis@henosis.us>
|
|
6
6
|
License-Expression: LicenseRef-Proprietary
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
cli.py,sha256=
|
|
1
|
+
cli.py,sha256=Zfl9XOtxi2-OkVj_653t-gfKvxt0837OqeQek3B0fZ4,504826
|
|
2
2
|
henosis_cli_tools/__init__.py,sha256=x3uaN_ub32uALx_oURna0VnuoSsj7i9NYY6uRsc2ZzM,1147
|
|
3
3
|
henosis_cli_tools/cli_entry.py,sha256=OZTe_s9Hfy3mcsYG77T3RTdtCDod-CSwmhskbXjmmqs,1713
|
|
4
4
|
henosis_cli_tools/input_engine.py,sha256=kGW6AgDGbdcVxlx5mvTPKYe4lYhho5wztvUAw7WlmTs,15286
|
|
5
5
|
henosis_cli_tools/settings_ui.py,sha256=8rWsp0S3wT-dgkP0y20FOBmBBy7jYbDy8AuftmKcp4w,21368
|
|
6
6
|
henosis_cli_tools/tool_impl.py,sha256=0iojZbVZhhPJybcmb2qYAuCesgQMp83JgPL2Py4PjT8,39250
|
|
7
|
-
henosis_cli-0.6.
|
|
8
|
-
henosis_cli-0.6.
|
|
9
|
-
henosis_cli-0.6.
|
|
10
|
-
henosis_cli-0.6.
|
|
11
|
-
henosis_cli-0.6.
|
|
7
|
+
henosis_cli-0.6.8.dist-info/METADATA,sha256=yU7jKZvdNYRSowgJrk7hAAH32QLM3Xey4d8iVjrz2OA,5787
|
|
8
|
+
henosis_cli-0.6.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
9
|
+
henosis_cli-0.6.8.dist-info/entry_points.txt,sha256=KmXDdmIjq1SVMs8FK3wHPA2i89RMaerzZHIetllMLIk,74
|
|
10
|
+
henosis_cli-0.6.8.dist-info/top_level.txt,sha256=u7XMBcJ8Kb0n91WaSU-4Db8yURSUXFuOxGMsXti0a-g,34
|
|
11
|
+
henosis_cli-0.6.8.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|