deepy-cli 0.1.4__tar.gz → 0.1.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/PKG-INFO +39 -10
  2. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/README.md +38 -9
  3. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/pyproject.toml +1 -1
  4. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/__init__.py +1 -1
  5. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/cli.py +80 -36
  6. deepy_cli-0.1.6/src/deepy/config/__init__.py +59 -0
  7. deepy_cli-0.1.6/src/deepy/config/settings.py +437 -0
  8. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/data/tools/AskUserQuestion.md +3 -1
  9. deepy_cli-0.1.6/src/deepy/data/tools/shell.md +13 -0
  10. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/llm/__init__.py +2 -1
  11. deepy_cli-0.1.6/src/deepy/llm/compaction.py +269 -0
  12. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/llm/context.py +14 -47
  13. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/llm/events.py +1 -0
  14. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/llm/runner.py +29 -1
  15. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/prompts/compact.py +40 -2
  16. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/prompts/runtime_context.py +9 -7
  17. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/prompts/system.py +5 -2
  18. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/prompts/tool_docs.py +1 -1
  19. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/sessions/jsonl.py +152 -4
  20. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/sessions/manager.py +13 -15
  21. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/status.py +8 -0
  22. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/tools/agents.py +10 -8
  23. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/tools/builtin.py +161 -35
  24. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/tools/shell_utils.py +84 -0
  25. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/ask_user_question.py +1 -1
  26. deepy_cli-0.1.6/src/deepy/ui/markdown.py +346 -0
  27. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/message_view.py +51 -41
  28. deepy_cli-0.1.6/src/deepy/ui/model_picker.py +178 -0
  29. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/prompt_input.py +24 -16
  30. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/slash_commands.py +4 -0
  31. deepy_cli-0.1.6/src/deepy/ui/styles.py +154 -0
  32. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/terminal.py +615 -77
  33. deepy_cli-0.1.6/src/deepy/ui/theme_picker.py +116 -0
  34. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/welcome.py +36 -23
  35. deepy_cli-0.1.4/src/deepy/config/__init__.py +0 -23
  36. deepy_cli-0.1.4/src/deepy/config/settings.py +0 -234
  37. deepy_cli-0.1.4/src/deepy/data/tools/bash.md +0 -7
  38. deepy_cli-0.1.4/src/deepy/ui/markdown.py +0 -152
  39. deepy_cli-0.1.4/src/deepy/ui/styles.py +0 -21
  40. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/__main__.py +0 -0
  41. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/data/__init__.py +0 -0
  42. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/data/tools/WebFetch.md +0 -0
  43. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/data/tools/WebSearch.md +0 -0
  44. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/data/tools/__init__.py +0 -0
  45. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/data/tools/edit.md +0 -0
  46. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/data/tools/modify.md +0 -0
  47. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/data/tools/read.md +0 -0
  48. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/data/tools/write.md +0 -0
  49. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/errors.py +0 -0
  50. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/llm/agent.py +0 -0
  51. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/llm/model_capabilities.py +0 -0
  52. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/llm/provider.py +0 -0
  53. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/llm/replay.py +0 -0
  54. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/llm/thinking.py +0 -0
  55. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/prompts/__init__.py +0 -0
  56. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/prompts/rules.py +0 -0
  57. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/sessions/__init__.py +0 -0
  58. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/skills.py +0 -0
  59. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/tools/__init__.py +0 -0
  60. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/tools/file_state.py +0 -0
  61. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/tools/result.py +0 -0
  62. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/__init__.py +0 -0
  63. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/app.py +0 -0
  64. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/exit_summary.py +0 -0
  65. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/loading_text.py +0 -0
  66. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/prompt_buffer.py +0 -0
  67. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/session_list.py +0 -0
  68. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/session_picker.py +0 -0
  69. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/ui/thinking_state.py +0 -0
  70. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/update_check.py +0 -0
  71. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/usage.py +0 -0
  72. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/utils/__init__.py +0 -0
  73. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/utils/debug_logger.py +0 -0
  74. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/utils/error_logger.py +0 -0
  75. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/utils/json.py +0 -0
  76. {deepy_cli-0.1.4 → deepy_cli-0.1.6}/src/deepy/utils/notify.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: deepy-cli
3
- Version: 0.1.4
3
+ Version: 0.1.6
4
4
  Summary: Deepy - Vibe coding for DeepSeek models in your terminal
5
5
  Keywords: deepseek,coding-agent,terminal,cli,agents
6
6
  Author: kirineko
@@ -62,7 +62,8 @@ context state visible while the agent works.
62
62
  ## Highlights
63
63
 
64
64
  - DeepSeek-first model setup with `deepseek-v4-pro`, thinking enabled, and
65
- `reasoning_effort=max` by default.
65
+ `reasoning_effort=max` by default; `/model` can switch between V4 Pro and V4
66
+ Flash with `none`, `high`, or `max` thinking strength.
66
67
  - OpenAI Agents SDK integration through `OpenAIChatCompletionsModel`.
67
68
  - Project-aware coding tools for reading files, modifying files, running shell
68
69
  commands, and showing readable diffs.
@@ -71,8 +72,8 @@ context state visible while the agent works.
71
72
  - Session history, `/resume`, `/new`, automatic context tracking, and compacting
72
73
  for long project work.
73
74
  - TOML-only private configuration at `~/.deepy/config.toml`.
74
- - Terminal UI with Markdown rendering, DeepSeek thinking display, per-turn usage,
75
- context window status, and version update checks.
75
+ - Theme-aware terminal UI with Markdown rendering, DeepSeek thinking display,
76
+ per-turn usage, context window status, and version update checks.
76
77
 
77
78
  ## See It Work
78
79
 
@@ -137,13 +138,28 @@ Deepy only uses TOML configuration. JSON config files are intentionally rejected
137
138
 
138
139
  ```toml
139
140
  # ~/.deepy/config.toml
141
+ [model]
140
142
  api_key = "sk-..."
141
- model = "deepseek-v4-pro"
143
+ name = "deepseek-v4-pro"
142
144
  base_url = "https://api.deepseek.com"
143
- context_window_tokens = 1048576
144
- compact_threshold = 0.8
145
+ thinking = true
146
+ reasoning_effort = "max" # high or max when thinking is enabled
147
+
148
+ [context]
149
+ window_tokens = 1048576
150
+ compact_trigger_ratio = 0.8
151
+ reserved_context_tokens = 50000
152
+ compact_preserve_recent_messages = 2
153
+
154
+ [ui]
155
+ theme = "auto" # auto, dark, or light
145
156
  ```
146
157
 
158
+ Supported interactive model choices are `deepseek-v4-pro` and
159
+ `deepseek-v4-flash`. In `/model`, thinking strength `none` saves
160
+ `thinking = false`; `high` and `max` save `thinking = true` with the matching
161
+ `reasoning_effort`.
162
+
147
163
  WebSearch uses Deepy's hosted SearXNG endpoint by default. You can override it
148
164
  with your own SearXNG instance:
149
165
 
@@ -158,11 +174,21 @@ You can also initialize config non-interactively:
158
174
  deepy config init --api-key sk-... --model deepseek-v4-pro
159
175
  ```
160
176
 
177
+ If your terminal uses a light background and parts of the UI look low contrast,
178
+ set the UI theme explicitly:
179
+
180
+ ```bash
181
+ deepy config theme light
182
+ ```
183
+
161
184
  ## Common Commands
162
185
 
163
186
  ```bash
164
187
  deepy --version
165
188
  deepy config setup
189
+ deepy config reset
190
+ deepy config theme
191
+ deepy config theme light
166
192
  deepy doctor
167
193
  deepy doctor --live --json
168
194
  deepy status
@@ -176,8 +202,12 @@ Inside the interactive terminal:
176
202
 
177
203
  ```text
178
204
  /skills List available skills
205
+ /model Select model and thinking strength
179
206
  /new Start a fresh conversation
180
207
  /resume Pick a previous session
208
+ /compact Compact the active session context
209
+ /theme Show or change UI theme
210
+ /reset Delete config and run setup again
181
211
  / Open the command menu
182
212
  Esc Interrupt the current model turn
183
213
  Ctrl+D Press twice to quit
@@ -208,6 +238,5 @@ assets live outside the package directory and are not included in the wheel.
208
238
 
209
239
  ## Release Status
210
240
 
211
- Deepy is preparing its first public `0.1.4` release. The current release path is
212
- GitHub + PyPI. Standalone binaries and npm wrappers can be added later, but the
213
- primary distribution is the Python CLI.
241
+ Deepy `0.1.6` is released through GitHub and PyPI. Standalone binaries and npm
242
+ wrappers can be added later, but the primary distribution is the Python CLI.
@@ -34,7 +34,8 @@ context state visible while the agent works.
34
34
  ## Highlights
35
35
 
36
36
  - DeepSeek-first model setup with `deepseek-v4-pro`, thinking enabled, and
37
- `reasoning_effort=max` by default.
37
+ `reasoning_effort=max` by default; `/model` can switch between V4 Pro and V4
38
+ Flash with `none`, `high`, or `max` thinking strength.
38
39
  - OpenAI Agents SDK integration through `OpenAIChatCompletionsModel`.
39
40
  - Project-aware coding tools for reading files, modifying files, running shell
40
41
  commands, and showing readable diffs.
@@ -43,8 +44,8 @@ context state visible while the agent works.
43
44
  - Session history, `/resume`, `/new`, automatic context tracking, and compacting
44
45
  for long project work.
45
46
  - TOML-only private configuration at `~/.deepy/config.toml`.
46
- - Terminal UI with Markdown rendering, DeepSeek thinking display, per-turn usage,
47
- context window status, and version update checks.
47
+ - Theme-aware terminal UI with Markdown rendering, DeepSeek thinking display,
48
+ per-turn usage, context window status, and version update checks.
48
49
 
49
50
  ## See It Work
50
51
 
@@ -109,13 +110,28 @@ Deepy only uses TOML configuration. JSON config files are intentionally rejected
109
110
 
110
111
  ```toml
111
112
  # ~/.deepy/config.toml
113
+ [model]
112
114
  api_key = "sk-..."
113
- model = "deepseek-v4-pro"
115
+ name = "deepseek-v4-pro"
114
116
  base_url = "https://api.deepseek.com"
115
- context_window_tokens = 1048576
116
- compact_threshold = 0.8
117
+ thinking = true
118
+ reasoning_effort = "max" # high or max when thinking is enabled
119
+
120
+ [context]
121
+ window_tokens = 1048576
122
+ compact_trigger_ratio = 0.8
123
+ reserved_context_tokens = 50000
124
+ compact_preserve_recent_messages = 2
125
+
126
+ [ui]
127
+ theme = "auto" # auto, dark, or light
117
128
  ```
118
129
 
130
+ Supported interactive model choices are `deepseek-v4-pro` and
131
+ `deepseek-v4-flash`. In `/model`, thinking strength `none` saves
132
+ `thinking = false`; `high` and `max` save `thinking = true` with the matching
133
+ `reasoning_effort`.
134
+
119
135
  WebSearch uses Deepy's hosted SearXNG endpoint by default. You can override it
120
136
  with your own SearXNG instance:
121
137
 
@@ -130,11 +146,21 @@ You can also initialize config non-interactively:
130
146
  deepy config init --api-key sk-... --model deepseek-v4-pro
131
147
  ```
132
148
 
149
+ If your terminal uses a light background and parts of the UI look low contrast,
150
+ set the UI theme explicitly:
151
+
152
+ ```bash
153
+ deepy config theme light
154
+ ```
155
+
133
156
  ## Common Commands
134
157
 
135
158
  ```bash
136
159
  deepy --version
137
160
  deepy config setup
161
+ deepy config reset
162
+ deepy config theme
163
+ deepy config theme light
138
164
  deepy doctor
139
165
  deepy doctor --live --json
140
166
  deepy status
@@ -148,8 +174,12 @@ Inside the interactive terminal:
148
174
 
149
175
  ```text
150
176
  /skills List available skills
177
+ /model Select model and thinking strength
151
178
  /new Start a fresh conversation
152
179
  /resume Pick a previous session
180
+ /compact Compact the active session context
181
+ /theme Show or change UI theme
182
+ /reset Delete config and run setup again
153
183
  / Open the command menu
154
184
  Esc Interrupt the current model turn
155
185
  Ctrl+D Press twice to quit
@@ -180,6 +210,5 @@ assets live outside the package directory and are not included in the wheel.
180
210
 
181
211
  ## Release Status
182
212
 
183
- Deepy is preparing its first public `0.1.4` release. The current release path is
184
- GitHub + PyPI. Standalone binaries and npm wrappers can be added later, but the
185
- primary distribution is the Python CLI.
213
+ Deepy `0.1.6` is released through GitHub and PyPI. Standalone binaries and npm
214
+ wrappers can be added later, but the primary distribution is the Python CLI.
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "deepy-cli"
3
- version = "0.1.4"
3
+ version = "0.1.6"
4
4
  description = "Deepy - Vibe coding for DeepSeek models in your terminal"
5
5
  readme = "README.md"
6
6
  authors = [
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- __version__ = "0.1.4"
3
+ __version__ = "0.1.6"
4
4
 
5
5
 
6
6
  def main() -> None:
@@ -2,7 +2,6 @@ from __future__ import annotations
2
2
 
3
3
  import argparse
4
4
  import asyncio
5
- import os
6
5
  import sys
7
6
  from pathlib import Path
8
7
  from typing import Sequence
@@ -10,8 +9,21 @@ from typing import Sequence
10
9
  import tomli_w
11
10
 
12
11
  from . import __version__
13
- from .config import Settings, load_settings, settings_to_toml_dict
14
- from .config.settings import DEFAULT_BASE_URL, DEFAULT_MODEL, DEFAULT_WEB_SEARCH_SEARXNG_URL
12
+ from .config import (
13
+ Settings,
14
+ load_settings,
15
+ settings_to_toml_dict,
16
+ ui_theme_from_selection,
17
+ ui_theme_number,
18
+ update_config_theme,
19
+ write_config,
20
+ )
21
+ from .config.settings import (
22
+ DEFAULT_BASE_URL,
23
+ DEFAULT_MODEL,
24
+ DEFAULT_UI_THEME,
25
+ UI_THEMES,
26
+ )
15
27
  from .errors import format_error_display
16
28
  from .llm.provider import build_provider_bundle
17
29
  from .llm.runner import DEFAULT_MAX_TURNS, run_prompt_once
@@ -20,6 +32,7 @@ from .skills import discover_skills, find_skill, format_skills_for_terminal, rea
20
32
  from .status import build_status_report, format_status_report, status_report_to_dict
21
33
  from .usage import TokenUsage, format_usage_line, usage_from_run_result
22
34
  from .ui import run_interactive
35
+ from .ui.styles import resolve_ui_palette
23
36
  from .utils import json as json_utils
24
37
 
25
38
 
@@ -42,9 +55,13 @@ def _build_parser() -> argparse.ArgumentParser:
42
55
  init_parser.add_argument("--api-key", help="DeepSeek API key.")
43
56
  init_parser.add_argument("--model", default=DEFAULT_MODEL, help="Model name.")
44
57
  init_parser.add_argument("--base-url", default=DEFAULT_BASE_URL, help="OpenAI-compatible base URL.")
58
+ init_parser.add_argument("--theme", default=DEFAULT_UI_THEME, help="UI theme: auto, dark, or light.")
45
59
  init_parser.add_argument("--force", action="store_true", help="Overwrite existing config.")
46
60
  setup_parser = config_sub.add_parser("setup", help="Interactively configure Deepy.")
47
61
  setup_parser.add_argument("--force", action="store_true", help="Overwrite existing config.")
62
+ config_sub.add_parser("reset", help="Delete local config and run interactive setup again.")
63
+ theme_parser = config_sub.add_parser("theme", help="Show or update terminal UI theme.")
64
+ theme_parser.add_argument("theme", nargs="?", help="Theme to save: auto, dark, or light.")
48
65
 
49
66
  doctor_parser = subparsers.add_parser("doctor", help="Validate local Deepy setup.")
50
67
  doctor_parser.add_argument("--json", action="store_true", help="Print JSON diagnostics.")
@@ -101,6 +118,7 @@ def _cmd_config_init(args: argparse.Namespace) -> int:
101
118
  api_key=args.api_key or "",
102
119
  model=args.model,
103
120
  base_url=args.base_url,
121
+ theme=args.theme,
104
122
  )
105
123
  print(f"Wrote {config_path}")
106
124
  return 0
@@ -110,7 +128,7 @@ def _cmd_config_setup(args: argparse.Namespace) -> int:
110
128
  config_path = args.config.expanduser() if args.config else Path.home() / ".deepy" / "config.toml"
111
129
  if config_path.suffix == ".json":
112
130
  raise ValueError("Deepy only supports TOML config files; JSON config is not supported.")
113
- if config_path.exists() and not args.force:
131
+ if config_path.exists():
114
132
  existing = load_settings(config_path)
115
133
  else:
116
134
  existing = Settings(path=config_path)
@@ -119,11 +137,26 @@ def _cmd_config_setup(args: argparse.Namespace) -> int:
119
137
  api_key = _prompt_config_value("API key", default=existing.model.api_key or "", is_password=True)
120
138
  model = _prompt_config_value("Model", default=existing.model.name)
121
139
  base_url = _prompt_config_value("Base URL", default=existing.model.base_url)
122
- _write_config(config_path, api_key=api_key, model=model, base_url=base_url)
140
+ theme = _prompt_theme_value(default=existing.ui.theme)
141
+ _write_config(config_path, api_key=api_key, model=model, base_url=base_url, theme=theme)
123
142
  print(f"Wrote {config_path}")
124
143
  return 0
125
144
 
126
145
 
146
+ def _cmd_config_reset(args: argparse.Namespace) -> int:
147
+ config_path = args.config.expanduser() if args.config else Path.home() / ".deepy" / "config.toml"
148
+ if config_path.suffix == ".json":
149
+ raise ValueError("Deepy only supports TOML config files; JSON config is not supported.")
150
+ if config_path.exists():
151
+ config_path.unlink()
152
+ print(f"Removed {config_path}")
153
+ else:
154
+ print(f"No existing config at {config_path}")
155
+ print("Starting Deepy configuration setup...")
156
+ setup_args = argparse.Namespace(config=args.config, force=True)
157
+ return _cmd_config_setup(setup_args)
158
+
159
+
127
160
  def _prompt_config_value(label: str, *, default: str, is_password: bool = False) -> str:
128
161
  from prompt_toolkit import PromptSession
129
162
 
@@ -136,36 +169,33 @@ def _prompt_config_value(label: str, *, default: str, is_password: bool = False)
136
169
  return value or default
137
170
 
138
171
 
139
- def _write_config(config_path: Path, *, api_key: str, model: str, base_url: str) -> None:
140
- payload = {
141
- "model": {
142
- "name": model,
143
- "base_url": base_url,
144
- "api_key": api_key,
145
- "thinking": True,
146
- "reasoning_effort": "max",
147
- },
148
- "context": {
149
- "window_tokens": 1_048_576,
150
- "compact_trigger_ratio": 0.8,
151
- "compact_prompt_token_threshold": 838_861,
152
- },
153
- "logging": {
154
- "debug": False,
155
- },
156
- "notify": {
157
- "enabled": False,
158
- "command": "",
159
- },
160
- "tools": {
161
- "web_search": {
162
- "searxng_url": DEFAULT_WEB_SEARCH_SEARXNG_URL,
163
- },
164
- },
165
- }
166
- config_path.parent.mkdir(parents=True, exist_ok=True)
167
- config_path.write_text(tomli_w.dumps(payload), encoding="utf-8")
168
- os.chmod(config_path, 0o600)
172
+ def _prompt_theme_value(*, default: str = DEFAULT_UI_THEME) -> str:
173
+ print("UI theme:")
174
+ print("1. auto Detect when possible; falls back to dark")
175
+ print("2. dark Optimized for dark terminal backgrounds")
176
+ print("3. light Optimized for light terminal backgrounds")
177
+ value = _prompt_config_value("UI theme number", default=ui_theme_number(default))
178
+ return ui_theme_from_selection(value, default=default)
179
+
180
+
181
+ def _write_config(config_path: Path, *, api_key: str, model: str, base_url: str, theme: str) -> None:
182
+ write_config(config_path, api_key=api_key, model=model, base_url=base_url, theme=theme)
183
+
184
+
185
+ def _cmd_config_theme(args: argparse.Namespace) -> int:
186
+ settings = load_settings(args.config)
187
+ if args.theme is None:
188
+ palette = resolve_ui_palette(settings.ui.theme)
189
+ print(f"saved: {settings.ui.theme}")
190
+ print(f"resolved: {palette.name}")
191
+ return 0
192
+ if args.theme not in UI_THEMES:
193
+ print("Invalid theme. Usage: deepy config theme [auto|dark|light]", file=sys.stderr)
194
+ return 1
195
+ config_path = settings.path or (args.config.expanduser() if args.config else Path.home() / ".deepy" / "config.toml")
196
+ update_config_theme(config_path, args.theme)
197
+ print(f"Saved UI theme: {args.theme}")
198
+ return 0
169
199
 
170
200
 
171
201
  def _doctor(args: argparse.Namespace) -> tuple[int, dict[str, object]]:
@@ -196,6 +226,11 @@ def _doctor(args: argparse.Namespace) -> tuple[int, dict[str, object]]:
196
226
  or settings.context.resolved_compact_threshold > 0,
197
227
  str(settings.context.resolved_compact_threshold),
198
228
  )
229
+ check(
230
+ "reserved_context",
231
+ settings.context.reserved_context_tokens > 0,
232
+ str(settings.context.reserved_context_tokens),
233
+ )
199
234
 
200
235
  try:
201
236
  build_provider_bundle(settings)
@@ -211,6 +246,7 @@ def _doctor(args: argparse.Namespace) -> tuple[int, dict[str, object]]:
211
246
  "thinking": {
212
247
  "enabled": settings.model.thinking_enabled,
213
248
  "reasoning_effort": settings.model.reasoning_effort,
249
+ "reasoning_mode": settings.model.reasoning_mode,
214
250
  },
215
251
  }
216
252
 
@@ -276,7 +312,7 @@ def _cmd_doctor(args: argparse.Namespace) -> int:
276
312
  status = "ok" if item["ok"] else "fail"
277
313
  print(f"{status:4} {item['name']}: {item['detail']}")
278
314
  thinking = report["thinking"]
279
- print(f"info thinking: enabled={thinking['enabled']} effort={thinking['reasoning_effort']}")
315
+ print(f"info reasoning: mode={thinking['reasoning_mode']}")
280
316
  live = report.get("live")
281
317
  if isinstance(live, dict):
282
318
  if live.get("ok"):
@@ -385,6 +421,10 @@ def main(argv: Sequence[str] | None = None) -> int:
385
421
  return _cmd_config_init(args)
386
422
  if args.config_command == "setup":
387
423
  return _cmd_config_setup(args)
424
+ if args.config_command == "reset":
425
+ return _cmd_config_reset(args)
426
+ if args.config_command == "theme":
427
+ return _cmd_config_theme(args)
388
428
  if args.command == "doctor":
389
429
  return _cmd_doctor(args)
390
430
  if args.command == "run":
@@ -404,6 +444,10 @@ def main(argv: Sequence[str] | None = None) -> int:
404
444
  setup_args = argparse.Namespace(config=args.config, force=True)
405
445
  _cmd_config_setup(setup_args)
406
446
  settings = load_settings(args.config)
447
+ if settings.path is not None and not settings.ui.theme_configured:
448
+ theme = _prompt_theme_value(default=settings.ui.theme)
449
+ update_config_theme(settings.path, theme)
450
+ settings = load_settings(args.config)
407
451
  return run_interactive(settings)
408
452
 
409
453
 
@@ -0,0 +1,59 @@
1
+ from __future__ import annotations
2
+
3
+ from .settings import (
4
+ ContextConfig,
5
+ DEEPSEEK_MODEL_CATALOG,
6
+ DEFAULT_COMPACT_PRESERVE_RECENT_MESSAGES,
7
+ DEFAULT_RESERVED_CONTEXT_TOKENS,
8
+ DEFAULT_UI_THEME,
9
+ DEFAULT_WEB_SEARCH_SEARXNG_URL,
10
+ DeepSeekModelInfo,
11
+ ModelConfig,
12
+ REASONING_MODES,
13
+ Settings,
14
+ SUPPORTED_DEEPSEEK_MODELS,
15
+ UI_THEME_OPTIONS,
16
+ UI_THEMES,
17
+ UiConfig,
18
+ default_config_path,
19
+ is_supported_deepseek_model,
20
+ is_valid_ui_theme,
21
+ is_valid_reasoning_mode,
22
+ load_settings,
23
+ mask_secret,
24
+ settings_to_toml_dict,
25
+ update_config_model_settings,
26
+ update_config_theme,
27
+ ui_theme_from_selection,
28
+ ui_theme_number,
29
+ write_config,
30
+ )
31
+
32
+ __all__ = [
33
+ "ContextConfig",
34
+ "DEEPSEEK_MODEL_CATALOG",
35
+ "DEFAULT_COMPACT_PRESERVE_RECENT_MESSAGES",
36
+ "DEFAULT_RESERVED_CONTEXT_TOKENS",
37
+ "DEFAULT_UI_THEME",
38
+ "DEFAULT_WEB_SEARCH_SEARXNG_URL",
39
+ "DeepSeekModelInfo",
40
+ "ModelConfig",
41
+ "REASONING_MODES",
42
+ "Settings",
43
+ "SUPPORTED_DEEPSEEK_MODELS",
44
+ "UI_THEME_OPTIONS",
45
+ "UI_THEMES",
46
+ "UiConfig",
47
+ "default_config_path",
48
+ "is_supported_deepseek_model",
49
+ "is_valid_ui_theme",
50
+ "is_valid_reasoning_mode",
51
+ "load_settings",
52
+ "mask_secret",
53
+ "settings_to_toml_dict",
54
+ "update_config_model_settings",
55
+ "update_config_theme",
56
+ "ui_theme_from_selection",
57
+ "ui_theme_number",
58
+ "write_config",
59
+ ]