yaicli 0.0.18__tar.gz → 0.0.19__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: yaicli
3
- Version: 0.0.18
3
+ Version: 0.0.19
4
4
  Summary: A simple CLI tool to interact with LLM
5
5
  Project-URL: Homepage, https://github.com/belingud/yaicli
6
6
  Project-URL: Repository, https://github.com/belingud/yaicli
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "yaicli"
3
- version = "0.0.18"
3
+ version = "0.0.19"
4
4
  description = "A simple CLI tool to interact with LLM"
5
5
  authors = [{ name = "belingud", email = "im.victor@qq.com" }]
6
6
  readme = "README.md"
@@ -49,48 +49,54 @@ CHAT_MODE = "chat"
49
49
  TEMP_MODE = "temp"
50
50
 
51
51
  DEFAULT_CONFIG_MAP = {
52
- "BASE_URL": {"value": "https://api.openai.com/v1", "env_key": "YAI_BASE_URL"},
53
- "API_KEY": {"value": "", "env_key": "YAI_API_KEY"},
54
- "MODEL": {"value": "gpt-4o", "env_key": "YAI_MODEL"},
55
- "SHELL_NAME": {"value": "auto", "env_key": "YAI_SHELL_NAME"},
56
- "OS_NAME": {"value": "auto", "env_key": "YAI_OS_NAME"},
57
- "COMPLETION_PATH": {"value": "chat/completions", "env_key": "YAI_COMPLETION_PATH"},
58
- "ANSWER_PATH": {"value": "choices[0].message.content", "env_key": "YAI_ANSWER_PATH"},
59
- "STREAM": {"value": "true", "env_key": "YAI_STREAM"},
60
- "CODE_THEME": {"value": "monokia", "env_key": "YAI_CODE_THEME"},
61
- "TEMPERATURE": {"value": "0.7", "env_key": "YAI_TEMPERATURE"},
62
- "TOP_P": {"value": "1.0", "env_key": "YAI_TOP_P"},
63
- "MAX_TOKENS": {"value": "1024", "env_key": "YAI_MAX_TOKENS"},
64
- "MAX_HISTORY": {"value": "500", "env_key": "YAI_MAX_HISTORY"},
65
- "AUTO_SUGGEST": {"value": "true", "env_key": "YAI_AUTO_SUGGEST"},
52
+ # Core API settings
53
+ "BASE_URL": {"value": "https://api.openai.com/v1", "env_key": "YAI_BASE_URL", "type": str},
54
+ "API_KEY": {"value": "", "env_key": "YAI_API_KEY", "type": str},
55
+ "MODEL": {"value": "gpt-4o", "env_key": "YAI_MODEL", "type": str},
56
+ # System detection hints
57
+ "SHELL_NAME": {"value": "auto", "env_key": "YAI_SHELL_NAME", "type": str},
58
+ "OS_NAME": {"value": "auto", "env_key": "YAI_OS_NAME", "type": str},
59
+ # API response parsing
60
+ "COMPLETION_PATH": {"value": "chat/completions", "env_key": "YAI_COMPLETION_PATH", "type": str},
61
+ "ANSWER_PATH": {"value": "choices[0].message.content", "env_key": "YAI_ANSWER_PATH", "type": str},
62
+ # API call parameters
63
+ "STREAM": {"value": "true", "env_key": "YAI_STREAM", "type": bool},
64
+ "TEMPERATURE": {"value": "0.7", "env_key": "YAI_TEMPERATURE", "type": float},
65
+ "TOP_P": {"value": "1.0", "env_key": "YAI_TOP_P", "type": float},
66
+ "MAX_TOKENS": {"value": "1024", "env_key": "YAI_MAX_TOKENS", "type": int},
67
+ # UI/UX settings
68
+ "CODE_THEME": {"value": "monokai", "env_key": "YAI_CODE_THEME", "type": str},
69
+ "MAX_HISTORY": {"value": "500", "env_key": "YAI_MAX_HISTORY", "type": int}, # readline history file limit
70
+ "AUTO_SUGGEST": {"value": "true", "env_key": "YAI_AUTO_SUGGEST", "type": bool},
66
71
  }
67
72
 
68
- DEFAULT_CONFIG_INI = """[core]
73
+ DEFAULT_CONFIG_INI = f"""[core]
69
74
  PROVIDER=openai
70
- BASE_URL=https://api.openai.com/v1
71
- API_KEY=
72
- MODEL=gpt-4o
75
+ BASE_URL={DEFAULT_CONFIG_MAP["BASE_URL"]["value"]}
76
+ API_KEY={DEFAULT_CONFIG_MAP["API_KEY"]["value"]}
77
+ MODEL={DEFAULT_CONFIG_MAP["MODEL"]["value"]}
73
78
 
74
- # auto detect shell and os
75
- SHELL_NAME=auto
76
- OS_NAME=auto
79
+ # auto detect shell and os (or specify manually, e.g., bash, zsh, powershell.exe)
80
+ SHELL_NAME={DEFAULT_CONFIG_MAP["SHELL_NAME"]["value"]}
81
+ OS_NAME={DEFAULT_CONFIG_MAP["OS_NAME"]["value"]}
77
82
 
78
- # if you want to use custom completions path, you can set it here
79
- COMPLETION_PATH=/chat/completions
80
- # if you want to use custom answer path, you can set it here
81
- ANSWER_PATH=choices[0].message.content
83
+ # API paths (usually no need to change for OpenAI compatible APIs)
84
+ COMPLETION_PATH={DEFAULT_CONFIG_MAP["COMPLETION_PATH"]["value"]}
85
+ ANSWER_PATH={DEFAULT_CONFIG_MAP["ANSWER_PATH"]["value"]}
82
86
 
83
- # true: streaming response
84
- # false: non-streaming response
85
- STREAM=true
86
- CODE_THEME=monokia
87
+ # true: streaming response, false: non-streaming
88
+ STREAM={DEFAULT_CONFIG_MAP["STREAM"]["value"]}
87
89
 
88
- TEMPERATURE=0.7
89
- TOP_P=1.0
90
- MAX_TOKENS=1024
90
+ # LLM parameters
91
+ TEMPERATURE={DEFAULT_CONFIG_MAP["TEMPERATURE"]["value"]}
92
+ TOP_P={DEFAULT_CONFIG_MAP["TOP_P"]["value"]}
93
+ MAX_TOKENS={DEFAULT_CONFIG_MAP["MAX_TOKENS"]["value"]}
91
94
 
92
- MAX_HISTORY=500
93
- AUTO_SUGGEST=true"""
95
+ # UI/UX
96
+ CODE_THEME={DEFAULT_CONFIG_MAP["CODE_THEME"]["value"]}
97
+ MAX_HISTORY={DEFAULT_CONFIG_MAP["MAX_HISTORY"]["value"]} # Max entries kept in history file
98
+ AUTO_SUGGEST={DEFAULT_CONFIG_MAP["AUTO_SUGGEST"]["value"]}
99
+ """
94
100
 
95
101
  app = typer.Typer(
96
102
  name="yaicli",
@@ -194,7 +200,6 @@ class CLI:
194
200
  self.session = PromptSession(
195
201
  key_bindings=self.bindings,
196
202
  # completer=WordCompleter(["/clear", "/exit", "/his"]),
197
- complete_while_typing=True,
198
203
  history=LimitedFileHistory(
199
204
  Path("~/.yaicli_history").expanduser(), max_entries=int(self.config["MAX_HISTORY"])
200
205
  ),
@@ -209,44 +214,66 @@ class CLI:
209
214
  def _(event: KeyPressEvent) -> None:
210
215
  self.current_mode = EXEC_MODE if self.current_mode == CHAT_MODE else CHAT_MODE
211
216
 
212
- def load_config(self) -> dict[str, str]:
217
+ def load_config(self) -> dict[str, Any]: # Changed return type hint
213
218
  """Load LLM API configuration with priority:
214
219
  1. Environment variables (highest priority)
215
220
  2. Configuration file
216
221
  3. Default values (lowest priority)
217
222
 
223
+ Applies type conversion based on DEFAULT_CONFIG_MAP after merging sources.
224
+
218
225
  Returns:
219
- dict: merged configuration
226
+ dict: merged configuration with appropriate types
220
227
  """
221
- boolean_keys = ["STREAM", "AUTO_SUGGEST"]
222
- # Start with default configuration (lowest priority)
223
- merged_config: Dict[str, Any] = {k: v["value"] for k, v in DEFAULT_CONFIG_MAP.items()}
228
+ # Start with default configuration string values (lowest priority)
229
+ # These serve as the base and also for fallback on type errors
230
+ default_values_str = {k: v["value"] for k, v in DEFAULT_CONFIG_MAP.items()}
231
+ merged_config: Dict[str, Any] = default_values_str.copy() # Use Any for value type
224
232
 
225
233
  # Create default config file if it doesn't exist
226
234
  if not self.CONFIG_PATH.exists():
227
235
  self.console.print("[bold yellow]Creating default configuration file.[/bold yellow]")
228
236
  self.CONFIG_PATH.parent.mkdir(parents=True, exist_ok=True)
229
- with open(self.CONFIG_PATH, "w") as f:
237
+ with open(self.CONFIG_PATH, "w", encoding="utf-8") as f: # Added encoding
230
238
  f.write(DEFAULT_CONFIG_INI)
231
239
  else:
232
240
  # Load from configuration file (middle priority)
233
241
  config_parser = CasePreservingConfigParser()
234
- config_parser.read(self.CONFIG_PATH)
242
+ # Read with UTF-8 encoding
243
+ config_parser.read(self.CONFIG_PATH, encoding="utf-8")
235
244
  if "core" in config_parser:
236
- # Update with non-empty values from config file
237
- merged_config.update({k: v for k, v in config_parser["core"].items() if v.strip()})
245
+ # Update with non-empty values from config file (values are strings)
246
+ merged_config.update(
247
+ {k: v for k, v in config_parser["core"].items() if k in DEFAULT_CONFIG_MAP and v.strip()}
248
+ )
238
249
 
239
250
  # Override with environment variables (highest priority)
240
- for key, config in DEFAULT_CONFIG_MAP.items():
241
- env_value = getenv(config["env_key"])
251
+ for key, config_info in DEFAULT_CONFIG_MAP.items():
252
+ env_value = getenv(config_info["env_key"])
242
253
  if env_value is not None:
254
+ # Env values are strings
243
255
  merged_config[key] = env_value
244
- # Convert boolean values
245
- if key in boolean_keys:
246
- merged_config[key] = str(merged_config[key]).lower() == "true"
256
+ target_type = config_info["type"]
257
+ # Fallback, shouldn't be needed here, but safe
258
+ raw_value: Any = merged_config.get(key, default_values_str.get(key))
259
+ converted_value = None
260
+ try:
261
+ if target_type is bool:
262
+ converted_value = str(raw_value).strip().lower() == "true"
263
+ elif target_type in (int, float, str):
264
+ converted_value = target_type(raw_value)
265
+ except (ValueError, TypeError) as e:
266
+ self.console.print(
267
+ f"[yellow]Warning:[/yellow] Invalid value '{raw_value}' for '{key}'. "
268
+ f"Expected type '{target_type.__name__}'. Using default value '{default_values_str[key]}'. Error: {e}",
269
+ style="dim",
270
+ )
271
+ # Fallback to default string value
272
+ converted_value = target_type(default_values_str[key])
247
273
 
274
+ merged_config[key] = converted_value
248
275
  self.config = merged_config
249
- return merged_config
276
+ return self.config
250
277
 
251
278
  def detect_os(self) -> str:
252
279
  """Detect operating system + version"""
@@ -545,7 +572,7 @@ class CLI:
545
572
  # Handle clear command
546
573
  if user_input.lower() == CMD_CLEAR and self.current_mode == CHAT_MODE:
547
574
  self.history = []
548
- self.console.print("[bold yellow]Chat history cleared[/bold yellow]\n")
575
+ self.console.print("Chat history cleared\n", style="bold yellow")
549
576
  continue
550
577
  elif user_input.lower() == CMD_HISTORY:
551
578
  self.console.print(self.history)
File without changes
File without changes
File without changes