yaicli 0.5.9__tar.gz → 0.6.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. {yaicli-0.5.9 → yaicli-0.6.1}/PKG-INFO +238 -32
  2. {yaicli-0.5.9 → yaicli-0.6.1}/README.md +225 -28
  3. {yaicli-0.5.9 → yaicli-0.6.1}/pyproject.toml +35 -12
  4. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/cli.py +31 -20
  5. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/const.py +6 -5
  6. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/entry.py +1 -1
  7. yaicli-0.6.1/yaicli/llms/__init__.py +13 -0
  8. yaicli-0.6.1/yaicli/llms/client.py +120 -0
  9. yaicli-0.6.1/yaicli/llms/provider.py +78 -0
  10. yaicli-0.6.1/yaicli/llms/providers/ai21_provider.py +66 -0
  11. yaicli-0.6.1/yaicli/llms/providers/chatglm_provider.py +139 -0
  12. yaicli-0.6.1/yaicli/llms/providers/chutes_provider.py +14 -0
  13. yaicli-0.6.1/yaicli/llms/providers/cohere_provider.py +298 -0
  14. yaicli-0.6.1/yaicli/llms/providers/deepseek_provider.py +14 -0
  15. yaicli-0.6.1/yaicli/llms/providers/doubao_provider.py +53 -0
  16. yaicli-0.6.1/yaicli/llms/providers/groq_provider.py +16 -0
  17. yaicli-0.6.1/yaicli/llms/providers/infiniai_provider.py +20 -0
  18. yaicli-0.6.1/yaicli/llms/providers/minimax_provider.py +13 -0
  19. yaicli-0.6.1/yaicli/llms/providers/modelscope_provider.py +14 -0
  20. yaicli-0.6.1/yaicli/llms/providers/ollama_provider.py +187 -0
  21. yaicli-0.6.1/yaicli/llms/providers/openai_provider.py +211 -0
  22. yaicli-0.6.1/yaicli/llms/providers/openrouter_provider.py +14 -0
  23. yaicli-0.6.1/yaicli/llms/providers/sambanova_provider.py +30 -0
  24. yaicli-0.6.1/yaicli/llms/providers/siliconflow_provider.py +14 -0
  25. yaicli-0.6.1/yaicli/llms/providers/targon_provider.py +14 -0
  26. yaicli-0.6.1/yaicli/llms/providers/yi_provider.py +14 -0
  27. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/printer.py +4 -16
  28. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/schemas.py +12 -3
  29. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/tools.py +59 -3
  30. yaicli-0.5.9/yaicli/client.py +0 -391
  31. {yaicli-0.5.9 → yaicli-0.6.1}/.gitignore +0 -0
  32. {yaicli-0.5.9 → yaicli-0.6.1}/LICENSE +0 -0
  33. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/__init__.py +0 -0
  34. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/chat.py +0 -0
  35. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/config.py +0 -0
  36. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/console.py +0 -0
  37. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/exceptions.py +0 -0
  38. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/functions/__init__.py +0 -0
  39. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/functions/buildin/execute_shell_command.py +0 -0
  40. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/history.py +0 -0
  41. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/render.py +0 -0
  42. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/role.py +0 -0
  43. {yaicli-0.5.9 → yaicli-0.6.1}/yaicli/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: yaicli
3
- Version: 0.5.9
3
+ Version: 0.6.1
4
4
  Summary: A simple CLI tool to interact with LLM
5
5
  Project-URL: Homepage, https://github.com/belingud/yaicli
6
6
  Project-URL: Repository, https://github.com/belingud/yaicli
@@ -208,8 +208,8 @@ License: Apache License
208
208
  See the License for the specific language governing permissions and
209
209
  limitations under the License.
210
210
  License-File: LICENSE
211
- Keywords: ai,chatgpt,cli,gpt,interact,interact with chatgpt,interact with gpt,interact with llm,interact with llms,interact with openai,interactive,llm,llms,openai,terminal
212
- Classifier: License :: OSI Approved :: MIT License
211
+ Keywords: ai,ai-assistant,ai-chat,ai-interaction,anthropic,chatgpt,claude,cli,cohere,command-line,completion,console-application,conversation,gemini,gpt,groq,inference,interactive,language-model,llm,llms,mistral,nlp,openai,prompt,python-tool,shell-integration,terminal,terminal-interface,text-generation
212
+ Classifier: License :: OSI Approved :: Apache Software License
213
213
  Classifier: Operating System :: OS Independent
214
214
  Classifier: Programming Language :: Python :: 3
215
215
  Requires-Python: >=3.9
@@ -218,12 +218,21 @@ Requires-Dist: distro>=1.9.0
218
218
  Requires-Dist: httpx>=0.28.1
219
219
  Requires-Dist: instructor>=1.7.9
220
220
  Requires-Dist: json-repair>=0.44.1
221
- Requires-Dist: litellm>=1.67.5
222
221
  Requires-Dist: openai>=1.76.0
223
222
  Requires-Dist: prompt-toolkit>=3.0.50
224
223
  Requires-Dist: rich>=13.9.4
225
224
  Requires-Dist: socksio>=1.0.0
226
225
  Requires-Dist: typer>=0.16.0
226
+ Provides-Extra: all
227
+ Requires-Dist: cohere>=5.15.0; extra == 'all'
228
+ Requires-Dist: ollama>=0.5.1; extra == 'all'
229
+ Requires-Dist: volcengine-python-sdk>=3.0.15; extra == 'all'
230
+ Provides-Extra: cohere
231
+ Requires-Dist: cohere>=5.15.0; extra == 'cohere'
232
+ Provides-Extra: doubao
233
+ Requires-Dist: volcengine-python-sdk>=3.0.15; extra == 'doubao'
234
+ Provides-Extra: ollama
235
+ Requires-Dist: ollama>=0.5.1; extra == 'ollama'
227
236
  Description-Content-Type: text/markdown
228
237
 
229
238
  # YAICLI: Your AI assistant in command line.
@@ -308,6 +317,38 @@ pipx install yaicli
308
317
  uv tool install yaicli
309
318
  ```
310
319
 
320
+ Yaicli has several optional dependencies group, you can copy below commands to install specific dependencies.
321
+
322
+ ```shell
323
+ # install all denpendencies
324
+ pip install 'yaicli[all]'
325
+
326
+ # install with ollama support
327
+ pip instsall 'yaicli[ollama]'
328
+
329
+ # install with cohere support
330
+ pip install 'yaicli[cohere]'
331
+
332
+ # install with doubao support
333
+ pip install 'yaicli[doubao]'
334
+ ```
335
+
336
+ Install by `uv`.
337
+
338
+ ```shell
339
+ # install all denpendencies
340
+ uv tool install 'yaicli[all]'
341
+
342
+ # install with ollama support
343
+ uv tool instsall 'yaicli[ollama]'
344
+
345
+ # install with cohere support
346
+ uv tool install 'yaicli[cohere]'
347
+
348
+ # install with doubao support
349
+ uv tool install 'yaicli[doubao]'
350
+ ```
351
+
311
352
  ### Install from Source
312
353
 
313
354
  ```bash
@@ -382,39 +423,41 @@ SHOW_FUNCTION_OUTPUT=true
382
423
 
383
424
  ### Configuration Options Reference
384
425
 
385
- | Option | Description | Default | Env Variable |
386
- | ---------------------- | ------------------------------------------- | --------------------------- | -------------------------- |
387
- | `PROVIDER` | LLM provider (openai, claude, cohere, etc.) | `openai` | `YAI_PROVIDER` |
388
- | `BASE_URL` | API endpoint URL | `https://api.openai.com/v1` | `YAI_BASE_URL` |
389
- | `API_KEY` | Your API key | - | `YAI_API_KEY` |
390
- | `MODEL` | LLM model to use | `gpt-4o` | `YAI_MODEL` |
391
- | `SHELL_NAME` | Shell type | `auto` | `YAI_SHELL_NAME` |
392
- | `OS_NAME` | Operating system | `auto` | `YAI_OS_NAME` |
393
- | `STREAM` | Enable streaming | `true` | `YAI_STREAM` |
394
- | `TIMEOUT` | API timeout (seconds) | `60` | `YAI_TIMEOUT` |
395
- | `EXTRA_HEADERS` | Extra headers | - | `YAI_EXTRA_HEADERS` |
396
- | `EXTRA_BODY` | Extra body | - | `YAI_EXTRA_BODY` |
397
- | `REASONING_EFFORT` | Reasoning effort | - | `YAI_REASONING_EFFORT` |
398
- | `INTERACTIVE_ROUND` | Interactive mode rounds | `25` | `YAI_INTERACTIVE_ROUND` |
399
- | `CODE_THEME` | Syntax highlighting theme | `monokai` | `YAI_CODE_THEME` |
400
- | `TEMPERATURE` | Response randomness | `0.7` | `YAI_TEMPERATURE` |
401
- | `TOP_P` | Top-p sampling | `1.0` | `YAI_TOP_P` |
402
- | `MAX_TOKENS` | Max response tokens | `1024` | `YAI_MAX_TOKENS` |
403
- | `MAX_HISTORY` | Max history entries | `500` | `YAI_MAX_HISTORY` |
404
- | `AUTO_SUGGEST` | Enable history suggestions | `true` | `YAI_AUTO_SUGGEST` |
405
- | `SHOW_REASONING` | Enable reasoning display | `true` | `YAI_SHOW_REASONING` |
406
- | `JUSTIFY` | Text alignment | `default` | `YAI_JUSTIFY` |
407
- | `CHAT_HISTORY_DIR` | Chat history directory | `<tempdir>/yaicli/chats` | `YAI_CHAT_HISTORY_DIR` |
408
- | `MAX_SAVED_CHATS` | Max saved chats | `20` | `YAI_MAX_SAVED_CHATS` |
409
- | `ROLE_MODIFY_WARNING` | Warn user when modifying role | `true` | `YAI_ROLE_MODIFY_WARNING` |
410
- | `ENABLE_FUNCTIONS` | Enable function calling | `true` | `YAI_ENABLE_FUNCTIONS` |
411
- | `SHOW_FUNCTION_OUTPUT` | Show function output in response | `true` | `YAI_SHOW_FUNCTION_OUTPUT` |
426
+ | Option | Description | Default | Env Variable |
427
+ | ---------------------- | ------------------------------------------- | ------------------------ | -------------------------- |
428
+ | `PROVIDER` | LLM provider (openai, claude, cohere, etc.) | `openai` | `YAI_PROVIDER` |
429
+ | `BASE_URL` | API endpoint URL | - | `YAI_BASE_URL` |
430
+ | `API_KEY` | Your API key | - | `YAI_API_KEY` |
431
+ | `MODEL` | LLM model to use | `gpt-4o` | `YAI_MODEL` |
432
+ | `SHELL_NAME` | Shell type | `auto` | `YAI_SHELL_NAME` |
433
+ | `OS_NAME` | Operating system | `auto` | `YAI_OS_NAME` |
434
+ | `STREAM` | Enable streaming | `true` | `YAI_STREAM` |
435
+ | `TIMEOUT` | API timeout (seconds) | `60` | `YAI_TIMEOUT` |
436
+ | `EXTRA_HEADERS` | Extra headers | - | `YAI_EXTRA_HEADERS` |
437
+ | `EXTRA_BODY` | Extra body | - | `YAI_EXTRA_BODY` |
438
+ | `REASONING_EFFORT` | Reasoning effort | - | `YAI_REASONING_EFFORT` |
439
+ | `INTERACTIVE_ROUND` | Interactive mode rounds | `25` | `YAI_INTERACTIVE_ROUND` |
440
+ | `CODE_THEME` | Syntax highlighting theme | `monokai` | `YAI_CODE_THEME` |
441
+ | `TEMPERATURE` | Response randomness | `0.7` | `YAI_TEMPERATURE` |
442
+ | `TOP_P` | Top-p sampling | `1.0` | `YAI_TOP_P` |
443
+ | `MAX_TOKENS` | Max response tokens | `1024` | `YAI_MAX_TOKENS` |
444
+ | `MAX_HISTORY` | Max history entries | `500` | `YAI_MAX_HISTORY` |
445
+ | `AUTO_SUGGEST` | Enable history suggestions | `true` | `YAI_AUTO_SUGGEST` |
446
+ | `SHOW_REASONING` | Enable reasoning display | `true` | `YAI_SHOW_REASONING` |
447
+ | `JUSTIFY` | Text alignment | `default` | `YAI_JUSTIFY` |
448
+ | `CHAT_HISTORY_DIR` | Chat history directory | `<tempdir>/yaicli/chats` | `YAI_CHAT_HISTORY_DIR` |
449
+ | `MAX_SAVED_CHATS` | Max saved chats | `20` | `YAI_MAX_SAVED_CHATS` |
450
+ | `ROLE_MODIFY_WARNING` | Warn user when modifying role | `true` | `YAI_ROLE_MODIFY_WARNING` |
451
+ | `ENABLE_FUNCTIONS` | Enable function calling | `true` | `YAI_ENABLE_FUNCTIONS` |
452
+ | `SHOW_FUNCTION_OUTPUT` | Show function output in response | `true` | `YAI_SHOW_FUNCTION_OUTPUT` |
412
453
 
413
454
  ### LLM Provider Configuration
414
455
 
415
456
  YAICLI works with all major LLM providers. The default configuration is set up for OpenAI, but you can easily switch to
416
457
  other providers.
417
458
 
459
+ Note: blank `BASE_URL` (or no `BASE_URL`) means use provider default url.
460
+
418
461
  #### Pre-configured Provider Settings
419
462
 
420
463
  | Provider | BASE_URL |
@@ -430,7 +473,7 @@ other providers.
430
473
  > - Google Gemini: https://ai.google.dev/gemini-api/docs/openai
431
474
  > - Claude: https://docs.anthropic.com/en/api/openai-sdk
432
475
 
433
- If you not sure about base_url or just use the default provider base_url, just leave it blank.
476
+ If you not sure about base_url or just use the default provider base_url, just leave it blank or delete `BASE_URL`.
434
477
 
435
478
  ```ini
436
479
  [core]
@@ -440,6 +483,169 @@ API_KEY=xxx
440
483
  MODEL=command-r-plus
441
484
  ```
442
485
 
486
+ #### Openai
487
+
488
+ Yaicli use `openai` as provider as default provider, `gpt-4o` as default model, you can add your api key to use as it is.
489
+
490
+ ```ini
491
+ PROVIDER=openai
492
+ BASE_URL=
493
+ API_KEY=
494
+ MODEL=gpt-4o
495
+ ```
496
+
497
+ #### Deepseek
498
+
499
+ ```ini
500
+ PROVIDER=deepseek
501
+ API_KEY=
502
+ MODEL=deepseek-chat
503
+ ```
504
+
505
+ #### OpenRouter
506
+
507
+ ```ini
508
+ PROVIDER=openrouter
509
+ API_KEY=
510
+ MODEL=deepseek/deepseek-chat-v3-0324
511
+ ```
512
+
513
+ #### Groq
514
+
515
+ ```ini
516
+ PROVIDER=groq
517
+ API_KEY=
518
+ MODEL=llama-3.3-70b-versatile
519
+ ```
520
+
521
+ #### Chatglm
522
+
523
+ ```ini
524
+ PROVIDER=chatglm
525
+ API_KEY=
526
+ MODEL=glm-4-plus
527
+ ```
528
+
529
+ #### Chutes
530
+
531
+ ```ini
532
+ PROVIDER=chutes
533
+ API_KEY=
534
+ MODEL=deepseek-ai/DeepSeek-V3-0324
535
+ ```
536
+
537
+ #### AI21
538
+
539
+ ```ini
540
+ PROVIDER=ai21
541
+ API_KEY=
542
+ MODEL=jamba-large-1.6
543
+ ```
544
+
545
+ #### Sambanova
546
+
547
+ ```ini
548
+ PROVIDER=sambanova
549
+ API_KEY=
550
+ MODEL=DeepSeek-V3-0324
551
+ ```
552
+
553
+ #### ModelScope
554
+
555
+ ```ini
556
+ PROVIDER=modelscope
557
+ API_KEY=
558
+ MODEL=deepseek-ai/DeepSeek-V3-0324
559
+ ```
560
+
561
+ #### Inifiniai
562
+
563
+ ```ini
564
+ PROVIDER=infiniai
565
+ ; BASE_URL=https://cloud.infini-ai.com/maas/v1
566
+ API_KEY=
567
+ MODEL=deepseek-v3
568
+ ```
569
+
570
+ #### Siliconflow
571
+
572
+ ```ini
573
+ PROVIDER=siliconflow
574
+ API_KEY=
575
+ MODEL=Qwen/Qwen3-32B
576
+ ```
577
+
578
+ #### Yi
579
+
580
+ ```ini
581
+ PROVIDER=yi
582
+ API_KEY=
583
+ MODEL=yi-lightning
584
+ ```
585
+
586
+ #### Cohere
587
+
588
+ You have to install cohere dependencies, `pip install 'yaicli[cohere]'`
589
+
590
+ ```ini
591
+ PROVIDER=cohere
592
+ API_KEY=
593
+ MODEL=command-a-03-2025
594
+ ```
595
+
596
+ #### Doubao
597
+
598
+ You have to install doubao dependencies, `pip install 'yaicli[doubao]'`
599
+
600
+ Use doubao OpenAI compatible api, you can just set `API_KEY`.
601
+
602
+ ```ini
603
+ PROVIDER=doubao
604
+ API_KEY=
605
+ MODEL=doubao-1-5-lite-32k-250115
606
+ ```
607
+
608
+ If you are using access key, you have to set `AK` and `SK`.
609
+
610
+ ```ini
611
+ PROVIDER=doubao
612
+ MODEL=doubao-1-5-lite-32k-250115
613
+ AK=XXX
614
+ SK=XXX
615
+ ```
616
+
617
+ #### Ollama
618
+
619
+ You have to install ollama dependencies, `pip install 'yaicli[ollama]'`
620
+
621
+ ```ini
622
+ PROVIDER=ollama
623
+ ; BASE_URL=http://localhost:11434
624
+ MODEL=qwen3:32b
625
+ ```
626
+
627
+ Support options:
628
+
629
+ ```ini
630
+ TEMPERATURE=
631
+ TOP_P=
632
+ SEED=
633
+ NUM_PREDICT=
634
+ NUM_CTX=
635
+ NUM_BATCH=
636
+ NUM_GPU=
637
+ MAIN_GPU=
638
+ LOW_VRAM=
639
+ F16_KV=
640
+ LOGITS_ALL=
641
+ VOCAB_ONLY=
642
+ USE_MMAP=
643
+ USE_MLOCK=
644
+ NUM_THREAD=
645
+ ```
646
+
647
+ NOTE: Ollama provider not support extra body
648
+
443
649
  ### Syntax Highlighting Themes
444
650
 
445
651
  YAICLI supports all Pygments syntax highlighting themes. You can set your preferred theme in the config file:
@@ -80,6 +80,38 @@ pipx install yaicli
80
80
  uv tool install yaicli
81
81
  ```
82
82
 
83
+ Yaicli has several optional dependencies group, you can copy below commands to install specific dependencies.
84
+
85
+ ```shell
86
+ # install all denpendencies
87
+ pip install 'yaicli[all]'
88
+
89
+ # install with ollama support
90
+ pip instsall 'yaicli[ollama]'
91
+
92
+ # install with cohere support
93
+ pip install 'yaicli[cohere]'
94
+
95
+ # install with doubao support
96
+ pip install 'yaicli[doubao]'
97
+ ```
98
+
99
+ Install by `uv`.
100
+
101
+ ```shell
102
+ # install all denpendencies
103
+ uv tool install 'yaicli[all]'
104
+
105
+ # install with ollama support
106
+ uv tool instsall 'yaicli[ollama]'
107
+
108
+ # install with cohere support
109
+ uv tool install 'yaicli[cohere]'
110
+
111
+ # install with doubao support
112
+ uv tool install 'yaicli[doubao]'
113
+ ```
114
+
83
115
  ### Install from Source
84
116
 
85
117
  ```bash
@@ -154,39 +186,41 @@ SHOW_FUNCTION_OUTPUT=true
154
186
 
155
187
  ### Configuration Options Reference
156
188
 
157
- | Option | Description | Default | Env Variable |
158
- | ---------------------- | ------------------------------------------- | --------------------------- | -------------------------- |
159
- | `PROVIDER` | LLM provider (openai, claude, cohere, etc.) | `openai` | `YAI_PROVIDER` |
160
- | `BASE_URL` | API endpoint URL | `https://api.openai.com/v1` | `YAI_BASE_URL` |
161
- | `API_KEY` | Your API key | - | `YAI_API_KEY` |
162
- | `MODEL` | LLM model to use | `gpt-4o` | `YAI_MODEL` |
163
- | `SHELL_NAME` | Shell type | `auto` | `YAI_SHELL_NAME` |
164
- | `OS_NAME` | Operating system | `auto` | `YAI_OS_NAME` |
165
- | `STREAM` | Enable streaming | `true` | `YAI_STREAM` |
166
- | `TIMEOUT` | API timeout (seconds) | `60` | `YAI_TIMEOUT` |
167
- | `EXTRA_HEADERS` | Extra headers | - | `YAI_EXTRA_HEADERS` |
168
- | `EXTRA_BODY` | Extra body | - | `YAI_EXTRA_BODY` |
169
- | `REASONING_EFFORT` | Reasoning effort | - | `YAI_REASONING_EFFORT` |
170
- | `INTERACTIVE_ROUND` | Interactive mode rounds | `25` | `YAI_INTERACTIVE_ROUND` |
171
- | `CODE_THEME` | Syntax highlighting theme | `monokai` | `YAI_CODE_THEME` |
172
- | `TEMPERATURE` | Response randomness | `0.7` | `YAI_TEMPERATURE` |
173
- | `TOP_P` | Top-p sampling | `1.0` | `YAI_TOP_P` |
174
- | `MAX_TOKENS` | Max response tokens | `1024` | `YAI_MAX_TOKENS` |
175
- | `MAX_HISTORY` | Max history entries | `500` | `YAI_MAX_HISTORY` |
176
- | `AUTO_SUGGEST` | Enable history suggestions | `true` | `YAI_AUTO_SUGGEST` |
177
- | `SHOW_REASONING` | Enable reasoning display | `true` | `YAI_SHOW_REASONING` |
178
- | `JUSTIFY` | Text alignment | `default` | `YAI_JUSTIFY` |
179
- | `CHAT_HISTORY_DIR` | Chat history directory | `<tempdir>/yaicli/chats` | `YAI_CHAT_HISTORY_DIR` |
180
- | `MAX_SAVED_CHATS` | Max saved chats | `20` | `YAI_MAX_SAVED_CHATS` |
181
- | `ROLE_MODIFY_WARNING` | Warn user when modifying role | `true` | `YAI_ROLE_MODIFY_WARNING` |
182
- | `ENABLE_FUNCTIONS` | Enable function calling | `true` | `YAI_ENABLE_FUNCTIONS` |
183
- | `SHOW_FUNCTION_OUTPUT` | Show function output in response | `true` | `YAI_SHOW_FUNCTION_OUTPUT` |
189
+ | Option | Description | Default | Env Variable |
190
+ | ---------------------- | ------------------------------------------- | ------------------------ | -------------------------- |
191
+ | `PROVIDER` | LLM provider (openai, claude, cohere, etc.) | `openai` | `YAI_PROVIDER` |
192
+ | `BASE_URL` | API endpoint URL | - | `YAI_BASE_URL` |
193
+ | `API_KEY` | Your API key | - | `YAI_API_KEY` |
194
+ | `MODEL` | LLM model to use | `gpt-4o` | `YAI_MODEL` |
195
+ | `SHELL_NAME` | Shell type | `auto` | `YAI_SHELL_NAME` |
196
+ | `OS_NAME` | Operating system | `auto` | `YAI_OS_NAME` |
197
+ | `STREAM` | Enable streaming | `true` | `YAI_STREAM` |
198
+ | `TIMEOUT` | API timeout (seconds) | `60` | `YAI_TIMEOUT` |
199
+ | `EXTRA_HEADERS` | Extra headers | - | `YAI_EXTRA_HEADERS` |
200
+ | `EXTRA_BODY` | Extra body | - | `YAI_EXTRA_BODY` |
201
+ | `REASONING_EFFORT` | Reasoning effort | - | `YAI_REASONING_EFFORT` |
202
+ | `INTERACTIVE_ROUND` | Interactive mode rounds | `25` | `YAI_INTERACTIVE_ROUND` |
203
+ | `CODE_THEME` | Syntax highlighting theme | `monokai` | `YAI_CODE_THEME` |
204
+ | `TEMPERATURE` | Response randomness | `0.7` | `YAI_TEMPERATURE` |
205
+ | `TOP_P` | Top-p sampling | `1.0` | `YAI_TOP_P` |
206
+ | `MAX_TOKENS` | Max response tokens | `1024` | `YAI_MAX_TOKENS` |
207
+ | `MAX_HISTORY` | Max history entries | `500` | `YAI_MAX_HISTORY` |
208
+ | `AUTO_SUGGEST` | Enable history suggestions | `true` | `YAI_AUTO_SUGGEST` |
209
+ | `SHOW_REASONING` | Enable reasoning display | `true` | `YAI_SHOW_REASONING` |
210
+ | `JUSTIFY` | Text alignment | `default` | `YAI_JUSTIFY` |
211
+ | `CHAT_HISTORY_DIR` | Chat history directory | `<tempdir>/yaicli/chats` | `YAI_CHAT_HISTORY_DIR` |
212
+ | `MAX_SAVED_CHATS` | Max saved chats | `20` | `YAI_MAX_SAVED_CHATS` |
213
+ | `ROLE_MODIFY_WARNING` | Warn user when modifying role | `true` | `YAI_ROLE_MODIFY_WARNING` |
214
+ | `ENABLE_FUNCTIONS` | Enable function calling | `true` | `YAI_ENABLE_FUNCTIONS` |
215
+ | `SHOW_FUNCTION_OUTPUT` | Show function output in response | `true` | `YAI_SHOW_FUNCTION_OUTPUT` |
184
216
 
185
217
  ### LLM Provider Configuration
186
218
 
187
219
  YAICLI works with all major LLM providers. The default configuration is set up for OpenAI, but you can easily switch to
188
220
  other providers.
189
221
 
222
+ Note: blank `BASE_URL` (or no `BASE_URL`) means use provider default url.
223
+
190
224
  #### Pre-configured Provider Settings
191
225
 
192
226
  | Provider | BASE_URL |
@@ -202,7 +236,7 @@ other providers.
202
236
  > - Google Gemini: https://ai.google.dev/gemini-api/docs/openai
203
237
  > - Claude: https://docs.anthropic.com/en/api/openai-sdk
204
238
 
205
- If you not sure about base_url or just use the default provider base_url, just leave it blank.
239
+ If you not sure about base_url or just use the default provider base_url, just leave it blank or delete `BASE_URL`.
206
240
 
207
241
  ```ini
208
242
  [core]
@@ -212,6 +246,169 @@ API_KEY=xxx
212
246
  MODEL=command-r-plus
213
247
  ```
214
248
 
249
+ #### Openai
250
+
251
+ Yaicli use `openai` as provider as default provider, `gpt-4o` as default model, you can add your api key to use as it is.
252
+
253
+ ```ini
254
+ PROVIDER=openai
255
+ BASE_URL=
256
+ API_KEY=
257
+ MODEL=gpt-4o
258
+ ```
259
+
260
+ #### Deepseek
261
+
262
+ ```ini
263
+ PROVIDER=deepseek
264
+ API_KEY=
265
+ MODEL=deepseek-chat
266
+ ```
267
+
268
+ #### OpenRouter
269
+
270
+ ```ini
271
+ PROVIDER=openrouter
272
+ API_KEY=
273
+ MODEL=deepseek/deepseek-chat-v3-0324
274
+ ```
275
+
276
+ #### Groq
277
+
278
+ ```ini
279
+ PROVIDER=groq
280
+ API_KEY=
281
+ MODEL=llama-3.3-70b-versatile
282
+ ```
283
+
284
+ #### Chatglm
285
+
286
+ ```ini
287
+ PROVIDER=chatglm
288
+ API_KEY=
289
+ MODEL=glm-4-plus
290
+ ```
291
+
292
+ #### Chutes
293
+
294
+ ```ini
295
+ PROVIDER=chutes
296
+ API_KEY=
297
+ MODEL=deepseek-ai/DeepSeek-V3-0324
298
+ ```
299
+
300
+ #### AI21
301
+
302
+ ```ini
303
+ PROVIDER=ai21
304
+ API_KEY=
305
+ MODEL=jamba-large-1.6
306
+ ```
307
+
308
+ #### Sambanova
309
+
310
+ ```ini
311
+ PROVIDER=sambanova
312
+ API_KEY=
313
+ MODEL=DeepSeek-V3-0324
314
+ ```
315
+
316
+ #### ModelScope
317
+
318
+ ```ini
319
+ PROVIDER=modelscope
320
+ API_KEY=
321
+ MODEL=deepseek-ai/DeepSeek-V3-0324
322
+ ```
323
+
324
+ #### Inifiniai
325
+
326
+ ```ini
327
+ PROVIDER=infiniai
328
+ ; BASE_URL=https://cloud.infini-ai.com/maas/v1
329
+ API_KEY=
330
+ MODEL=deepseek-v3
331
+ ```
332
+
333
+ #### Siliconflow
334
+
335
+ ```ini
336
+ PROVIDER=siliconflow
337
+ API_KEY=
338
+ MODEL=Qwen/Qwen3-32B
339
+ ```
340
+
341
+ #### Yi
342
+
343
+ ```ini
344
+ PROVIDER=yi
345
+ API_KEY=
346
+ MODEL=yi-lightning
347
+ ```
348
+
349
+ #### Cohere
350
+
351
+ You have to install cohere dependencies, `pip install 'yaicli[cohere]'`
352
+
353
+ ```ini
354
+ PROVIDER=cohere
355
+ API_KEY=
356
+ MODEL=command-a-03-2025
357
+ ```
358
+
359
+ #### Doubao
360
+
361
+ You have to install doubao dependencies, `pip install 'yaicli[doubao]'`
362
+
363
+ Use doubao OpenAI compatible api, you can just set `API_KEY`.
364
+
365
+ ```ini
366
+ PROVIDER=doubao
367
+ API_KEY=
368
+ MODEL=doubao-1-5-lite-32k-250115
369
+ ```
370
+
371
+ If you are using access key, you have to set `AK` and `SK`.
372
+
373
+ ```ini
374
+ PROVIDER=doubao
375
+ MODEL=doubao-1-5-lite-32k-250115
376
+ AK=XXX
377
+ SK=XXX
378
+ ```
379
+
380
+ #### Ollama
381
+
382
+ You have to install ollama dependencies, `pip install 'yaicli[ollama]'`
383
+
384
+ ```ini
385
+ PROVIDER=ollama
386
+ ; BASE_URL=http://localhost:11434
387
+ MODEL=qwen3:32b
388
+ ```
389
+
390
+ Support options:
391
+
392
+ ```ini
393
+ TEMPERATURE=
394
+ TOP_P=
395
+ SEED=
396
+ NUM_PREDICT=
397
+ NUM_CTX=
398
+ NUM_BATCH=
399
+ NUM_GPU=
400
+ MAIN_GPU=
401
+ LOW_VRAM=
402
+ F16_KV=
403
+ LOGITS_ALL=
404
+ VOCAB_ONLY=
405
+ USE_MMAP=
406
+ USE_MLOCK=
407
+ NUM_THREAD=
408
+ ```
409
+
410
+ NOTE: Ollama provider not support extra body
411
+
215
412
  ### Syntax Highlighting Themes
216
413
 
217
414
  YAICLI supports all Pygments syntax highlighting themes. You can set your preferred theme in the config file: