llms-py 2.0.14__tar.gz → 2.0.16__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. llms_py-2.0.16/MANIFEST.in +4 -0
  2. {llms_py-2.0.14/llms_py.egg-info → llms_py-2.0.16}/PKG-INFO +8 -35
  3. {llms_py-2.0.14 → llms_py-2.0.16}/README.md +7 -34
  4. llms_py-2.0.16/llms/__init__.py +2 -0
  5. llms_py-2.0.16/llms/__main__.py +9 -0
  6. llms_py-2.0.16/llms/__pycache__/__init__.cpython-312.pyc +0 -0
  7. llms_py-2.0.16/llms/__pycache__/__init__.cpython-313.pyc +0 -0
  8. llms_py-2.0.16/llms/__pycache__/__init__.cpython-314.pyc +0 -0
  9. llms_py-2.0.16/llms/__pycache__/__main__.cpython-312.pyc +0 -0
  10. llms_py-2.0.16/llms/__pycache__/__main__.cpython-314.pyc +0 -0
  11. llms_py-2.0.16/llms/__pycache__/llms.cpython-312.pyc +0 -0
  12. llms_py-2.0.16/llms/__pycache__/main.cpython-312.pyc +0 -0
  13. llms_py-2.0.16/llms/__pycache__/main.cpython-313.pyc +0 -0
  14. llms_py-2.0.16/llms/__pycache__/main.cpython-314.pyc +0 -0
  15. {llms_py-2.0.14 → llms_py-2.0.16/llms}/index.html +5 -1
  16. llms_py-2.0.16/llms/llms.json +1102 -0
  17. llms_py-2.0.14/llms.py → llms_py-2.0.16/llms/main.py +252 -14
  18. llms_py-2.0.16/llms/ui/Analytics.mjs +1483 -0
  19. llms_py-2.0.16/llms/ui/Brand.mjs +34 -0
  20. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/ChatPrompt.mjs +58 -36
  21. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/Main.mjs +205 -5
  22. llms_py-2.0.16/llms/ui/ModelSelector.mjs +60 -0
  23. llms_py-2.0.16/llms/ui/ProviderIcon.mjs +29 -0
  24. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/Sidebar.mjs +20 -4
  25. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/ai.mjs +1 -1
  26. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/app.css +211 -64
  27. llms_py-2.0.16/llms/ui/lib/chart.js +14 -0
  28. llms_py-2.0.16/llms/ui/lib/charts.mjs +20 -0
  29. llms_py-2.0.16/llms/ui/lib/color.js +14 -0
  30. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/tailwind.input.css +1 -1
  31. llms_py-2.0.16/llms/ui/threadStore.mjs +524 -0
  32. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/utils.mjs +36 -0
  33. {llms_py-2.0.14 → llms_py-2.0.16/llms_py.egg-info}/PKG-INFO +8 -35
  34. llms_py-2.0.16/llms_py.egg-info/SOURCES.txt +63 -0
  35. llms_py-2.0.16/llms_py.egg-info/entry_points.txt +2 -0
  36. {llms_py-2.0.14 → llms_py-2.0.16}/pyproject.toml +4 -4
  37. {llms_py-2.0.14 → llms_py-2.0.16}/setup.py +12 -45
  38. llms_py-2.0.14/MANIFEST.in +0 -7
  39. llms_py-2.0.14/llms.json +0 -447
  40. llms_py-2.0.14/llms_py.egg-info/SOURCES.txt +0 -47
  41. llms_py-2.0.14/llms_py.egg-info/entry_points.txt +0 -2
  42. llms_py-2.0.14/ui/Brand.mjs +0 -23
  43. llms_py-2.0.14/ui/ModelSelector.mjs +0 -29
  44. llms_py-2.0.14/ui/threadStore.mjs +0 -273
  45. {llms_py-2.0.14 → llms_py-2.0.16}/LICENSE +0 -0
  46. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/App.mjs +0 -0
  47. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/Avatar.mjs +0 -0
  48. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/ProviderStatus.mjs +0 -0
  49. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/Recents.mjs +0 -0
  50. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/SettingsDialog.mjs +0 -0
  51. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/SignIn.mjs +0 -0
  52. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/SystemPromptEditor.mjs +0 -0
  53. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/SystemPromptSelector.mjs +0 -0
  54. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/Welcome.mjs +0 -0
  55. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/fav.svg +0 -0
  56. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/lib/highlight.min.mjs +0 -0
  57. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/lib/idb.min.mjs +0 -0
  58. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/lib/marked.min.mjs +0 -0
  59. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/lib/servicestack-client.mjs +0 -0
  60. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/lib/servicestack-vue.mjs +0 -0
  61. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/lib/vue-router.min.mjs +0 -0
  62. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/lib/vue.min.mjs +0 -0
  63. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/lib/vue.mjs +0 -0
  64. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/markdown.mjs +0 -0
  65. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui/typography.css +0 -0
  66. {llms_py-2.0.14 → llms_py-2.0.16/llms}/ui.json +0 -0
  67. {llms_py-2.0.14 → llms_py-2.0.16}/llms_py.egg-info/dependency_links.txt +0 -0
  68. {llms_py-2.0.14 → llms_py-2.0.16}/llms_py.egg-info/not-zip-safe +0 -0
  69. {llms_py-2.0.14 → llms_py-2.0.16}/llms_py.egg-info/requires.txt +0 -0
  70. {llms_py-2.0.14 → llms_py-2.0.16}/llms_py.egg-info/top_level.txt +0 -0
  71. {llms_py-2.0.14 → llms_py-2.0.16}/requirements.txt +0 -0
  72. {llms_py-2.0.14 → llms_py-2.0.16}/setup.cfg +0 -0
@@ -0,0 +1,4 @@
1
+ include README.md
2
+ include LICENSE
3
+ include requirements.txt
4
+ recursive-include llms *
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llms-py
3
- Version: 2.0.14
3
+ Version: 2.0.16
4
4
  Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
5
5
  Home-page: https://github.com/ServiceStack/llms
6
6
  Author: ServiceStack
@@ -42,7 +42,7 @@ Dynamic: requires-python
42
42
 
43
43
  Lightweight CLI and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers.
44
44
 
45
- Configure additional providers and models in [llms.json](llms.json)
45
+ Configure additional providers and models in [llms.json](llms/llms.json)
46
46
  - Mix and match local models with models from different API providers
47
47
  - Requests automatically routed to available providers that supports the requested model (in defined order)
48
48
  - Define free/cheapest/local providers first to save on costs
@@ -73,28 +73,10 @@ Read the [Introductory Blog Post](https://servicestack.net/posts/llms-py-ui).
73
73
 
74
74
  ## Installation
75
75
 
76
- ### Option 1: Install from PyPI
77
-
78
76
  ```bash
79
77
  pip install llms-py
80
78
  ```
81
79
 
82
- ### Option 2: Download directly
83
-
84
- 1. Download [llms.py](llms.py)
85
-
86
- ```bash
87
- curl -O https://raw.githubusercontent.com/ServiceStack/llms/main/llms.py
88
- chmod +x llms.py
89
- mv llms.py ~/.local/bin/llms
90
- ```
91
-
92
- 2. Install single dependency:
93
-
94
- ```bash
95
- pip install aiohttp
96
- ```
97
-
98
80
  ## Quick Start
99
81
 
100
82
  ### 1. Set API Keys
@@ -102,12 +84,12 @@ pip install aiohttp
102
84
  Set environment variables for the providers you want to use:
103
85
 
104
86
  ```bash
105
- export OPENROUTER_FREE_API_KEY="..."
87
+ export OPENROUTER_API_KEY="..."
106
88
  ```
107
89
 
108
90
  | Provider | Variable | Description | Example |
109
91
  |-----------------|---------------------------|---------------------|---------|
110
- | openrouter_free | `OPENROUTER_FREE_API_KEY` | OpenRouter FREE models API key | `sk-or-...` |
92
+ | openrouter_free | `OPENROUTER_API_KEY` | OpenRouter FREE models API key | `sk-or-...` |
111
93
  | groq | `GROQ_API_KEY` | Groq API key | `gsk_...` |
112
94
  | google_free | `GOOGLE_FREE_API_KEY` | Google FREE API key | `AIza...` |
113
95
  | codestral | `CODESTRAL_API_KEY` | Codestral API key | `...` |
@@ -151,7 +133,7 @@ llms "What is the capital of France?"
151
133
 
152
134
  ## Configuration
153
135
 
154
- The configuration file [llms.json](llms.json) is saved to `~/.llms/llms.json` and defines available providers, models, and default settings. Key sections:
136
+ The configuration file [llms.json](llms/llms.json) is saved to `~/.llms/llms.json` and defines available providers, models, and default settings. Key sections:
155
137
 
156
138
  ### Defaults
157
139
  - `headers`: Common HTTP headers for all requests
@@ -193,7 +175,7 @@ llms "Explain quantum computing" --raw
193
175
 
194
176
  ### Using a Chat Template
195
177
 
196
- By default llms uses the `defaults/text` chat completion request defined in [llms.json](llms.json).
178
+ By default llms uses the `defaults/text` chat completion request defined in [llms.json](llms/llms.json).
197
179
 
198
180
  You can instead use a custom chat completion request with `--chat`, e.g:
199
181
 
@@ -485,19 +467,10 @@ llms --default grok-4
485
467
 
486
468
  ### Update
487
469
 
488
- 1. Installed from PyPI
489
-
490
470
  ```bash
491
471
  pip install llms-py --upgrade
492
472
  ```
493
473
 
494
- 2. Using Direct Download
495
-
496
- ```bash
497
- # Update to latest version (Downloads latest llms.py)
498
- llms --update
499
- ```
500
-
501
474
  ### Advanced Options
502
475
 
503
476
  ```bash
@@ -596,7 +569,7 @@ llms --update
596
569
  ```
597
570
 
598
571
  This command:
599
- - Downloads the latest `llms.py` from `https://raw.githubusercontent.com/ServiceStack/llms/refs/heads/main/llms.py`
572
+ - Downloads the latest `llms.py` from `github.com/ServiceStack/llms/blob/main/llms/main.py`
600
573
  - Overwrites your current `llms.py` file with the latest version
601
574
  - Preserves your existing configuration file (`llms.json`)
602
575
  - Requires an internet connection to download the update
@@ -633,7 +606,7 @@ or directly in your `llms.json`.
633
606
 
634
607
  | Provider | Variable | Description | Example |
635
608
  |-----------------|---------------------------|---------------------|---------|
636
- | openrouter_free | `OPENROUTER_FREE_API_KEY` | OpenRouter FREE models API key | `sk-or-...` |
609
+ | openrouter_free | `OPENROUTER_API_KEY` | OpenRouter FREE models API key | `sk-or-...` |
637
610
  | groq | `GROQ_API_KEY` | Groq API key | `gsk_...` |
638
611
  | google_free | `GOOGLE_FREE_API_KEY` | Google FREE API key | `AIza...` |
639
612
  | codestral | `CODESTRAL_API_KEY` | Codestral API key | `...` |
@@ -2,7 +2,7 @@
2
2
 
3
3
  Lightweight CLI and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers.
4
4
 
5
- Configure additional providers and models in [llms.json](llms.json)
5
+ Configure additional providers and models in [llms.json](llms/llms.json)
6
6
  - Mix and match local models with models from different API providers
7
7
  - Requests automatically routed to available providers that supports the requested model (in defined order)
8
8
  - Define free/cheapest/local providers first to save on costs
@@ -33,28 +33,10 @@ Read the [Introductory Blog Post](https://servicestack.net/posts/llms-py-ui).
33
33
 
34
34
  ## Installation
35
35
 
36
- ### Option 1: Install from PyPI
37
-
38
36
  ```bash
39
37
  pip install llms-py
40
38
  ```
41
39
 
42
- ### Option 2: Download directly
43
-
44
- 1. Download [llms.py](llms.py)
45
-
46
- ```bash
47
- curl -O https://raw.githubusercontent.com/ServiceStack/llms/main/llms.py
48
- chmod +x llms.py
49
- mv llms.py ~/.local/bin/llms
50
- ```
51
-
52
- 2. Install single dependency:
53
-
54
- ```bash
55
- pip install aiohttp
56
- ```
57
-
58
40
  ## Quick Start
59
41
 
60
42
  ### 1. Set API Keys
@@ -62,12 +44,12 @@ pip install aiohttp
62
44
  Set environment variables for the providers you want to use:
63
45
 
64
46
  ```bash
65
- export OPENROUTER_FREE_API_KEY="..."
47
+ export OPENROUTER_API_KEY="..."
66
48
  ```
67
49
 
68
50
  | Provider | Variable | Description | Example |
69
51
  |-----------------|---------------------------|---------------------|---------|
70
- | openrouter_free | `OPENROUTER_FREE_API_KEY` | OpenRouter FREE models API key | `sk-or-...` |
52
+ | openrouter_free | `OPENROUTER_API_KEY` | OpenRouter FREE models API key | `sk-or-...` |
71
53
  | groq | `GROQ_API_KEY` | Groq API key | `gsk_...` |
72
54
  | google_free | `GOOGLE_FREE_API_KEY` | Google FREE API key | `AIza...` |
73
55
  | codestral | `CODESTRAL_API_KEY` | Codestral API key | `...` |
@@ -111,7 +93,7 @@ llms "What is the capital of France?"
111
93
 
112
94
  ## Configuration
113
95
 
114
- The configuration file [llms.json](llms.json) is saved to `~/.llms/llms.json` and defines available providers, models, and default settings. Key sections:
96
+ The configuration file [llms.json](llms/llms.json) is saved to `~/.llms/llms.json` and defines available providers, models, and default settings. Key sections:
115
97
 
116
98
  ### Defaults
117
99
  - `headers`: Common HTTP headers for all requests
@@ -153,7 +135,7 @@ llms "Explain quantum computing" --raw
153
135
 
154
136
  ### Using a Chat Template
155
137
 
156
- By default llms uses the `defaults/text` chat completion request defined in [llms.json](llms.json).
138
+ By default llms uses the `defaults/text` chat completion request defined in [llms.json](llms/llms.json).
157
139
 
158
140
  You can instead use a custom chat completion request with `--chat`, e.g:
159
141
 
@@ -445,19 +427,10 @@ llms --default grok-4
445
427
 
446
428
  ### Update
447
429
 
448
- 1. Installed from PyPI
449
-
450
430
  ```bash
451
431
  pip install llms-py --upgrade
452
432
  ```
453
433
 
454
- 2. Using Direct Download
455
-
456
- ```bash
457
- # Update to latest version (Downloads latest llms.py)
458
- llms --update
459
- ```
460
-
461
434
  ### Advanced Options
462
435
 
463
436
  ```bash
@@ -556,7 +529,7 @@ llms --update
556
529
  ```
557
530
 
558
531
  This command:
559
- - Downloads the latest `llms.py` from `https://raw.githubusercontent.com/ServiceStack/llms/refs/heads/main/llms.py`
532
+ - Downloads the latest `llms.py` from `github.com/ServiceStack/llms/blob/main/llms/main.py`
560
533
  - Overwrites your current `llms.py` file with the latest version
561
534
  - Preserves your existing configuration file (`llms.json`)
562
535
  - Requires an internet connection to download the update
@@ -593,7 +566,7 @@ or directly in your `llms.json`.
593
566
 
594
567
  | Provider | Variable | Description | Example |
595
568
  |-----------------|---------------------------|---------------------|---------|
596
- | openrouter_free | `OPENROUTER_FREE_API_KEY` | OpenRouter FREE models API key | `sk-or-...` |
569
+ | openrouter_free | `OPENROUTER_API_KEY` | OpenRouter FREE models API key | `sk-or-...` |
597
570
  | groq | `GROQ_API_KEY` | Groq API key | `gsk_...` |
598
571
  | google_free | `GOOGLE_FREE_API_KEY` | Google FREE API key | `AIza...` |
599
572
  | codestral | `CODESTRAL_API_KEY` | Codestral API key | `...` |
@@ -0,0 +1,2 @@
1
+ # Import the main module content
2
+ from .main import *
@@ -0,0 +1,9 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Entry point for running llms as a module: python -m llms
4
+ """
5
+
6
+ from .main import main
7
+
8
+ if __name__ == "__main__":
9
+ main()
@@ -29,7 +29,9 @@
29
29
  "@servicestack/vue": "/ui/lib/servicestack-vue.mjs",
30
30
  "idb": "/ui/lib/idb.min.mjs",
31
31
  "marked": "/ui/lib/marked.min.mjs",
32
- "highlight.js": "/ui/lib/highlight.min.mjs"
32
+ "highlight.js": "/ui/lib/highlight.min.mjs",
33
+ "chart.js": "/ui/lib/chart.js",
34
+ "color.js": "/ui/lib/color.js"
33
35
  }
34
36
  }
35
37
  </script>
@@ -47,6 +49,7 @@ import SettingsDialog from '/ui/SettingsDialog.mjs'
47
49
  const { config, models } = await ai.init()
48
50
  const MainComponent = defineAsyncComponent(() => import(ai.base + '/ui/Main.mjs'))
49
51
  const RecentsComponent = defineAsyncComponent(() => import(ai.base + '/ui/Recents.mjs'))
52
+ const AnalyticsComponent = defineAsyncComponent(() => import(ai.base + '/ui/Analytics.mjs'))
50
53
 
51
54
  const Components = {
52
55
  SettingsDialog,
@@ -56,6 +59,7 @@ const routes = [
56
59
  { path: '/', component: MainComponent },
57
60
  { path: '/c/:id', component: MainComponent },
58
61
  { path: '/recents', component: RecentsComponent },
62
+ { path: '/analytics', component: AnalyticsComponent },
59
63
  { path: '/:fallback(.*)*', component: MainComponent }
60
64
  ]
61
65
  routes.forEach(r => r.path = ai.base + r.path)