llms-py 2.0.18__tar.gz → 2.0.20__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llms_py-2.0.18/llms_py.egg-info → llms_py-2.0.20}/PKG-INFO +14 -5
- {llms_py-2.0.18 → llms_py-2.0.20}/README.md +13 -4
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/llms.json +0 -1
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/main.py +1 -13
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/ai.mjs +1 -1
- {llms_py-2.0.18 → llms_py-2.0.20/llms_py.egg-info}/PKG-INFO +14 -5
- {llms_py-2.0.18 → llms_py-2.0.20}/llms_py.egg-info/SOURCES.txt +0 -20
- {llms_py-2.0.18 → llms_py-2.0.20}/pyproject.toml +1 -1
- {llms_py-2.0.18 → llms_py-2.0.20}/setup.py +1 -1
- llms_py-2.0.18/llms/__pycache__/__init__.cpython-312.pyc +0 -0
- llms_py-2.0.18/llms/__pycache__/__init__.cpython-313.pyc +0 -0
- llms_py-2.0.18/llms/__pycache__/__init__.cpython-314.pyc +0 -0
- llms_py-2.0.18/llms/__pycache__/__main__.cpython-312.pyc +0 -0
- llms_py-2.0.18/llms/__pycache__/__main__.cpython-314.pyc +0 -0
- llms_py-2.0.18/llms/__pycache__/llms.cpython-312.pyc +0 -0
- llms_py-2.0.18/llms/__pycache__/main.cpython-312.pyc +0 -0
- llms_py-2.0.18/llms/__pycache__/main.cpython-313.pyc +0 -0
- llms_py-2.0.18/llms/__pycache__/main.cpython-314.pyc +0 -0
- llms_py-2.0.18/llms/ui/lib/chart.js +0 -14
- llms_py-2.0.18/llms/ui/lib/charts.mjs +0 -20
- llms_py-2.0.18/llms/ui/lib/color.js +0 -14
- llms_py-2.0.18/llms/ui/lib/highlight.min.mjs +0 -1243
- llms_py-2.0.18/llms/ui/lib/idb.min.mjs +0 -8
- llms_py-2.0.18/llms/ui/lib/marked.min.mjs +0 -8
- llms_py-2.0.18/llms/ui/lib/servicestack-client.mjs +0 -1
- llms_py-2.0.18/llms/ui/lib/servicestack-vue.mjs +0 -37
- llms_py-2.0.18/llms/ui/lib/vue-router.min.mjs +0 -6
- llms_py-2.0.18/llms/ui/lib/vue.min.mjs +0 -12
- llms_py-2.0.18/llms/ui/lib/vue.mjs +0 -18369
- {llms_py-2.0.18 → llms_py-2.0.20}/LICENSE +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/MANIFEST.in +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/__init__.py +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/__main__.py +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/index.html +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/Analytics.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/App.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/Avatar.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/Brand.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/ChatPrompt.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/Main.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/ModelSelector.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/ProviderIcon.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/ProviderStatus.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/Recents.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/SettingsDialog.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/Sidebar.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/SignIn.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/SystemPromptEditor.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/SystemPromptSelector.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/Welcome.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/app.css +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/fav.svg +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/markdown.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/tailwind.input.css +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/threadStore.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/typography.css +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui/utils.mjs +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms/ui.json +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms_py.egg-info/dependency_links.txt +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms_py.egg-info/entry_points.txt +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms_py.egg-info/not-zip-safe +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms_py.egg-info/requires.txt +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/llms_py.egg-info/top_level.txt +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/requirements.txt +0 -0
- {llms_py-2.0.18 → llms_py-2.0.20}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: llms-py
|
|
3
|
-
Version: 2.0.
|
|
3
|
+
Version: 2.0.20
|
|
4
4
|
Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
|
|
5
5
|
Home-page: https://github.com/ServiceStack/llms
|
|
6
6
|
Author: ServiceStack
|
|
@@ -40,7 +40,7 @@ Dynamic: requires-python
|
|
|
40
40
|
|
|
41
41
|
# llms.py
|
|
42
42
|
|
|
43
|
-
Lightweight CLI and
|
|
43
|
+
Lightweight CLI, API and ChatGPT-like alternative to Open WebUI for accessing multiple LLMs, entirely offline, with all data kept private in browser storage.
|
|
44
44
|
|
|
45
45
|
Configure additional providers and models in [llms.json](llms/llms.json)
|
|
46
46
|
- Mix and match local models with models from different API providers
|
|
@@ -53,6 +53,7 @@ Configure additional providers and models in [llms.json](llms/llms.json)
|
|
|
53
53
|
- **Lightweight**: Single [llms.py](llms.py) Python file with single `aiohttp` dependency
|
|
54
54
|
- **Multi-Provider Support**: OpenRouter, Ollama, Anthropic, Google, OpenAI, Grok, Groq, Qwen, Z.ai, Mistral
|
|
55
55
|
- **OpenAI-Compatible API**: Works with any client that supports OpenAI's chat completion API
|
|
56
|
+
- **Built-in Analytics**: Built-in analytics UI to visualize costs, requests, and token usage
|
|
56
57
|
- **Configuration Management**: Easy provider enable/disable and configuration management
|
|
57
58
|
- **CLI Interface**: Simple command-line interface for quick interactions
|
|
58
59
|
- **Server Mode**: Run an OpenAI-compatible HTTP server at `http://localhost:{PORT}/v1/chat/completions`
|
|
@@ -65,11 +66,19 @@ Configure additional providers and models in [llms.json](llms/llms.json)
|
|
|
65
66
|
|
|
66
67
|
## llms.py UI
|
|
67
68
|
|
|
68
|
-
|
|
69
|
+
Access all your local all remote LLMs with a single ChatGPT-like UI:
|
|
69
70
|
|
|
70
|
-
[](https://servicestack.net/posts/llms-py-ui)
|
|
71
72
|
|
|
72
|
-
|
|
73
|
+
**Monthly Costs Analysis**
|
|
74
|
+
|
|
75
|
+
[](https://servicestack.net/posts/llms-py-ui)
|
|
76
|
+
|
|
77
|
+
**Monthly Activity Log**
|
|
78
|
+
|
|
79
|
+
[](https://servicestack.net/posts/llms-py-ui)
|
|
80
|
+
|
|
81
|
+
[More Features and Screenshots](https://servicestack.net/posts/llms-py-ui).
|
|
73
82
|
|
|
74
83
|
## Installation
|
|
75
84
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# llms.py
|
|
2
2
|
|
|
3
|
-
Lightweight CLI and
|
|
3
|
+
Lightweight CLI, API and ChatGPT-like alternative to Open WebUI for accessing multiple LLMs, entirely offline, with all data kept private in browser storage.
|
|
4
4
|
|
|
5
5
|
Configure additional providers and models in [llms.json](llms/llms.json)
|
|
6
6
|
- Mix and match local models with models from different API providers
|
|
@@ -13,6 +13,7 @@ Configure additional providers and models in [llms.json](llms/llms.json)
|
|
|
13
13
|
- **Lightweight**: Single [llms.py](llms.py) Python file with single `aiohttp` dependency
|
|
14
14
|
- **Multi-Provider Support**: OpenRouter, Ollama, Anthropic, Google, OpenAI, Grok, Groq, Qwen, Z.ai, Mistral
|
|
15
15
|
- **OpenAI-Compatible API**: Works with any client that supports OpenAI's chat completion API
|
|
16
|
+
- **Built-in Analytics**: Built-in analytics UI to visualize costs, requests, and token usage
|
|
16
17
|
- **Configuration Management**: Easy provider enable/disable and configuration management
|
|
17
18
|
- **CLI Interface**: Simple command-line interface for quick interactions
|
|
18
19
|
- **Server Mode**: Run an OpenAI-compatible HTTP server at `http://localhost:{PORT}/v1/chat/completions`
|
|
@@ -25,11 +26,19 @@ Configure additional providers and models in [llms.json](llms/llms.json)
|
|
|
25
26
|
|
|
26
27
|
## llms.py UI
|
|
27
28
|
|
|
28
|
-
|
|
29
|
+
Access all your local all remote LLMs with a single ChatGPT-like UI:
|
|
29
30
|
|
|
30
|
-
[](https://servicestack.net/posts/llms-py-ui)
|
|
31
32
|
|
|
32
|
-
|
|
33
|
+
**Monthly Costs Analysis**
|
|
34
|
+
|
|
35
|
+
[](https://servicestack.net/posts/llms-py-ui)
|
|
36
|
+
|
|
37
|
+
**Monthly Activity Log**
|
|
38
|
+
|
|
39
|
+
[](https://servicestack.net/posts/llms-py-ui)
|
|
40
|
+
|
|
41
|
+
[More Features and Screenshots](https://servicestack.net/posts/llms-py-ui).
|
|
33
42
|
|
|
34
43
|
## Installation
|
|
35
44
|
|
|
@@ -261,7 +261,6 @@
|
|
|
261
261
|
"nova-micro": "amazon/nova-micro-v1",
|
|
262
262
|
"nova-lite": "amazon/nova-lite-v1",
|
|
263
263
|
"nova-pro": "amazon/nova-pro-v1",
|
|
264
|
-
"claude-opus-4-1": "anthropic/claude-opus-4.1",
|
|
265
264
|
"claude-sonnet-4-5": "anthropic/claude-sonnet-4.5",
|
|
266
265
|
"claude-sonnet-4-0": "anthropic/claude-sonnet-4",
|
|
267
266
|
"gpt-5": "openai/gpt-5",
|
|
@@ -22,7 +22,7 @@ from aiohttp import web
|
|
|
22
22
|
from pathlib import Path
|
|
23
23
|
from importlib import resources # Py≥3.9 (pip install importlib_resources for 3.7/3.8)
|
|
24
24
|
|
|
25
|
-
VERSION = "2.0.
|
|
25
|
+
VERSION = "2.0.20"
|
|
26
26
|
_ROOT = None
|
|
27
27
|
g_config_path = None
|
|
28
28
|
g_ui_path = None
|
|
@@ -938,12 +938,6 @@ async def save_default_config(config_path):
|
|
|
938
938
|
config_json = await save_text(github_url("llms.json"), config_path)
|
|
939
939
|
g_config = json.loads(config_json)
|
|
940
940
|
|
|
941
|
-
async def update_llms():
|
|
942
|
-
"""
|
|
943
|
-
Update llms.py from GitHub
|
|
944
|
-
"""
|
|
945
|
-
await save_text(github_url("llms.py"), __file__)
|
|
946
|
-
|
|
947
941
|
def provider_status():
|
|
948
942
|
enabled = list(g_handlers.keys())
|
|
949
943
|
disabled = [provider for provider in g_config['providers'].keys() if provider not in enabled]
|
|
@@ -1291,7 +1285,6 @@ def main():
|
|
|
1291
1285
|
parser.add_argument('--root', default=None, help='Change root directory for UI files', metavar='PATH')
|
|
1292
1286
|
parser.add_argument('--logprefix', default="", help='Prefix used in log messages', metavar='PREFIX')
|
|
1293
1287
|
parser.add_argument('--verbose', action='store_true', help='Verbose output')
|
|
1294
|
-
parser.add_argument('--update', action='store_true', help='Update to latest version')
|
|
1295
1288
|
|
|
1296
1289
|
cli_args, extra_args = parser.parse_known_args()
|
|
1297
1290
|
if cli_args.verbose:
|
|
@@ -1617,11 +1610,6 @@ def main():
|
|
|
1617
1610
|
print(f"\nDefault model set to: {default_model}")
|
|
1618
1611
|
exit(0)
|
|
1619
1612
|
|
|
1620
|
-
if cli_args.update:
|
|
1621
|
-
asyncio.run(update_llms())
|
|
1622
|
-
print(f"{__file__} updated")
|
|
1623
|
-
exit(0)
|
|
1624
|
-
|
|
1625
1613
|
if cli_args.chat is not None or cli_args.image is not None or cli_args.audio is not None or cli_args.file is not None or len(extra_args) > 0:
|
|
1626
1614
|
try:
|
|
1627
1615
|
chat = g_config['defaults']['text']
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: llms-py
|
|
3
|
-
Version: 2.0.
|
|
3
|
+
Version: 2.0.20
|
|
4
4
|
Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
|
|
5
5
|
Home-page: https://github.com/ServiceStack/llms
|
|
6
6
|
Author: ServiceStack
|
|
@@ -40,7 +40,7 @@ Dynamic: requires-python
|
|
|
40
40
|
|
|
41
41
|
# llms.py
|
|
42
42
|
|
|
43
|
-
Lightweight CLI and
|
|
43
|
+
Lightweight CLI, API and ChatGPT-like alternative to Open WebUI for accessing multiple LLMs, entirely offline, with all data kept private in browser storage.
|
|
44
44
|
|
|
45
45
|
Configure additional providers and models in [llms.json](llms/llms.json)
|
|
46
46
|
- Mix and match local models with models from different API providers
|
|
@@ -53,6 +53,7 @@ Configure additional providers and models in [llms.json](llms/llms.json)
|
|
|
53
53
|
- **Lightweight**: Single [llms.py](llms.py) Python file with single `aiohttp` dependency
|
|
54
54
|
- **Multi-Provider Support**: OpenRouter, Ollama, Anthropic, Google, OpenAI, Grok, Groq, Qwen, Z.ai, Mistral
|
|
55
55
|
- **OpenAI-Compatible API**: Works with any client that supports OpenAI's chat completion API
|
|
56
|
+
- **Built-in Analytics**: Built-in analytics UI to visualize costs, requests, and token usage
|
|
56
57
|
- **Configuration Management**: Easy provider enable/disable and configuration management
|
|
57
58
|
- **CLI Interface**: Simple command-line interface for quick interactions
|
|
58
59
|
- **Server Mode**: Run an OpenAI-compatible HTTP server at `http://localhost:{PORT}/v1/chat/completions`
|
|
@@ -65,11 +66,19 @@ Configure additional providers and models in [llms.json](llms/llms.json)
|
|
|
65
66
|
|
|
66
67
|
## llms.py UI
|
|
67
68
|
|
|
68
|
-
|
|
69
|
+
Access all your local all remote LLMs with a single ChatGPT-like UI:
|
|
69
70
|
|
|
70
|
-
[](https://servicestack.net/posts/llms-py-ui)
|
|
71
72
|
|
|
72
|
-
|
|
73
|
+
**Monthly Costs Analysis**
|
|
74
|
+
|
|
75
|
+
[](https://servicestack.net/posts/llms-py-ui)
|
|
76
|
+
|
|
77
|
+
**Monthly Activity Log**
|
|
78
|
+
|
|
79
|
+
[](https://servicestack.net/posts/llms-py-ui)
|
|
80
|
+
|
|
81
|
+
[More Features and Screenshots](https://servicestack.net/posts/llms-py-ui).
|
|
73
82
|
|
|
74
83
|
## Installation
|
|
75
84
|
|
|
@@ -10,15 +10,6 @@ llms/index.html
|
|
|
10
10
|
llms/llms.json
|
|
11
11
|
llms/main.py
|
|
12
12
|
llms/ui.json
|
|
13
|
-
llms/__pycache__/__init__.cpython-312.pyc
|
|
14
|
-
llms/__pycache__/__init__.cpython-313.pyc
|
|
15
|
-
llms/__pycache__/__init__.cpython-314.pyc
|
|
16
|
-
llms/__pycache__/__main__.cpython-312.pyc
|
|
17
|
-
llms/__pycache__/__main__.cpython-314.pyc
|
|
18
|
-
llms/__pycache__/llms.cpython-312.pyc
|
|
19
|
-
llms/__pycache__/main.cpython-312.pyc
|
|
20
|
-
llms/__pycache__/main.cpython-313.pyc
|
|
21
|
-
llms/__pycache__/main.cpython-314.pyc
|
|
22
13
|
llms/ui/Analytics.mjs
|
|
23
14
|
llms/ui/App.mjs
|
|
24
15
|
llms/ui/Avatar.mjs
|
|
@@ -43,17 +34,6 @@ llms/ui/tailwind.input.css
|
|
|
43
34
|
llms/ui/threadStore.mjs
|
|
44
35
|
llms/ui/typography.css
|
|
45
36
|
llms/ui/utils.mjs
|
|
46
|
-
llms/ui/lib/chart.js
|
|
47
|
-
llms/ui/lib/charts.mjs
|
|
48
|
-
llms/ui/lib/color.js
|
|
49
|
-
llms/ui/lib/highlight.min.mjs
|
|
50
|
-
llms/ui/lib/idb.min.mjs
|
|
51
|
-
llms/ui/lib/marked.min.mjs
|
|
52
|
-
llms/ui/lib/servicestack-client.mjs
|
|
53
|
-
llms/ui/lib/servicestack-vue.mjs
|
|
54
|
-
llms/ui/lib/vue-router.min.mjs
|
|
55
|
-
llms/ui/lib/vue.min.mjs
|
|
56
|
-
llms/ui/lib/vue.mjs
|
|
57
37
|
llms_py.egg-info/PKG-INFO
|
|
58
38
|
llms_py.egg-info/SOURCES.txt
|
|
59
39
|
llms_py.egg-info/dependency_links.txt
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "llms-py"
|
|
7
|
-
version = "2.0.
|
|
7
|
+
version = "2.0.20"
|
|
8
8
|
description = "A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
license = "BSD-3-Clause"
|
|
@@ -16,7 +16,7 @@ with open(os.path.join(this_directory, "requirements.txt"), encoding="utf-8") as
|
|
|
16
16
|
|
|
17
17
|
setup(
|
|
18
18
|
name="llms-py",
|
|
19
|
-
version="2.0.
|
|
19
|
+
version="2.0.20",
|
|
20
20
|
author="ServiceStack",
|
|
21
21
|
author_email="team@servicestack.net",
|
|
22
22
|
description="A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers",
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|