llms-py 2.0.17__tar.gz → 2.0.19__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {llms_py-2.0.17/llms_py.egg-info → llms_py-2.0.19}/PKG-INFO +1 -1
  2. {llms_py-2.0.17 → llms_py-2.0.19}/llms/llms.json +19 -21
  3. {llms_py-2.0.17 → llms_py-2.0.19}/llms/main.py +1 -1
  4. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/ai.mjs +1 -1
  5. {llms_py-2.0.17 → llms_py-2.0.19/llms_py.egg-info}/PKG-INFO +1 -1
  6. {llms_py-2.0.17 → llms_py-2.0.19}/llms_py.egg-info/SOURCES.txt +0 -20
  7. {llms_py-2.0.17 → llms_py-2.0.19}/pyproject.toml +1 -1
  8. {llms_py-2.0.17 → llms_py-2.0.19}/setup.py +1 -1
  9. llms_py-2.0.17/llms/__pycache__/__init__.cpython-312.pyc +0 -0
  10. llms_py-2.0.17/llms/__pycache__/__init__.cpython-313.pyc +0 -0
  11. llms_py-2.0.17/llms/__pycache__/__init__.cpython-314.pyc +0 -0
  12. llms_py-2.0.17/llms/__pycache__/__main__.cpython-312.pyc +0 -0
  13. llms_py-2.0.17/llms/__pycache__/__main__.cpython-314.pyc +0 -0
  14. llms_py-2.0.17/llms/__pycache__/llms.cpython-312.pyc +0 -0
  15. llms_py-2.0.17/llms/__pycache__/main.cpython-312.pyc +0 -0
  16. llms_py-2.0.17/llms/__pycache__/main.cpython-313.pyc +0 -0
  17. llms_py-2.0.17/llms/__pycache__/main.cpython-314.pyc +0 -0
  18. llms_py-2.0.17/llms/ui/lib/chart.js +0 -14
  19. llms_py-2.0.17/llms/ui/lib/charts.mjs +0 -20
  20. llms_py-2.0.17/llms/ui/lib/color.js +0 -14
  21. llms_py-2.0.17/llms/ui/lib/highlight.min.mjs +0 -1243
  22. llms_py-2.0.17/llms/ui/lib/idb.min.mjs +0 -8
  23. llms_py-2.0.17/llms/ui/lib/marked.min.mjs +0 -8
  24. llms_py-2.0.17/llms/ui/lib/servicestack-client.mjs +0 -1
  25. llms_py-2.0.17/llms/ui/lib/servicestack-vue.mjs +0 -37
  26. llms_py-2.0.17/llms/ui/lib/vue-router.min.mjs +0 -6
  27. llms_py-2.0.17/llms/ui/lib/vue.min.mjs +0 -12
  28. llms_py-2.0.17/llms/ui/lib/vue.mjs +0 -18369
  29. {llms_py-2.0.17 → llms_py-2.0.19}/LICENSE +0 -0
  30. {llms_py-2.0.17 → llms_py-2.0.19}/MANIFEST.in +0 -0
  31. {llms_py-2.0.17 → llms_py-2.0.19}/README.md +0 -0
  32. {llms_py-2.0.17 → llms_py-2.0.19}/llms/__init__.py +0 -0
  33. {llms_py-2.0.17 → llms_py-2.0.19}/llms/__main__.py +0 -0
  34. {llms_py-2.0.17 → llms_py-2.0.19}/llms/index.html +0 -0
  35. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/Analytics.mjs +0 -0
  36. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/App.mjs +0 -0
  37. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/Avatar.mjs +0 -0
  38. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/Brand.mjs +0 -0
  39. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/ChatPrompt.mjs +0 -0
  40. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/Main.mjs +0 -0
  41. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/ModelSelector.mjs +0 -0
  42. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/ProviderIcon.mjs +0 -0
  43. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/ProviderStatus.mjs +0 -0
  44. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/Recents.mjs +0 -0
  45. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/SettingsDialog.mjs +0 -0
  46. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/Sidebar.mjs +0 -0
  47. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/SignIn.mjs +0 -0
  48. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/SystemPromptEditor.mjs +0 -0
  49. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/SystemPromptSelector.mjs +0 -0
  50. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/Welcome.mjs +0 -0
  51. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/app.css +0 -0
  52. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/fav.svg +0 -0
  53. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/markdown.mjs +0 -0
  54. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/tailwind.input.css +0 -0
  55. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/threadStore.mjs +0 -0
  56. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/typography.css +0 -0
  57. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui/utils.mjs +0 -0
  58. {llms_py-2.0.17 → llms_py-2.0.19}/llms/ui.json +0 -0
  59. {llms_py-2.0.17 → llms_py-2.0.19}/llms_py.egg-info/dependency_links.txt +0 -0
  60. {llms_py-2.0.17 → llms_py-2.0.19}/llms_py.egg-info/entry_points.txt +0 -0
  61. {llms_py-2.0.17 → llms_py-2.0.19}/llms_py.egg-info/not-zip-safe +0 -0
  62. {llms_py-2.0.17 → llms_py-2.0.19}/llms_py.egg-info/requires.txt +0 -0
  63. {llms_py-2.0.17 → llms_py-2.0.19}/llms_py.egg-info/top_level.txt +0 -0
  64. {llms_py-2.0.17 → llms_py-2.0.19}/requirements.txt +0 -0
  65. {llms_py-2.0.17 → llms_py-2.0.19}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llms-py
3
- Version: 2.0.17
3
+ Version: 2.0.19
4
4
  Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
5
5
  Home-page: https://github.com/ServiceStack/llms
6
6
  Author: ServiceStack
@@ -93,10 +93,6 @@
93
93
  }
94
94
  ],
95
95
  "max_completion_tokens": 16,
96
- "reasoning": {
97
- "max_tokens": 16,
98
- "reasoning_effort": "low"
99
- },
100
96
  "stream": false
101
97
  }
102
98
  },
@@ -147,7 +143,6 @@
147
143
  "api_key": "$GROQ_API_KEY",
148
144
  "models": {
149
145
  "allam-2-7b": "allam-2-7b",
150
- "gemma2:9b": "gemma2-9b-it",
151
146
  "compound": "groq/compound",
152
147
  "compound-mini": "groq/compound-mini",
153
148
  "llama3.1:8b": "llama-3.1-8b-instant",
@@ -599,7 +594,6 @@
599
594
  "base_url": "https://api.anthropic.com",
600
595
  "api_key": "$ANTHROPIC_API_KEY",
601
596
  "models": {
602
- "claude-opus-4-1": "claude-opus-4-1",
603
597
  "claude-sonnet-4-5": "claude-sonnet-4-5",
604
598
  "claude-sonnet-4-0": "claude-sonnet-4-0",
605
599
  "claude-3-7-sonnet": "claude-3-7-sonnet-latest",
@@ -640,6 +634,25 @@
640
634
  "input": "0.00000025",
641
635
  "output": "0.00000125"
642
636
  }
637
+ },
638
+ "check": {
639
+ "messages": [
640
+ {
641
+ "role": "user",
642
+ "content": [
643
+ {
644
+ "type": "text",
645
+ "text": "1+1="
646
+ }
647
+ ]
648
+ }
649
+ ],
650
+ "max_completion_tokens": 512,
651
+ "reasoning": {
652
+ "max_tokens": 128,
653
+ "reasoning_effort": "low"
654
+ },
655
+ "stream": false
643
656
  }
644
657
  },
645
658
  "openai": {
@@ -735,21 +748,6 @@
735
748
  "input": "0.000003",
736
749
  "output": "0.000006"
737
750
  }
738
- },
739
- "check": {
740
- "messages": [
741
- {
742
- "role": "user",
743
- "content": [
744
- {
745
- "type": "text",
746
- "text": "1+1="
747
- }
748
- ]
749
- }
750
- ],
751
- "max_completion_tokens": 16,
752
- "stream": false
753
751
  }
754
752
  },
755
753
  "grok": {
@@ -22,7 +22,7 @@ from aiohttp import web
22
22
  from pathlib import Path
23
23
  from importlib import resources # Py≥3.9 (pip install importlib_resources for 3.7/3.8)
24
24
 
25
- VERSION = "2.0.17"
25
+ VERSION = "2.0.19"
26
26
  _ROOT = None
27
27
  g_config_path = None
28
28
  g_ui_path = None
@@ -6,7 +6,7 @@ const headers = { 'Accept': 'application/json' }
6
6
  const prefsKey = 'llms.prefs'
7
7
 
8
8
  export const o = {
9
- version: '2.0.17',
9
+ version: '2.0.19',
10
10
  base,
11
11
  prefsKey,
12
12
  welcome: 'Welcome to llms.py',
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llms-py
3
- Version: 2.0.17
3
+ Version: 2.0.19
4
4
  Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
5
5
  Home-page: https://github.com/ServiceStack/llms
6
6
  Author: ServiceStack
@@ -10,15 +10,6 @@ llms/index.html
10
10
  llms/llms.json
11
11
  llms/main.py
12
12
  llms/ui.json
13
- llms/__pycache__/__init__.cpython-312.pyc
14
- llms/__pycache__/__init__.cpython-313.pyc
15
- llms/__pycache__/__init__.cpython-314.pyc
16
- llms/__pycache__/__main__.cpython-312.pyc
17
- llms/__pycache__/__main__.cpython-314.pyc
18
- llms/__pycache__/llms.cpython-312.pyc
19
- llms/__pycache__/main.cpython-312.pyc
20
- llms/__pycache__/main.cpython-313.pyc
21
- llms/__pycache__/main.cpython-314.pyc
22
13
  llms/ui/Analytics.mjs
23
14
  llms/ui/App.mjs
24
15
  llms/ui/Avatar.mjs
@@ -43,17 +34,6 @@ llms/ui/tailwind.input.css
43
34
  llms/ui/threadStore.mjs
44
35
  llms/ui/typography.css
45
36
  llms/ui/utils.mjs
46
- llms/ui/lib/chart.js
47
- llms/ui/lib/charts.mjs
48
- llms/ui/lib/color.js
49
- llms/ui/lib/highlight.min.mjs
50
- llms/ui/lib/idb.min.mjs
51
- llms/ui/lib/marked.min.mjs
52
- llms/ui/lib/servicestack-client.mjs
53
- llms/ui/lib/servicestack-vue.mjs
54
- llms/ui/lib/vue-router.min.mjs
55
- llms/ui/lib/vue.min.mjs
56
- llms/ui/lib/vue.mjs
57
37
  llms_py.egg-info/PKG-INFO
58
38
  llms_py.egg-info/SOURCES.txt
59
39
  llms_py.egg-info/dependency_links.txt
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "llms-py"
7
- version = "2.0.17"
7
+ version = "2.0.19"
8
8
  description = "A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers"
9
9
  readme = "README.md"
10
10
  license = "BSD-3-Clause"
@@ -16,7 +16,7 @@ with open(os.path.join(this_directory, "requirements.txt"), encoding="utf-8") as
16
16
 
17
17
  setup(
18
18
  name="llms-py",
19
- version="2.0.17",
19
+ version="2.0.19",
20
20
  author="ServiceStack",
21
21
  author_email="team@servicestack.net",
22
22
  description="A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers",