llms-py 2.0.7__tar.gz → 2.0.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. {llms_py-2.0.7/llms_py.egg-info → llms_py-2.0.8}/PKG-INFO +1 -1
  2. {llms_py-2.0.7 → llms_py-2.0.8}/llms.json +21 -0
  3. {llms_py-2.0.7 → llms_py-2.0.8}/llms.py +8 -2
  4. {llms_py-2.0.7 → llms_py-2.0.8/llms_py.egg-info}/PKG-INFO +1 -1
  5. {llms_py-2.0.7 → llms_py-2.0.8}/pyproject.toml +1 -1
  6. {llms_py-2.0.7 → llms_py-2.0.8}/setup.py +1 -1
  7. {llms_py-2.0.7 → llms_py-2.0.8}/ui/Main.mjs +1 -1
  8. {llms_py-2.0.7 → llms_py-2.0.8}/ui/Recents.mjs +31 -2
  9. {llms_py-2.0.7 → llms_py-2.0.8}/LICENSE +0 -0
  10. {llms_py-2.0.7 → llms_py-2.0.8}/MANIFEST.in +0 -0
  11. {llms_py-2.0.7 → llms_py-2.0.8}/README.md +0 -0
  12. {llms_py-2.0.7 → llms_py-2.0.8}/index.html +0 -0
  13. {llms_py-2.0.7 → llms_py-2.0.8}/llms_py.egg-info/SOURCES.txt +0 -0
  14. {llms_py-2.0.7 → llms_py-2.0.8}/llms_py.egg-info/dependency_links.txt +0 -0
  15. {llms_py-2.0.7 → llms_py-2.0.8}/llms_py.egg-info/entry_points.txt +0 -0
  16. {llms_py-2.0.7 → llms_py-2.0.8}/llms_py.egg-info/not-zip-safe +0 -0
  17. {llms_py-2.0.7 → llms_py-2.0.8}/llms_py.egg-info/requires.txt +0 -0
  18. {llms_py-2.0.7 → llms_py-2.0.8}/llms_py.egg-info/top_level.txt +0 -0
  19. {llms_py-2.0.7 → llms_py-2.0.8}/requirements.txt +0 -0
  20. {llms_py-2.0.7 → llms_py-2.0.8}/setup.cfg +0 -0
  21. {llms_py-2.0.7 → llms_py-2.0.8}/ui/App.mjs +0 -0
  22. {llms_py-2.0.7 → llms_py-2.0.8}/ui/ChatPrompt.mjs +0 -0
  23. {llms_py-2.0.7 → llms_py-2.0.8}/ui/Sidebar.mjs +0 -0
  24. {llms_py-2.0.7 → llms_py-2.0.8}/ui/app.css +0 -0
  25. {llms_py-2.0.7 → llms_py-2.0.8}/ui/fav.svg +0 -0
  26. {llms_py-2.0.7 → llms_py-2.0.8}/ui/lib/highlight.min.mjs +0 -0
  27. {llms_py-2.0.7 → llms_py-2.0.8}/ui/lib/idb.min.mjs +0 -0
  28. {llms_py-2.0.7 → llms_py-2.0.8}/ui/lib/marked.min.mjs +0 -0
  29. {llms_py-2.0.7 → llms_py-2.0.8}/ui/lib/servicestack-client.min.mjs +0 -0
  30. {llms_py-2.0.7 → llms_py-2.0.8}/ui/lib/servicestack-vue.min.mjs +0 -0
  31. {llms_py-2.0.7 → llms_py-2.0.8}/ui/lib/vue-router.min.mjs +0 -0
  32. {llms_py-2.0.7 → llms_py-2.0.8}/ui/lib/vue.min.mjs +0 -0
  33. {llms_py-2.0.7 → llms_py-2.0.8}/ui/lib/vue.mjs +0 -0
  34. {llms_py-2.0.7 → llms_py-2.0.8}/ui/markdown.mjs +0 -0
  35. {llms_py-2.0.7 → llms_py-2.0.8}/ui/tailwind.input.css +0 -0
  36. {llms_py-2.0.7 → llms_py-2.0.8}/ui/threadStore.mjs +0 -0
  37. {llms_py-2.0.7 → llms_py-2.0.8}/ui/typography.css +0 -0
  38. {llms_py-2.0.7 → llms_py-2.0.8}/ui/utils.mjs +0 -0
  39. {llms_py-2.0.7 → llms_py-2.0.8}/ui.json +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llms-py
3
- Version: 2.0.7
3
+ Version: 2.0.8
4
4
  Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
5
5
  Home-page: https://github.com/ServiceStack/llms
6
6
  Author: ServiceStack
@@ -193,6 +193,9 @@
193
193
  "nova-micro": "amazon/nova-micro-v1",
194
194
  "nova-lite": "amazon/nova-lite-v1",
195
195
  "nova-pro": "amazon/nova-pro-v1",
196
+ "claude-opus-4-1": "anthropic/claude-opus-4.1",
197
+ "claude-sonnet-4-5": "anthropic/claude-sonnet-4.5",
198
+ "claude-sonnet-4-0": "anthropic/claude-sonnet-4",
196
199
  "gpt-5": "openai/gpt-5",
197
200
  "gpt-5-chat": "openai/gpt-5-chat",
198
201
  "gpt-5-mini": "openai/gpt-5-mini",
@@ -210,11 +213,13 @@
210
213
  "grok-4": "x-ai/grok-4",
211
214
  "grok-4-fast": "x-ai/grok-4-fast",
212
215
  "grok-code-fast-1": "x-ai/grok-code-fast-1",
216
+ "glm-4.6": "z-ai/glm-4.6",
213
217
  "glm-4.5v": "z-ai/glm-4.5v",
214
218
  "glm-4.5": "z-ai/glm-4.5",
215
219
  "glm-4.5-air": "z-ai/glm-4.5-air",
216
220
  "kimi-k2": "moonshotai/kimi-k2",
217
221
  "deepseek-v3.1:671b": "deepseek/deepseek-chat",
222
+ "deepseek-v3.2-exp": "deepseek/deepseek-v3.2-exp",
218
223
  "deepseek-chat-v3.1:671b": "deepseek/deepseek-chat-v3.1:free",
219
224
  "deepseek-r1:671b": "deepseek/deepseek-r1",
220
225
  "deepseek-v3.1-terminus": "deepseek/deepseek-v3.1-terminus",
@@ -263,6 +268,7 @@
263
268
  "models": {
264
269
  "claude-opus-4-1": "claude-opus-4-1",
265
270
  "claude-opus-4": "claude-opus-4",
271
+ "claude-sonnet-4-5": "claude-sonnet-4-5",
266
272
  "claude-sonnet-4-0": "claude-sonnet-4-0",
267
273
  "claude-3-7-sonnet": "claude-3-7-sonnet-latest",
268
274
  "claude-3-5-haiku": "claude-3-5-haiku-latest",
@@ -385,6 +391,21 @@
385
391
  "qwen2.5-omni:7b": "qwen2.5-omni-7b"
386
392
  }
387
393
  },
394
+ "z.ai": {
395
+ "enabled": false,
396
+ "type": "OpenAiProvider",
397
+ "base_url": "https://api.z.ai/api/paas/v4",
398
+ "api_key": "$ZAI_API_KEY",
399
+ "models": {
400
+ "glm-4.6": "glm-4.6",
401
+ "glm-4.5": "glm-4.5",
402
+ "glm-4.5-air": "glm-4.5-air",
403
+ "glm-4.5-x": "glm-4.5-x",
404
+ "glm-4.5-airx": "glm-4.5-airx",
405
+ "glm-4.5-flash": "glm-4.5-flash",
406
+ "glm-4:32b": "glm-4-32b-0414-128k"
407
+ }
408
+ },
388
409
  "mistral": {
389
410
  "enabled": false,
390
411
  "type": "OpenAiProvider",
@@ -21,7 +21,7 @@ from aiohttp import web
21
21
  from pathlib import Path
22
22
  from importlib import resources # Py≥3.9 (pip install importlib_resources for 3.7/3.8)
23
23
 
24
- VERSION = "2.0.7"
24
+ VERSION = "2.0.8"
25
25
  _ROOT = None
26
26
  g_config_path = None
27
27
  g_ui_path = None
@@ -219,7 +219,13 @@ class OpenAiProvider:
219
219
  self.api_key = api_key
220
220
  self.models = models
221
221
 
222
- self.chat_url = f"{base_url}/v1/chat/completions"
222
+ # check if base_url ends with /v{\d} to handle providers with different versions (e.g. z.ai uses /v4)
223
+ last_segment = base_url.rsplit('/',1)[1]
224
+ if last_segment.startswith('v') and last_segment[1:].isdigit():
225
+ self.chat_url = f"{base_url}/chat/completions"
226
+ else:
227
+ self.chat_url = f"{base_url}/v1/chat/completions"
228
+
223
229
  self.headers = kwargs['headers'] if 'headers' in kwargs else {
224
230
  "Content-Type": "application/json",
225
231
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llms-py
3
- Version: 2.0.7
3
+ Version: 2.0.8
4
4
  Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
5
5
  Home-page: https://github.com/ServiceStack/llms
6
6
  Author: ServiceStack
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "llms-py"
7
- version = "2.0.7"
7
+ version = "2.0.8"
8
8
  description = "A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers"
9
9
  readme = "README.md"
10
10
  license = "BSD-3-Clause"
@@ -16,7 +16,7 @@ with open(os.path.join(this_directory, "requirements.txt"), encoding="utf-8") as
16
16
 
17
17
  setup(
18
18
  name="llms-py",
19
- version="2.0.7",
19
+ version="2.0.8",
20
20
  author="ServiceStack",
21
21
  author_email="team@servicestack.net",
22
22
  description="A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers",
@@ -20,7 +20,7 @@ const ProviderStatus = {
20
20
  <span class="text-red-700">{{(config.status.disabled||[]).length}}</span>
21
21
  </div>
22
22
  </button>
23
- <div v-if="showPopover" ref="popoverRef" class="absolute right-0 mt-2 w-72 max-h-112 overflow-y-auto bg-white border border-gray-200 rounded-md shadow-lg z-10">
23
+ <div v-if="showPopover" ref="popoverRef" class="absolute right-0 mt-2 w-72 max-h-116 overflow-y-auto bg-white border border-gray-200 rounded-md shadow-lg z-10">
24
24
  <div class="divide-y divide-gray-100">
25
25
  <div v-for="p in allProviders" :key="p" class="flex items-center justify-between px-3 py-2">
26
26
  <label :for="'chk_' + p" class="cursor-pointer text-sm text-gray-900 truncate mr-2" :title="p">{{ p }}</label>
@@ -1,5 +1,5 @@
1
- import { ref, computed, onMounted, watch, inject } from 'vue'
2
- import { useRouter } from 'vue-router'
1
+ import { ref, onMounted, watch, inject } from 'vue'
2
+ import { useRouter, useRoute } from 'vue-router'
3
3
  import { useThreadStore } from './threadStore.mjs'
4
4
  import { renderMarkdown } from './markdown.mjs'
5
5
 
@@ -169,7 +169,36 @@ export default {
169
169
  </div>
170
170
  `,
171
171
  setup() {
172
+ const router = useRouter()
173
+ const route = useRoute()
172
174
  const q = ref('')
175
+
176
+ // Initialize search query from URL parameter
177
+ onMounted(() => {
178
+ const urlQuery = route.query.q || ''
179
+ q.value = urlQuery
180
+ })
181
+
182
+ // Watch for changes in the search input and update URL
183
+ watch(q, (newQuery) => {
184
+ const currentQuery = route.query.q || ''
185
+ if (newQuery !== currentQuery) {
186
+ // Update URL without triggering navigation
187
+ router.replace({
188
+ path: route.path,
189
+ query: newQuery ? { q: newQuery } : {}
190
+ })
191
+ }
192
+ })
193
+
194
+ // Watch for URL changes (browser back/forward) and update search input
195
+ watch(() => route.query.q, (newQuery) => {
196
+ const urlQuery = newQuery || ''
197
+ if (q.value !== urlQuery) {
198
+ q.value = urlQuery
199
+ }
200
+ })
201
+
173
202
  return {
174
203
  q,
175
204
  }
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes