llms-py 2.0.13__tar.gz → 2.0.14__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. {llms_py-2.0.13/llms_py.egg-info → llms_py-2.0.14}/PKG-INFO +1 -1
  2. {llms_py-2.0.13 → llms_py-2.0.14}/llms.py +16 -16
  3. {llms_py-2.0.13 → llms_py-2.0.14/llms_py.egg-info}/PKG-INFO +1 -1
  4. {llms_py-2.0.13 → llms_py-2.0.14}/pyproject.toml +1 -1
  5. {llms_py-2.0.13 → llms_py-2.0.14}/setup.py +1 -1
  6. {llms_py-2.0.13 → llms_py-2.0.14}/ui/ai.mjs +1 -1
  7. {llms_py-2.0.13 → llms_py-2.0.14}/LICENSE +0 -0
  8. {llms_py-2.0.13 → llms_py-2.0.14}/MANIFEST.in +0 -0
  9. {llms_py-2.0.13 → llms_py-2.0.14}/README.md +0 -0
  10. {llms_py-2.0.13 → llms_py-2.0.14}/index.html +0 -0
  11. {llms_py-2.0.13 → llms_py-2.0.14}/llms.json +0 -0
  12. {llms_py-2.0.13 → llms_py-2.0.14}/llms_py.egg-info/SOURCES.txt +0 -0
  13. {llms_py-2.0.13 → llms_py-2.0.14}/llms_py.egg-info/dependency_links.txt +0 -0
  14. {llms_py-2.0.13 → llms_py-2.0.14}/llms_py.egg-info/entry_points.txt +0 -0
  15. {llms_py-2.0.13 → llms_py-2.0.14}/llms_py.egg-info/not-zip-safe +0 -0
  16. {llms_py-2.0.13 → llms_py-2.0.14}/llms_py.egg-info/requires.txt +0 -0
  17. {llms_py-2.0.13 → llms_py-2.0.14}/llms_py.egg-info/top_level.txt +0 -0
  18. {llms_py-2.0.13 → llms_py-2.0.14}/requirements.txt +0 -0
  19. {llms_py-2.0.13 → llms_py-2.0.14}/setup.cfg +0 -0
  20. {llms_py-2.0.13 → llms_py-2.0.14}/ui/App.mjs +0 -0
  21. {llms_py-2.0.13 → llms_py-2.0.14}/ui/Avatar.mjs +0 -0
  22. {llms_py-2.0.13 → llms_py-2.0.14}/ui/Brand.mjs +0 -0
  23. {llms_py-2.0.13 → llms_py-2.0.14}/ui/ChatPrompt.mjs +0 -0
  24. {llms_py-2.0.13 → llms_py-2.0.14}/ui/Main.mjs +0 -0
  25. {llms_py-2.0.13 → llms_py-2.0.14}/ui/ModelSelector.mjs +0 -0
  26. {llms_py-2.0.13 → llms_py-2.0.14}/ui/ProviderStatus.mjs +0 -0
  27. {llms_py-2.0.13 → llms_py-2.0.14}/ui/Recents.mjs +0 -0
  28. {llms_py-2.0.13 → llms_py-2.0.14}/ui/SettingsDialog.mjs +0 -0
  29. {llms_py-2.0.13 → llms_py-2.0.14}/ui/Sidebar.mjs +0 -0
  30. {llms_py-2.0.13 → llms_py-2.0.14}/ui/SignIn.mjs +0 -0
  31. {llms_py-2.0.13 → llms_py-2.0.14}/ui/SystemPromptEditor.mjs +0 -0
  32. {llms_py-2.0.13 → llms_py-2.0.14}/ui/SystemPromptSelector.mjs +0 -0
  33. {llms_py-2.0.13 → llms_py-2.0.14}/ui/Welcome.mjs +0 -0
  34. {llms_py-2.0.13 → llms_py-2.0.14}/ui/app.css +0 -0
  35. {llms_py-2.0.13 → llms_py-2.0.14}/ui/fav.svg +0 -0
  36. {llms_py-2.0.13 → llms_py-2.0.14}/ui/lib/highlight.min.mjs +0 -0
  37. {llms_py-2.0.13 → llms_py-2.0.14}/ui/lib/idb.min.mjs +0 -0
  38. {llms_py-2.0.13 → llms_py-2.0.14}/ui/lib/marked.min.mjs +0 -0
  39. {llms_py-2.0.13 → llms_py-2.0.14}/ui/lib/servicestack-client.mjs +0 -0
  40. {llms_py-2.0.13 → llms_py-2.0.14}/ui/lib/servicestack-vue.mjs +0 -0
  41. {llms_py-2.0.13 → llms_py-2.0.14}/ui/lib/vue-router.min.mjs +0 -0
  42. {llms_py-2.0.13 → llms_py-2.0.14}/ui/lib/vue.min.mjs +0 -0
  43. {llms_py-2.0.13 → llms_py-2.0.14}/ui/lib/vue.mjs +0 -0
  44. {llms_py-2.0.13 → llms_py-2.0.14}/ui/markdown.mjs +0 -0
  45. {llms_py-2.0.13 → llms_py-2.0.14}/ui/tailwind.input.css +0 -0
  46. {llms_py-2.0.13 → llms_py-2.0.14}/ui/threadStore.mjs +0 -0
  47. {llms_py-2.0.13 → llms_py-2.0.14}/ui/typography.css +0 -0
  48. {llms_py-2.0.13 → llms_py-2.0.14}/ui/utils.mjs +0 -0
  49. {llms_py-2.0.13 → llms_py-2.0.14}/ui.json +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llms-py
3
- Version: 2.0.13
3
+ Version: 2.0.14
4
4
  Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
5
5
  Home-page: https://github.com/ServiceStack/llms
6
6
  Author: ServiceStack
@@ -22,7 +22,7 @@ from aiohttp import web
22
22
  from pathlib import Path
23
23
  from importlib import resources # Py≥3.9 (pip install importlib_resources for 3.7/3.8)
24
24
 
25
- VERSION = "2.0.13"
25
+ VERSION = "2.0.14"
26
26
  _ROOT = None
27
27
  g_config_path = None
28
28
  g_ui_path = None
@@ -1279,6 +1279,20 @@ def main():
1279
1279
  raise web.HTTPNotFound
1280
1280
 
1281
1281
  app.router.add_get("/ui/{path:.*}", ui_static, name="ui_static")
1282
+
1283
+ async def ui_config_handler(request):
1284
+ with open(g_ui_path, "r") as f:
1285
+ ui = json.load(f)
1286
+ if 'defaults' not in ui:
1287
+ ui['defaults'] = g_config['defaults']
1288
+ enabled, disabled = provider_status()
1289
+ ui['status'] = {
1290
+ "all": list(g_config['providers'].keys()),
1291
+ "enabled": enabled,
1292
+ "disabled": disabled
1293
+ }
1294
+ return web.json_response(ui)
1295
+ app.router.add_get('/config', ui_config_handler)
1282
1296
 
1283
1297
  async def not_found_handler(request):
1284
1298
  return web.Response(text="404: Not Found", status=404)
@@ -1294,23 +1308,9 @@ def main():
1294
1308
 
1295
1309
  # Serve index.html as fallback route (SPA routing)
1296
1310
  app.router.add_route('*', '/{tail:.*}', index_handler)
1297
-
1298
- async def ui_config_handler(request):
1299
- with open(g_ui_path, "r") as f:
1300
- ui = json.load(f)
1301
- if 'defaults' not in ui:
1302
- ui['defaults'] = g_config['defaults']
1303
- enabled, disabled = provider_status()
1304
- ui['status'] = {
1305
- "all": list(g_config['providers'].keys()),
1306
- "enabled": enabled,
1307
- "disabled": disabled
1308
- }
1309
- return web.json_response(ui)
1310
- app.router.add_get('/config', ui_config_handler)
1311
1311
 
1312
1312
  print(f"Starting server on port {port}...")
1313
- web.run_app(app, host='0.0.0.0', port=port)
1313
+ web.run_app(app, host='0.0.0.0', port=port, print=_log)
1314
1314
  exit(0)
1315
1315
 
1316
1316
  if cli_args.enable is not None:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llms-py
3
- Version: 2.0.13
3
+ Version: 2.0.14
4
4
  Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
5
5
  Home-page: https://github.com/ServiceStack/llms
6
6
  Author: ServiceStack
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "llms-py"
7
- version = "2.0.13"
7
+ version = "2.0.14"
8
8
  description = "A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers"
9
9
  readme = "README.md"
10
10
  license = "BSD-3-Clause"
@@ -16,7 +16,7 @@ with open(os.path.join(this_directory, "requirements.txt"), encoding="utf-8") as
16
16
 
17
17
  setup(
18
18
  name="llms-py",
19
- version="2.0.13",
19
+ version="2.0.14",
20
20
  author="ServiceStack",
21
21
  author_email="team@servicestack.net",
22
22
  description="A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers",
@@ -6,7 +6,7 @@ const headers = { 'Accept': 'application/json' }
6
6
  const prefsKey = 'llms.prefs'
7
7
 
8
8
  export const o = {
9
- version: '2.0.13',
9
+ version: '2.0.14',
10
10
  base,
11
11
  prefsKey,
12
12
  welcome: 'Welcome to llms.py',
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes