llms-py 2.0.0__tar.gz → 2.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. {llms_py-2.0.0/llms_py.egg-info → llms_py-2.0.1}/PKG-INFO +1 -1
  2. {llms_py-2.0.0 → llms_py-2.0.1}/llms.py +54 -12
  3. {llms_py-2.0.0 → llms_py-2.0.1/llms_py.egg-info}/PKG-INFO +1 -1
  4. {llms_py-2.0.0 → llms_py-2.0.1}/pyproject.toml +1 -1
  5. {llms_py-2.0.0 → llms_py-2.0.1}/setup.py +1 -1
  6. {llms_py-2.0.0 → llms_py-2.0.1}/LICENSE +0 -0
  7. {llms_py-2.0.0 → llms_py-2.0.1}/MANIFEST.in +0 -0
  8. {llms_py-2.0.0 → llms_py-2.0.1}/README.md +0 -0
  9. {llms_py-2.0.0 → llms_py-2.0.1}/index.html +0 -0
  10. {llms_py-2.0.0 → llms_py-2.0.1}/llms.json +0 -0
  11. {llms_py-2.0.0 → llms_py-2.0.1}/llms_py.egg-info/SOURCES.txt +0 -0
  12. {llms_py-2.0.0 → llms_py-2.0.1}/llms_py.egg-info/dependency_links.txt +0 -0
  13. {llms_py-2.0.0 → llms_py-2.0.1}/llms_py.egg-info/entry_points.txt +0 -0
  14. {llms_py-2.0.0 → llms_py-2.0.1}/llms_py.egg-info/not-zip-safe +0 -0
  15. {llms_py-2.0.0 → llms_py-2.0.1}/llms_py.egg-info/requires.txt +0 -0
  16. {llms_py-2.0.0 → llms_py-2.0.1}/llms_py.egg-info/top_level.txt +0 -0
  17. {llms_py-2.0.0 → llms_py-2.0.1}/requirements.txt +0 -0
  18. {llms_py-2.0.0 → llms_py-2.0.1}/setup.cfg +0 -0
  19. {llms_py-2.0.0 → llms_py-2.0.1}/ui/App.mjs +0 -0
  20. {llms_py-2.0.0 → llms_py-2.0.1}/ui/ChatPrompt.mjs +0 -0
  21. {llms_py-2.0.0 → llms_py-2.0.1}/ui/Main.mjs +0 -0
  22. {llms_py-2.0.0 → llms_py-2.0.1}/ui/Recents.mjs +0 -0
  23. {llms_py-2.0.0 → llms_py-2.0.1}/ui/Sidebar.mjs +0 -0
  24. {llms_py-2.0.0 → llms_py-2.0.1}/ui/app.css +0 -0
  25. {llms_py-2.0.0 → llms_py-2.0.1}/ui/fav.svg +0 -0
  26. {llms_py-2.0.0 → llms_py-2.0.1}/ui/lib/highlight.min.mjs +0 -0
  27. {llms_py-2.0.0 → llms_py-2.0.1}/ui/lib/idb.min.mjs +0 -0
  28. {llms_py-2.0.0 → llms_py-2.0.1}/ui/lib/marked.min.mjs +0 -0
  29. {llms_py-2.0.0 → llms_py-2.0.1}/ui/lib/servicestack-client.min.mjs +0 -0
  30. {llms_py-2.0.0 → llms_py-2.0.1}/ui/lib/servicestack-vue.min.mjs +0 -0
  31. {llms_py-2.0.0 → llms_py-2.0.1}/ui/lib/vue-router.min.mjs +0 -0
  32. {llms_py-2.0.0 → llms_py-2.0.1}/ui/lib/vue.min.mjs +0 -0
  33. {llms_py-2.0.0 → llms_py-2.0.1}/ui/lib/vue.mjs +0 -0
  34. {llms_py-2.0.0 → llms_py-2.0.1}/ui/markdown.mjs +0 -0
  35. {llms_py-2.0.0 → llms_py-2.0.1}/ui/tailwind.input.css +0 -0
  36. {llms_py-2.0.0 → llms_py-2.0.1}/ui/threadStore.mjs +0 -0
  37. {llms_py-2.0.0 → llms_py-2.0.1}/ui/typography.css +0 -0
  38. {llms_py-2.0.0 → llms_py-2.0.1}/ui/utils.mjs +0 -0
  39. {llms_py-2.0.0 → llms_py-2.0.1}/ui.json +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llms-py
3
- Version: 2.0.0
3
+ Version: 2.0.1
4
4
  Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
5
5
  Home-page: https://github.com/ServiceStack/llms
6
6
  Author: ServiceStack
@@ -16,7 +16,10 @@ import traceback
16
16
  import aiohttp
17
17
  from aiohttp import web
18
18
 
19
- VERSION = "2.0.0"
19
+ from pathlib import Path
20
+ from importlib import resources # Py≥3.9 (pip install importlib_resources for 3.7/3.8)
21
+
22
+ VERSION = "2.0.1"
20
23
  g_config_path = None
21
24
  g_ui_path = None
22
25
  g_config = None
@@ -782,6 +785,7 @@ def disable_provider(provider):
782
785
  save_config(g_config)
783
786
  init_llms(g_config)
784
787
 
788
+
785
789
  def main():
786
790
  global g_verbose, g_default_model, g_logprefix, g_config_path, g_ui_path
787
791
 
@@ -822,18 +826,29 @@ def main():
822
826
  if cli_args.config is not None:
823
827
  g_config_path = os.path.join(os.path.dirname(__file__), cli_args.config)
824
828
 
829
+ try:
830
+ from importlib.resources import files
831
+ _ROOT = files("llms-py")
832
+ except ModuleNotFoundError: # package not installed
833
+ # __file__ is *this* module; climb two levels to repo root
834
+ _ROOT = Path(__file__).resolve().parent.parent / "llms"
835
+ _log(f"ROOT: {_ROOT}")
836
+
825
837
  g_config_path = os.path.join(os.path.dirname(__file__), cli_args.config) if cli_args.config else get_config_path()
826
838
  g_ui_path = get_ui_path()
827
839
 
840
+ home_config_path = home_llms_path("llms.json")
841
+ resource_config_path = _ROOT / "llms.json"
842
+ home_ui_path = home_llms_path("ui.json")
843
+ resource_ui_path = _ROOT / "ui.json"
844
+
828
845
  if cli_args.init:
829
- home_config_path = home_llms_path("llms.json")
830
846
  if os.path.exists(home_config_path):
831
847
  print(f"llms.json already exists at {home_config_path}")
832
848
  else:
833
849
  asyncio.run(save_default_config(home_config_path))
834
850
  print(f"Created default config at {home_config_path}")
835
851
 
836
- home_ui_path = home_llms_path("ui.json")
837
852
  if os.path.exists(home_ui_path):
838
853
  print(f"ui.json already exists at {home_ui_path}")
839
854
  else:
@@ -841,9 +856,26 @@ def main():
841
856
  print(f"Created default ui config at {home_ui_path}")
842
857
  exit(0)
843
858
 
844
- if not os.path.exists(g_config_path):
845
- print("Config file not found. Create one with --init or use --config <path>")
846
- exit(1)
859
+ if not g_config_path or not os.path.exists(g_config_path):
860
+ # copy llms.json and ui.json to llms_home
861
+ if not os.path.exists(home_config_path) and os.path.exists(resource_config_path):
862
+ llms_home = os.path.dirname(home_config_path)
863
+ os.makedirs(llms_home, exist_ok=True)
864
+ with open(resource_config_path, "r") as f:
865
+ config_json = f.read()
866
+ with open(home_config_path, "w") as f:
867
+ f.write(config_json)
868
+ _log(f"Created default config at {home_config_path}")
869
+
870
+ if not os.path.exists(home_ui_path) and os.path.exists(resource_ui_path):
871
+ with open(resource_ui_path, "r") as f:
872
+ ui_json = f.read()
873
+ with open(home_ui_path, "w") as f:
874
+ f.write(ui_json)
875
+ _log(f"Created default ui config at {home_ui_path}")
876
+ else:
877
+ print("Config file not found. Create one with --init or use --config <path>")
878
+ exit(1)
847
879
 
848
880
  # read contents
849
881
  with open(g_config_path, "r") as f:
@@ -923,18 +955,28 @@ def main():
923
955
  })
924
956
  app.router.add_post('/providers/{provider}', provider_handler)
925
957
 
926
- # Serve static files from ui/ directory
927
- script_dir = os.path.dirname(os.path.abspath(__file__))
928
- ui_path = os.path.join(script_dir, 'ui')
929
- if os.path.exists(ui_path):
930
- app.router.add_static('/ui/', ui_path, name='ui')
958
+ async def ui_static(request: web.Request) -> web.Response:
959
+ path = Path(request.match_info["path"])
960
+ resource = _ROOT / "ui" / path
961
+ if not resource.is_file():
962
+ raise web.HTTPNotFound
963
+ try:
964
+ resource.relative_to(Path(_ROOT)) # basic directory-traversal guard
965
+ except ValueError:
966
+ raise web.HTTPBadRequest(text="Invalid path")
967
+ content_type, _ = mimetypes.guess_type(resource.name)
968
+ if content_type is None:
969
+ content_type = "application/octet-stream"
970
+ return web.Response(body=resource.read_bytes(), content_type=content_type)
971
+
972
+ app.router.add_get("/ui/{path:.*}", ui_static, name="ui_static")
931
973
 
932
974
  async def not_found_handler(request):
933
975
  return web.Response(text="404: Not Found", status=404)
934
976
  app.router.add_get('/favicon.ico', not_found_handler)
935
977
 
936
978
  # Serve index.html from root
937
- index_path = os.path.join(script_dir, 'index.html')
979
+ index_path = os.path.join(_ROOT, 'index.html')
938
980
  if os.path.exists(index_path):
939
981
  async def index_handler(request):
940
982
  return web.FileResponse(index_path)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llms-py
3
- Version: 2.0.0
3
+ Version: 2.0.1
4
4
  Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
5
5
  Home-page: https://github.com/ServiceStack/llms
6
6
  Author: ServiceStack
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "llms-py"
7
- version = "2.0.0"
7
+ version = "2.0.1"
8
8
  description = "A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers"
9
9
  readme = "README.md"
10
10
  license = "BSD-3-Clause"
@@ -16,7 +16,7 @@ with open(os.path.join(this_directory, "requirements.txt"), encoding="utf-8") as
16
16
 
17
17
  setup(
18
18
  name="llms-py",
19
- version="2.0.0",
19
+ version="2.0.1",
20
20
  author="ServiceStack",
21
21
  author_email="team@servicestack.net",
22
22
  description="A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers",
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes