llms-py 3.0.0b9__py3-none-any.whl → 3.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. llms/extensions/app/README.md +20 -0
  2. llms/extensions/app/__init__.py +16 -15
  3. llms/extensions/app/db.py +7 -6
  4. llms/extensions/app/ui/index.mjs +1 -1
  5. llms/extensions/app/ui/threadStore.mjs +21 -17
  6. llms/extensions/core_tools/CALCULATOR.md +32 -0
  7. llms/extensions/core_tools/__init__.py +1 -1
  8. llms/extensions/core_tools/ui/index.mjs +4 -4
  9. llms/extensions/gallery/README.md +61 -0
  10. llms/extensions/gallery/ui/index.mjs +1 -0
  11. llms/extensions/katex/README.md +39 -0
  12. llms/extensions/system_prompts/README.md +22 -0
  13. llms/extensions/system_prompts/ui/index.mjs +21 -26
  14. llms/extensions/system_prompts/ui/prompts.json +5 -5
  15. llms/llms.json +9 -12
  16. llms/main.py +25 -5
  17. llms/providers.json +1 -1
  18. llms/ui/ai.mjs +20 -5
  19. llms/ui/ctx.mjs +25 -1
  20. llms/ui/modules/chat/ChatBody.mjs +43 -27
  21. llms/ui/modules/chat/index.mjs +17 -29
  22. {llms_py-3.0.0b9.dist-info → llms_py-3.0.1.dist-info}/METADATA +1 -1
  23. {llms_py-3.0.0b9.dist-info → llms_py-3.0.1.dist-info}/RECORD +27 -53
  24. llms/__pycache__/__init__.cpython-312.pyc +0 -0
  25. llms/__pycache__/__init__.cpython-313.pyc +0 -0
  26. llms/__pycache__/__init__.cpython-314.pyc +0 -0
  27. llms/__pycache__/__main__.cpython-312.pyc +0 -0
  28. llms/__pycache__/__main__.cpython-314.pyc +0 -0
  29. llms/__pycache__/llms.cpython-312.pyc +0 -0
  30. llms/__pycache__/main.cpython-312.pyc +0 -0
  31. llms/__pycache__/main.cpython-313.pyc +0 -0
  32. llms/__pycache__/main.cpython-314.pyc +0 -0
  33. llms/__pycache__/plugins.cpython-314.pyc +0 -0
  34. llms/extensions/app/__pycache__/__init__.cpython-314.pyc +0 -0
  35. llms/extensions/app/__pycache__/db.cpython-314.pyc +0 -0
  36. llms/extensions/app/__pycache__/db_manager.cpython-314.pyc +0 -0
  37. llms/extensions/app/requests.json +0 -9073
  38. llms/extensions/app/threads.json +0 -15290
  39. llms/extensions/core_tools/__pycache__/__init__.cpython-314.pyc +0 -0
  40. llms/extensions/core_tools/ui/codemirror/lib/codemirror.css +0 -344
  41. llms/extensions/core_tools/ui/codemirror/lib/codemirror.js +0 -9884
  42. llms/extensions/gallery/__pycache__/__init__.cpython-314.pyc +0 -0
  43. llms/extensions/gallery/__pycache__/db.cpython-314.pyc +0 -0
  44. llms/extensions/katex/__pycache__/__init__.cpython-314.pyc +0 -0
  45. llms/extensions/providers/__pycache__/__init__.cpython-314.pyc +0 -0
  46. llms/extensions/providers/__pycache__/anthropic.cpython-314.pyc +0 -0
  47. llms/extensions/providers/__pycache__/chutes.cpython-314.pyc +0 -0
  48. llms/extensions/providers/__pycache__/google.cpython-314.pyc +0 -0
  49. llms/extensions/providers/__pycache__/nvidia.cpython-314.pyc +0 -0
  50. llms/extensions/providers/__pycache__/openai.cpython-314.pyc +0 -0
  51. llms/extensions/providers/__pycache__/openrouter.cpython-314.pyc +0 -0
  52. llms/extensions/system_prompts/__pycache__/__init__.cpython-314.pyc +0 -0
  53. llms/extensions/tools/__pycache__/__init__.cpython-314.pyc +0 -0
  54. llms/ui/modules/chat/HomeTools.mjs +0 -12
  55. {llms_py-3.0.0b9.dist-info → llms_py-3.0.1.dist-info}/WHEEL +0 -0
  56. {llms_py-3.0.0b9.dist-info → llms_py-3.0.1.dist-info}/entry_points.txt +0 -0
  57. {llms_py-3.0.0b9.dist-info → llms_py-3.0.1.dist-info}/licenses/LICENSE +0 -0
  58. {llms_py-3.0.0b9.dist-info → llms_py-3.0.1.dist-info}/top_level.txt +0 -0
llms/llms.json CHANGED
@@ -9,14 +9,11 @@
9
9
  "restrict_to": "GITHUB_USERS"
10
10
  }
11
11
  },
12
- "disable_extensions": [
13
- "xmas",
14
- "duckduckgo"
15
- ],
12
+ "disable_extensions": [],
16
13
  "defaults": {
17
14
  "headers": {
18
15
  "Content-Type": "application/json",
19
- "User-Agent": "llmspy.org/1.0"
16
+ "User-Agent": "llmspy.org/3.0"
20
17
  },
21
18
  "text": {
22
19
  "model": "kimi-k2",
@@ -95,7 +92,7 @@
95
92
  ]
96
93
  },
97
94
  "out:image": {
98
- "model": "black-forest-labs/flux.1-dev",
95
+ "model": "gemini-2.5-flash-image",
99
96
  "messages": [
100
97
  {
101
98
  "role": "user",
@@ -202,10 +199,10 @@
202
199
  }
203
200
  },
204
201
  "github-copilot": {
205
- "enabled": false
202
+ "enabled": true
206
203
  },
207
204
  "github-models": {
208
- "enabled": false,
205
+ "enabled": true,
209
206
  "check": {
210
207
  "messages": [
211
208
  {
@@ -230,13 +227,13 @@
230
227
  "temperature": 1.0
231
228
  },
232
229
  "ollama": {
233
- "enabled": true,
230
+ "enabled": false,
234
231
  "id": "ollama",
235
232
  "npm": "ollama",
236
233
  "api": "http://localhost:11434"
237
234
  },
238
235
  "lmstudio": {
239
- "enabled": true,
236
+ "enabled": false,
240
237
  "npm": "lmstudio",
241
238
  "api": "http://127.0.0.1:1234/v1",
242
239
  "models": {}
@@ -351,7 +348,7 @@
351
348
  "enabled": true
352
349
  },
353
350
  "moonshotai": {
354
- "enabled": false
351
+ "enabled": true
355
352
  },
356
353
  "nvidia": {
357
354
  "enabled": true,
@@ -372,7 +369,7 @@
372
369
  "enabled": true
373
370
  },
374
371
  "fireworks-ai": {
375
- "enabled": false
372
+ "enabled": true
376
373
  },
377
374
  "openrouter": {
378
375
  "enabled": true,
llms/main.py CHANGED
@@ -29,7 +29,7 @@ from importlib import resources # Py≥3.9 (pip install importlib_resources fo
29
29
  from io import BytesIO
30
30
  from pathlib import Path
31
31
  from typing import get_type_hints
32
- from urllib.parse import parse_qs, urlencode
32
+ from urllib.parse import parse_qs, urlencode, urljoin
33
33
 
34
34
  import aiohttp
35
35
  from aiohttp import web
@@ -41,7 +41,7 @@ try:
41
41
  except ImportError:
42
42
  HAS_PIL = False
43
43
 
44
- VERSION = "3.0.0b9"
44
+ VERSION = "3.0.1"
45
45
  _ROOT = None
46
46
  DEBUG = os.getenv("DEBUG") == "1"
47
47
  MOCK = os.getenv("MOCK") == "1"
@@ -204,6 +204,12 @@ def id_to_name(id):
204
204
  return id.replace("-", " ").title()
205
205
 
206
206
 
207
+ def pluralize(word, count):
208
+ if count == 1:
209
+ return word
210
+ return word + "s"
211
+
212
+
207
213
  def get_file_mime_type(filename):
208
214
  mime_type, _ = mimetypes.guess_type(filename)
209
215
  return mime_type or "application/octet-stream"
@@ -368,6 +374,9 @@ async def process_chat(chat, provider_id=None):
368
374
  raise Exception("No chat provided")
369
375
  if "stream" not in chat:
370
376
  chat["stream"] = False
377
+ # Some providers don't support empty tools
378
+ if "tools" in chat and len(chat["tools"]) == 0:
379
+ del chat["tools"]
371
380
  if "messages" not in chat:
372
381
  return chat
373
382
 
@@ -700,6 +709,7 @@ def save_image_to_cache(base64_data, filename, image_info, ignore_info=False):
700
709
  async def response_json(response):
701
710
  text = await response.text()
702
711
  if response.status >= 400:
712
+ _dbg(f"HTTP {response.status} {response.reason}: {text}")
703
713
  raise HTTPError(response.status, reason=response.reason, body=text, headers=dict(response.headers))
704
714
  response.raise_for_status()
705
715
  body = json.loads(text)
@@ -1427,6 +1437,8 @@ async def g_chat_completion(chat, context=None):
1427
1437
  current_chat["messages"].append(message)
1428
1438
  tool_history.append(message)
1429
1439
 
1440
+ await g_app.on_chat_tool(current_chat, context)
1441
+
1430
1442
  for tool_call in tool_calls:
1431
1443
  function_name = tool_call["function"]["name"]
1432
1444
  try:
@@ -1450,8 +1462,7 @@ async def g_chat_completion(chat, context=None):
1450
1462
  current_chat["messages"].append(tool_msg)
1451
1463
  tool_history.append(tool_msg)
1452
1464
 
1453
- for filter_func in g_app.chat_tool_filters:
1454
- await filter_func(current_chat, context)
1465
+ await g_app.on_chat_tool(current_chat, context)
1455
1466
 
1456
1467
  if should_cancel_thread(context):
1457
1468
  return
@@ -1606,7 +1617,7 @@ async def cli_chat(chat, tools=None, image=None, audio=None, file=None, args=Non
1606
1617
  for file in generated_files:
1607
1618
  if file.startswith("/~cache"):
1608
1619
  print(get_cache_path(file[8:]))
1609
- print(f"http://localhost:8000/{file}")
1620
+ print(urljoin("http://localhost:8000", file))
1610
1621
  else:
1611
1622
  print(file)
1612
1623
 
@@ -2443,6 +2454,15 @@ class AppExtensions:
2443
2454
  except Exception as e:
2444
2455
  _err("chat error filter failed", e)
2445
2456
 
2457
+ async def on_chat_tool(self, chat, context):
2458
+ m_len = len(chat.get("messages", []))
2459
+ t_len = len(self.chat_tool_filters)
2460
+ _dbg(
2461
+ f"on_tool_call for thread {context.get('threadId', None)} with {m_len} {pluralize('message', m_len)}, invoking {t_len} {pluralize('filter', t_len)}:"
2462
+ )
2463
+ for filter_func in self.chat_tool_filters:
2464
+ await filter_func(chat, context)
2465
+
2446
2466
  def exit(self, exit_code=0):
2447
2467
  if len(self.shutdown_handlers) > 0:
2448
2468
  _dbg(f"running {len(self.shutdown_handlers)} shutdown handlers...")