llms-py 3.0.16__py3-none-any.whl → 3.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -199,7 +199,6 @@ def install(ctx):
199
199
  "model": thread.get("model"),
200
200
  "messages": thread.get("messages"),
201
201
  "modalities": thread.get("modalities"),
202
- "systemPrompt": thread.get("systemPrompt"),
203
202
  "tools": thread.get("tools"), # tools request
204
203
  "metadata": metadata,
205
204
  }
llms/extensions/app/db.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import json
2
2
  import os
3
+ import time
3
4
  from datetime import datetime, timedelta
4
5
  from typing import Any, Dict
5
6
 
@@ -344,9 +345,12 @@ class AppDB:
344
345
  else:
345
346
  thread["createdAt"] = now
346
347
  thread["updatedAt"] = now
348
+ initial_timestamp = int(time.time() * 1000) + 1
347
349
  if "messages" in thread:
348
- for m in thread["messages"]:
350
+ for idx, m in enumerate(thread["messages"]):
349
351
  self.ctx.cache_message_inline_data(m)
352
+ if "timestamp" not in m:
353
+ m["timestamp"] = initial_timestamp + idx
350
354
  return with_user(thread, user=user)
351
355
 
352
356
  def create_thread(self, thread: Dict[str, Any], user=None):
@@ -31,7 +31,6 @@ def install_cerebras(ctx):
31
31
  clean_chat["messages"].append(new_msg)
32
32
 
33
33
  clean_chat.pop("modalities", None)
34
- clean_chat.pop("systemPrompt", None)
35
34
  return await super().chat(clean_chat, context)
36
35
 
37
36
  ctx.add_provider(CerebrasProvider)
llms/main.py CHANGED
@@ -57,7 +57,7 @@ try:
57
57
  except ImportError:
58
58
  HAS_PIL = False
59
59
 
60
- VERSION = "3.0.16"
60
+ VERSION = "3.0.17"
61
61
  _ROOT = None
62
62
  DEBUG = os.getenv("DEBUG") == "1"
63
63
  MOCK = os.getenv("MOCK") == "1"
@@ -1216,8 +1216,8 @@ class OpenAiCompatible:
1216
1216
  def chat_summary(self, chat):
1217
1217
  return chat_summary(chat)
1218
1218
 
1219
- def process_chat(self, chat, provider_id=None):
1220
- return process_chat(chat, provider_id)
1219
+ async def process_chat(self, chat, provider_id=None):
1220
+ return await process_chat(chat, provider_id)
1221
1221
 
1222
1222
  async def chat(self, chat, context=None):
1223
1223
  chat["model"] = self.provider_model(chat["model"]) or chat["model"]
@@ -1272,7 +1272,7 @@ class OpenAiCompatible:
1272
1272
  if self.enable_thinking is not None:
1273
1273
  chat["enable_thinking"] = self.enable_thinking
1274
1274
 
1275
- chat = await process_chat(chat, provider_id=self.id)
1275
+ chat = await self.process_chat(chat, provider_id=self.id)
1276
1276
  _log(f"POST {self.chat_url}")
1277
1277
  _log(chat_summary(chat))
1278
1278
  # remove metadata if any (conflicts with some providers, e.g. Z.ai)
@@ -1304,6 +1304,15 @@ class GroqProvider(OpenAiCompatible):
1304
1304
  kwargs["api"] = "https://api.groq.com/openai/v1"
1305
1305
  super().__init__(**kwargs)
1306
1306
 
1307
+ async def process_chat(self, chat, provider_id=None):
1308
+ ret = await process_chat(chat, provider_id)
1309
+ chat.pop("modalities", None) # groq doesn't support modalities
1310
+ messages = chat.get("messages", []).copy()
1311
+ for message in messages:
1312
+ message.pop("timestamp", None) # groq doesn't support timestamp
1313
+ ret["messages"] = messages
1314
+ return ret
1315
+
1307
1316
 
1308
1317
  class XaiProvider(OpenAiCompatible):
1309
1318
  sdk = "@ai-sdk/xai"
@@ -2974,6 +2983,11 @@ class AppExtensions:
2974
2983
  if "tools" not in current_chat:
2975
2984
  current_chat["tools"] = []
2976
2985
 
2986
+ _dbg(
2987
+ f"create_chat_with_tools: all_tools:{include_all_tools}, only_tools:{only_tools_list}, chat tools: "
2988
+ + str(len(current_chat["tools"]))
2989
+ )
2990
+
2977
2991
  existing_tools = {t["function"]["name"] for t in current_chat["tools"]}
2978
2992
  for tool_def in self.tool_definitions:
2979
2993
  name = tool_def["function"]["name"]
llms/ui/ai.mjs CHANGED
@@ -6,7 +6,7 @@ const headers = { 'Accept': 'application/json' }
6
6
  const prefsKey = 'llms.prefs'
7
7
 
8
8
  export const o = {
9
- version: '3.0.16',
9
+ version: '3.0.17',
10
10
  base,
11
11
  prefsKey,
12
12
  welcome: 'Welcome to llms.py',
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llms-py
3
- Version: 3.0.16
3
+ Version: 3.0.17
4
4
  Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
5
5
  Home-page: https://github.com/ServiceStack/llms
6
6
  Author: ServiceStack
@@ -3,13 +3,13 @@ llms/__main__.py,sha256=hrBulHIt3lmPm1BCyAEVtB6DQ0Hvc3gnIddhHCmJasg,151
3
3
  llms/db.py,sha256=oozp5I5lECVO8oZEFwcZl3ES5mARqWeR1BkoqG5kSqM,11687
4
4
  llms/index.html,sha256=nGk1Djtn9p7l6LuKp4Kg0JIB9fCzxtTWXFfmDb4ggpc,1658
5
5
  llms/llms.json,sha256=NEr9kJRkUGZ2YZHbWC-haGPlVVL2Qtnx4kKZENGH1wk,11494
6
- llms/main.py,sha256=mce_QW7YCiV0oCNOP2GUCyngkIKIsscf_F9_qFX6bRc,182958
6
+ llms/main.py,sha256=GyuQiCJNY23j2GPDR5obucpHENbOLCTyqSE41Hwrvd8,183589
7
7
  llms/providers-extra.json,sha256=_6DmGBiQY9LM6_Y0zOiObYn7ba4g3akSNQfmHcYlENc,11101
8
8
  llms/providers.json,sha256=yjhDurlwo70xqfV0HNLiZaCpw3WvtIgkjoLahQIKX2w,282530
9
9
  llms/extensions/analytics/ui/index.mjs,sha256=m1XwaqYCLwK267JAUCAltkN_nOXep0GxfpvGNS5i4_w,69547
10
10
  llms/extensions/app/README.md,sha256=TKoblZpHlheLCh_dfXOxqTc5OvxlgMBa-vKo8Hqb2gg,1370
11
- llms/extensions/app/__init__.py,sha256=aU8Bfliw--Xj1bsKL3PSoX6MY1ZNgweNyMWS1V_YG4s,20855
12
- llms/extensions/app/db.py,sha256=DU8YZ25yFsBI-O6msxh2GgzbwaqKqXkAHJLwQKcmFPI,21533
11
+ llms/extensions/app/__init__.py,sha256=5TX1QZ0c4CpTpZQvoBebNLfBToTP_Sdi2tiCvbwFMd4,20799
12
+ llms/extensions/app/db.py,sha256=eVUHkMo5va1pCkd3dP-4_DnkfYGZZ6oxOKOEI8hEKsM,21719
13
13
  llms/extensions/app/ui/Recents.mjs,sha256=2ypAKUp9_Oqcive1nUWZ8I2PQTBomBg_Pkjygi4oPgs,9261
14
14
  llms/extensions/app/ui/index.mjs,sha256=sB9176LLNuKFsZ28yL-tROA6J4xePNtvxtSrzFcinRo,13271
15
15
  llms/extensions/app/ui/threadStore.mjs,sha256=QS6mLqysw9Je_ixUKpbhAELGq-As8aFk6Qm_vO5hvUQ,12515
@@ -138,7 +138,7 @@ llms/extensions/katex/ui/fonts/KaTeX_Typewriter-Regular.woff,sha256=4U_tArGrp86f
138
138
  llms/extensions/katex/ui/fonts/KaTeX_Typewriter-Regular.woff2,sha256=cdUX1ngneHz6vfGGkUzDNY7aU543kxlB8rL9SiH2jAs,13568
139
139
  llms/extensions/providers/__init__.py,sha256=C5zOBQEOB2L96rAZdjV42fPVk_dZxSh2Dv30Kb1w3lE,534
140
140
  llms/extensions/providers/anthropic.py,sha256=ey3G9D3drhjzaNTKC8SS_XVSjdi3K7uqYTskmf26Aic,12011
141
- llms/extensions/providers/cerebras.py,sha256=HaeFW0GwbD6V6Zrrwqyv78kQb0VXg9oHmykvJfIOOYE,1417
141
+ llms/extensions/providers/cerebras.py,sha256=iKPzsaRIBqQWXsgPQ50jsNPGx8Ud704VVUAXzDBHu7k,1368
142
142
  llms/extensions/providers/chutes.py,sha256=5ZrfbqoOhgzKLQy_qULcp4jlvW5WXPR0jP9kN2Jzb9g,6229
143
143
  llms/extensions/providers/google.py,sha256=rRmpmtSjTM04mZGNyEV2jcDxdDM99GNj_X68dNT1H20,27719
144
144
  llms/extensions/providers/nvidia.py,sha256=C6cwqn3EufYDfRIgbc8MDkQNyD6w3c7hbjfYaHJSDik,4279
@@ -160,7 +160,7 @@ llms/extensions/system_prompts/ui/prompts.json,sha256=t5DD3bird-87wFa4OlW-bC2wdo
160
160
  llms/extensions/tools/__init__.py,sha256=PRZe0QMfsOymJ3jTqO0VFppNEWI4f2bYSOImK_YrGQM,2036
161
161
  llms/extensions/tools/ui/index.mjs,sha256=4gT0mHKuzcLWe8BmrYeVNS3VMd5Me9CX6Q0A_YLyLck,38633
162
162
  llms/ui/App.mjs,sha256=CoUzO9mV__-jV19NKHYIbwHsjWMnO11jyNSbnJhe1gQ,7486
163
- llms/ui/ai.mjs,sha256=A5fGDgVa7mRC6aagXkKuXrSM_MlgAF1gsDTWHEZk2No,6541
163
+ llms/ui/ai.mjs,sha256=6oL6leYL7INGxJKyjz9RjU8nmUc86qDrigzwsg_cn-c,6541
164
164
  llms/ui/app.css,sha256=kKKICFM_85H1BDFdUYJzo93S__1OZcj2cO6zL-4VdJs,209085
165
165
  llms/ui/ctx.mjs,sha256=g1mmv87bhKCFyMbAImvX6mArmGliAoGPPupFEaMMf7c,14500
166
166
  llms/ui/fav.svg,sha256=_R6MFeXl6wBFT0lqcUxYQIDWgm246YH_3hSTW0oO8qw,734
@@ -186,9 +186,9 @@ llms/ui/modules/model-selector.mjs,sha256=6U4rAZ7vmQELFRQGWk4YEtq02v3lyHdMq6yUOp
186
186
  llms/ui/modules/chat/ChatBody.mjs,sha256=Rwyr7JeqBn6LUn-VtHB9qj7kBsLsOr34SbHcK0twIZ0,58118
187
187
  llms/ui/modules/chat/SettingsDialog.mjs,sha256=HMBJTwrapKrRIAstIIqp0QlJL5O-ho4hzgvfagPfsX8,19930
188
188
  llms/ui/modules/chat/index.mjs,sha256=nS_L6G1RSuCybgnA6n-q8Sn3OeSbQWL2iW3-zCIFqJk,39548
189
- llms_py-3.0.16.dist-info/licenses/LICENSE,sha256=bus9cuAOWeYqBk2OuhSABVV1P4z7hgrEFISpyda_H5w,1532
190
- llms_py-3.0.16.dist-info/METADATA,sha256=zP_aLsJ0caYROZZ-ch-oyoI0HlS86R4M-ZvY64_OmAg,2195
191
- llms_py-3.0.16.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
192
- llms_py-3.0.16.dist-info/entry_points.txt,sha256=WswyE7PfnkZMIxboC-MS6flBD6wm-CYU7JSUnMhqMfM,40
193
- llms_py-3.0.16.dist-info/top_level.txt,sha256=gC7hk9BKSeog8gyg-EM_g2gxm1mKHwFRfK-10BxOsa4,5
194
- llms_py-3.0.16.dist-info/RECORD,,
189
+ llms_py-3.0.17.dist-info/licenses/LICENSE,sha256=bus9cuAOWeYqBk2OuhSABVV1P4z7hgrEFISpyda_H5w,1532
190
+ llms_py-3.0.17.dist-info/METADATA,sha256=iwwWkuCRwIXXHAcTzqHyuYW1fNnJezrDjoYE_T3nwHU,2195
191
+ llms_py-3.0.17.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
192
+ llms_py-3.0.17.dist-info/entry_points.txt,sha256=WswyE7PfnkZMIxboC-MS6flBD6wm-CYU7JSUnMhqMfM,40
193
+ llms_py-3.0.17.dist-info/top_level.txt,sha256=gC7hk9BKSeog8gyg-EM_g2gxm1mKHwFRfK-10BxOsa4,5
194
+ llms_py-3.0.17.dist-info/RECORD,,