llms-py 3.0.0b7__py3-none-any.whl → 3.0.0b9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llms/__pycache__/main.cpython-314.pyc +0 -0
- llms/extensions/analytics/ui/index.mjs +51 -162
- llms/extensions/app/__init__.py +519 -0
- llms/extensions/app/__pycache__/__init__.cpython-314.pyc +0 -0
- llms/extensions/app/__pycache__/db.cpython-314.pyc +0 -0
- llms/extensions/app/__pycache__/db_manager.cpython-314.pyc +0 -0
- llms/extensions/app/db.py +643 -0
- llms/extensions/app/db_manager.py +195 -0
- llms/extensions/app/requests.json +9073 -0
- llms/extensions/app/threads.json +15290 -0
- llms/{ui/modules/threads → extensions/app/ui}/Recents.mjs +82 -55
- llms/{ui/modules/threads → extensions/app/ui}/index.mjs +78 -9
- llms/extensions/app/ui/threadStore.mjs +407 -0
- llms/extensions/core_tools/__init__.py +272 -32
- llms/extensions/core_tools/__pycache__/__init__.cpython-314.pyc +0 -0
- llms/extensions/core_tools/ui/codemirror/addon/edit/closebrackets.js +201 -0
- llms/extensions/core_tools/ui/codemirror/addon/edit/closetag.js +185 -0
- llms/extensions/core_tools/ui/codemirror/addon/edit/continuelist.js +101 -0
- llms/extensions/core_tools/ui/codemirror/addon/edit/matchbrackets.js +160 -0
- llms/extensions/core_tools/ui/codemirror/addon/edit/matchtags.js +66 -0
- llms/extensions/core_tools/ui/codemirror/addon/edit/trailingspace.js +27 -0
- llms/extensions/core_tools/ui/codemirror/addon/selection/active-line.js +72 -0
- llms/extensions/core_tools/ui/codemirror/addon/selection/mark-selection.js +119 -0
- llms/extensions/core_tools/ui/codemirror/addon/selection/selection-pointer.js +98 -0
- llms/extensions/core_tools/ui/codemirror/doc/docs.css +225 -0
- llms/extensions/core_tools/ui/codemirror/doc/source_sans.woff +0 -0
- llms/extensions/core_tools/ui/codemirror/lib/codemirror.css +344 -0
- llms/extensions/core_tools/ui/codemirror/lib/codemirror.js +9884 -0
- llms/extensions/core_tools/ui/codemirror/mode/clike/clike.js +942 -0
- llms/extensions/core_tools/ui/codemirror/mode/javascript/index.html +118 -0
- llms/extensions/core_tools/ui/codemirror/mode/javascript/javascript.js +962 -0
- llms/extensions/core_tools/ui/codemirror/mode/javascript/typescript.html +62 -0
- llms/extensions/core_tools/ui/codemirror/mode/python/python.js +402 -0
- llms/extensions/core_tools/ui/codemirror/theme/dracula.css +40 -0
- llms/extensions/core_tools/ui/codemirror/theme/mocha.css +135 -0
- llms/extensions/core_tools/ui/index.mjs +650 -0
- llms/extensions/gallery/__pycache__/db.cpython-314.pyc +0 -0
- llms/extensions/gallery/db.py +4 -4
- llms/extensions/gallery/ui/index.mjs +2 -1
- llms/extensions/katex/__init__.py +6 -0
- llms/extensions/katex/__pycache__/__init__.cpython-314.pyc +0 -0
- llms/extensions/katex/ui/README.md +125 -0
- llms/extensions/katex/ui/contrib/auto-render.js +338 -0
- llms/extensions/katex/ui/contrib/auto-render.min.js +1 -0
- llms/extensions/katex/ui/contrib/auto-render.mjs +244 -0
- llms/extensions/katex/ui/contrib/copy-tex.js +127 -0
- llms/extensions/katex/ui/contrib/copy-tex.min.js +1 -0
- llms/extensions/katex/ui/contrib/copy-tex.mjs +105 -0
- llms/extensions/katex/ui/contrib/mathtex-script-type.js +109 -0
- llms/extensions/katex/ui/contrib/mathtex-script-type.min.js +1 -0
- llms/extensions/katex/ui/contrib/mathtex-script-type.mjs +24 -0
- llms/extensions/katex/ui/contrib/mhchem.js +3213 -0
- llms/extensions/katex/ui/contrib/mhchem.min.js +1 -0
- llms/extensions/katex/ui/contrib/mhchem.mjs +3109 -0
- llms/extensions/katex/ui/contrib/render-a11y-string.js +887 -0
- llms/extensions/katex/ui/contrib/render-a11y-string.min.js +1 -0
- llms/extensions/katex/ui/contrib/render-a11y-string.mjs +800 -0
- llms/extensions/katex/ui/fonts/KaTeX_AMS-Regular.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_AMS-Regular.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_AMS-Regular.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Caligraphic-Bold.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Caligraphic-Bold.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Caligraphic-Bold.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Caligraphic-Regular.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Caligraphic-Regular.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Caligraphic-Regular.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Fraktur-Bold.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Fraktur-Bold.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Fraktur-Bold.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Fraktur-Regular.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Fraktur-Regular.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Fraktur-Regular.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Main-Bold.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Main-Bold.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Main-Bold.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Main-BoldItalic.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Main-BoldItalic.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Main-BoldItalic.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Main-Italic.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Main-Italic.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Main-Italic.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Main-Regular.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Main-Regular.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Main-Regular.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Math-BoldItalic.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Math-BoldItalic.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Math-BoldItalic.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Math-Italic.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Math-Italic.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Math-Italic.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_SansSerif-Bold.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_SansSerif-Bold.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_SansSerif-Bold.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_SansSerif-Italic.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_SansSerif-Italic.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_SansSerif-Italic.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_SansSerif-Regular.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_SansSerif-Regular.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_SansSerif-Regular.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Script-Regular.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Script-Regular.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Script-Regular.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Size1-Regular.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Size1-Regular.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Size1-Regular.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Size2-Regular.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Size2-Regular.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Size2-Regular.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Size3-Regular.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Size3-Regular.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Size3-Regular.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Size4-Regular.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Size4-Regular.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Size4-Regular.woff2 +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Typewriter-Regular.ttf +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Typewriter-Regular.woff +0 -0
- llms/extensions/katex/ui/fonts/KaTeX_Typewriter-Regular.woff2 +0 -0
- llms/extensions/katex/ui/index.mjs +92 -0
- llms/extensions/katex/ui/katex-swap.css +1230 -0
- llms/extensions/katex/ui/katex-swap.min.css +1 -0
- llms/extensions/katex/ui/katex.css +1230 -0
- llms/extensions/katex/ui/katex.js +19080 -0
- llms/extensions/katex/ui/katex.min.css +1 -0
- llms/extensions/katex/ui/katex.min.js +1 -0
- llms/extensions/katex/ui/katex.min.mjs +1 -0
- llms/extensions/katex/ui/katex.mjs +18547 -0
- llms/extensions/providers/__pycache__/anthropic.cpython-314.pyc +0 -0
- llms/extensions/providers/anthropic.py +44 -1
- llms/extensions/system_prompts/ui/index.mjs +2 -1
- llms/extensions/tools/__init__.py +5 -0
- llms/extensions/tools/__pycache__/__init__.cpython-314.pyc +0 -0
- llms/extensions/tools/ui/index.mjs +8 -8
- llms/index.html +26 -38
- llms/llms.json +4 -1
- llms/main.py +492 -103
- llms/ui/App.mjs +2 -3
- llms/ui/ai.mjs +29 -13
- llms/ui/app.css +255 -289
- llms/ui/ctx.mjs +84 -6
- llms/ui/index.mjs +4 -6
- llms/ui/lib/vue.min.mjs +10 -9
- llms/ui/lib/vue.mjs +1796 -1635
- llms/ui/markdown.mjs +4 -2
- llms/ui/modules/chat/ChatBody.mjs +90 -86
- llms/ui/modules/chat/HomeTools.mjs +0 -242
- llms/ui/modules/chat/index.mjs +103 -170
- llms/ui/modules/model-selector.mjs +2 -2
- llms/ui/tailwind.input.css +35 -1
- llms/ui/utils.mjs +12 -0
- {llms_py-3.0.0b7.dist-info → llms_py-3.0.0b9.dist-info}/METADATA +1 -1
- llms_py-3.0.0b9.dist-info/RECORD +198 -0
- llms/ui/modules/threads/threadStore.mjs +0 -640
- llms_py-3.0.0b7.dist-info/RECORD +0 -80
- {llms_py-3.0.0b7.dist-info → llms_py-3.0.0b9.dist-info}/WHEEL +0 -0
- {llms_py-3.0.0b7.dist-info → llms_py-3.0.0b9.dist-info}/entry_points.txt +0 -0
- {llms_py-3.0.0b7.dist-info → llms_py-3.0.0b9.dist-info}/licenses/LICENSE +0 -0
- {llms_py-3.0.0b7.dist-info → llms_py-3.0.0b9.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,643 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import threading
|
|
4
|
+
from datetime import datetime, timedelta
|
|
5
|
+
from typing import Any, Dict
|
|
6
|
+
|
|
7
|
+
from .db_manager import DbManager
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def with_user(data, user):
|
|
11
|
+
if user is None:
|
|
12
|
+
if "user" in data:
|
|
13
|
+
del data["user"]
|
|
14
|
+
return data
|
|
15
|
+
else:
|
|
16
|
+
data["user"] = user
|
|
17
|
+
return data
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def valid_columns(all_columns, fields):
|
|
21
|
+
if fields:
|
|
22
|
+
if not isinstance(fields, list):
|
|
23
|
+
fields = fields.split(",")
|
|
24
|
+
cols = []
|
|
25
|
+
for k in fields:
|
|
26
|
+
k = k.strip()
|
|
27
|
+
if k in all_columns:
|
|
28
|
+
cols.append(k)
|
|
29
|
+
return cols
|
|
30
|
+
return []
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def table_columns(all_columns, fields):
|
|
34
|
+
cols = valid_columns(all_columns, fields)
|
|
35
|
+
return ", ".join(cols) if len(cols) > 0 else ", ".join(all_columns)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def select_columns(all_columns, fields, select=None):
|
|
39
|
+
columns = table_columns(all_columns, fields)
|
|
40
|
+
if select == "distinct":
|
|
41
|
+
return f"SELECT DISTINCT {columns}"
|
|
42
|
+
return f"SELECT {columns}"
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def order_by(all_columns, sort):
|
|
46
|
+
cols = []
|
|
47
|
+
for k in sort.split(","):
|
|
48
|
+
k = k.strip()
|
|
49
|
+
by = ""
|
|
50
|
+
if k[0] == "-":
|
|
51
|
+
by = " DESC"
|
|
52
|
+
k = k[1:]
|
|
53
|
+
if k in all_columns:
|
|
54
|
+
cols.append(f"{k}{by}")
|
|
55
|
+
return f"ORDER BY {', '.join(cols)} " if len(cols) > 0 else ""
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class AppDB:
|
|
59
|
+
def __init__(self, ctx, db_path):
|
|
60
|
+
if db_path is None:
|
|
61
|
+
raise ValueError("db_path is required")
|
|
62
|
+
|
|
63
|
+
self.ctx = ctx
|
|
64
|
+
self.db_path = str(db_path)
|
|
65
|
+
|
|
66
|
+
dirname = os.path.dirname(self.db_path)
|
|
67
|
+
if dirname:
|
|
68
|
+
os.makedirs(dirname, exist_ok=True)
|
|
69
|
+
|
|
70
|
+
self.db = DbManager(ctx, self.db_path)
|
|
71
|
+
self.columns = {
|
|
72
|
+
"thread": {
|
|
73
|
+
"id": "INTEGER",
|
|
74
|
+
"user": "TEXT",
|
|
75
|
+
"createdAt": "TIMESTAMP",
|
|
76
|
+
"updatedAt": "TIMESTAMP",
|
|
77
|
+
"title": "TEXT",
|
|
78
|
+
"systemPrompt": "TEXT",
|
|
79
|
+
"model": "TEXT",
|
|
80
|
+
"modelInfo": "JSON",
|
|
81
|
+
"modalities": "JSON",
|
|
82
|
+
"messages": "JSON",
|
|
83
|
+
"args": "JSON",
|
|
84
|
+
"toolHistory": "JSON",
|
|
85
|
+
"cost": "REAL",
|
|
86
|
+
"inputTokens": "INTEGER",
|
|
87
|
+
"outputTokens": "INTEGER",
|
|
88
|
+
"stats": "JSON",
|
|
89
|
+
"provider": "TEXT",
|
|
90
|
+
"providerModel": "TEXT",
|
|
91
|
+
"publishedAt": "TIMESTAMP",
|
|
92
|
+
"startedAt": "TIMESTAMP",
|
|
93
|
+
"completedAt": "TIMESTAMP",
|
|
94
|
+
"error": "TEXT",
|
|
95
|
+
"ref": "TEXT",
|
|
96
|
+
},
|
|
97
|
+
"request": {
|
|
98
|
+
"id": "INTEGER",
|
|
99
|
+
"user": "TEXT",
|
|
100
|
+
"threadId": "INTEGER",
|
|
101
|
+
"createdAt": "TIMESTAMP",
|
|
102
|
+
"updatedAt": "TIMESTAMP",
|
|
103
|
+
"title": "TEXT",
|
|
104
|
+
"model": "TEXT",
|
|
105
|
+
"duration": "INTEGER",
|
|
106
|
+
"cost": "REAL",
|
|
107
|
+
"inputPrice": "REAL",
|
|
108
|
+
"inputTokens": "INTEGER",
|
|
109
|
+
"inputCachedTokens": "INTEGER",
|
|
110
|
+
"outputPrice": "REAL",
|
|
111
|
+
"outputTokens": "INTEGER",
|
|
112
|
+
"totalTokens": "INTEGER",
|
|
113
|
+
"usage": "JSON",
|
|
114
|
+
"provider": "TEXT",
|
|
115
|
+
"providerModel": "TEXT",
|
|
116
|
+
"providerRef": "TEXT",
|
|
117
|
+
"finishReason": "TEXT",
|
|
118
|
+
"startedAt": "TIMESTAMP",
|
|
119
|
+
"completedAt": "TIMESTAMP",
|
|
120
|
+
"error": "TEXT",
|
|
121
|
+
"stackTrace": "TEXT",
|
|
122
|
+
"ref": "TEXT",
|
|
123
|
+
},
|
|
124
|
+
}
|
|
125
|
+
with self.create_writer_connection() as conn:
|
|
126
|
+
self.init_db(conn)
|
|
127
|
+
|
|
128
|
+
def get_connection(self):
|
|
129
|
+
return self.create_reader_connection()
|
|
130
|
+
|
|
131
|
+
def create_reader_connection(self):
|
|
132
|
+
return self.db.create_reader_connection()
|
|
133
|
+
|
|
134
|
+
def create_writer_connection(self):
|
|
135
|
+
return self.db.create_writer_connection()
|
|
136
|
+
|
|
137
|
+
# Check for missing columns and migrate if necessary
|
|
138
|
+
def add_missing_columns(self, conn, table):
|
|
139
|
+
cur = self.db.exec(conn, f"PRAGMA table_info({table})")
|
|
140
|
+
columns = {row[1] for row in cur.fetchall()}
|
|
141
|
+
|
|
142
|
+
for col, dtype in self.columns[table].items():
|
|
143
|
+
if col not in columns:
|
|
144
|
+
try:
|
|
145
|
+
self.db.exec(conn, f"ALTER TABLE {table} ADD COLUMN {col} {dtype}")
|
|
146
|
+
except Exception as e:
|
|
147
|
+
self.ctx.err(f"adding {table} column {col}", e)
|
|
148
|
+
|
|
149
|
+
def init_db(self, conn):
|
|
150
|
+
# Create table with all columns
|
|
151
|
+
# Note: default SQLite timestamp has different tz to datetime.now()
|
|
152
|
+
overrides = {
|
|
153
|
+
"id": "INTEGER PRIMARY KEY AUTOINCREMENT",
|
|
154
|
+
"createdAt": "TIMESTAMP DEFAULT CURRENT_TIMESTAMP",
|
|
155
|
+
"updatedAt": "TIMESTAMP DEFAULT CURRENT_TIMESTAMP",
|
|
156
|
+
}
|
|
157
|
+
sql_columns = ",".join([f"{col} {overrides.get(col, dtype)}" for col, dtype in self.columns["thread"].items()])
|
|
158
|
+
self.db.exec(
|
|
159
|
+
conn,
|
|
160
|
+
f"""
|
|
161
|
+
CREATE TABLE IF NOT EXISTS thread (
|
|
162
|
+
{sql_columns}
|
|
163
|
+
)
|
|
164
|
+
""",
|
|
165
|
+
)
|
|
166
|
+
self.add_missing_columns(conn, "thread")
|
|
167
|
+
self.db.exec(conn, "CREATE INDEX IF NOT EXISTS idx_thread_user ON thread(user)")
|
|
168
|
+
self.db.exec(conn, "CREATE INDEX IF NOT EXISTS idx_thread_createdat ON thread(createdAt)")
|
|
169
|
+
self.db.exec(conn, "CREATE INDEX IF NOT EXISTS idx_thread_updatedat ON thread(updatedAt)")
|
|
170
|
+
self.db.exec(conn, "CREATE INDEX IF NOT EXISTS idx_thread_model ON thread(model)")
|
|
171
|
+
self.db.exec(conn, "CREATE INDEX IF NOT EXISTS idx_thread_cost ON thread(cost)")
|
|
172
|
+
|
|
173
|
+
sql_columns = ",".join([f"{col} {overrides.get(col, dtype)}" for col, dtype in self.columns["request"].items()])
|
|
174
|
+
self.db.exec(
|
|
175
|
+
conn,
|
|
176
|
+
f"""
|
|
177
|
+
CREATE TABLE IF NOT EXISTS request (
|
|
178
|
+
{sql_columns}
|
|
179
|
+
)
|
|
180
|
+
""",
|
|
181
|
+
)
|
|
182
|
+
self.add_missing_columns(conn, "request")
|
|
183
|
+
self.db.exec(conn, "CREATE INDEX IF NOT EXISTS idx_request_user ON request(user)")
|
|
184
|
+
self.db.exec(conn, "CREATE INDEX IF NOT EXISTS idx_request_createdat ON request(createdAt)")
|
|
185
|
+
self.db.exec(conn, "CREATE INDEX IF NOT EXISTS idx_request_cost ON request(cost)")
|
|
186
|
+
self.db.exec(conn, "CREATE INDEX IF NOT EXISTS idx_request_threadid ON request(threadId)")
|
|
187
|
+
|
|
188
|
+
def import_db(self, threads, requests):
|
|
189
|
+
self.ctx.log("import threads and requests")
|
|
190
|
+
with self.create_writer_connection() as conn:
|
|
191
|
+
conn.execute("DROP TABLE IF EXISTS thread")
|
|
192
|
+
conn.execute("DROP TABLE IF EXISTS request")
|
|
193
|
+
self.init_db(conn)
|
|
194
|
+
thread_id_map = {}
|
|
195
|
+
for thread in threads:
|
|
196
|
+
thread_id = self.import_thread(conn, thread)
|
|
197
|
+
thread_id_map[thread["id"]] = thread_id
|
|
198
|
+
self.ctx.log(f"imported {len(threads)} threads")
|
|
199
|
+
for request in requests:
|
|
200
|
+
self.import_request(conn, request, thread_id_map)
|
|
201
|
+
self.ctx.log(f"imported {len(requests)} requests")
|
|
202
|
+
|
|
203
|
+
def import_date(self, date):
|
|
204
|
+
# "1765794035" or "2025-12-31T05:41:46.686Z" or "2026-01-02 05:00:16"
|
|
205
|
+
str = date or datetime.now().isoformat()
|
|
206
|
+
if isinstance(str, int):
|
|
207
|
+
return datetime.fromtimestamp(str)
|
|
208
|
+
if isinstance(str, float):
|
|
209
|
+
return datetime.fromtimestamp(str)
|
|
210
|
+
return (
|
|
211
|
+
datetime.strptime(str, "%Y-%m-%dT%H:%M:%S.%fZ")
|
|
212
|
+
if "T" in str
|
|
213
|
+
else datetime.strptime(str, "%Y-%m-%d %H:%M:%S")
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
def import_thread(self, conn, orig):
|
|
217
|
+
thread = orig.copy()
|
|
218
|
+
thread["refId"] = thread["id"]
|
|
219
|
+
del thread["id"]
|
|
220
|
+
|
|
221
|
+
info = thread.get("modelInfo", thread.get("info", {}))
|
|
222
|
+
created_at = self.import_date(thread.get("createdAt"))
|
|
223
|
+
thread["createdAt"] = created_at
|
|
224
|
+
if "updateAt" not in thread:
|
|
225
|
+
thread["updateAt"] = created_at
|
|
226
|
+
thread["modelInfo"] = info
|
|
227
|
+
if "modalities" not in thread:
|
|
228
|
+
if "modalities" in info:
|
|
229
|
+
modalities = info["modalities"]
|
|
230
|
+
if isinstance(modalities, dict):
|
|
231
|
+
input = modalities.get("input", ["text"])
|
|
232
|
+
output = modalities.get("output", ["text"])
|
|
233
|
+
thread["modalities"] = list(set(input + output))
|
|
234
|
+
else:
|
|
235
|
+
thread["modalities"] = modalities
|
|
236
|
+
else:
|
|
237
|
+
thread["modalities"] = ["text"]
|
|
238
|
+
if "provider" not in thread and "provider" in info:
|
|
239
|
+
thread["provider"] = info["provider"]
|
|
240
|
+
if "providerModel" not in thread and "id" in info:
|
|
241
|
+
thread["providerModel"] = info["id"]
|
|
242
|
+
|
|
243
|
+
stats = thread.get("stats", {})
|
|
244
|
+
if "inputTokens" not in thread and "inputTokens" in stats:
|
|
245
|
+
thread["inputTokens"] = stats["inputTokens"]
|
|
246
|
+
if "outputTokens" not in thread and "outputTokens" in stats:
|
|
247
|
+
thread["outputTokens"] = stats["outputTokens"]
|
|
248
|
+
if "cost" not in thread and "cost" in stats:
|
|
249
|
+
thread["cost"] = stats["cost"]
|
|
250
|
+
if "completedAt" not in thread:
|
|
251
|
+
thread["completedAt"] = created_at + timedelta(milliseconds=stats.get("duration", 0))
|
|
252
|
+
|
|
253
|
+
sql_columns = []
|
|
254
|
+
sql_params = []
|
|
255
|
+
columns = self.columns["thread"]
|
|
256
|
+
for col in columns:
|
|
257
|
+
if col == "id":
|
|
258
|
+
continue
|
|
259
|
+
sql_columns.append(col)
|
|
260
|
+
val = thread.get(col, None)
|
|
261
|
+
if columns[col] == "JSON" and val is not None:
|
|
262
|
+
val = json.dumps(val)
|
|
263
|
+
sql_params.append(val)
|
|
264
|
+
|
|
265
|
+
return conn.execute(
|
|
266
|
+
f"INSERT INTO thread ({', '.join(sql_columns)}) VALUES ({', '.join(['?'] * len(sql_params))})",
|
|
267
|
+
sql_params,
|
|
268
|
+
).lastrowid
|
|
269
|
+
|
|
270
|
+
# run on startup
|
|
271
|
+
def import_request(self, conn, orig, id_map):
|
|
272
|
+
request = orig.copy()
|
|
273
|
+
del request["id"]
|
|
274
|
+
thread_id = request.get("threadId")
|
|
275
|
+
if thread_id:
|
|
276
|
+
request["threadId"] = id_map.get(thread_id, None)
|
|
277
|
+
|
|
278
|
+
created_at = self.import_date(request.get("created"))
|
|
279
|
+
request["createdAt"] = created_at
|
|
280
|
+
if "updateAt" not in request:
|
|
281
|
+
request["updateAt"] = created_at
|
|
282
|
+
if "completedAt" not in request:
|
|
283
|
+
request["completedAt"] = created_at + timedelta(milliseconds=request.get("duration", 0))
|
|
284
|
+
|
|
285
|
+
sql_columns = []
|
|
286
|
+
sql_params = []
|
|
287
|
+
columns = self.columns["request"]
|
|
288
|
+
for col in columns:
|
|
289
|
+
if col == "id":
|
|
290
|
+
continue
|
|
291
|
+
sql_columns.append(col)
|
|
292
|
+
val = request.get(col, None)
|
|
293
|
+
if columns[col] == "JSON" and val is not None:
|
|
294
|
+
val = json.dumps(val)
|
|
295
|
+
sql_params.append(val)
|
|
296
|
+
|
|
297
|
+
return conn.execute(
|
|
298
|
+
f"INSERT INTO request ({', '.join(sql_columns)}) VALUES ({', '.join(['?'] * len(sql_params))})",
|
|
299
|
+
sql_params,
|
|
300
|
+
).lastrowid
|
|
301
|
+
|
|
302
|
+
def get_user_filter(self, user=None, params=None):
|
|
303
|
+
if user is None:
|
|
304
|
+
return "WHERE user IS NULL", params or {}
|
|
305
|
+
else:
|
|
306
|
+
args = params.copy() if params else {}
|
|
307
|
+
args.update({"user": user})
|
|
308
|
+
return "WHERE user = :user", args
|
|
309
|
+
|
|
310
|
+
def get_thread(self, id, user=None):
|
|
311
|
+
try:
|
|
312
|
+
sql_where, params = self.get_user_filter(user, {"id": id})
|
|
313
|
+
return self.db.one(f"SELECT * FROM thread {sql_where} AND id = :id", params)
|
|
314
|
+
except Exception as e:
|
|
315
|
+
self.ctx.err(f"get_thread ({id}, {user})", e)
|
|
316
|
+
return None
|
|
317
|
+
|
|
318
|
+
def get_thread_column(self, id, column, user=None):
|
|
319
|
+
if column not in self.columns["thread"]:
|
|
320
|
+
self.ctx.err(f"get_thread_column invalid column ({id}, {column}, {user})", None)
|
|
321
|
+
return None
|
|
322
|
+
|
|
323
|
+
try:
|
|
324
|
+
sql_where, params = self.get_user_filter(user, {"id": id})
|
|
325
|
+
return self.db.scalar(f"SELECT {column} FROM thread {sql_where} AND id = :id", params)
|
|
326
|
+
except Exception as e:
|
|
327
|
+
self.ctx.err(f"get_thread_column ({id}, {column}, {user})", e)
|
|
328
|
+
return None
|
|
329
|
+
|
|
330
|
+
def query_threads(self, query: Dict[str, Any], user=None):
|
|
331
|
+
try:
|
|
332
|
+
columns = self.columns["thread"]
|
|
333
|
+
all_columns = columns.keys()
|
|
334
|
+
|
|
335
|
+
take = min(int(query.get("take", "50")), 1000)
|
|
336
|
+
skip = int(query.get("skip", "0"))
|
|
337
|
+
sort = query.get("sort", "-id")
|
|
338
|
+
|
|
339
|
+
# always filter by user
|
|
340
|
+
sql_where, params = self.get_user_filter(user, {"take": take, "skip": skip})
|
|
341
|
+
|
|
342
|
+
filter = {}
|
|
343
|
+
for k in query:
|
|
344
|
+
if k in all_columns:
|
|
345
|
+
filter[k] = query[k]
|
|
346
|
+
params[k] = query[k]
|
|
347
|
+
|
|
348
|
+
if len(filter) > 0:
|
|
349
|
+
sql_where += " AND " + " AND ".join([f"{k} = :{k}" for k in filter])
|
|
350
|
+
|
|
351
|
+
if "null" in query:
|
|
352
|
+
cols = valid_columns(all_columns, query["null"])
|
|
353
|
+
if len(cols) > 0:
|
|
354
|
+
sql_where += " AND " + " AND ".join([f"{k} IS NULL" for k in cols])
|
|
355
|
+
|
|
356
|
+
if "not_null" in query:
|
|
357
|
+
cols = valid_columns(all_columns, query.get("not_null"))
|
|
358
|
+
if len(cols) > 0:
|
|
359
|
+
sql_where += " AND " + " AND ".join([f"{k} IS NOT NULL" for k in cols])
|
|
360
|
+
|
|
361
|
+
if "q" in query:
|
|
362
|
+
sql_where += " AND " if sql_where else "WHERE "
|
|
363
|
+
sql_where += "(title LIKE :q OR messages LIKE :q)"
|
|
364
|
+
params["q"] = f"%{query['q']}%"
|
|
365
|
+
|
|
366
|
+
sql = f"{select_columns(all_columns, query.get('fields'), select=query.get('select'))} FROM thread {sql_where} {order_by(all_columns, sort)} LIMIT :take OFFSET :skip"
|
|
367
|
+
|
|
368
|
+
if query.get("as") == "column":
|
|
369
|
+
return self.db.column(sql, params)
|
|
370
|
+
else:
|
|
371
|
+
return self.db.all(sql, params)
|
|
372
|
+
|
|
373
|
+
except Exception as e:
|
|
374
|
+
self.ctx.err(f"query_threads ({take}, {skip})", e)
|
|
375
|
+
return []
|
|
376
|
+
|
|
377
|
+
def insert(self, table, info, callback=None):
|
|
378
|
+
if not info:
|
|
379
|
+
raise Exception("info is required")
|
|
380
|
+
|
|
381
|
+
columns = self.columns[table]
|
|
382
|
+
args = {}
|
|
383
|
+
known_columns = columns.keys()
|
|
384
|
+
for k, val in info.items():
|
|
385
|
+
if k in known_columns and k != "id":
|
|
386
|
+
args[k] = self.db.value(val)
|
|
387
|
+
|
|
388
|
+
insert_keys = list(args.keys())
|
|
389
|
+
insert_body = ", ".join(insert_keys)
|
|
390
|
+
insert_values = ", ".join(["?" for _ in insert_keys])
|
|
391
|
+
|
|
392
|
+
sql = f"INSERT INTO {table} ({insert_body}) VALUES ({insert_values})"
|
|
393
|
+
|
|
394
|
+
self.db.write(sql, tuple(args[k] for k in insert_keys), callback)
|
|
395
|
+
|
|
396
|
+
async def insert_async(self, table, info):
|
|
397
|
+
event = threading.Event()
|
|
398
|
+
|
|
399
|
+
ret = [None]
|
|
400
|
+
|
|
401
|
+
def cb(lastrowid, rowcount, error=None):
|
|
402
|
+
nonlocal ret
|
|
403
|
+
if error:
|
|
404
|
+
raise error
|
|
405
|
+
ret[0] = lastrowid
|
|
406
|
+
event.set()
|
|
407
|
+
|
|
408
|
+
self.insert(table, info, cb)
|
|
409
|
+
event.wait()
|
|
410
|
+
return ret[0]
|
|
411
|
+
|
|
412
|
+
def update(self, table, info, callback=None):
|
|
413
|
+
if not info:
|
|
414
|
+
raise Exception("info is required")
|
|
415
|
+
|
|
416
|
+
columns = self.columns[table]
|
|
417
|
+
args = {}
|
|
418
|
+
known_columns = columns.keys()
|
|
419
|
+
for k, val in info.items():
|
|
420
|
+
if k in known_columns and k != "id":
|
|
421
|
+
args[k] = self.db.value(val)
|
|
422
|
+
|
|
423
|
+
update_keys = list(args.keys())
|
|
424
|
+
update_body = ", ".join([f"{k} = :{k}" for k in update_keys])
|
|
425
|
+
|
|
426
|
+
args["id"] = info["id"]
|
|
427
|
+
sql = f"UPDATE {table} SET {update_body} WHERE id = :id"
|
|
428
|
+
|
|
429
|
+
self.db.write(sql, args, callback)
|
|
430
|
+
|
|
431
|
+
async def update_async(self, table, info):
|
|
432
|
+
event = threading.Event()
|
|
433
|
+
|
|
434
|
+
ret = [None]
|
|
435
|
+
|
|
436
|
+
def cb(lastrowid, rowcount, error=None):
|
|
437
|
+
nonlocal ret
|
|
438
|
+
if error:
|
|
439
|
+
raise error
|
|
440
|
+
ret[0] = rowcount
|
|
441
|
+
event.set()
|
|
442
|
+
|
|
443
|
+
self.update(table, info, cb)
|
|
444
|
+
event.wait()
|
|
445
|
+
return ret[0]
|
|
446
|
+
|
|
447
|
+
def prepare_thread(self, thread, id=None):
|
|
448
|
+
now = datetime.now()
|
|
449
|
+
if id:
|
|
450
|
+
thread["id"] = id
|
|
451
|
+
else:
|
|
452
|
+
thread["createdAt"] = now
|
|
453
|
+
thread["updatedAt"] = now
|
|
454
|
+
if "messages" in thread:
|
|
455
|
+
for m in thread["messages"]:
|
|
456
|
+
self.ctx.cache_message_inline_data(m)
|
|
457
|
+
return thread
|
|
458
|
+
|
|
459
|
+
def create_thread(self, thread: Dict[str, Any], user=None):
|
|
460
|
+
return self.insert("thread", with_user(self.prepare_thread(thread), user=user))
|
|
461
|
+
|
|
462
|
+
async def create_thread_async(self, thread: Dict[str, Any], user=None):
|
|
463
|
+
return await self.insert_async("thread", with_user(self.prepare_thread(thread), user=user))
|
|
464
|
+
|
|
465
|
+
def update_thread(self, id, thread: Dict[str, Any], user=None):
|
|
466
|
+
return self.update("thread", with_user(self.prepare_thread(thread, id), user=user))
|
|
467
|
+
|
|
468
|
+
async def update_thread_async(self, id, thread: Dict[str, Any], user=None):
|
|
469
|
+
return await self.update_async("thread", with_user(self.prepare_thread(thread, id), user=user))
|
|
470
|
+
|
|
471
|
+
def delete_thread(self, id, user=None, callback=None):
|
|
472
|
+
sql_where, params = self.get_user_filter(user, {"id": id})
|
|
473
|
+
self.db.write(f"DELETE FROM thread {sql_where} AND id = :id", params, callback)
|
|
474
|
+
|
|
475
|
+
def query_requests(self, query: Dict[str, Any], user=None):
|
|
476
|
+
try:
|
|
477
|
+
columns = self.columns["request"]
|
|
478
|
+
all_columns = columns.keys()
|
|
479
|
+
|
|
480
|
+
take = min(int(query.get("take", "50")), 10000)
|
|
481
|
+
skip = int(query.get("skip", 0))
|
|
482
|
+
sort = query.get("sort", "-id")
|
|
483
|
+
|
|
484
|
+
# always filter by user
|
|
485
|
+
sql_where, params = self.get_user_filter(user, {"take": take, "skip": skip})
|
|
486
|
+
|
|
487
|
+
filter = {}
|
|
488
|
+
for k in query:
|
|
489
|
+
if k in all_columns:
|
|
490
|
+
filter[k] = query[k]
|
|
491
|
+
params[k] = query[k]
|
|
492
|
+
|
|
493
|
+
if len(filter) > 0:
|
|
494
|
+
sql_where += " AND " + " AND ".join([f"{k} = :{k}" for k in filter])
|
|
495
|
+
|
|
496
|
+
if "null" in query:
|
|
497
|
+
cols = valid_columns(all_columns, query["null"])
|
|
498
|
+
if len(cols) > 0:
|
|
499
|
+
sql_where += " AND " + " AND ".join([f"{k} IS NULL" for k in cols])
|
|
500
|
+
|
|
501
|
+
if "not_null" in query:
|
|
502
|
+
cols = valid_columns(all_columns, query.get("not_null"))
|
|
503
|
+
if len(cols) > 0:
|
|
504
|
+
sql_where += " AND " + " AND ".join([f"{k} IS NOT NULL" for k in cols])
|
|
505
|
+
|
|
506
|
+
if "q" in query:
|
|
507
|
+
sql_where += " AND " if sql_where else "WHERE "
|
|
508
|
+
sql_where += "(title LIKE :q)"
|
|
509
|
+
params["q"] = f"%{query['q']}%"
|
|
510
|
+
|
|
511
|
+
if "month" in query:
|
|
512
|
+
sql_where += " AND strftime('%Y-%m', createdAt) = :month"
|
|
513
|
+
params["month"] = query["month"]
|
|
514
|
+
|
|
515
|
+
sql = f"{select_columns(all_columns, query.get('fields'), select=query.get('select'))} FROM request {sql_where} {order_by(all_columns, sort)}LIMIT :take OFFSET :skip"
|
|
516
|
+
|
|
517
|
+
if query.get("as") == "column":
|
|
518
|
+
return self.db.column(sql, params)
|
|
519
|
+
else:
|
|
520
|
+
return self.db.all(sql, params)
|
|
521
|
+
except Exception as e:
|
|
522
|
+
self.ctx.err(f"query_requests ({take}, {skip})", e)
|
|
523
|
+
return []
|
|
524
|
+
|
|
525
|
+
def get_request_summary(self, user=None):
|
|
526
|
+
try:
|
|
527
|
+
sql_where, params = self.get_user_filter(user)
|
|
528
|
+
# Use strftime to format date as YYYY-MM-DD
|
|
529
|
+
sql = f"""
|
|
530
|
+
SELECT
|
|
531
|
+
strftime('%Y-%m-%d', createdAt) as date,
|
|
532
|
+
count(id) as requests,
|
|
533
|
+
sum(cost) as cost,
|
|
534
|
+
sum(inputTokens) as inputTokens,
|
|
535
|
+
sum(outputTokens) as outputTokens
|
|
536
|
+
FROM request
|
|
537
|
+
{sql_where}
|
|
538
|
+
GROUP BY date
|
|
539
|
+
ORDER BY date
|
|
540
|
+
"""
|
|
541
|
+
return self.db.all(sql, params)
|
|
542
|
+
except Exception as e:
|
|
543
|
+
self.ctx.err(f"get_request_summary ({user})", e)
|
|
544
|
+
return []
|
|
545
|
+
|
|
546
|
+
def get_daily_request_summary(self, day, user=None):
|
|
547
|
+
try:
|
|
548
|
+
sql_where, params = self.get_user_filter(user)
|
|
549
|
+
# Add date filter
|
|
550
|
+
sql_where += " AND strftime('%Y-%m-%d', createdAt) = :day"
|
|
551
|
+
params["day"] = day
|
|
552
|
+
|
|
553
|
+
# Model aggregation
|
|
554
|
+
sql_model = f"""
|
|
555
|
+
SELECT
|
|
556
|
+
model,
|
|
557
|
+
count(id) as count,
|
|
558
|
+
sum(cost) as cost,
|
|
559
|
+
sum(duration) as duration,
|
|
560
|
+
sum(inputTokens + outputTokens) as tokens,
|
|
561
|
+
sum(inputTokens) as inputTokens,
|
|
562
|
+
sum(outputTokens) as outputTokens
|
|
563
|
+
FROM request
|
|
564
|
+
{sql_where}
|
|
565
|
+
GROUP BY model
|
|
566
|
+
"""
|
|
567
|
+
model_data = {}
|
|
568
|
+
for row in self.db.all(sql_model, params):
|
|
569
|
+
model_data[row["model"]] = {
|
|
570
|
+
"cost": row["cost"] or 0,
|
|
571
|
+
"count": row["count"],
|
|
572
|
+
"duration": row["duration"] or 0,
|
|
573
|
+
"tokens": row["tokens"] or 0,
|
|
574
|
+
"inputTokens": row["inputTokens"] or 0,
|
|
575
|
+
"outputTokens": row["outputTokens"] or 0,
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
# Provider aggregation
|
|
579
|
+
sql_provider = f"""
|
|
580
|
+
SELECT
|
|
581
|
+
provider,
|
|
582
|
+
count(id) as count,
|
|
583
|
+
sum(cost) as cost,
|
|
584
|
+
sum(duration) as duration,
|
|
585
|
+
sum(inputTokens + outputTokens) as tokens,
|
|
586
|
+
sum(inputTokens) as inputTokens,
|
|
587
|
+
sum(outputTokens) as outputTokens
|
|
588
|
+
FROM request
|
|
589
|
+
{sql_where}
|
|
590
|
+
AND provider IS NOT NULL
|
|
591
|
+
GROUP BY provider
|
|
592
|
+
"""
|
|
593
|
+
provider_data = {}
|
|
594
|
+
for row in self.db.all(sql_provider, params):
|
|
595
|
+
provider_data[row["provider"]] = {
|
|
596
|
+
"cost": row["cost"] or 0,
|
|
597
|
+
"count": row["count"],
|
|
598
|
+
"duration": row["duration"] or 0,
|
|
599
|
+
"tokens": row["tokens"] or 0,
|
|
600
|
+
"inputTokens": row["inputTokens"] or 0,
|
|
601
|
+
"outputTokens": row["outputTokens"] or 0,
|
|
602
|
+
}
|
|
603
|
+
|
|
604
|
+
return {"modelData": model_data, "providerData": provider_data}
|
|
605
|
+
except Exception as e:
|
|
606
|
+
self.ctx.err(f"get_daily_request_summary ({day}, {user})", e)
|
|
607
|
+
return {"modelData": {}, "providerData": {}}
|
|
608
|
+
|
|
609
|
+
def create_request(self, request: Dict[str, Any], user=None):
|
|
610
|
+
request["createdAt"] = request["updatedAt"] = datetime.now()
|
|
611
|
+
return self.insert("request", with_user(request, user=user))
|
|
612
|
+
|
|
613
|
+
async def create_request_async(self, request: Dict[str, Any], user=None):
|
|
614
|
+
request["createdAt"] = request["updatedAt"] = datetime.now()
|
|
615
|
+
return await self.insert_async("request", with_user(request, user=user))
|
|
616
|
+
|
|
617
|
+
def update_request(self, id, request: Dict[str, Any], user=None):
|
|
618
|
+
request["id"] = id
|
|
619
|
+
request["updatedAt"] = datetime.now()
|
|
620
|
+
return self.update("request", with_user(request, user=user))
|
|
621
|
+
|
|
622
|
+
async def update_request_async(self, id, request: Dict[str, Any], user=None):
|
|
623
|
+
request["id"] = id
|
|
624
|
+
request["updatedAt"] = datetime.now()
|
|
625
|
+
return await self.update_async("request", with_user(request, user=user))
|
|
626
|
+
|
|
627
|
+
def delete_request(self, id, user=None, callback=None):
|
|
628
|
+
sql_where, params = self.get_user_filter(user, {"id": id})
|
|
629
|
+
self.db.write(f"DELETE FROM request {sql_where} AND id = :id", params, callback)
|
|
630
|
+
|
|
631
|
+
def close(self):
|
|
632
|
+
self.db.close()
|
|
633
|
+
|
|
634
|
+
# complete all in progress tasks
|
|
635
|
+
with self.db.create_writer_connection() as conn:
|
|
636
|
+
conn.execute(
|
|
637
|
+
"UPDATE thread SET completedAt = :completedAt, error = :error WHERE completedAt IS NULL",
|
|
638
|
+
{"completedAt": datetime.now(), "error": "Server Shutdown"},
|
|
639
|
+
)
|
|
640
|
+
conn.execute(
|
|
641
|
+
"UPDATE request SET completedAt = :completedAt, error = :error WHERE completedAt IS NULL",
|
|
642
|
+
{"completedAt": datetime.now(), "error": "Server Shutdown"},
|
|
643
|
+
)
|