chainlit 2.7.0__py3-none-any.whl → 2.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of chainlit might be problematic. Click here for more details.
- {chainlit-2.7.0.dist-info → chainlit-2.7.1.dist-info}/METADATA +1 -1
- chainlit-2.7.1.dist-info/RECORD +4 -0
- chainlit/__init__.py +0 -207
- chainlit/__main__.py +0 -4
- chainlit/_utils.py +0 -8
- chainlit/action.py +0 -33
- chainlit/auth/__init__.py +0 -95
- chainlit/auth/cookie.py +0 -197
- chainlit/auth/jwt.py +0 -42
- chainlit/cache.py +0 -45
- chainlit/callbacks.py +0 -433
- chainlit/chat_context.py +0 -64
- chainlit/chat_settings.py +0 -34
- chainlit/cli/__init__.py +0 -235
- chainlit/config.py +0 -621
- chainlit/context.py +0 -112
- chainlit/data/__init__.py +0 -111
- chainlit/data/acl.py +0 -19
- chainlit/data/base.py +0 -107
- chainlit/data/chainlit_data_layer.py +0 -687
- chainlit/data/dynamodb.py +0 -616
- chainlit/data/literalai.py +0 -501
- chainlit/data/sql_alchemy.py +0 -741
- chainlit/data/storage_clients/__init__.py +0 -0
- chainlit/data/storage_clients/azure.py +0 -84
- chainlit/data/storage_clients/azure_blob.py +0 -94
- chainlit/data/storage_clients/base.py +0 -28
- chainlit/data/storage_clients/gcs.py +0 -101
- chainlit/data/storage_clients/s3.py +0 -88
- chainlit/data/utils.py +0 -29
- chainlit/discord/__init__.py +0 -6
- chainlit/discord/app.py +0 -364
- chainlit/element.py +0 -454
- chainlit/emitter.py +0 -450
- chainlit/hello.py +0 -12
- chainlit/input_widget.py +0 -182
- chainlit/langchain/__init__.py +0 -6
- chainlit/langchain/callbacks.py +0 -682
- chainlit/langflow/__init__.py +0 -25
- chainlit/llama_index/__init__.py +0 -6
- chainlit/llama_index/callbacks.py +0 -206
- chainlit/logger.py +0 -16
- chainlit/markdown.py +0 -57
- chainlit/mcp.py +0 -99
- chainlit/message.py +0 -619
- chainlit/mistralai/__init__.py +0 -50
- chainlit/oauth_providers.py +0 -835
- chainlit/openai/__init__.py +0 -53
- chainlit/py.typed +0 -0
- chainlit/secret.py +0 -9
- chainlit/semantic_kernel/__init__.py +0 -111
- chainlit/server.py +0 -1616
- chainlit/session.py +0 -304
- chainlit/sidebar.py +0 -55
- chainlit/slack/__init__.py +0 -6
- chainlit/slack/app.py +0 -427
- chainlit/socket.py +0 -381
- chainlit/step.py +0 -490
- chainlit/sync.py +0 -43
- chainlit/teams/__init__.py +0 -6
- chainlit/teams/app.py +0 -348
- chainlit/translations/bn.json +0 -214
- chainlit/translations/el-GR.json +0 -214
- chainlit/translations/en-US.json +0 -214
- chainlit/translations/fr-FR.json +0 -214
- chainlit/translations/gu.json +0 -214
- chainlit/translations/he-IL.json +0 -214
- chainlit/translations/hi.json +0 -214
- chainlit/translations/ja.json +0 -214
- chainlit/translations/kn.json +0 -214
- chainlit/translations/ml.json +0 -214
- chainlit/translations/mr.json +0 -214
- chainlit/translations/nl.json +0 -214
- chainlit/translations/ta.json +0 -214
- chainlit/translations/te.json +0 -214
- chainlit/translations/zh-CN.json +0 -214
- chainlit/translations.py +0 -60
- chainlit/types.py +0 -334
- chainlit/user.py +0 -43
- chainlit/user_session.py +0 -153
- chainlit/utils.py +0 -173
- chainlit/version.py +0 -8
- chainlit-2.7.0.dist-info/RECORD +0 -84
- {chainlit-2.7.0.dist-info → chainlit-2.7.1.dist-info}/WHEEL +0 -0
- {chainlit-2.7.0.dist-info → chainlit-2.7.1.dist-info}/entry_points.txt +0 -0
chainlit/data/sql_alchemy.py
DELETED
|
@@ -1,741 +0,0 @@
|
|
|
1
|
-
import json
|
|
2
|
-
import ssl
|
|
3
|
-
import uuid
|
|
4
|
-
from dataclasses import asdict
|
|
5
|
-
from datetime import datetime
|
|
6
|
-
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
|
|
7
|
-
|
|
8
|
-
import aiofiles
|
|
9
|
-
import aiohttp
|
|
10
|
-
from sqlalchemy import text
|
|
11
|
-
from sqlalchemy.exc import SQLAlchemyError
|
|
12
|
-
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine
|
|
13
|
-
from sqlalchemy.orm import sessionmaker
|
|
14
|
-
|
|
15
|
-
from chainlit.data.base import BaseDataLayer
|
|
16
|
-
from chainlit.data.storage_clients.base import BaseStorageClient
|
|
17
|
-
from chainlit.data.utils import queue_until_user_message
|
|
18
|
-
from chainlit.element import ElementDict
|
|
19
|
-
from chainlit.logger import logger
|
|
20
|
-
from chainlit.step import StepDict
|
|
21
|
-
from chainlit.types import (
|
|
22
|
-
Feedback,
|
|
23
|
-
FeedbackDict,
|
|
24
|
-
PageInfo,
|
|
25
|
-
PaginatedResponse,
|
|
26
|
-
Pagination,
|
|
27
|
-
ThreadDict,
|
|
28
|
-
ThreadFilter,
|
|
29
|
-
)
|
|
30
|
-
from chainlit.user import PersistedUser, User
|
|
31
|
-
|
|
32
|
-
if TYPE_CHECKING:
|
|
33
|
-
from chainlit.element import Element, ElementDict
|
|
34
|
-
from chainlit.step import StepDict
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
class SQLAlchemyDataLayer(BaseDataLayer):
|
|
38
|
-
def __init__(
|
|
39
|
-
self,
|
|
40
|
-
conninfo: str,
|
|
41
|
-
connect_args: Optional[dict[str, Any]] = None,
|
|
42
|
-
ssl_require: bool = False,
|
|
43
|
-
storage_provider: Optional[BaseStorageClient] = None,
|
|
44
|
-
user_thread_limit: Optional[int] = 1000,
|
|
45
|
-
show_logger: Optional[bool] = False,
|
|
46
|
-
):
|
|
47
|
-
self._conninfo = conninfo
|
|
48
|
-
self.user_thread_limit = user_thread_limit
|
|
49
|
-
self.show_logger = show_logger
|
|
50
|
-
if connect_args is None:
|
|
51
|
-
connect_args = {}
|
|
52
|
-
if ssl_require:
|
|
53
|
-
# Create an SSL context to require an SSL connection
|
|
54
|
-
ssl_context = ssl.create_default_context()
|
|
55
|
-
ssl_context.check_hostname = False
|
|
56
|
-
ssl_context.verify_mode = ssl.CERT_NONE
|
|
57
|
-
connect_args["ssl"] = ssl_context
|
|
58
|
-
self.engine: AsyncEngine = create_async_engine(
|
|
59
|
-
self._conninfo, connect_args=connect_args
|
|
60
|
-
)
|
|
61
|
-
self.async_session = sessionmaker(
|
|
62
|
-
bind=self.engine, expire_on_commit=False, class_=AsyncSession
|
|
63
|
-
) # type: ignore
|
|
64
|
-
if storage_provider:
|
|
65
|
-
self.storage_provider: Optional[BaseStorageClient] = storage_provider
|
|
66
|
-
if self.show_logger:
|
|
67
|
-
logger.info("SQLAlchemyDataLayer storage client initialized")
|
|
68
|
-
else:
|
|
69
|
-
self.storage_provider = None
|
|
70
|
-
logger.warning(
|
|
71
|
-
"SQLAlchemyDataLayer storage client is not initialized and elements will not be persisted!"
|
|
72
|
-
)
|
|
73
|
-
|
|
74
|
-
async def build_debug_url(self) -> str:
|
|
75
|
-
return ""
|
|
76
|
-
|
|
77
|
-
###### SQL Helpers ######
|
|
78
|
-
async def execute_sql(
|
|
79
|
-
self, query: str, parameters: dict
|
|
80
|
-
) -> Union[List[Dict[str, Any]], int, None]:
|
|
81
|
-
parameterized_query = text(query)
|
|
82
|
-
async with self.async_session() as session:
|
|
83
|
-
try:
|
|
84
|
-
await session.begin()
|
|
85
|
-
result = await session.execute(parameterized_query, parameters)
|
|
86
|
-
await session.commit()
|
|
87
|
-
if result.returns_rows:
|
|
88
|
-
json_result = [dict(row._mapping) for row in result.fetchall()]
|
|
89
|
-
clean_json_result = self.clean_result(json_result)
|
|
90
|
-
assert isinstance(clean_json_result, list) or isinstance(
|
|
91
|
-
clean_json_result, int
|
|
92
|
-
)
|
|
93
|
-
return clean_json_result
|
|
94
|
-
else:
|
|
95
|
-
return result.rowcount
|
|
96
|
-
except SQLAlchemyError as e:
|
|
97
|
-
await session.rollback()
|
|
98
|
-
logger.warning(f"An error occurred: {e}")
|
|
99
|
-
return None
|
|
100
|
-
except Exception as e:
|
|
101
|
-
await session.rollback()
|
|
102
|
-
logger.warning(f"An unexpected error occurred: {e}")
|
|
103
|
-
return None
|
|
104
|
-
|
|
105
|
-
async def get_current_timestamp(self) -> str:
|
|
106
|
-
return datetime.now().isoformat() + "Z"
|
|
107
|
-
|
|
108
|
-
def clean_result(self, obj):
|
|
109
|
-
"""Recursively change UUID -> str and serialize dictionaries"""
|
|
110
|
-
if isinstance(obj, dict):
|
|
111
|
-
return {k: self.clean_result(v) for k, v in obj.items()}
|
|
112
|
-
elif isinstance(obj, list):
|
|
113
|
-
return [self.clean_result(item) for item in obj]
|
|
114
|
-
elif isinstance(obj, uuid.UUID):
|
|
115
|
-
return str(obj)
|
|
116
|
-
return obj
|
|
117
|
-
|
|
118
|
-
###### User ######
|
|
119
|
-
async def get_user(self, identifier: str) -> Optional[PersistedUser]:
|
|
120
|
-
if self.show_logger:
|
|
121
|
-
logger.info(f"SQLAlchemy: get_user, identifier={identifier}")
|
|
122
|
-
query = "SELECT * FROM users WHERE identifier = :identifier"
|
|
123
|
-
parameters = {"identifier": identifier}
|
|
124
|
-
result = await self.execute_sql(query=query, parameters=parameters)
|
|
125
|
-
if result and isinstance(result, list):
|
|
126
|
-
user_data = result[0]
|
|
127
|
-
|
|
128
|
-
# SQLite returns JSON as string, we most convert it. (#1137)
|
|
129
|
-
metadata = user_data.get("metadata", {})
|
|
130
|
-
if isinstance(metadata, str):
|
|
131
|
-
metadata = json.loads(metadata)
|
|
132
|
-
|
|
133
|
-
assert isinstance(metadata, dict)
|
|
134
|
-
assert isinstance(user_data["id"], str)
|
|
135
|
-
assert isinstance(user_data["identifier"], str)
|
|
136
|
-
assert isinstance(user_data["createdAt"], str)
|
|
137
|
-
|
|
138
|
-
return PersistedUser(
|
|
139
|
-
id=user_data["id"],
|
|
140
|
-
identifier=user_data["identifier"],
|
|
141
|
-
createdAt=user_data["createdAt"],
|
|
142
|
-
metadata=metadata,
|
|
143
|
-
)
|
|
144
|
-
return None
|
|
145
|
-
|
|
146
|
-
async def _get_user_identifer_by_id(self, user_id: str) -> str:
|
|
147
|
-
if self.show_logger:
|
|
148
|
-
logger.info(f"SQLAlchemy: _get_user_identifer_by_id, user_id={user_id}")
|
|
149
|
-
query = "SELECT identifier FROM users WHERE id = :user_id"
|
|
150
|
-
parameters = {"user_id": user_id}
|
|
151
|
-
result = await self.execute_sql(query=query, parameters=parameters)
|
|
152
|
-
|
|
153
|
-
assert result
|
|
154
|
-
assert isinstance(result, list)
|
|
155
|
-
|
|
156
|
-
return result[0]["identifier"]
|
|
157
|
-
|
|
158
|
-
async def _get_user_id_by_thread(self, thread_id: str) -> Optional[str]:
|
|
159
|
-
if self.show_logger:
|
|
160
|
-
logger.info(f"SQLAlchemy: _get_user_id_by_thread, thread_id={thread_id}")
|
|
161
|
-
query = """SELECT "userId" FROM threads WHERE id = :thread_id"""
|
|
162
|
-
parameters = {"thread_id": thread_id}
|
|
163
|
-
result = await self.execute_sql(query=query, parameters=parameters)
|
|
164
|
-
if result:
|
|
165
|
-
assert isinstance(result, list)
|
|
166
|
-
return result[0]["userId"]
|
|
167
|
-
|
|
168
|
-
return None
|
|
169
|
-
|
|
170
|
-
async def create_user(self, user: User) -> Optional[PersistedUser]:
|
|
171
|
-
if self.show_logger:
|
|
172
|
-
logger.info(f"SQLAlchemy: create_user, user_identifier={user.identifier}")
|
|
173
|
-
existing_user: Optional[PersistedUser] = await self.get_user(user.identifier)
|
|
174
|
-
user_dict: Dict[str, Any] = {
|
|
175
|
-
"identifier": str(user.identifier),
|
|
176
|
-
"metadata": json.dumps(user.metadata) or {},
|
|
177
|
-
}
|
|
178
|
-
if not existing_user: # create the user
|
|
179
|
-
if self.show_logger:
|
|
180
|
-
logger.info("SQLAlchemy: create_user, creating the user")
|
|
181
|
-
user_dict["id"] = str(uuid.uuid4())
|
|
182
|
-
user_dict["createdAt"] = await self.get_current_timestamp()
|
|
183
|
-
query = """INSERT INTO users ("id", "identifier", "createdAt", "metadata") VALUES (:id, :identifier, :createdAt, :metadata)"""
|
|
184
|
-
await self.execute_sql(query=query, parameters=user_dict)
|
|
185
|
-
else: # update the user
|
|
186
|
-
if self.show_logger:
|
|
187
|
-
logger.info("SQLAlchemy: update user metadata")
|
|
188
|
-
query = """UPDATE users SET "metadata" = :metadata WHERE "identifier" = :identifier"""
|
|
189
|
-
await self.execute_sql(
|
|
190
|
-
query=query, parameters=user_dict
|
|
191
|
-
) # We want to update the metadata
|
|
192
|
-
return await self.get_user(user.identifier)
|
|
193
|
-
|
|
194
|
-
###### Threads ######
|
|
195
|
-
async def get_thread_author(self, thread_id: str) -> str:
|
|
196
|
-
if self.show_logger:
|
|
197
|
-
logger.info(f"SQLAlchemy: get_thread_author, thread_id={thread_id}")
|
|
198
|
-
query = """SELECT "userIdentifier" FROM threads WHERE "id" = :id"""
|
|
199
|
-
parameters = {"id": thread_id}
|
|
200
|
-
result = await self.execute_sql(query=query, parameters=parameters)
|
|
201
|
-
if isinstance(result, list) and result:
|
|
202
|
-
author_identifier = result[0].get("userIdentifier")
|
|
203
|
-
if author_identifier is not None:
|
|
204
|
-
return author_identifier
|
|
205
|
-
raise ValueError(f"Author not found for thread_id {thread_id}")
|
|
206
|
-
|
|
207
|
-
async def get_thread(self, thread_id: str) -> Optional[ThreadDict]:
|
|
208
|
-
if self.show_logger:
|
|
209
|
-
logger.info(f"SQLAlchemy: get_thread, thread_id={thread_id}")
|
|
210
|
-
user_threads: Optional[List[ThreadDict]] = await self.get_all_user_threads(
|
|
211
|
-
thread_id=thread_id
|
|
212
|
-
)
|
|
213
|
-
if user_threads:
|
|
214
|
-
return user_threads[0]
|
|
215
|
-
else:
|
|
216
|
-
return None
|
|
217
|
-
|
|
218
|
-
async def update_thread(
|
|
219
|
-
self,
|
|
220
|
-
thread_id: str,
|
|
221
|
-
name: Optional[str] = None,
|
|
222
|
-
user_id: Optional[str] = None,
|
|
223
|
-
metadata: Optional[Dict] = None,
|
|
224
|
-
tags: Optional[List[str]] = None,
|
|
225
|
-
):
|
|
226
|
-
if self.show_logger:
|
|
227
|
-
logger.info(f"SQLAlchemy: update_thread, thread_id={thread_id}")
|
|
228
|
-
|
|
229
|
-
user_identifier = None
|
|
230
|
-
if user_id:
|
|
231
|
-
user_identifier = await self._get_user_identifer_by_id(user_id)
|
|
232
|
-
|
|
233
|
-
data = {
|
|
234
|
-
"id": thread_id,
|
|
235
|
-
"createdAt": (
|
|
236
|
-
await self.get_current_timestamp() if metadata is None else None
|
|
237
|
-
),
|
|
238
|
-
"name": (
|
|
239
|
-
name
|
|
240
|
-
if name is not None
|
|
241
|
-
else (metadata.get("name") if metadata and "name" in metadata else None)
|
|
242
|
-
),
|
|
243
|
-
"userId": user_id,
|
|
244
|
-
"userIdentifier": user_identifier,
|
|
245
|
-
"tags": tags,
|
|
246
|
-
"metadata": json.dumps(metadata) if metadata else None,
|
|
247
|
-
}
|
|
248
|
-
parameters = {
|
|
249
|
-
key: value for key, value in data.items() if value is not None
|
|
250
|
-
} # Remove keys with None values
|
|
251
|
-
columns = ", ".join(f'"{key}"' for key in parameters.keys())
|
|
252
|
-
values = ", ".join(f":{key}" for key in parameters.keys())
|
|
253
|
-
updates = ", ".join(
|
|
254
|
-
f'"{key}" = EXCLUDED."{key}"' for key in parameters.keys() if key != "id"
|
|
255
|
-
)
|
|
256
|
-
query = f"""
|
|
257
|
-
INSERT INTO threads ({columns})
|
|
258
|
-
VALUES ({values})
|
|
259
|
-
ON CONFLICT ("id") DO UPDATE
|
|
260
|
-
SET {updates};
|
|
261
|
-
"""
|
|
262
|
-
await self.execute_sql(query=query, parameters=parameters)
|
|
263
|
-
|
|
264
|
-
async def delete_thread(self, thread_id: str):
|
|
265
|
-
if self.show_logger:
|
|
266
|
-
logger.info(f"SQLAlchemy: delete_thread, thread_id={thread_id}")
|
|
267
|
-
|
|
268
|
-
elements_query = """SELECT * FROM elements WHERE "threadId" = :id"""
|
|
269
|
-
elements = await self.execute_sql(elements_query, {"id": thread_id})
|
|
270
|
-
|
|
271
|
-
if self.storage_provider is not None and isinstance(elements, list):
|
|
272
|
-
for elem in filter(lambda x: x["objectKey"], elements):
|
|
273
|
-
await self.storage_provider.delete_file(object_key=elem["objectKey"])
|
|
274
|
-
|
|
275
|
-
# Delete feedbacks/elements/steps/thread
|
|
276
|
-
feedbacks_query = """DELETE FROM feedbacks WHERE "forId" IN (SELECT "id" FROM steps WHERE "threadId" = :id)"""
|
|
277
|
-
elements_query = """DELETE FROM elements WHERE "threadId" = :id"""
|
|
278
|
-
steps_query = """DELETE FROM steps WHERE "threadId" = :id"""
|
|
279
|
-
thread_query = """DELETE FROM threads WHERE "id" = :id"""
|
|
280
|
-
parameters = {"id": thread_id}
|
|
281
|
-
await self.execute_sql(query=feedbacks_query, parameters=parameters)
|
|
282
|
-
await self.execute_sql(query=elements_query, parameters=parameters)
|
|
283
|
-
await self.execute_sql(query=steps_query, parameters=parameters)
|
|
284
|
-
await self.execute_sql(query=thread_query, parameters=parameters)
|
|
285
|
-
|
|
286
|
-
async def list_threads(
|
|
287
|
-
self, pagination: Pagination, filters: ThreadFilter
|
|
288
|
-
) -> PaginatedResponse:
|
|
289
|
-
if self.show_logger:
|
|
290
|
-
logger.info(
|
|
291
|
-
f"SQLAlchemy: list_threads, pagination={pagination}, filters={filters}"
|
|
292
|
-
)
|
|
293
|
-
if not filters.userId:
|
|
294
|
-
raise ValueError("userId is required")
|
|
295
|
-
all_user_threads: List[ThreadDict] = (
|
|
296
|
-
await self.get_all_user_threads(user_id=filters.userId) or []
|
|
297
|
-
)
|
|
298
|
-
|
|
299
|
-
search_keyword = filters.search.lower() if filters.search else None
|
|
300
|
-
feedback_value = int(filters.feedback) if filters.feedback else None
|
|
301
|
-
|
|
302
|
-
filtered_threads = []
|
|
303
|
-
for thread in all_user_threads:
|
|
304
|
-
keyword_match = True
|
|
305
|
-
feedback_match = True
|
|
306
|
-
if search_keyword or feedback_value is not None:
|
|
307
|
-
if search_keyword:
|
|
308
|
-
keyword_match = any(
|
|
309
|
-
search_keyword in step["output"].lower()
|
|
310
|
-
for step in thread["steps"]
|
|
311
|
-
if "output" in step
|
|
312
|
-
)
|
|
313
|
-
if feedback_value is not None:
|
|
314
|
-
feedback_match = False # Assume no match until found
|
|
315
|
-
for step in thread["steps"]:
|
|
316
|
-
feedback = step.get("feedback")
|
|
317
|
-
if feedback and feedback.get("value") == feedback_value:
|
|
318
|
-
feedback_match = True
|
|
319
|
-
break
|
|
320
|
-
if keyword_match and feedback_match:
|
|
321
|
-
filtered_threads.append(thread)
|
|
322
|
-
|
|
323
|
-
start = 0
|
|
324
|
-
if pagination.cursor:
|
|
325
|
-
for i, thread in enumerate(filtered_threads):
|
|
326
|
-
if (
|
|
327
|
-
thread["id"] == pagination.cursor
|
|
328
|
-
): # Find the start index using pagination.cursor
|
|
329
|
-
start = i + 1
|
|
330
|
-
break
|
|
331
|
-
end = start + pagination.first
|
|
332
|
-
paginated_threads = filtered_threads[start:end] or []
|
|
333
|
-
|
|
334
|
-
has_next_page = len(filtered_threads) > end
|
|
335
|
-
start_cursor = paginated_threads[0]["id"] if paginated_threads else None
|
|
336
|
-
end_cursor = paginated_threads[-1]["id"] if paginated_threads else None
|
|
337
|
-
|
|
338
|
-
return PaginatedResponse(
|
|
339
|
-
pageInfo=PageInfo(
|
|
340
|
-
hasNextPage=has_next_page,
|
|
341
|
-
startCursor=start_cursor,
|
|
342
|
-
endCursor=end_cursor,
|
|
343
|
-
),
|
|
344
|
-
data=paginated_threads,
|
|
345
|
-
)
|
|
346
|
-
|
|
347
|
-
###### Steps ######
|
|
348
|
-
@queue_until_user_message()
|
|
349
|
-
async def create_step(self, step_dict: "StepDict"):
|
|
350
|
-
await self.update_thread(step_dict["threadId"])
|
|
351
|
-
|
|
352
|
-
if self.show_logger:
|
|
353
|
-
logger.info(f"SQLAlchemy: create_step, step_id={step_dict.get('id')}")
|
|
354
|
-
|
|
355
|
-
step_dict["showInput"] = (
|
|
356
|
-
str(step_dict.get("showInput", "")).lower()
|
|
357
|
-
if "showInput" in step_dict
|
|
358
|
-
else None
|
|
359
|
-
)
|
|
360
|
-
parameters = {
|
|
361
|
-
key: value
|
|
362
|
-
for key, value in step_dict.items()
|
|
363
|
-
if value is not None and not (isinstance(value, dict) and not value)
|
|
364
|
-
}
|
|
365
|
-
parameters["metadata"] = json.dumps(step_dict.get("metadata", {}))
|
|
366
|
-
parameters["generation"] = json.dumps(step_dict.get("generation", {}))
|
|
367
|
-
columns = ", ".join(f'"{key}"' for key in parameters.keys())
|
|
368
|
-
values = ", ".join(f":{key}" for key in parameters.keys())
|
|
369
|
-
updates = ", ".join(
|
|
370
|
-
f'"{key}" = :{key}' for key in parameters.keys() if key != "id"
|
|
371
|
-
)
|
|
372
|
-
query = f"""
|
|
373
|
-
INSERT INTO steps ({columns})
|
|
374
|
-
VALUES ({values})
|
|
375
|
-
ON CONFLICT (id) DO UPDATE
|
|
376
|
-
SET {updates};
|
|
377
|
-
"""
|
|
378
|
-
await self.execute_sql(query=query, parameters=parameters)
|
|
379
|
-
|
|
380
|
-
@queue_until_user_message()
|
|
381
|
-
async def update_step(self, step_dict: "StepDict"):
|
|
382
|
-
if self.show_logger:
|
|
383
|
-
logger.info(f"SQLAlchemy: update_step, step_id={step_dict.get('id')}")
|
|
384
|
-
await self.create_step(step_dict)
|
|
385
|
-
|
|
386
|
-
@queue_until_user_message()
|
|
387
|
-
async def delete_step(self, step_id: str):
|
|
388
|
-
if self.show_logger:
|
|
389
|
-
logger.info(f"SQLAlchemy: delete_step, step_id={step_id}")
|
|
390
|
-
# Delete feedbacks/elements/steps
|
|
391
|
-
feedbacks_query = """DELETE FROM feedbacks WHERE "forId" = :id"""
|
|
392
|
-
elements_query = """DELETE FROM elements WHERE "forId" = :id"""
|
|
393
|
-
steps_query = """DELETE FROM steps WHERE "id" = :id"""
|
|
394
|
-
parameters = {"id": step_id}
|
|
395
|
-
await self.execute_sql(query=feedbacks_query, parameters=parameters)
|
|
396
|
-
await self.execute_sql(query=elements_query, parameters=parameters)
|
|
397
|
-
await self.execute_sql(query=steps_query, parameters=parameters)
|
|
398
|
-
|
|
399
|
-
###### Feedback ######
|
|
400
|
-
async def upsert_feedback(self, feedback: Feedback) -> str:
|
|
401
|
-
if self.show_logger:
|
|
402
|
-
logger.info(f"SQLAlchemy: upsert_feedback, feedback_id={feedback.id}")
|
|
403
|
-
feedback.id = feedback.id or str(uuid.uuid4())
|
|
404
|
-
feedback_dict = asdict(feedback)
|
|
405
|
-
parameters = {
|
|
406
|
-
key: value for key, value in feedback_dict.items() if value is not None
|
|
407
|
-
}
|
|
408
|
-
|
|
409
|
-
columns = ", ".join(f'"{key}"' for key in parameters.keys())
|
|
410
|
-
values = ", ".join(f":{key}" for key in parameters.keys())
|
|
411
|
-
updates = ", ".join(
|
|
412
|
-
f'"{key}" = :{key}' for key in parameters.keys() if key != "id"
|
|
413
|
-
)
|
|
414
|
-
query = f"""
|
|
415
|
-
INSERT INTO feedbacks ({columns})
|
|
416
|
-
VALUES ({values})
|
|
417
|
-
ON CONFLICT (id) DO UPDATE
|
|
418
|
-
SET {updates};
|
|
419
|
-
"""
|
|
420
|
-
await self.execute_sql(query=query, parameters=parameters)
|
|
421
|
-
return feedback.id
|
|
422
|
-
|
|
423
|
-
async def delete_feedback(self, feedback_id: str) -> bool:
|
|
424
|
-
if self.show_logger:
|
|
425
|
-
logger.info(f"SQLAlchemy: delete_feedback, feedback_id={feedback_id}")
|
|
426
|
-
query = """DELETE FROM feedbacks WHERE "id" = :feedback_id"""
|
|
427
|
-
parameters = {"feedback_id": feedback_id}
|
|
428
|
-
await self.execute_sql(query=query, parameters=parameters)
|
|
429
|
-
return True
|
|
430
|
-
|
|
431
|
-
###### Elements ######
|
|
432
|
-
async def get_element(
|
|
433
|
-
self, thread_id: str, element_id: str
|
|
434
|
-
) -> Optional["ElementDict"]:
|
|
435
|
-
if self.show_logger:
|
|
436
|
-
logger.info(
|
|
437
|
-
f"SQLAlchemy: get_element, thread_id={thread_id}, element_id={element_id}"
|
|
438
|
-
)
|
|
439
|
-
query = """SELECT * FROM elements WHERE "threadId" = :thread_id AND "id" = :element_id"""
|
|
440
|
-
parameters = {"thread_id": thread_id, "element_id": element_id}
|
|
441
|
-
element: Union[List[Dict[str, Any]], int, None] = await self.execute_sql(
|
|
442
|
-
query=query, parameters=parameters
|
|
443
|
-
)
|
|
444
|
-
if isinstance(element, list) and element:
|
|
445
|
-
element_dict: Dict[str, Any] = element[0]
|
|
446
|
-
return ElementDict(
|
|
447
|
-
id=element_dict["id"],
|
|
448
|
-
threadId=element_dict.get("threadId"),
|
|
449
|
-
type=element_dict["type"],
|
|
450
|
-
chainlitKey=element_dict.get("chainlitKey"),
|
|
451
|
-
url=element_dict.get("url"),
|
|
452
|
-
objectKey=element_dict.get("objectKey"),
|
|
453
|
-
name=element_dict["name"],
|
|
454
|
-
props=json.loads(element_dict.get("props", "{}")),
|
|
455
|
-
display=element_dict["display"],
|
|
456
|
-
size=element_dict.get("size"),
|
|
457
|
-
language=element_dict.get("language"),
|
|
458
|
-
page=element_dict.get("page"),
|
|
459
|
-
autoPlay=element_dict.get("autoPlay"),
|
|
460
|
-
playerConfig=element_dict.get("playerConfig"),
|
|
461
|
-
forId=element_dict.get("forId"),
|
|
462
|
-
mime=element_dict.get("mime"),
|
|
463
|
-
)
|
|
464
|
-
else:
|
|
465
|
-
return None
|
|
466
|
-
|
|
467
|
-
@queue_until_user_message()
|
|
468
|
-
async def create_element(self, element: "Element"):
|
|
469
|
-
if self.show_logger:
|
|
470
|
-
logger.info(f"SQLAlchemy: create_element, element_id = {element.id}")
|
|
471
|
-
|
|
472
|
-
if not self.storage_provider:
|
|
473
|
-
logger.warning(
|
|
474
|
-
"SQLAlchemy: create_element error. No blob_storage_client is configured!"
|
|
475
|
-
)
|
|
476
|
-
return
|
|
477
|
-
if not element.for_id:
|
|
478
|
-
return
|
|
479
|
-
|
|
480
|
-
content: Optional[Union[bytes, str]] = None
|
|
481
|
-
|
|
482
|
-
if element.path:
|
|
483
|
-
async with aiofiles.open(element.path, "rb") as f:
|
|
484
|
-
content = await f.read()
|
|
485
|
-
elif element.url:
|
|
486
|
-
async with aiohttp.ClientSession() as session:
|
|
487
|
-
async with session.get(element.url) as response:
|
|
488
|
-
if response.status == 200:
|
|
489
|
-
content = await response.read()
|
|
490
|
-
else:
|
|
491
|
-
content = None
|
|
492
|
-
elif element.content:
|
|
493
|
-
content = element.content
|
|
494
|
-
else:
|
|
495
|
-
raise ValueError("Element url, path or content must be provided")
|
|
496
|
-
if content is None:
|
|
497
|
-
raise ValueError("Content is None, cannot upload file")
|
|
498
|
-
|
|
499
|
-
user_id: str = await self._get_user_id_by_thread(element.thread_id) or "unknown"
|
|
500
|
-
file_object_key = f"{user_id}/{element.id}" + (
|
|
501
|
-
f"/{element.name}" if element.name else ""
|
|
502
|
-
)
|
|
503
|
-
|
|
504
|
-
if not element.mime:
|
|
505
|
-
element.mime = "application/octet-stream"
|
|
506
|
-
|
|
507
|
-
uploaded_file = await self.storage_provider.upload_file(
|
|
508
|
-
object_key=file_object_key, data=content, mime=element.mime, overwrite=True
|
|
509
|
-
)
|
|
510
|
-
if not uploaded_file:
|
|
511
|
-
raise ValueError(
|
|
512
|
-
"SQLAlchemy Error: create_element, Failed to persist data in storage_provider"
|
|
513
|
-
)
|
|
514
|
-
|
|
515
|
-
element_dict: ElementDict = element.to_dict()
|
|
516
|
-
|
|
517
|
-
element_dict["url"] = uploaded_file.get("url")
|
|
518
|
-
element_dict["objectKey"] = uploaded_file.get("object_key")
|
|
519
|
-
|
|
520
|
-
element_dict_cleaned = {k: v for k, v in element_dict.items() if v is not None}
|
|
521
|
-
if "props" in element_dict_cleaned:
|
|
522
|
-
element_dict_cleaned["props"] = json.dumps(element_dict_cleaned["props"])
|
|
523
|
-
|
|
524
|
-
columns = ", ".join(f'"{column}"' for column in element_dict_cleaned.keys())
|
|
525
|
-
placeholders = ", ".join(f":{column}" for column in element_dict_cleaned.keys())
|
|
526
|
-
updates = ", ".join(
|
|
527
|
-
f'"{column}" = :{column}'
|
|
528
|
-
for column in element_dict_cleaned.keys()
|
|
529
|
-
if column != "id"
|
|
530
|
-
)
|
|
531
|
-
query = f"INSERT INTO elements ({columns}) VALUES ({placeholders}) ON CONFLICT (id) DO UPDATE SET {updates};"
|
|
532
|
-
await self.execute_sql(query=query, parameters=element_dict_cleaned)
|
|
533
|
-
|
|
534
|
-
@queue_until_user_message()
|
|
535
|
-
async def delete_element(self, element_id: str, thread_id: Optional[str] = None):
|
|
536
|
-
if self.show_logger:
|
|
537
|
-
logger.info(f"SQLAlchemy: delete_element, element_id={element_id}")
|
|
538
|
-
|
|
539
|
-
query = """SELECT * FROM elements WHERE "id" = :id"""
|
|
540
|
-
elements = await self.execute_sql(query, {"id": element_id})
|
|
541
|
-
|
|
542
|
-
if (
|
|
543
|
-
self.storage_provider is not None
|
|
544
|
-
and isinstance(elements, list)
|
|
545
|
-
and len(elements) > 0
|
|
546
|
-
and elements[0]["objectKey"]
|
|
547
|
-
):
|
|
548
|
-
await self.storage_provider.delete_file(object_key=elements[0]["objectKey"])
|
|
549
|
-
|
|
550
|
-
query = """DELETE FROM elements WHERE "id" = :id"""
|
|
551
|
-
parameters = {"id": element_id}
|
|
552
|
-
|
|
553
|
-
await self.execute_sql(query=query, parameters=parameters)
|
|
554
|
-
|
|
555
|
-
async def get_all_user_threads(
|
|
556
|
-
self, user_id: Optional[str] = None, thread_id: Optional[str] = None
|
|
557
|
-
) -> Optional[List[ThreadDict]]:
|
|
558
|
-
"""Fetch all user threads up to self.user_thread_limit, or one thread by id if thread_id is provided."""
|
|
559
|
-
if self.show_logger:
|
|
560
|
-
logger.info("SQLAlchemy: get_all_user_threads")
|
|
561
|
-
user_threads_query = """
|
|
562
|
-
SELECT
|
|
563
|
-
t."id" AS thread_id,
|
|
564
|
-
t."createdAt" AS thread_createdat,
|
|
565
|
-
t."name" AS thread_name,
|
|
566
|
-
t."userId" AS user_id,
|
|
567
|
-
t."userIdentifier" AS user_identifier,
|
|
568
|
-
t."tags" AS thread_tags,
|
|
569
|
-
t."metadata" AS thread_metadata,
|
|
570
|
-
MAX(s."createdAt") AS updatedAt
|
|
571
|
-
FROM threads t
|
|
572
|
-
LEFT JOIN steps s ON t."id" = s."threadId"
|
|
573
|
-
WHERE t."userId" = :user_id OR t."id" = :thread_id
|
|
574
|
-
GROUP BY
|
|
575
|
-
t."id",
|
|
576
|
-
t."createdAt",
|
|
577
|
-
t."name",
|
|
578
|
-
t."userId",
|
|
579
|
-
t."userIdentifier",
|
|
580
|
-
t."tags",
|
|
581
|
-
t."metadata"
|
|
582
|
-
ORDER BY updatedAt DESC NULLS LAST
|
|
583
|
-
LIMIT :limit
|
|
584
|
-
"""
|
|
585
|
-
user_threads = await self.execute_sql(
|
|
586
|
-
query=user_threads_query,
|
|
587
|
-
parameters={
|
|
588
|
-
"user_id": user_id,
|
|
589
|
-
"limit": self.user_thread_limit,
|
|
590
|
-
"thread_id": thread_id,
|
|
591
|
-
},
|
|
592
|
-
)
|
|
593
|
-
if not isinstance(user_threads, list):
|
|
594
|
-
return None
|
|
595
|
-
if not user_threads:
|
|
596
|
-
return []
|
|
597
|
-
else:
|
|
598
|
-
thread_ids = (
|
|
599
|
-
"('"
|
|
600
|
-
+ "','".join(map(str, [thread["thread_id"] for thread in user_threads]))
|
|
601
|
-
+ "')"
|
|
602
|
-
)
|
|
603
|
-
|
|
604
|
-
steps_feedbacks_query = f"""
|
|
605
|
-
SELECT
|
|
606
|
-
s."id" AS step_id,
|
|
607
|
-
s."name" AS step_name,
|
|
608
|
-
s."type" AS step_type,
|
|
609
|
-
s."threadId" AS step_threadid,
|
|
610
|
-
s."parentId" AS step_parentid,
|
|
611
|
-
s."streaming" AS step_streaming,
|
|
612
|
-
s."waitForAnswer" AS step_waitforanswer,
|
|
613
|
-
s."isError" AS step_iserror,
|
|
614
|
-
s."metadata" AS step_metadata,
|
|
615
|
-
s."tags" AS step_tags,
|
|
616
|
-
s."input" AS step_input,
|
|
617
|
-
s."output" AS step_output,
|
|
618
|
-
s."createdAt" AS step_createdat,
|
|
619
|
-
s."start" AS step_start,
|
|
620
|
-
s."end" AS step_end,
|
|
621
|
-
s."generation" AS step_generation,
|
|
622
|
-
s."showInput" AS step_showinput,
|
|
623
|
-
s."language" AS step_language,
|
|
624
|
-
f."value" AS feedback_value,
|
|
625
|
-
f."comment" AS feedback_comment,
|
|
626
|
-
f."id" AS feedback_id
|
|
627
|
-
FROM steps s LEFT JOIN feedbacks f ON s."id" = f."forId"
|
|
628
|
-
WHERE s."threadId" IN {thread_ids}
|
|
629
|
-
ORDER BY s."createdAt" ASC
|
|
630
|
-
"""
|
|
631
|
-
steps_feedbacks = await self.execute_sql(
|
|
632
|
-
query=steps_feedbacks_query, parameters={}
|
|
633
|
-
)
|
|
634
|
-
|
|
635
|
-
elements_query = f"""
|
|
636
|
-
SELECT
|
|
637
|
-
e."id" AS element_id,
|
|
638
|
-
e."threadId" as element_threadid,
|
|
639
|
-
e."type" AS element_type,
|
|
640
|
-
e."chainlitKey" AS element_chainlitkey,
|
|
641
|
-
e."url" AS element_url,
|
|
642
|
-
e."objectKey" as element_objectkey,
|
|
643
|
-
e."name" AS element_name,
|
|
644
|
-
e."display" AS element_display,
|
|
645
|
-
e."size" AS element_size,
|
|
646
|
-
e."language" AS element_language,
|
|
647
|
-
e."page" AS element_page,
|
|
648
|
-
e."forId" AS element_forid,
|
|
649
|
-
e."mime" AS element_mime,
|
|
650
|
-
e."props" AS props
|
|
651
|
-
FROM elements e
|
|
652
|
-
WHERE e."threadId" IN {thread_ids}
|
|
653
|
-
"""
|
|
654
|
-
elements = await self.execute_sql(query=elements_query, parameters={})
|
|
655
|
-
|
|
656
|
-
thread_dicts = {}
|
|
657
|
-
for thread in user_threads:
|
|
658
|
-
thread_id = thread["thread_id"]
|
|
659
|
-
if thread_id is not None:
|
|
660
|
-
thread_dicts[thread_id] = ThreadDict(
|
|
661
|
-
id=thread_id,
|
|
662
|
-
createdAt=thread["thread_createdat"],
|
|
663
|
-
name=thread["thread_name"],
|
|
664
|
-
userId=thread["user_id"],
|
|
665
|
-
userIdentifier=thread["user_identifier"],
|
|
666
|
-
tags=thread["thread_tags"],
|
|
667
|
-
metadata=thread["thread_metadata"],
|
|
668
|
-
steps=[],
|
|
669
|
-
elements=[],
|
|
670
|
-
)
|
|
671
|
-
# Process steps_feedbacks to populate the steps in the corresponding ThreadDict
|
|
672
|
-
if isinstance(steps_feedbacks, list):
|
|
673
|
-
for step_feedback in steps_feedbacks:
|
|
674
|
-
thread_id = step_feedback["step_threadid"]
|
|
675
|
-
if thread_id is not None:
|
|
676
|
-
feedback = None
|
|
677
|
-
if step_feedback["feedback_value"] is not None:
|
|
678
|
-
feedback = FeedbackDict(
|
|
679
|
-
forId=step_feedback["step_id"],
|
|
680
|
-
id=step_feedback.get("feedback_id"),
|
|
681
|
-
value=step_feedback["feedback_value"],
|
|
682
|
-
comment=step_feedback.get("feedback_comment"),
|
|
683
|
-
)
|
|
684
|
-
step_dict = StepDict(
|
|
685
|
-
id=step_feedback["step_id"],
|
|
686
|
-
name=step_feedback["step_name"],
|
|
687
|
-
type=step_feedback["step_type"],
|
|
688
|
-
threadId=thread_id,
|
|
689
|
-
parentId=step_feedback.get("step_parentid"),
|
|
690
|
-
streaming=step_feedback.get("step_streaming", False),
|
|
691
|
-
waitForAnswer=step_feedback.get("step_waitforanswer"),
|
|
692
|
-
isError=step_feedback.get("step_iserror"),
|
|
693
|
-
metadata=(
|
|
694
|
-
step_feedback["step_metadata"]
|
|
695
|
-
if step_feedback.get("step_metadata") is not None
|
|
696
|
-
else {}
|
|
697
|
-
),
|
|
698
|
-
tags=step_feedback.get("step_tags"),
|
|
699
|
-
input=(
|
|
700
|
-
step_feedback.get("step_input", "")
|
|
701
|
-
if step_feedback.get("step_showinput")
|
|
702
|
-
not in [None, "false"]
|
|
703
|
-
else ""
|
|
704
|
-
),
|
|
705
|
-
output=step_feedback.get("step_output", ""),
|
|
706
|
-
createdAt=step_feedback.get("step_createdat"),
|
|
707
|
-
start=step_feedback.get("step_start"),
|
|
708
|
-
end=step_feedback.get("step_end"),
|
|
709
|
-
generation=step_feedback.get("step_generation"),
|
|
710
|
-
showInput=step_feedback.get("step_showinput"),
|
|
711
|
-
language=step_feedback.get("step_language"),
|
|
712
|
-
feedback=feedback,
|
|
713
|
-
)
|
|
714
|
-
# Append the step to the steps list of the corresponding ThreadDict
|
|
715
|
-
thread_dicts[thread_id]["steps"].append(step_dict)
|
|
716
|
-
|
|
717
|
-
if isinstance(elements, list):
|
|
718
|
-
for element in elements:
|
|
719
|
-
thread_id = element["element_threadid"]
|
|
720
|
-
if thread_id is not None:
|
|
721
|
-
element_dict = ElementDict(
|
|
722
|
-
id=element["element_id"],
|
|
723
|
-
threadId=thread_id,
|
|
724
|
-
type=element["element_type"],
|
|
725
|
-
chainlitKey=element.get("element_chainlitkey"),
|
|
726
|
-
url=element.get("element_url"),
|
|
727
|
-
objectKey=element.get("element_objectkey"),
|
|
728
|
-
name=element["element_name"],
|
|
729
|
-
display=element["element_display"],
|
|
730
|
-
size=element.get("element_size"),
|
|
731
|
-
language=element.get("element_language"),
|
|
732
|
-
autoPlay=element.get("element_autoPlay"),
|
|
733
|
-
playerConfig=element.get("element_playerconfig"),
|
|
734
|
-
page=element.get("element_page"),
|
|
735
|
-
props=element.get("props", "{}"),
|
|
736
|
-
forId=element.get("element_forid"),
|
|
737
|
-
mime=element.get("element_mime"),
|
|
738
|
-
)
|
|
739
|
-
thread_dicts[thread_id]["elements"].append(element_dict) # type: ignore
|
|
740
|
-
|
|
741
|
-
return list(thread_dicts.values())
|