chainlit 1.0.501__py3-none-any.whl → 1.0.503__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of chainlit might be problematic. Click here for more details.
- chainlit/cli/__init__.py +1 -1
- chainlit/copilot/dist/index.js +1 -1
- chainlit/data/__init__.py +16 -4
- chainlit/data/sql_alchemy.py +494 -0
- chainlit/data/storage_clients.py +58 -0
- chainlit/emitter.py +4 -0
- chainlit/frontend/dist/assets/{index-e306c2e5.js → index-a8e1b559.js} +2 -2
- chainlit/frontend/dist/assets/{react-plotly-cc656f1c.js → react-plotly-b225b63c.js} +1 -1
- chainlit/frontend/dist/index.html +1 -1
- chainlit/socket.py +3 -0
- chainlit/types.py +53 -1
- {chainlit-1.0.501.dist-info → chainlit-1.0.503.dist-info}/METADATA +2 -2
- {chainlit-1.0.501.dist-info → chainlit-1.0.503.dist-info}/RECORD +15 -13
- {chainlit-1.0.501.dist-info → chainlit-1.0.503.dist-info}/WHEEL +0 -0
- {chainlit-1.0.501.dist-info → chainlit-1.0.503.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,494 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
import ssl
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
import json
|
|
5
|
+
from typing import Any, Dict, List, Optional, Union, TYPE_CHECKING, Any
|
|
6
|
+
import aiofiles
|
|
7
|
+
import aiohttp
|
|
8
|
+
from dataclasses import asdict
|
|
9
|
+
from sqlalchemy import text
|
|
10
|
+
from sqlalchemy.exc import SQLAlchemyError
|
|
11
|
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, AsyncEngine
|
|
12
|
+
from sqlalchemy.orm import sessionmaker
|
|
13
|
+
from chainlit.context import context
|
|
14
|
+
from chainlit.logger import logger
|
|
15
|
+
from chainlit.data import BaseDataLayer, BaseStorageClient, queue_until_user_message
|
|
16
|
+
from chainlit.user import User, PersistedUser
|
|
17
|
+
from chainlit.types import Feedback, FeedbackDict, Pagination, ThreadDict, ThreadFilter, PageInfo, PaginatedResponse
|
|
18
|
+
from chainlit.step import StepDict
|
|
19
|
+
from chainlit.element import ElementDict, Avatar
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from chainlit.element import Element, ElementDict
|
|
23
|
+
from chainlit.step import StepDict
|
|
24
|
+
|
|
25
|
+
class SQLAlchemyDataLayer(BaseDataLayer):
|
|
26
|
+
def __init__(self, conninfo: str, ssl_require: bool = False, storage_provider: Optional[BaseStorageClient] = None, user_thread_limit: Optional[int] = 1000):
|
|
27
|
+
self._conninfo = conninfo
|
|
28
|
+
self.user_thread_limit = user_thread_limit
|
|
29
|
+
ssl_args = {}
|
|
30
|
+
if ssl_require:
|
|
31
|
+
# Create an SSL context to require an SSL connection
|
|
32
|
+
ssl_context = ssl.create_default_context()
|
|
33
|
+
ssl_context.check_hostname = False
|
|
34
|
+
ssl_context.verify_mode = ssl.CERT_NONE
|
|
35
|
+
ssl_args['ssl'] = ssl_context
|
|
36
|
+
self.engine: AsyncEngine = create_async_engine(self._conninfo, connect_args=ssl_args)
|
|
37
|
+
self.async_session = sessionmaker(bind=self.engine, expire_on_commit=False, class_=AsyncSession) # type: ignore
|
|
38
|
+
if storage_provider:
|
|
39
|
+
self.storage_provider = storage_provider
|
|
40
|
+
logger.info("SQLAlchemyDataLayer storage client initialized")
|
|
41
|
+
else:
|
|
42
|
+
logger.warn("SQLAlchemyDataLayer storage client is not initialized and elements will not be persisted!")
|
|
43
|
+
|
|
44
|
+
###### SQL Helpers ######
|
|
45
|
+
async def execute_sql(self, query: str, parameters: dict) -> Union[List[Dict[str, Any]], int, None]:
|
|
46
|
+
parameterized_query = text(query)
|
|
47
|
+
async with self.async_session() as session:
|
|
48
|
+
try:
|
|
49
|
+
await session.begin()
|
|
50
|
+
result = await session.execute(parameterized_query, parameters)
|
|
51
|
+
await session.commit()
|
|
52
|
+
if result.returns_rows:
|
|
53
|
+
json_result = [dict(row._mapping) for row in result.fetchall()]
|
|
54
|
+
clean_json_result = self.clean_result(json_result)
|
|
55
|
+
return clean_json_result
|
|
56
|
+
else:
|
|
57
|
+
return result.rowcount
|
|
58
|
+
except SQLAlchemyError as e:
|
|
59
|
+
await session.rollback()
|
|
60
|
+
logger.warn(f"An error occurred: {e}")
|
|
61
|
+
return None
|
|
62
|
+
except Exception as e:
|
|
63
|
+
await session.rollback()
|
|
64
|
+
logger.warn(f"An unexpected error occurred: {e}")
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
async def get_current_timestamp(self) -> str:
|
|
68
|
+
return datetime.now().isoformat() + "Z"
|
|
69
|
+
|
|
70
|
+
def clean_result(self, obj):
|
|
71
|
+
"""Recursively change UUID -> str and serialize dictionaries"""
|
|
72
|
+
if isinstance(obj, dict):
|
|
73
|
+
return {k: self.clean_result(v) for k, v in obj.items()}
|
|
74
|
+
elif isinstance(obj, list):
|
|
75
|
+
return [self.clean_result(item) for item in obj]
|
|
76
|
+
elif isinstance(obj, uuid.UUID):
|
|
77
|
+
return str(obj)
|
|
78
|
+
return obj
|
|
79
|
+
|
|
80
|
+
###### User ######
|
|
81
|
+
async def get_user(self, identifier: str) -> Optional[PersistedUser]:
|
|
82
|
+
logger.info(f"SQLAlchemy: get_user, identifier={identifier}")
|
|
83
|
+
query = "SELECT * FROM users WHERE identifier = :identifier"
|
|
84
|
+
parameters = {"identifier": identifier}
|
|
85
|
+
result = await self.execute_sql(query=query, parameters=parameters)
|
|
86
|
+
if result and isinstance(result, list):
|
|
87
|
+
user_data = result[0]
|
|
88
|
+
return PersistedUser(**user_data)
|
|
89
|
+
return None
|
|
90
|
+
|
|
91
|
+
async def create_user(self, user: User) -> Optional[PersistedUser]:
|
|
92
|
+
logger.info(f"SQLAlchemy: create_user, user_identifier={user.identifier}")
|
|
93
|
+
existing_user: Optional['PersistedUser'] = await self.get_user(user.identifier)
|
|
94
|
+
user_dict: Dict[str, Any] = {
|
|
95
|
+
"identifier": str(user.identifier),
|
|
96
|
+
"metadata": json.dumps(user.metadata) or {}
|
|
97
|
+
}
|
|
98
|
+
if not existing_user: # create the user
|
|
99
|
+
logger.info("SQLAlchemy: create_user, creating the user")
|
|
100
|
+
user_dict['id'] = str(uuid.uuid4())
|
|
101
|
+
user_dict['createdAt'] = await self.get_current_timestamp()
|
|
102
|
+
query = """INSERT INTO users ("id", "identifier", "createdAt", "metadata") VALUES (:id, :identifier, :createdAt, :metadata)"""
|
|
103
|
+
await self.execute_sql(query=query, parameters=user_dict)
|
|
104
|
+
else: # update the user
|
|
105
|
+
logger.info("SQLAlchemy: update user metadata")
|
|
106
|
+
query = """UPDATE users SET "metadata" = :metadata WHERE "identifier" = :identifier"""
|
|
107
|
+
await self.execute_sql(query=query, parameters=user_dict) # We want to update the metadata
|
|
108
|
+
return await self.get_user(user.identifier)
|
|
109
|
+
|
|
110
|
+
###### Threads ######
|
|
111
|
+
async def get_thread_author(self, thread_id: str) -> str:
|
|
112
|
+
logger.info(f"SQLAlchemy: get_thread_author, thread_id={thread_id}")
|
|
113
|
+
query = """SELECT "userIdentifier" FROM threads WHERE "id" = :id"""
|
|
114
|
+
parameters = {"id": thread_id}
|
|
115
|
+
result = await self.execute_sql(query=query, parameters=parameters)
|
|
116
|
+
if isinstance(result, list) and result[0]:
|
|
117
|
+
author_identifier = result[0].get('userIdentifier')
|
|
118
|
+
if author_identifier is not None:
|
|
119
|
+
print(f'Author found: {author_identifier}')
|
|
120
|
+
return author_identifier
|
|
121
|
+
raise ValueError (f"Author not found for thread_id {thread_id}")
|
|
122
|
+
|
|
123
|
+
async def get_thread(self, thread_id: str) -> Optional[ThreadDict]:
|
|
124
|
+
logger.info(f"SQLAlchemy: get_thread, thread_id={thread_id}")
|
|
125
|
+
user_threads: Optional[List[ThreadDict]] = await self.get_all_user_threads(thread_id=thread_id)
|
|
126
|
+
if user_threads:
|
|
127
|
+
return user_threads[0]
|
|
128
|
+
else:
|
|
129
|
+
return None
|
|
130
|
+
|
|
131
|
+
async def update_thread(self, thread_id: str, name: Optional[str] = None, user_id: Optional[str] = None, metadata: Optional[Dict] = None, tags: Optional[List[str]] = None):
|
|
132
|
+
logger.info(f"SQLAlchemy: update_thread, thread_id={thread_id}")
|
|
133
|
+
if context.session.user is not None:
|
|
134
|
+
user_identifier = context.session.user.identifier
|
|
135
|
+
else:
|
|
136
|
+
raise ValueError("User not found in session context")
|
|
137
|
+
data = {
|
|
138
|
+
"id": thread_id,
|
|
139
|
+
"createdAt": await self.get_current_timestamp() if metadata is None else None,
|
|
140
|
+
"name": name if name is not None else (metadata.get('name') if metadata and 'name' in metadata else None),
|
|
141
|
+
"userId": user_id,
|
|
142
|
+
"userIdentifier": user_identifier,
|
|
143
|
+
"tags": tags,
|
|
144
|
+
"metadata": json.dumps(metadata) if metadata else None,
|
|
145
|
+
}
|
|
146
|
+
parameters = {key: value for key, value in data.items() if value is not None} # Remove keys with None values
|
|
147
|
+
columns = ', '.join(f'"{key}"' for key in parameters.keys())
|
|
148
|
+
values = ', '.join(f':{key}' for key in parameters.keys())
|
|
149
|
+
updates = ', '.join(f'"{key}" = EXCLUDED."{key}"' for key in parameters.keys() if key != 'id')
|
|
150
|
+
query = f"""
|
|
151
|
+
INSERT INTO threads ({columns})
|
|
152
|
+
VALUES ({values})
|
|
153
|
+
ON CONFLICT ("id") DO UPDATE
|
|
154
|
+
SET {updates};
|
|
155
|
+
"""
|
|
156
|
+
await self.execute_sql(query=query, parameters=parameters)
|
|
157
|
+
|
|
158
|
+
async def delete_thread(self, thread_id: str):
|
|
159
|
+
logger.info(f"SQLAlchemy: delete_thread, thread_id={thread_id}")
|
|
160
|
+
# Delete feedbacks/elements/steps/thread
|
|
161
|
+
feedbacks_query = """DELETE FROM feedbacks WHERE "forId" IN (SELECT "id" FROM steps WHERE "threadId" = :id)"""
|
|
162
|
+
elements_query = """DELETE FROM elements WHERE "threadId" = :id"""
|
|
163
|
+
steps_query = """DELETE FROM steps WHERE "threadId" = :id"""
|
|
164
|
+
thread_query = """DELETE FROM threads WHERE "id" = :id"""
|
|
165
|
+
parameters = {"id": thread_id}
|
|
166
|
+
await self.execute_sql(query=feedbacks_query, parameters=parameters)
|
|
167
|
+
await self.execute_sql(query=elements_query, parameters=parameters)
|
|
168
|
+
await self.execute_sql(query=steps_query, parameters=parameters)
|
|
169
|
+
await self.execute_sql(query=thread_query, parameters=parameters)
|
|
170
|
+
|
|
171
|
+
async def list_threads(self, pagination: Pagination, filters: ThreadFilter) -> PaginatedResponse:
|
|
172
|
+
logger.info(f"SQLAlchemy: list_threads, pagination={pagination}, filters={filters}")
|
|
173
|
+
if not filters.userId:
|
|
174
|
+
raise ValueError("userId is required")
|
|
175
|
+
all_user_threads: List[ThreadDict] = await self.get_all_user_threads(user_id=filters.userId) or []
|
|
176
|
+
|
|
177
|
+
search_keyword = filters.search.lower() if filters.search else None
|
|
178
|
+
feedback_value = int(filters.feedback) if filters.feedback else None
|
|
179
|
+
|
|
180
|
+
filtered_threads = []
|
|
181
|
+
for thread in all_user_threads:
|
|
182
|
+
keyword_match = True
|
|
183
|
+
feedback_match = True
|
|
184
|
+
if search_keyword or feedback_value is not None:
|
|
185
|
+
if search_keyword:
|
|
186
|
+
keyword_match = any(search_keyword in step['output'].lower() for step in thread['steps'] if 'output' in step)
|
|
187
|
+
if feedback_value is not None:
|
|
188
|
+
feedback_match = False # Assume no match until found
|
|
189
|
+
for step in thread['steps']:
|
|
190
|
+
feedback = step.get('feedback')
|
|
191
|
+
if feedback and feedback.get('value') == feedback_value:
|
|
192
|
+
feedback_match = True
|
|
193
|
+
break
|
|
194
|
+
if keyword_match and feedback_match:
|
|
195
|
+
filtered_threads.append(thread)
|
|
196
|
+
|
|
197
|
+
start = 0
|
|
198
|
+
if pagination.cursor:
|
|
199
|
+
for i, thread in enumerate(filtered_threads):
|
|
200
|
+
if thread['id'] == pagination.cursor: # Find the start index using pagination.cursor
|
|
201
|
+
start = i + 1
|
|
202
|
+
break
|
|
203
|
+
end = start + pagination.first
|
|
204
|
+
paginated_threads = filtered_threads[start:end] or []
|
|
205
|
+
|
|
206
|
+
has_next_page = len(filtered_threads) > end
|
|
207
|
+
start_cursor = paginated_threads[0]['id'] if paginated_threads else None
|
|
208
|
+
end_cursor = paginated_threads[-1]['id'] if paginated_threads else None
|
|
209
|
+
|
|
210
|
+
return PaginatedResponse(
|
|
211
|
+
pageInfo=PageInfo(hasNextPage=has_next_page, startCursor=start_cursor, endCursor=end_cursor),
|
|
212
|
+
data=paginated_threads
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
###### Steps ######
|
|
216
|
+
@queue_until_user_message()
|
|
217
|
+
async def create_step(self, step_dict: 'StepDict'):
|
|
218
|
+
logger.info(f"SQLAlchemy: create_step, step_id={step_dict.get('id')}")
|
|
219
|
+
if not getattr(context.session.user, 'id', None):
|
|
220
|
+
raise ValueError("No authenticated user in context")
|
|
221
|
+
step_dict['showInput'] = str(step_dict.get('showInput', '')).lower() if 'showInput' in step_dict else None
|
|
222
|
+
parameters = {key: value for key, value in step_dict.items() if value is not None and not (isinstance(value, dict) and not value)}
|
|
223
|
+
parameters['metadata'] = json.dumps(step_dict.get('metadata', {}))
|
|
224
|
+
columns = ', '.join(f'"{key}"' for key in parameters.keys())
|
|
225
|
+
values = ', '.join(f':{key}' for key in parameters.keys())
|
|
226
|
+
updates = ', '.join(f'"{key}" = :{key}' for key in parameters.keys() if key != 'id')
|
|
227
|
+
query = f"""
|
|
228
|
+
INSERT INTO steps ({columns})
|
|
229
|
+
VALUES ({values})
|
|
230
|
+
ON CONFLICT (id) DO UPDATE
|
|
231
|
+
SET {updates};
|
|
232
|
+
"""
|
|
233
|
+
await self.execute_sql(query=query, parameters=parameters)
|
|
234
|
+
|
|
235
|
+
@queue_until_user_message()
|
|
236
|
+
async def update_step(self, step_dict: 'StepDict'):
|
|
237
|
+
logger.info(f"SQLAlchemy: update_step, step_id={step_dict.get('id')}")
|
|
238
|
+
await self.create_step(step_dict)
|
|
239
|
+
|
|
240
|
+
@queue_until_user_message()
|
|
241
|
+
async def delete_step(self, step_id: str):
|
|
242
|
+
logger.info(f"SQLAlchemy: delete_step, step_id={step_id}")
|
|
243
|
+
# Delete feedbacks/elements/steps
|
|
244
|
+
feedbacks_query = """DELETE FROM feedbacks WHERE "forId" = :id"""
|
|
245
|
+
elements_query = """DELETE FROM elements WHERE "forId" = :id"""
|
|
246
|
+
steps_query = """DELETE FROM steps WHERE "forId" = :id"""
|
|
247
|
+
parameters = {"id": step_id}
|
|
248
|
+
await self.execute_sql(query=feedbacks_query, parameters=parameters)
|
|
249
|
+
await self.execute_sql(query=elements_query, parameters=parameters)
|
|
250
|
+
await self.execute_sql(query=steps_query, parameters=parameters)
|
|
251
|
+
|
|
252
|
+
###### Feedback ######
|
|
253
|
+
async def upsert_feedback(self, feedback: Feedback) -> str:
|
|
254
|
+
logger.info(f"SQLAlchemy: upsert_feedback, feedback_id={feedback.id}")
|
|
255
|
+
feedback.id = feedback.id or str(uuid.uuid4())
|
|
256
|
+
feedback_dict = asdict(feedback)
|
|
257
|
+
parameters = {key: value for key, value in feedback_dict.items() if value is not None}
|
|
258
|
+
|
|
259
|
+
columns = ', '.join(f'"{key}"' for key in parameters.keys())
|
|
260
|
+
values = ', '.join(f':{key}' for key in parameters.keys())
|
|
261
|
+
updates = ', '.join(f'"{key}" = :{key}' for key in parameters.keys() if key != 'id')
|
|
262
|
+
query = f"""
|
|
263
|
+
INSERT INTO feedbacks ({columns})
|
|
264
|
+
VALUES ({values})
|
|
265
|
+
ON CONFLICT (id) DO UPDATE
|
|
266
|
+
SET {updates};
|
|
267
|
+
"""
|
|
268
|
+
await self.execute_sql(query=query, parameters=parameters)
|
|
269
|
+
return feedback.id
|
|
270
|
+
|
|
271
|
+
async def delete_feedback(self, feedback_id: str) -> bool:
|
|
272
|
+
logger.info(f"SQLAlchemy: delete_feedback, feedback_id={feedback_id}")
|
|
273
|
+
query = """DELETE FROM feedbacks WHERE "id" = :feedback_id"""
|
|
274
|
+
parameters = {"feedback_id": feedback_id}
|
|
275
|
+
await self.execute_sql(query=query, parameters=parameters)
|
|
276
|
+
return True
|
|
277
|
+
|
|
278
|
+
###### Elements ######
|
|
279
|
+
@queue_until_user_message()
|
|
280
|
+
async def create_element(self, element: 'Element'):
|
|
281
|
+
logger.info(f"SQLAlchemy: create_element, element_id = {element.id}")
|
|
282
|
+
if not getattr(context.session.user, 'id', None):
|
|
283
|
+
raise ValueError("No authenticated user in context")
|
|
284
|
+
if isinstance(element, Avatar): # Skip creating elements of type avatar
|
|
285
|
+
return
|
|
286
|
+
if not self.storage_provider:
|
|
287
|
+
logger.warn(f"SQLAlchemy: create_element error. No blob_storage_client is configured!")
|
|
288
|
+
return
|
|
289
|
+
if not element.for_id:
|
|
290
|
+
return
|
|
291
|
+
|
|
292
|
+
content: Optional[Union[bytes, str]] = None
|
|
293
|
+
|
|
294
|
+
if element.path:
|
|
295
|
+
async with aiofiles.open(element.path, "rb") as f:
|
|
296
|
+
content = await f.read()
|
|
297
|
+
elif element.url:
|
|
298
|
+
async with aiohttp.ClientSession() as session:
|
|
299
|
+
async with session.get(element.url) as response:
|
|
300
|
+
if response.status == 200:
|
|
301
|
+
content = await response.read()
|
|
302
|
+
else:
|
|
303
|
+
content = None
|
|
304
|
+
elif element.content:
|
|
305
|
+
content = element.content
|
|
306
|
+
else:
|
|
307
|
+
raise ValueError("Element url, path or content must be provided")
|
|
308
|
+
if content is None:
|
|
309
|
+
raise ValueError("Content is None, cannot upload file")
|
|
310
|
+
|
|
311
|
+
context_user = context.session.user
|
|
312
|
+
|
|
313
|
+
user_folder = getattr(context_user, 'id', 'unknown')
|
|
314
|
+
file_object_key = f"{user_folder}/{element.id}" + (f"/{element.name}" if element.name else "")
|
|
315
|
+
|
|
316
|
+
if not element.mime:
|
|
317
|
+
element.mime = "application/octet-stream"
|
|
318
|
+
|
|
319
|
+
uploaded_file = await self.storage_provider.upload_file(object_key=file_object_key, data=content, mime=element.mime, overwrite=True)
|
|
320
|
+
if not uploaded_file:
|
|
321
|
+
raise ValueError("SQLAlchemy Error: create_element, Failed to persist data in storage_provider")
|
|
322
|
+
|
|
323
|
+
element_dict: ElementDict = element.to_dict()
|
|
324
|
+
|
|
325
|
+
element_dict['url'] = uploaded_file.get('url')
|
|
326
|
+
element_dict['objectKey'] = uploaded_file.get('object_key')
|
|
327
|
+
element_dict_cleaned = {k: v for k, v in element_dict.items() if v is not None}
|
|
328
|
+
|
|
329
|
+
columns = ', '.join(f'"{column}"' for column in element_dict_cleaned.keys())
|
|
330
|
+
placeholders = ', '.join(f':{column}' for column in element_dict_cleaned.keys())
|
|
331
|
+
query = f"INSERT INTO elements ({columns}) VALUES ({placeholders})"
|
|
332
|
+
await self.execute_sql(query=query, parameters=element_dict_cleaned)
|
|
333
|
+
|
|
334
|
+
@queue_until_user_message()
|
|
335
|
+
async def delete_element(self, element_id: str):
|
|
336
|
+
logger.info(f"SQLAlchemy: delete_element, element_id={element_id}")
|
|
337
|
+
query = """DELETE FROM elements WHERE "id" = :id"""
|
|
338
|
+
parameters = {"id": element_id}
|
|
339
|
+
await self.execute_sql(query=query, parameters=parameters)
|
|
340
|
+
|
|
341
|
+
async def delete_user_session(self, id: str) -> bool:
|
|
342
|
+
return False # Not sure why documentation wants this
|
|
343
|
+
|
|
344
|
+
async def get_all_user_threads(self, user_id: Optional[str] = None, thread_id: Optional[str] = None) -> Optional[List[ThreadDict]]:
|
|
345
|
+
"""Fetch all user threads up to self.user_thread_limit, or one thread by id if thread_id is provided."""
|
|
346
|
+
logger.info(f"SQLAlchemy: get_all_user_threads")
|
|
347
|
+
user_threads_query = """
|
|
348
|
+
SELECT
|
|
349
|
+
"id" AS thread_id,
|
|
350
|
+
"createdAt" AS thread_createdat,
|
|
351
|
+
"name" AS thread_name,
|
|
352
|
+
"userId" AS user_id,
|
|
353
|
+
"userIdentifier" AS user_identifier,
|
|
354
|
+
"tags" AS thread_tags,
|
|
355
|
+
"metadata" AS thread_metadata
|
|
356
|
+
FROM threads
|
|
357
|
+
WHERE "userId" = :user_id OR "id" = :thread_id
|
|
358
|
+
ORDER BY "createdAt" DESC
|
|
359
|
+
LIMIT :limit
|
|
360
|
+
"""
|
|
361
|
+
user_threads = await self.execute_sql(query=user_threads_query, parameters={"user_id": user_id, "limit": self.user_thread_limit, "thread_id": thread_id})
|
|
362
|
+
if not isinstance(user_threads, list):
|
|
363
|
+
return None
|
|
364
|
+
if not user_threads:
|
|
365
|
+
return []
|
|
366
|
+
else:
|
|
367
|
+
thread_ids = "('" + "','".join(map(str, [thread['thread_id'] for thread in user_threads])) + "')"
|
|
368
|
+
|
|
369
|
+
steps_feedbacks_query = f"""
|
|
370
|
+
SELECT
|
|
371
|
+
s."id" AS step_id,
|
|
372
|
+
s."name" AS step_name,
|
|
373
|
+
s."type" AS step_type,
|
|
374
|
+
s."threadId" AS step_threadid,
|
|
375
|
+
s."parentId" AS step_parentid,
|
|
376
|
+
s."disableFeedback" AS step_disablefeedback,
|
|
377
|
+
s."streaming" AS step_streaming,
|
|
378
|
+
s."waitForAnswer" AS step_waitforanswer,
|
|
379
|
+
s."isError" AS step_iserror,
|
|
380
|
+
s."metadata" AS step_metadata,
|
|
381
|
+
s."tags" AS step_tags,
|
|
382
|
+
s."input" AS step_input,
|
|
383
|
+
s."output" AS step_output,
|
|
384
|
+
s."createdAt" AS step_createdat,
|
|
385
|
+
s."start" AS step_start,
|
|
386
|
+
s."end" AS step_end,
|
|
387
|
+
s."generation" AS step_generation,
|
|
388
|
+
s."showInput" AS step_showinput,
|
|
389
|
+
s."language" AS step_language,
|
|
390
|
+
s."indent" AS step_indent,
|
|
391
|
+
f."value" AS feedback_value,
|
|
392
|
+
f."comment" AS feedback_comment
|
|
393
|
+
FROM steps s LEFT JOIN feedbacks f ON s."id" = f."forId"
|
|
394
|
+
WHERE s."threadId" IN {thread_ids}
|
|
395
|
+
ORDER BY s."createdAt" ASC
|
|
396
|
+
"""
|
|
397
|
+
steps_feedbacks = await self.execute_sql(query=steps_feedbacks_query, parameters={})
|
|
398
|
+
|
|
399
|
+
elements_query = f"""
|
|
400
|
+
SELECT
|
|
401
|
+
e."id" AS element_id,
|
|
402
|
+
e."threadId" as element_threadid,
|
|
403
|
+
e."type" AS element_type,
|
|
404
|
+
e."chainlitKey" AS element_chainlitkey,
|
|
405
|
+
e."url" AS element_url,
|
|
406
|
+
e."objectKey" as element_objectkey,
|
|
407
|
+
e."name" AS element_name,
|
|
408
|
+
e."display" AS element_display,
|
|
409
|
+
e."size" AS element_size,
|
|
410
|
+
e."language" AS element_language,
|
|
411
|
+
e."page" AS element_page,
|
|
412
|
+
e."forId" AS element_forid,
|
|
413
|
+
e."mime" AS element_mime
|
|
414
|
+
FROM elements e
|
|
415
|
+
WHERE e."threadId" IN {thread_ids}
|
|
416
|
+
"""
|
|
417
|
+
elements = await self.execute_sql(query=elements_query, parameters={})
|
|
418
|
+
|
|
419
|
+
thread_dicts = {}
|
|
420
|
+
for thread in user_threads:
|
|
421
|
+
thread_id = thread['thread_id']
|
|
422
|
+
if thread_id is not None:
|
|
423
|
+
thread_dicts[thread_id] = ThreadDict(
|
|
424
|
+
id=thread_id,
|
|
425
|
+
createdAt=thread['thread_createdat'],
|
|
426
|
+
name=thread['thread_name'],
|
|
427
|
+
userId=thread['user_id'],
|
|
428
|
+
userIdentifier=thread['user_identifier'],
|
|
429
|
+
tags=thread['thread_tags'],
|
|
430
|
+
metadata=thread['thread_metadata'],
|
|
431
|
+
steps=[],
|
|
432
|
+
elements=[]
|
|
433
|
+
)
|
|
434
|
+
# Process steps_feedbacks to populate the steps in the corresponding ThreadDict
|
|
435
|
+
if isinstance(steps_feedbacks, list):
|
|
436
|
+
for step_feedback in steps_feedbacks:
|
|
437
|
+
thread_id = step_feedback['step_threadid']
|
|
438
|
+
if thread_id is not None:
|
|
439
|
+
feedback = None
|
|
440
|
+
if step_feedback['feedback_value'] is not None:
|
|
441
|
+
feedback = FeedbackDict(
|
|
442
|
+
forId=step_feedback['step_id'],
|
|
443
|
+
id=step_feedback.get('feedback_id'),
|
|
444
|
+
value=step_feedback['feedback_value'],
|
|
445
|
+
comment=step_feedback.get('feedback_comment')
|
|
446
|
+
)
|
|
447
|
+
step_dict = StepDict(
|
|
448
|
+
id=step_feedback['step_id'],
|
|
449
|
+
name=step_feedback['step_name'],
|
|
450
|
+
type=step_feedback['step_type'],
|
|
451
|
+
threadId=thread_id,
|
|
452
|
+
parentId=step_feedback.get('step_parentid'),
|
|
453
|
+
disableFeedback=step_feedback.get('step_disablefeedback', False),
|
|
454
|
+
streaming=step_feedback.get('step_streaming', False),
|
|
455
|
+
waitForAnswer=step_feedback.get('step_waitforanswer'),
|
|
456
|
+
isError=step_feedback.get('step_iserror'),
|
|
457
|
+
metadata=step_feedback['step_metadata'] if step_feedback.get('step_metadata') is not None else {},
|
|
458
|
+
tags=step_feedback.get('step_tags'),
|
|
459
|
+
input=step_feedback.get('step_input', '') if step_feedback['step_showinput'] else '',
|
|
460
|
+
output=step_feedback.get('step_output', ''),
|
|
461
|
+
createdAt=step_feedback.get('step_createdat'),
|
|
462
|
+
start=step_feedback.get('step_start'),
|
|
463
|
+
end=step_feedback.get('step_end'),
|
|
464
|
+
generation=step_feedback.get('step_generation'),
|
|
465
|
+
showInput=step_feedback.get('step_showinput'),
|
|
466
|
+
language=step_feedback.get('step_language'),
|
|
467
|
+
indent=step_feedback.get('step_indent'),
|
|
468
|
+
feedback=feedback
|
|
469
|
+
)
|
|
470
|
+
# Append the step to the steps list of the corresponding ThreadDict
|
|
471
|
+
thread_dicts[thread_id]['steps'].append(step_dict)
|
|
472
|
+
|
|
473
|
+
if isinstance(elements, list):
|
|
474
|
+
for element in elements:
|
|
475
|
+
thread_id = element['element_threadid']
|
|
476
|
+
if thread_id is not None:
|
|
477
|
+
element_dict = ElementDict(
|
|
478
|
+
id=element['element_id'],
|
|
479
|
+
threadId=thread_id,
|
|
480
|
+
type=element['element_type'],
|
|
481
|
+
chainlitKey=element.get('element_chainlitkey'),
|
|
482
|
+
url=element.get('element_url'),
|
|
483
|
+
objectKey=element.get('element_objectkey'),
|
|
484
|
+
name=element['element_name'],
|
|
485
|
+
display=element['element_display'],
|
|
486
|
+
size=element.get('element_size'),
|
|
487
|
+
language=element.get('element_language'),
|
|
488
|
+
page=element.get('element_page'),
|
|
489
|
+
forId=element.get('element_forid'),
|
|
490
|
+
mime=element.get('element_mime'),
|
|
491
|
+
)
|
|
492
|
+
thread_dicts[thread_id]['elements'].append(element_dict) # type: ignore
|
|
493
|
+
|
|
494
|
+
return list(thread_dicts.values())
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
from chainlit.data import BaseStorageClient
|
|
2
|
+
from chainlit.logger import logger
|
|
3
|
+
from typing import TYPE_CHECKING, Optional, Dict, Union, Any
|
|
4
|
+
from azure.storage.filedatalake import DataLakeServiceClient, FileSystemClient, DataLakeFileClient, ContentSettings
|
|
5
|
+
import boto3 # type: ignore
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential
|
|
9
|
+
|
|
10
|
+
class AzureStorageClient(BaseStorageClient):
|
|
11
|
+
"""
|
|
12
|
+
Class to enable Azure Data Lake Storage (ADLS) Gen2
|
|
13
|
+
|
|
14
|
+
parms:
|
|
15
|
+
account_url: "https://<your_account>.dfs.core.windows.net"
|
|
16
|
+
credential: Access credential (AzureKeyCredential)
|
|
17
|
+
sas_token: Optionally include SAS token to append to urls
|
|
18
|
+
"""
|
|
19
|
+
def __init__(self, account_url: str, container: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]], sas_token: Optional[str] = None):
|
|
20
|
+
try:
|
|
21
|
+
self.data_lake_client = DataLakeServiceClient(account_url=account_url, credential=credential)
|
|
22
|
+
self.container_client: FileSystemClient = self.data_lake_client.get_file_system_client(file_system=container)
|
|
23
|
+
self.sas_token = sas_token
|
|
24
|
+
logger.info("AzureStorageClient initialized")
|
|
25
|
+
except Exception as e:
|
|
26
|
+
logger.warn(f"AzureStorageClient initialization error: {e}")
|
|
27
|
+
|
|
28
|
+
async def upload_file(self, object_key: str, data: Union[bytes, str], mime: str = 'application/octet-stream', overwrite: bool = True) -> Dict[str, Any]:
|
|
29
|
+
try:
|
|
30
|
+
file_client: DataLakeFileClient = self.container_client.get_file_client(object_key)
|
|
31
|
+
content_settings = ContentSettings(content_type=mime)
|
|
32
|
+
file_client.upload_data(data, overwrite=overwrite, content_settings=content_settings)
|
|
33
|
+
url = f"{file_client.url}{self.sas_token}" if self.sas_token else file_client.url
|
|
34
|
+
return {"object_key": object_key, "url": url}
|
|
35
|
+
except Exception as e:
|
|
36
|
+
logger.warn(f"AzureStorageClient, upload_file error: {e}")
|
|
37
|
+
return {}
|
|
38
|
+
|
|
39
|
+
class S3StorageClient(BaseStorageClient):
|
|
40
|
+
"""
|
|
41
|
+
Class to enable Amazon S3 storage provider
|
|
42
|
+
"""
|
|
43
|
+
def __init__(self, bucket: str):
|
|
44
|
+
try:
|
|
45
|
+
self.bucket = bucket
|
|
46
|
+
self.client = boto3.client("s3")
|
|
47
|
+
logger.info("S3StorageClient initialized")
|
|
48
|
+
except Exception as e:
|
|
49
|
+
logger.warn(f"S3StorageClient initialization error: {e}")
|
|
50
|
+
|
|
51
|
+
async def upload_file(self, object_key: str, data: Union[bytes, str], mime: str = 'application/octet-stream', overwrite: bool = True) -> Dict[str, Any]:
|
|
52
|
+
try:
|
|
53
|
+
self.client.put_object(Bucket=self.bucket, Key=object_key, Body=data, ContentType=mime)
|
|
54
|
+
url = f"https://{self.bucket}.s3.amazonaws.com/{object_key}"
|
|
55
|
+
return {"object_key": object_key, "url": url}
|
|
56
|
+
except Exception as e:
|
|
57
|
+
logger.warn(f"S3StorageClient, upload_file error: {e}")
|
|
58
|
+
return {}
|
chainlit/emitter.py
CHANGED
|
@@ -175,10 +175,14 @@ class ChainlitEmitter(BaseChainlitEmitter):
|
|
|
175
175
|
else:
|
|
176
176
|
user_id = None
|
|
177
177
|
try:
|
|
178
|
+
tags = (
|
|
179
|
+
[self.session.chat_profile] if self.session.chat_profile else None
|
|
180
|
+
)
|
|
178
181
|
await data_layer.update_thread(
|
|
179
182
|
thread_id=self.session.thread_id,
|
|
180
183
|
name=interaction,
|
|
181
184
|
user_id=user_id,
|
|
185
|
+
tags=tags,
|
|
182
186
|
)
|
|
183
187
|
except Exception as e:
|
|
184
188
|
logger.error(f"Error updating thread: {e}")
|