chainlit 2.0.0__py3-none-any.whl → 2.0.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chainlit might be problematic. Click here for more details.

Files changed (98) hide show
  1. chainlit/__init__.py +57 -56
  2. chainlit/action.py +10 -12
  3. chainlit/{auth/__init__.py → auth.py} +34 -26
  4. chainlit/cache.py +6 -4
  5. chainlit/callbacks.py +7 -52
  6. chainlit/chat_context.py +2 -2
  7. chainlit/chat_settings.py +1 -3
  8. chainlit/cli/__init__.py +2 -15
  9. chainlit/config.py +70 -41
  10. chainlit/context.py +9 -8
  11. chainlit/copilot/dist/index.js +874 -8533
  12. chainlit/data/__init__.py +8 -96
  13. chainlit/data/acl.py +2 -3
  14. chainlit/data/base.py +15 -1
  15. chainlit/data/dynamodb.py +4 -7
  16. chainlit/data/literalai.py +6 -4
  17. chainlit/data/sql_alchemy.py +9 -10
  18. chainlit/data/{storage_clients/azure.py → storage_clients.py} +33 -2
  19. chainlit/discord/__init__.py +4 -4
  20. chainlit/discord/app.py +1 -2
  21. chainlit/element.py +9 -41
  22. chainlit/emitter.py +21 -17
  23. chainlit/frontend/dist/assets/DailyMotion-b4b7af47.js +1 -0
  24. chainlit/frontend/dist/assets/Facebook-572972a0.js +1 -0
  25. chainlit/frontend/dist/assets/FilePlayer-85c69ca8.js +1 -0
  26. chainlit/frontend/dist/assets/Kaltura-dfc24672.js +1 -0
  27. chainlit/frontend/dist/assets/Mixcloud-705011f4.js +1 -0
  28. chainlit/frontend/dist/assets/Mux-4201a9e6.js +1 -0
  29. chainlit/frontend/dist/assets/Preview-23ba40a6.js +1 -0
  30. chainlit/frontend/dist/assets/SoundCloud-1a582d51.js +1 -0
  31. chainlit/frontend/dist/assets/Streamable-5017c4ba.js +1 -0
  32. chainlit/frontend/dist/assets/Twitch-bb2de2fa.js +1 -0
  33. chainlit/frontend/dist/assets/Vidyard-54e269b1.js +1 -0
  34. chainlit/frontend/dist/assets/Vimeo-d92c37dd.js +1 -0
  35. chainlit/frontend/dist/assets/Wistia-25a1363b.js +1 -0
  36. chainlit/frontend/dist/assets/YouTube-616e8cb7.js +1 -0
  37. chainlit/frontend/dist/assets/index-aaf974a9.css +1 -0
  38. chainlit/frontend/dist/assets/index-f5df2072.js +1027 -0
  39. chainlit/frontend/dist/assets/{react-plotly-BpxUS-ab.js → react-plotly-f0315f86.js} +94 -94
  40. chainlit/frontend/dist/index.html +3 -2
  41. chainlit/haystack/callbacks.py +4 -5
  42. chainlit/input_widget.py +4 -6
  43. chainlit/langchain/callbacks.py +47 -56
  44. chainlit/langflow/__init__.py +0 -1
  45. chainlit/llama_index/callbacks.py +7 -7
  46. chainlit/message.py +10 -8
  47. chainlit/mistralai/__init__.py +2 -3
  48. chainlit/oauth_providers.py +12 -113
  49. chainlit/openai/__init__.py +7 -6
  50. chainlit/secret.py +1 -1
  51. chainlit/server.py +181 -491
  52. chainlit/session.py +5 -7
  53. chainlit/slack/__init__.py +3 -3
  54. chainlit/slack/app.py +2 -3
  55. chainlit/socket.py +103 -78
  56. chainlit/step.py +29 -21
  57. chainlit/sync.py +1 -2
  58. chainlit/teams/__init__.py +3 -3
  59. chainlit/teams/app.py +0 -1
  60. chainlit/types.py +4 -20
  61. chainlit/user.py +1 -2
  62. chainlit/utils.py +2 -3
  63. chainlit/version.py +2 -3
  64. {chainlit-2.0.0.dist-info → chainlit-2.0.dev0.dist-info}/METADATA +39 -27
  65. chainlit-2.0.dev0.dist-info/RECORD +96 -0
  66. chainlit/auth/cookie.py +0 -123
  67. chainlit/auth/jwt.py +0 -37
  68. chainlit/data/chainlit_data_layer.py +0 -584
  69. chainlit/data/storage_clients/__init__.py +0 -0
  70. chainlit/data/storage_clients/azure_blob.py +0 -80
  71. chainlit/data/storage_clients/base.py +0 -22
  72. chainlit/data/storage_clients/gcs.py +0 -78
  73. chainlit/data/storage_clients/s3.py +0 -49
  74. chainlit/frontend/dist/assets/DailyMotion-DgRzV5GZ.js +0 -1
  75. chainlit/frontend/dist/assets/Dataframe-DVgwSMU2.js +0 -22
  76. chainlit/frontend/dist/assets/Facebook-C0vx6HWv.js +0 -1
  77. chainlit/frontend/dist/assets/FilePlayer-CdhzeHPP.js +0 -1
  78. chainlit/frontend/dist/assets/Kaltura-5iVmeUct.js +0 -1
  79. chainlit/frontend/dist/assets/Mixcloud-C2zi77Ex.js +0 -1
  80. chainlit/frontend/dist/assets/Mux-Vkebogdf.js +0 -1
  81. chainlit/frontend/dist/assets/Preview-DwY_sEIl.js +0 -1
  82. chainlit/frontend/dist/assets/SoundCloud-CREBXAWo.js +0 -1
  83. chainlit/frontend/dist/assets/Streamable-B5Lu25uy.js +0 -1
  84. chainlit/frontend/dist/assets/Twitch-y9iKCcM1.js +0 -1
  85. chainlit/frontend/dist/assets/Vidyard-ClYvcuEu.js +0 -1
  86. chainlit/frontend/dist/assets/Vimeo-D6HvM2jt.js +0 -1
  87. chainlit/frontend/dist/assets/Wistia-Cu4zZ2Ci.js +0 -1
  88. chainlit/frontend/dist/assets/YouTube-D10tR6CJ.js +0 -1
  89. chainlit/frontend/dist/assets/index-CI4qFOt5.js +0 -8665
  90. chainlit/frontend/dist/assets/index-CrrqM0nZ.css +0 -1
  91. chainlit/translations/nl-NL.json +0 -229
  92. chainlit-2.0.0.dist-info/RECORD +0 -106
  93. /chainlit/copilot/dist/assets/{logo_dark-IkGJ_IwC.svg → logo_dark-2a3cf740.svg} +0 -0
  94. /chainlit/copilot/dist/assets/{logo_light-Bb_IPh6r.svg → logo_light-b078e7bc.svg} +0 -0
  95. /chainlit/frontend/dist/assets/{logo_dark-IkGJ_IwC.svg → logo_dark-2a3cf740.svg} +0 -0
  96. /chainlit/frontend/dist/assets/{logo_light-Bb_IPh6r.svg → logo_light-b078e7bc.svg} +0 -0
  97. {chainlit-2.0.0.dist-info → chainlit-2.0.dev0.dist-info}/WHEEL +0 -0
  98. {chainlit-2.0.0.dist-info → chainlit-2.0.dev0.dist-info}/entry_points.txt +0 -0
@@ -1,584 +0,0 @@
1
- import json
2
- import uuid
3
- from datetime import datetime
4
- from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
5
-
6
- import aiofiles
7
- import asyncpg # type: ignore
8
-
9
- from chainlit.data.base import BaseDataLayer
10
- from chainlit.data.storage_clients.base import BaseStorageClient
11
- from chainlit.data.utils import queue_until_user_message
12
- from chainlit.element import ElementDict
13
- from chainlit.logger import logger
14
- from chainlit.step import StepDict
15
- from chainlit.types import (
16
- Feedback,
17
- PageInfo,
18
- PaginatedResponse,
19
- Pagination,
20
- ThreadDict,
21
- ThreadFilter,
22
- )
23
- from chainlit.user import PersistedUser, User
24
-
25
- if TYPE_CHECKING:
26
- from chainlit.element import Element, ElementDict
27
- from chainlit.step import StepDict
28
-
29
- ISO_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
30
-
31
-
32
- class ChainlitDataLayer(BaseDataLayer):
33
- def __init__(
34
- self,
35
- database_url: str,
36
- storage_client: Optional[BaseStorageClient] = None,
37
- show_logger: bool = False,
38
- ):
39
- self.database_url = database_url
40
- self.pool: Optional[asyncpg.Pool] = None
41
- self.storage_client = storage_client
42
- self.show_logger = show_logger
43
-
44
- async def connect(self):
45
- if not self.pool:
46
- self.pool = await asyncpg.create_pool(self.database_url)
47
-
48
- async def get_current_timestamp(self) -> datetime:
49
- return datetime.now()
50
-
51
- async def execute_query(
52
- self, query: str, params: Union[Dict, None] = None
53
- ) -> List[Dict[str, Any]]:
54
- if not self.pool:
55
- await self.connect()
56
-
57
- async with self.pool.acquire() as connection: # type: ignore
58
- try:
59
- if params:
60
- records = await connection.fetch(query, *params.values())
61
- else:
62
- records = await connection.fetch(query)
63
- return [dict(record) for record in records]
64
- except Exception as e:
65
- logger.error(f"Database error: {e!s}")
66
- raise
67
-
68
- async def get_user(self, identifier: str) -> Optional[PersistedUser]:
69
- query = """
70
- SELECT * FROM "User"
71
- WHERE identifier = $1
72
- """
73
- result = await self.execute_query(query, {"identifier": identifier})
74
- if not result or len(result) == 0:
75
- return None
76
- row = result[0]
77
-
78
- return PersistedUser(
79
- id=str(row.get("id")),
80
- identifier=str(row.get("identifier")),
81
- createdAt=row.get("createdAt").isoformat(), # type: ignore
82
- metadata=json.loads(row.get("metadata", "{}")),
83
- )
84
-
85
- async def create_user(self, user: User) -> Optional[PersistedUser]:
86
- query = """
87
- INSERT INTO "User" (id, identifier, metadata, "createdAt", "updatedAt")
88
- VALUES ($1, $2, $3, $4, $5)
89
- ON CONFLICT (identifier) DO UPDATE
90
- SET metadata = $3
91
- RETURNING *
92
- """
93
- now = await self.get_current_timestamp()
94
- params = {
95
- "id": str(uuid.uuid4()),
96
- "identifier": user.identifier,
97
- "metadata": json.dumps(user.metadata),
98
- "created_at": now,
99
- "updated_at": now,
100
- }
101
- result = await self.execute_query(query, params)
102
- row = result[0]
103
-
104
- return PersistedUser(
105
- id=str(row.get("id")),
106
- identifier=str(row.get("identifier")),
107
- createdAt=row.get("createdAt").isoformat(), # type: ignore
108
- metadata=json.loads(row.get("metadata", "{}")),
109
- )
110
-
111
- async def delete_feedback(self, feedback_id: str) -> bool:
112
- query = """
113
- DELETE FROM "Feedback" WHERE id = $1
114
- """
115
- await self.execute_query(query, {"feedback_id": feedback_id})
116
- return True
117
-
118
- async def upsert_feedback(self, feedback: Feedback) -> str:
119
- query = """
120
- INSERT INTO "Feedback" (id, "stepId", name, value, comment)
121
- VALUES ($1, $2, $3, $4, $5)
122
- ON CONFLICT (id) DO UPDATE
123
- SET value = $4, comment = $5
124
- RETURNING id
125
- """
126
- feedback_id = feedback.id or str(uuid.uuid4())
127
- params = {
128
- "id": feedback_id,
129
- "step_id": feedback.forId,
130
- "name": "user_feedback",
131
- "value": float(feedback.value),
132
- "comment": feedback.comment,
133
- }
134
- results = await self.execute_query(query, params)
135
- return str(results[0]["id"])
136
-
137
- @queue_until_user_message()
138
- async def create_element(self, element: "Element"):
139
- if not self.storage_client:
140
- logger.warn(
141
- "Data Layer: create_element error. No cloud storage configured!"
142
- )
143
- return
144
-
145
- if not element.for_id:
146
- return
147
-
148
- if element.thread_id:
149
- query = 'SELECT id FROM "Thread" WHERE id = $1'
150
- results = await self.execute_query(query, {"thread_id": element.thread_id})
151
- if not results:
152
- await self.update_thread(thread_id=element.thread_id)
153
-
154
- if element.for_id:
155
- query = 'SELECT id FROM "Step" WHERE id = $1'
156
- results = await self.execute_query(query, {"step_id": element.for_id})
157
- if not results:
158
- await self.create_step(
159
- {
160
- "id": element.for_id,
161
- "metadata": {},
162
- "type": "run",
163
- "start_time": await self.get_current_timestamp(),
164
- "end_time": await self.get_current_timestamp(),
165
- }
166
- )
167
- content: Optional[Union[bytes, str]] = None
168
-
169
- if element.path:
170
- async with aiofiles.open(element.path, "rb") as f:
171
- content = await f.read()
172
- elif element.content:
173
- content = element.content
174
- elif not element.url:
175
- raise ValueError("Element url, path or content must be provided")
176
-
177
- if element.thread_id:
178
- path = f"threads/{element.thread_id}/files/{element.name}"
179
- else:
180
- path = f"files/{element.name}"
181
-
182
- if content is not None:
183
- await self.storage_client.upload_file(
184
- object_key=path,
185
- data=content,
186
- mime=element.mime or "application/octet-stream",
187
- overwrite=True,
188
- )
189
-
190
- query = """
191
- INSERT INTO "Element" (
192
- id, "threadId", "stepId", metadata, mime, name, "objectKey", url,
193
- "chainlitKey", display, size, language, page, props
194
- ) VALUES (
195
- $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14
196
- )
197
- """
198
- params = {
199
- "id": element.id,
200
- "thread_id": element.thread_id,
201
- "step_id": element.for_id,
202
- "metadata": json.dumps(
203
- {
204
- "size": element.size,
205
- "language": element.language,
206
- "display": element.display,
207
- "type": element.type,
208
- "page": getattr(element, "page", None),
209
- }
210
- ),
211
- "mime": element.mime,
212
- "name": element.name,
213
- "object_key": path,
214
- "url": element.url,
215
- "chainlit_key": element.chainlit_key,
216
- "display": element.display,
217
- "size": element.size,
218
- "language": element.language,
219
- "page": getattr(element, "page", None),
220
- "props": json.dumps(getattr(element, "props", {})),
221
- }
222
- await self.execute_query(query, params)
223
-
224
- async def get_element(
225
- self, thread_id: str, element_id: str
226
- ) -> Optional[ElementDict]:
227
- query = """
228
- SELECT * FROM "Element"
229
- WHERE id = $1 AND "threadId" = $2
230
- """
231
- results = await self.execute_query(
232
- query, {"element_id": element_id, "thread_id": thread_id}
233
- )
234
-
235
- if not results:
236
- return None
237
-
238
- row = results[0]
239
- metadata = json.loads(row.get("metadata", "{}"))
240
-
241
- return ElementDict(
242
- id=str(row["id"]),
243
- threadId=str(row["threadId"]),
244
- type=metadata.get("type", "file"),
245
- url=str(row["url"]),
246
- name=str(row["name"]),
247
- mime=str(row["mime"]),
248
- objectKey=str(row["objectKey"]),
249
- forId=str(row["stepId"]),
250
- chainlitKey=row.get("chainlitKey"),
251
- display=row["display"],
252
- size=row["size"],
253
- language=row["language"],
254
- page=row["page"],
255
- autoPlay=row.get("autoPlay"),
256
- playerConfig=row.get("playerConfig"),
257
- props=json.loads(row.get("props", "{}")),
258
- )
259
-
260
- @queue_until_user_message()
261
- async def delete_element(self, element_id: str, thread_id: Optional[str] = None):
262
- query = """
263
- DELETE FROM "Element"
264
- WHERE id = $1
265
- """
266
- params = {"element_id": element_id}
267
-
268
- if thread_id:
269
- query += ' AND "threadId" = $2'
270
- params["thread_id"] = thread_id
271
-
272
- await self.execute_query(query, params)
273
-
274
- @queue_until_user_message()
275
- async def create_step(self, step_dict: StepDict):
276
- if step_dict.get("threadId"):
277
- thread_query = 'SELECT id FROM "Thread" WHERE id = $1'
278
- thread_results = await self.execute_query(
279
- thread_query, {"thread_id": step_dict["threadId"]}
280
- )
281
- if not thread_results:
282
- await self.update_thread(thread_id=step_dict["threadId"])
283
-
284
- if step_dict.get("parentId"):
285
- parent_query = 'SELECT id FROM "Step" WHERE id = $1'
286
- parent_results = await self.execute_query(
287
- parent_query, {"parent_id": step_dict["parentId"]}
288
- )
289
- if not parent_results:
290
- await self.create_step(
291
- {
292
- "id": step_dict["parentId"],
293
- "metadata": {},
294
- "type": "run",
295
- "createdAt": step_dict.get("createdAt"),
296
- }
297
- )
298
-
299
- query = """
300
- INSERT INTO "Step" (
301
- id, "threadId", "parentId", input, metadata, name, output,
302
- type, "startTime", "endTime", "showInput", "isError"
303
- ) VALUES (
304
- $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12
305
- )
306
- ON CONFLICT (id) DO UPDATE SET
307
- "parentId" = COALESCE(EXCLUDED."parentId", "Step"."parentId"),
308
- input = COALESCE(EXCLUDED.input, "Step".input),
309
- metadata = CASE
310
- WHEN EXCLUDED.metadata <> '{}' THEN EXCLUDED.metadata
311
- ELSE "Step".metadata
312
- END,
313
- name = COALESCE(EXCLUDED.name, "Step".name),
314
- output = COALESCE(EXCLUDED.output, "Step".output),
315
- type = CASE
316
- WHEN EXCLUDED.type = 'run' THEN "Step".type
317
- ELSE EXCLUDED.type
318
- END,
319
- "threadId" = COALESCE(EXCLUDED."threadId", "Step"."threadId"),
320
- "endTime" = COALESCE(EXCLUDED."endTime", "Step"."endTime"),
321
- "startTime" = LEAST(EXCLUDED."startTime", "Step"."startTime"),
322
- "showInput" = COALESCE(EXCLUDED."showInput", "Step"."showInput"),
323
- "isError" = COALESCE(EXCLUDED."isError", "Step"."isError")
324
- """
325
-
326
- timestamp = await self.get_current_timestamp()
327
- created_at = step_dict.get("createdAt")
328
- if created_at:
329
- timestamp = datetime.strptime(created_at, ISO_FORMAT)
330
-
331
- params = {
332
- "id": step_dict["id"],
333
- "thread_id": step_dict.get("threadId"),
334
- "parent_id": step_dict.get("parentId"),
335
- "input": step_dict.get("input"),
336
- "metadata": json.dumps(step_dict.get("metadata", {})),
337
- "name": step_dict.get("name"),
338
- "output": step_dict.get("output"),
339
- "type": step_dict["type"],
340
- "start_time": timestamp,
341
- "end_time": timestamp,
342
- "show_input": step_dict.get("showInput", "json"),
343
- "is_error": step_dict.get("isError", False),
344
- }
345
- await self.execute_query(query, params)
346
-
347
- @queue_until_user_message()
348
- async def update_step(self, step_dict: StepDict):
349
- await self.create_step(step_dict)
350
-
351
- @queue_until_user_message()
352
- async def delete_step(self, step_id: str):
353
- # Delete associated elements and feedbacks first
354
- await self.execute_query(
355
- 'DELETE FROM "Element" WHERE "stepId" = $1', {"step_id": step_id}
356
- )
357
- await self.execute_query(
358
- 'DELETE FROM "Feedback" WHERE "stepId" = $1', {"step_id": step_id}
359
- )
360
- # Delete the step
361
- await self.execute_query(
362
- 'DELETE FROM "Step" WHERE id = $1', {"step_id": step_id}
363
- )
364
-
365
- async def get_thread_author(self, thread_id: str) -> str:
366
- query = """
367
- SELECT u.identifier
368
- FROM "Thread" t
369
- JOIN "User" u ON t."userId" = u.id
370
- WHERE t.id = $1
371
- """
372
- results = await self.execute_query(query, {"thread_id": thread_id})
373
- if not results:
374
- raise ValueError(f"Thread {thread_id} not found")
375
- return results[0]["identifier"]
376
-
377
- async def delete_thread(self, thread_id: str):
378
- await self.execute_query(
379
- 'DELETE FROM "Thread" WHERE id = $1', {"thread_id": thread_id}
380
- )
381
-
382
- async def list_threads(
383
- self, pagination: Pagination, filters: ThreadFilter
384
- ) -> PaginatedResponse[ThreadDict]:
385
- query = """
386
- SELECT
387
- t.*,
388
- u.identifier as user_identifier,
389
- (SELECT COUNT(*) FROM "Thread" WHERE "userId" = t."userId") as total
390
- FROM "Thread" t
391
- LEFT JOIN "User" u ON t."userId" = u.id
392
- WHERE t."deletedAt" IS NULL
393
- """
394
- params: Dict[str, Any] = {}
395
- param_count = 1
396
-
397
- if filters.search:
398
- query += f" AND t.name ILIKE ${param_count}"
399
- params["name"] = f"%{filters.search}%"
400
- param_count += 1
401
-
402
- if filters.userId:
403
- query += f' AND t."userId" = ${param_count}'
404
- params["user_id"] = filters.userId
405
- param_count += 1
406
-
407
- if pagination.cursor:
408
- query += f' AND t."createdAt" < (SELECT "createdAt" FROM "Thread" WHERE id = ${param_count})'
409
- params["cursor"] = pagination.cursor
410
- param_count += 1
411
-
412
- query += f' ORDER BY t."createdAt" DESC LIMIT ${param_count}'
413
- params["limit"] = pagination.first + 1
414
-
415
- results = await self.execute_query(query, params)
416
- threads = results
417
-
418
- has_next_page = len(threads) > pagination.first
419
- if has_next_page:
420
- threads = threads[:-1]
421
-
422
- thread_dicts = []
423
- for thread in threads:
424
- thread_dict = ThreadDict(
425
- id=str(thread["id"]),
426
- createdAt=thread["createdAt"].isoformat(),
427
- name=thread["name"],
428
- userId=str(thread["userId"]) if thread["userId"] else None,
429
- userIdentifier=thread["user_identifier"],
430
- metadata=json.loads(thread["metadata"]),
431
- steps=[],
432
- elements=[],
433
- tags=[],
434
- )
435
- thread_dicts.append(thread_dict)
436
-
437
- return PaginatedResponse(
438
- pageInfo=PageInfo(
439
- hasNextPage=has_next_page,
440
- startCursor=thread_dicts[0]["id"] if thread_dicts else None,
441
- endCursor=thread_dicts[-1]["id"] if thread_dicts else None,
442
- ),
443
- data=thread_dicts,
444
- )
445
-
446
- async def get_thread(self, thread_id: str) -> Optional[ThreadDict]:
447
- query = """
448
- SELECT t.*, u.identifier as user_identifier
449
- FROM "Thread" t
450
- LEFT JOIN "User" u ON t."userId" = u.id
451
- WHERE t.id = $1 AND t."deletedAt" IS NULL
452
- """
453
- results = await self.execute_query(query, {"thread_id": thread_id})
454
-
455
- if not results:
456
- return None
457
-
458
- thread = results[0]
459
-
460
- # Get steps
461
- steps_query = """
462
- SELECT * FROM "Step"
463
- WHERE "threadId" = $1
464
- ORDER BY "startTime"
465
- """
466
- steps_results = await self.execute_query(steps_query, {"thread_id": thread_id})
467
-
468
- # Get elements
469
- elements_query = """
470
- SELECT * FROM "Element"
471
- WHERE "threadId" = $1
472
- """
473
- elements_results = await self.execute_query(
474
- elements_query, {"thread_id": thread_id}
475
- )
476
-
477
- if self.storage_client is not None:
478
- for elem in elements_results:
479
- if not elem["url"] and elem["objectKey"]:
480
- elem["url"] = await self.storage_client.get_read_url(
481
- object_key=elem["objectKey"],
482
- )
483
-
484
- return ThreadDict(
485
- id=str(thread["id"]),
486
- createdAt=thread["createdAt"].isoformat(),
487
- name=thread["name"],
488
- userId=str(thread["userId"]) if thread["userId"] else None,
489
- userIdentifier=thread["user_identifier"],
490
- metadata=json.loads(thread["metadata"]),
491
- steps=[self._convert_step_row_to_dict(step) for step in steps_results],
492
- elements=[
493
- self._convert_element_row_to_dict(elem) for elem in elements_results
494
- ],
495
- tags=[],
496
- )
497
-
498
- async def update_thread(
499
- self,
500
- thread_id: str,
501
- name: Optional[str] = None,
502
- user_id: Optional[str] = None,
503
- metadata: Optional[Dict] = None,
504
- tags: Optional[List[str]] = None,
505
- ):
506
- if self.show_logger:
507
- logger.info(f"asyncpg: update_thread, thread_id={thread_id}")
508
-
509
- data = {
510
- "id": thread_id,
511
- "name": (
512
- name
513
- if name is not None
514
- else (metadata.get("name") if metadata and "name" in metadata else None)
515
- ),
516
- "userId": user_id,
517
- "tags": tags,
518
- "metadata": json.dumps(metadata or {}),
519
- }
520
-
521
- # Remove None values
522
- data = {k: v for k, v in data.items() if v is not None}
523
-
524
- # Build the query dynamically based on available fields
525
- columns = [f'"{k}"' for k in data.keys()]
526
- placeholders = [f"${i+1}" for i in range(len(data))]
527
- values = list(data.values())
528
-
529
- update_sets = [f'"{k}" = EXCLUDED."{k}"' for k in data.keys() if k != "id"]
530
-
531
- query = f"""
532
- INSERT INTO "Thread" ({", ".join(columns)})
533
- VALUES ({", ".join(placeholders)})
534
- ON CONFLICT (id) DO UPDATE
535
- SET {", ".join(update_sets)};
536
- """
537
-
538
- await self.execute_query(query, {str(i + 1): v for i, v in enumerate(values)})
539
-
540
- def _convert_step_row_to_dict(self, row: Dict) -> StepDict:
541
- return StepDict(
542
- id=str(row["id"]),
543
- threadId=str(row["threadId"]) if row.get("threadId") else "",
544
- parentId=str(row["parentId"]) if row.get("parentId") else None,
545
- name=str(row.get("name")),
546
- type=row["type"],
547
- input=row.get("input", {}),
548
- output=row.get("output", {}),
549
- metadata=json.loads(row.get("metadata", "{}")),
550
- createdAt=row["createdAt"].isoformat() if row.get("createdAt") else None,
551
- start=row["startTime"].isoformat() if row.get("startTime") else None,
552
- showInput=row.get("showInput"),
553
- isError=row.get("isError"),
554
- end=row["endTime"].isoformat() if row.get("endTime") else None,
555
- )
556
-
557
- def _convert_element_row_to_dict(self, row: Dict) -> ElementDict:
558
- metadata = json.loads(row.get("metadata", "{}"))
559
- return ElementDict(
560
- id=str(row["id"]),
561
- threadId=str(row["threadId"]) if row.get("threadId") else None,
562
- type=metadata.get("type", "file"),
563
- url=row["url"],
564
- name=row["name"],
565
- mime=row["mime"],
566
- objectKey=row["objectKey"],
567
- forId=str(row["stepId"]),
568
- chainlitKey=row.get("chainlitKey"),
569
- display=row["display"],
570
- size=row["size"],
571
- language=row["language"],
572
- page=row["page"],
573
- autoPlay=row.get("autoPlay"),
574
- playerConfig=row.get("playerConfig"),
575
- props=json.loads(row.get("props") or "{}"),
576
- )
577
-
578
- async def build_debug_url(self) -> str:
579
- return ""
580
-
581
- async def cleanup(self):
582
- """Cleanup database connections"""
583
- if self.pool:
584
- await self.pool.close()
File without changes
@@ -1,80 +0,0 @@
1
- from datetime import datetime, timedelta
2
- from typing import Any, Dict, Union
3
-
4
- from azure.storage.blob import BlobSasPermissions, ContentSettings, generate_blob_sas
5
- from azure.storage.blob.aio import BlobServiceClient as AsyncBlobServiceClient
6
-
7
- from chainlit.data.storage_clients.base import EXPIRY_TIME, BaseStorageClient
8
- from chainlit.logger import logger
9
-
10
-
11
- class AzureBlobStorageClient(BaseStorageClient):
12
- def __init__(self, container_name: str, storage_account: str, storage_key: str):
13
- self.container_name = container_name
14
- self.storage_account = storage_account
15
- self.storage_key = storage_key
16
- connection_string = (
17
- f"DefaultEndpointsProtocol=https;"
18
- f"AccountName={storage_account};"
19
- f"AccountKey={storage_key};"
20
- f"EndpointSuffix=core.windows.net"
21
- )
22
- self.service_client = AsyncBlobServiceClient.from_connection_string(
23
- connection_string
24
- )
25
- logger.info("AzureBlobStorageClient initialized")
26
-
27
- async def get_read_url(self, object_key: str) -> str:
28
- if not self.storage_key:
29
- raise Exception("Not using Azure Storage")
30
-
31
- sas_permissions = BlobSasPermissions(read=True)
32
- start_time = datetime.now()
33
- expiry_time = start_time + timedelta(seconds=EXPIRY_TIME)
34
-
35
- sas_token = generate_blob_sas(
36
- account_name=self.storage_account,
37
- container_name=self.container_name,
38
- blob_name=object_key,
39
- account_key=self.storage_key,
40
- permission=sas_permissions,
41
- start=start_time,
42
- expiry=expiry_time,
43
- )
44
-
45
- return f"https://{self.storage_account}.blob.core.windows.net/{self.container_name}/{object_key}?{sas_token}"
46
-
47
- async def upload_file(
48
- self,
49
- object_key: str,
50
- data: Union[bytes, str],
51
- mime: str = "application/octet-stream",
52
- overwrite: bool = True,
53
- ) -> Dict[str, Any]:
54
- try:
55
- container_client = self.service_client.get_container_client(
56
- self.container_name
57
- )
58
- blob_client = container_client.get_blob_client(object_key)
59
-
60
- if isinstance(data, str):
61
- data = data.encode("utf-8")
62
-
63
- content_settings = ContentSettings(content_type=mime)
64
-
65
- await blob_client.upload_blob(
66
- data, overwrite=overwrite, content_settings=content_settings
67
- )
68
-
69
- properties = await blob_client.get_blob_properties()
70
-
71
- return {
72
- "path": object_key,
73
- "size": properties.size,
74
- "last_modified": properties.last_modified,
75
- "etag": properties.etag,
76
- "content_type": properties.content_settings.content_type,
77
- }
78
-
79
- except Exception as e:
80
- raise Exception(f"Failed to upload file to Azure Blob Storage: {e!s}")
@@ -1,22 +0,0 @@
1
- from abc import ABC, abstractmethod
2
- from typing import Any, Dict, Union
3
-
4
- EXPIRY_TIME = 3600
5
-
6
-
7
- class BaseStorageClient(ABC):
8
- """Base class for non-text data persistence like Azure Data Lake, S3, Google Storage, etc."""
9
-
10
- @abstractmethod
11
- async def upload_file(
12
- self,
13
- object_key: str,
14
- data: Union[bytes, str],
15
- mime: str = "application/octet-stream",
16
- overwrite: bool = True,
17
- ) -> Dict[str, Any]:
18
- pass
19
-
20
- @abstractmethod
21
- async def get_read_url(self, object_key: str) -> str:
22
- pass