chainlit 2.7.0__py3-none-any.whl → 2.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chainlit might be problematic. Click here for more details.

Files changed (85) hide show
  1. {chainlit-2.7.0.dist-info → chainlit-2.7.1.dist-info}/METADATA +1 -1
  2. chainlit-2.7.1.dist-info/RECORD +4 -0
  3. chainlit/__init__.py +0 -207
  4. chainlit/__main__.py +0 -4
  5. chainlit/_utils.py +0 -8
  6. chainlit/action.py +0 -33
  7. chainlit/auth/__init__.py +0 -95
  8. chainlit/auth/cookie.py +0 -197
  9. chainlit/auth/jwt.py +0 -42
  10. chainlit/cache.py +0 -45
  11. chainlit/callbacks.py +0 -433
  12. chainlit/chat_context.py +0 -64
  13. chainlit/chat_settings.py +0 -34
  14. chainlit/cli/__init__.py +0 -235
  15. chainlit/config.py +0 -621
  16. chainlit/context.py +0 -112
  17. chainlit/data/__init__.py +0 -111
  18. chainlit/data/acl.py +0 -19
  19. chainlit/data/base.py +0 -107
  20. chainlit/data/chainlit_data_layer.py +0 -687
  21. chainlit/data/dynamodb.py +0 -616
  22. chainlit/data/literalai.py +0 -501
  23. chainlit/data/sql_alchemy.py +0 -741
  24. chainlit/data/storage_clients/__init__.py +0 -0
  25. chainlit/data/storage_clients/azure.py +0 -84
  26. chainlit/data/storage_clients/azure_blob.py +0 -94
  27. chainlit/data/storage_clients/base.py +0 -28
  28. chainlit/data/storage_clients/gcs.py +0 -101
  29. chainlit/data/storage_clients/s3.py +0 -88
  30. chainlit/data/utils.py +0 -29
  31. chainlit/discord/__init__.py +0 -6
  32. chainlit/discord/app.py +0 -364
  33. chainlit/element.py +0 -454
  34. chainlit/emitter.py +0 -450
  35. chainlit/hello.py +0 -12
  36. chainlit/input_widget.py +0 -182
  37. chainlit/langchain/__init__.py +0 -6
  38. chainlit/langchain/callbacks.py +0 -682
  39. chainlit/langflow/__init__.py +0 -25
  40. chainlit/llama_index/__init__.py +0 -6
  41. chainlit/llama_index/callbacks.py +0 -206
  42. chainlit/logger.py +0 -16
  43. chainlit/markdown.py +0 -57
  44. chainlit/mcp.py +0 -99
  45. chainlit/message.py +0 -619
  46. chainlit/mistralai/__init__.py +0 -50
  47. chainlit/oauth_providers.py +0 -835
  48. chainlit/openai/__init__.py +0 -53
  49. chainlit/py.typed +0 -0
  50. chainlit/secret.py +0 -9
  51. chainlit/semantic_kernel/__init__.py +0 -111
  52. chainlit/server.py +0 -1616
  53. chainlit/session.py +0 -304
  54. chainlit/sidebar.py +0 -55
  55. chainlit/slack/__init__.py +0 -6
  56. chainlit/slack/app.py +0 -427
  57. chainlit/socket.py +0 -381
  58. chainlit/step.py +0 -490
  59. chainlit/sync.py +0 -43
  60. chainlit/teams/__init__.py +0 -6
  61. chainlit/teams/app.py +0 -348
  62. chainlit/translations/bn.json +0 -214
  63. chainlit/translations/el-GR.json +0 -214
  64. chainlit/translations/en-US.json +0 -214
  65. chainlit/translations/fr-FR.json +0 -214
  66. chainlit/translations/gu.json +0 -214
  67. chainlit/translations/he-IL.json +0 -214
  68. chainlit/translations/hi.json +0 -214
  69. chainlit/translations/ja.json +0 -214
  70. chainlit/translations/kn.json +0 -214
  71. chainlit/translations/ml.json +0 -214
  72. chainlit/translations/mr.json +0 -214
  73. chainlit/translations/nl.json +0 -214
  74. chainlit/translations/ta.json +0 -214
  75. chainlit/translations/te.json +0 -214
  76. chainlit/translations/zh-CN.json +0 -214
  77. chainlit/translations.py +0 -60
  78. chainlit/types.py +0 -334
  79. chainlit/user.py +0 -43
  80. chainlit/user_session.py +0 -153
  81. chainlit/utils.py +0 -173
  82. chainlit/version.py +0 -8
  83. chainlit-2.7.0.dist-info/RECORD +0 -84
  84. {chainlit-2.7.0.dist-info → chainlit-2.7.1.dist-info}/WHEEL +0 -0
  85. {chainlit-2.7.0.dist-info → chainlit-2.7.1.dist-info}/entry_points.txt +0 -0
@@ -1,687 +0,0 @@
1
- import asyncio
2
- import atexit
3
- import json
4
- import signal
5
- import uuid
6
- from datetime import datetime
7
- from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
8
-
9
- import aiofiles
10
- import asyncpg # type: ignore
11
-
12
- from chainlit.data.base import BaseDataLayer
13
- from chainlit.data.storage_clients.base import BaseStorageClient
14
- from chainlit.data.storage_clients.gcs import GCSStorageClient
15
- from chainlit.data.utils import queue_until_user_message
16
- from chainlit.element import ElementDict
17
- from chainlit.logger import logger
18
- from chainlit.step import StepDict
19
- from chainlit.types import (
20
- Feedback,
21
- FeedbackDict,
22
- PageInfo,
23
- PaginatedResponse,
24
- Pagination,
25
- ThreadDict,
26
- ThreadFilter,
27
- )
28
- from chainlit.user import PersistedUser, User
29
-
30
- if TYPE_CHECKING:
31
- from chainlit.element import Element, ElementDict
32
- from chainlit.step import StepDict
33
-
34
- ISO_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
35
-
36
-
37
- class ChainlitDataLayer(BaseDataLayer):
38
- def __init__(
39
- self,
40
- database_url: str,
41
- storage_client: Optional[BaseStorageClient] = None,
42
- show_logger: bool = False,
43
- ):
44
- self.database_url = database_url
45
- self.pool: Optional[asyncpg.Pool] = None
46
- self.storage_client = storage_client
47
- self.show_logger = show_logger
48
-
49
- # Register cleanup handlers for application termination
50
- atexit.register(self._sync_cleanup)
51
- for sig in (signal.SIGINT, signal.SIGTERM):
52
- signal.signal(sig, self._signal_handler)
53
-
54
- async def connect(self):
55
- if not self.pool:
56
- self.pool = await asyncpg.create_pool(self.database_url)
57
-
58
- async def get_current_timestamp(self) -> datetime:
59
- return datetime.now()
60
-
61
- async def execute_query(
62
- self, query: str, params: Union[Dict, None] = None
63
- ) -> List[Dict[str, Any]]:
64
- if not self.pool:
65
- await self.connect()
66
-
67
- try:
68
- async with self.pool.acquire() as connection: # type: ignore
69
- try:
70
- if params:
71
- records = await connection.fetch(query, *params.values())
72
- else:
73
- records = await connection.fetch(query)
74
- return [dict(record) for record in records]
75
- except Exception as e:
76
- logger.error(f"Database error: {e!s}")
77
- raise
78
- except (
79
- asyncpg.exceptions.ConnectionDoesNotExistError,
80
- asyncpg.exceptions.InterfaceError,
81
- ) as e:
82
- # Handle connection issues by cleaning up and rethrowing
83
- logger.error(f"Connection error: {e!s}, cleaning up pool")
84
- await self.cleanup()
85
- self.pool = None
86
- raise
87
-
88
- async def get_user(self, identifier: str) -> Optional[PersistedUser]:
89
- query = """
90
- SELECT * FROM "User"
91
- WHERE identifier = $1
92
- """
93
- result = await self.execute_query(query, {"identifier": identifier})
94
- if not result or len(result) == 0:
95
- return None
96
- row = result[0]
97
-
98
- return PersistedUser(
99
- id=str(row.get("id")),
100
- identifier=str(row.get("identifier")),
101
- createdAt=row.get("createdAt").isoformat(), # type: ignore
102
- metadata=json.loads(row.get("metadata", "{}")),
103
- )
104
-
105
- async def create_user(self, user: User) -> Optional[PersistedUser]:
106
- query = """
107
- INSERT INTO "User" (id, identifier, metadata, "createdAt", "updatedAt")
108
- VALUES ($1, $2, $3, $4, $5)
109
- ON CONFLICT (identifier) DO UPDATE
110
- SET metadata = $3
111
- RETURNING *
112
- """
113
- now = await self.get_current_timestamp()
114
- params = {
115
- "id": str(uuid.uuid4()),
116
- "identifier": user.identifier,
117
- "metadata": json.dumps(user.metadata),
118
- "created_at": now,
119
- "updated_at": now,
120
- }
121
- result = await self.execute_query(query, params)
122
- row = result[0]
123
-
124
- return PersistedUser(
125
- id=str(row.get("id")),
126
- identifier=str(row.get("identifier")),
127
- createdAt=row.get("createdAt").isoformat(), # type: ignore
128
- metadata=json.loads(row.get("metadata", "{}")),
129
- )
130
-
131
- async def delete_feedback(self, feedback_id: str) -> bool:
132
- query = """
133
- DELETE FROM "Feedback" WHERE id = $1
134
- """
135
- await self.execute_query(query, {"feedback_id": feedback_id})
136
- return True
137
-
138
- async def upsert_feedback(self, feedback: Feedback) -> str:
139
- query = """
140
- INSERT INTO "Feedback" (id, "stepId", name, value, comment)
141
- VALUES ($1, $2, $3, $4, $5)
142
- ON CONFLICT (id) DO UPDATE
143
- SET value = $4, comment = $5
144
- RETURNING id
145
- """
146
- feedback_id = feedback.id or str(uuid.uuid4())
147
- params = {
148
- "id": feedback_id,
149
- "step_id": feedback.forId,
150
- "name": "user_feedback",
151
- "value": float(feedback.value),
152
- "comment": feedback.comment,
153
- }
154
- results = await self.execute_query(query, params)
155
- return str(results[0]["id"])
156
-
157
- @queue_until_user_message()
158
- async def create_element(self, element: "Element"):
159
- if not self.storage_client:
160
- logger.warning(
161
- "Data Layer: create_element error. No cloud storage configured!"
162
- )
163
- return
164
-
165
- if not element.for_id:
166
- return
167
-
168
- if element.thread_id:
169
- query = 'SELECT id FROM "Thread" WHERE id = $1'
170
- results = await self.execute_query(query, {"thread_id": element.thread_id})
171
- if not results:
172
- await self.update_thread(thread_id=element.thread_id)
173
-
174
- if element.for_id:
175
- query = 'SELECT id FROM "Step" WHERE id = $1'
176
- results = await self.execute_query(query, {"step_id": element.for_id})
177
- if not results:
178
- await self.create_step(
179
- {
180
- "id": element.for_id,
181
- "metadata": {},
182
- "type": "run",
183
- "start_time": await self.get_current_timestamp(),
184
- "end_time": await self.get_current_timestamp(),
185
- }
186
- )
187
- content: Optional[Union[bytes, str]] = None
188
-
189
- if element.path:
190
- async with aiofiles.open(element.path, "rb") as f:
191
- content = await f.read()
192
- elif element.content:
193
- content = element.content
194
- elif not element.url:
195
- raise ValueError("Element url, path or content must be provided")
196
-
197
- if element.thread_id:
198
- path = f"threads/{element.thread_id}/files/{element.id}"
199
- else:
200
- path = f"files/{element.id}"
201
-
202
- if content is not None:
203
- content_disposition = (
204
- f'attachment; filename="{element.name}"'
205
- if not isinstance(self.storage_client, GCSStorageClient)
206
- else None
207
- )
208
- await self.storage_client.upload_file(
209
- object_key=path,
210
- data=content,
211
- mime=element.mime or "application/octet-stream",
212
- overwrite=True,
213
- content_disposition=content_disposition,
214
- )
215
-
216
- query = """
217
- INSERT INTO "Element" (
218
- id, "threadId", "stepId", metadata, mime, name, "objectKey", url,
219
- "chainlitKey", display, size, language, page, props
220
- ) VALUES (
221
- $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14
222
- )
223
- ON CONFLICT (id) DO UPDATE SET
224
- props = EXCLUDED.props
225
- """
226
- params = {
227
- "id": element.id,
228
- "thread_id": element.thread_id,
229
- "step_id": element.for_id,
230
- "metadata": json.dumps(
231
- {
232
- "size": element.size,
233
- "language": element.language,
234
- "display": element.display,
235
- "type": element.type,
236
- "page": getattr(element, "page", None),
237
- }
238
- ),
239
- "mime": element.mime,
240
- "name": element.name,
241
- "object_key": path,
242
- "url": element.url,
243
- "chainlit_key": element.chainlit_key,
244
- "display": element.display,
245
- "size": element.size,
246
- "language": element.language,
247
- "page": getattr(element, "page", None),
248
- "props": json.dumps(getattr(element, "props", {})),
249
- }
250
- await self.execute_query(query, params)
251
-
252
- async def get_element(
253
- self, thread_id: str, element_id: str
254
- ) -> Optional[ElementDict]:
255
- query = """
256
- SELECT * FROM "Element"
257
- WHERE id = $1 AND "threadId" = $2
258
- """
259
- results = await self.execute_query(
260
- query, {"element_id": element_id, "thread_id": thread_id}
261
- )
262
-
263
- if not results:
264
- return None
265
-
266
- row = results[0]
267
- metadata = json.loads(row.get("metadata", "{}"))
268
-
269
- return ElementDict(
270
- id=str(row["id"]),
271
- threadId=str(row["threadId"]),
272
- type=metadata.get("type", "file"),
273
- url=str(row["url"]),
274
- name=str(row["name"]),
275
- mime=str(row["mime"]),
276
- objectKey=str(row["objectKey"]),
277
- forId=str(row["stepId"]),
278
- chainlitKey=row.get("chainlitKey"),
279
- display=row["display"],
280
- size=row["size"],
281
- language=row["language"],
282
- page=row["page"],
283
- autoPlay=row.get("autoPlay"),
284
- playerConfig=row.get("playerConfig"),
285
- props=json.loads(row.get("props", "{}")),
286
- )
287
-
288
- @queue_until_user_message()
289
- async def delete_element(self, element_id: str, thread_id: Optional[str] = None):
290
- query = """
291
- SELECT * FROM "Element"
292
- WHERE id = $1
293
- """
294
- elements = await self.execute_query(query, {"id": element_id})
295
-
296
- if self.storage_client is not None and len(elements) > 0:
297
- if elements[0]["objectKey"]:
298
- await self.storage_client.delete_file(
299
- object_key=elements[0]["objectKey"]
300
- )
301
- query = """
302
- DELETE FROM "Element"
303
- WHERE id = $1
304
- """
305
- params = {"id": element_id}
306
-
307
- if thread_id:
308
- query += ' AND "threadId" = $2'
309
- params["thread_id"] = thread_id
310
-
311
- await self.execute_query(query, params)
312
-
313
- @queue_until_user_message()
314
- async def create_step(self, step_dict: StepDict):
315
- if step_dict.get("threadId"):
316
- thread_query = 'SELECT id FROM "Thread" WHERE id = $1'
317
- thread_results = await self.execute_query(
318
- thread_query, {"thread_id": step_dict["threadId"]}
319
- )
320
- if not thread_results:
321
- await self.update_thread(thread_id=step_dict["threadId"])
322
-
323
- if step_dict.get("parentId"):
324
- parent_query = 'SELECT id FROM "Step" WHERE id = $1'
325
- parent_results = await self.execute_query(
326
- parent_query, {"parent_id": step_dict["parentId"]}
327
- )
328
- if not parent_results:
329
- await self.create_step(
330
- {
331
- "id": step_dict["parentId"],
332
- "metadata": {},
333
- "type": "run",
334
- "createdAt": step_dict.get("createdAt"),
335
- }
336
- )
337
-
338
- query = """
339
- INSERT INTO "Step" (
340
- id, "threadId", "parentId", input, metadata, name, output,
341
- type, "startTime", "endTime", "showInput", "isError"
342
- ) VALUES (
343
- $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12
344
- )
345
- ON CONFLICT (id) DO UPDATE SET
346
- "parentId" = COALESCE(EXCLUDED."parentId", "Step"."parentId"),
347
- input = COALESCE(EXCLUDED.input, "Step".input),
348
- metadata = CASE
349
- WHEN EXCLUDED.metadata <> '{}' THEN EXCLUDED.metadata
350
- ELSE "Step".metadata
351
- END,
352
- name = COALESCE(EXCLUDED.name, "Step".name),
353
- output = COALESCE(EXCLUDED.output, "Step".output),
354
- type = CASE
355
- WHEN EXCLUDED.type = 'run' THEN "Step".type
356
- ELSE EXCLUDED.type
357
- END,
358
- "threadId" = COALESCE(EXCLUDED."threadId", "Step"."threadId"),
359
- "endTime" = COALESCE(EXCLUDED."endTime", "Step"."endTime"),
360
- "startTime" = LEAST(EXCLUDED."startTime", "Step"."startTime"),
361
- "showInput" = COALESCE(EXCLUDED."showInput", "Step"."showInput"),
362
- "isError" = COALESCE(EXCLUDED."isError", "Step"."isError")
363
- """
364
-
365
- timestamp = await self.get_current_timestamp()
366
- created_at = step_dict.get("createdAt")
367
- if created_at:
368
- timestamp = datetime.strptime(created_at, ISO_FORMAT)
369
-
370
- params = {
371
- "id": step_dict["id"],
372
- "thread_id": step_dict.get("threadId"),
373
- "parent_id": step_dict.get("parentId"),
374
- "input": step_dict.get("input"),
375
- "metadata": json.dumps(step_dict.get("metadata", {})),
376
- "name": step_dict.get("name"),
377
- "output": step_dict.get("output"),
378
- "type": step_dict["type"],
379
- "start_time": timestamp,
380
- "end_time": timestamp,
381
- "show_input": str(step_dict.get("showInput", "json")),
382
- "is_error": step_dict.get("isError", False),
383
- }
384
- await self.execute_query(query, params)
385
-
386
- @queue_until_user_message()
387
- async def update_step(self, step_dict: StepDict):
388
- await self.create_step(step_dict)
389
-
390
- @queue_until_user_message()
391
- async def delete_step(self, step_id: str):
392
- # Delete associated elements and feedbacks first
393
- await self.execute_query(
394
- 'DELETE FROM "Element" WHERE "stepId" = $1', {"step_id": step_id}
395
- )
396
- await self.execute_query(
397
- 'DELETE FROM "Feedback" WHERE "stepId" = $1', {"step_id": step_id}
398
- )
399
- # Delete the step
400
- await self.execute_query(
401
- 'DELETE FROM "Step" WHERE id = $1', {"step_id": step_id}
402
- )
403
-
404
- async def get_thread_author(self, thread_id: str) -> str:
405
- query = """
406
- SELECT u.identifier
407
- FROM "Thread" t
408
- JOIN "User" u ON t."userId" = u.id
409
- WHERE t.id = $1
410
- """
411
- results = await self.execute_query(query, {"thread_id": thread_id})
412
- if not results:
413
- raise ValueError(f"Thread {thread_id} not found")
414
- return results[0]["identifier"]
415
-
416
- async def delete_thread(self, thread_id: str):
417
- elements_query = """
418
- SELECT * FROM "Element"
419
- WHERE "threadId" = $1
420
- """
421
- elements_results = await self.execute_query(
422
- elements_query, {"thread_id": thread_id}
423
- )
424
-
425
- if self.storage_client is not None:
426
- for elem in elements_results:
427
- if elem["objectKey"]:
428
- await self.storage_client.delete_file(object_key=elem["objectKey"])
429
-
430
- await self.execute_query(
431
- 'DELETE FROM "Thread" WHERE id = $1', {"thread_id": thread_id}
432
- )
433
-
434
- async def list_threads(
435
- self, pagination: Pagination, filters: ThreadFilter
436
- ) -> PaginatedResponse[ThreadDict]:
437
- query = """
438
- SELECT
439
- t.*,
440
- u.identifier as user_identifier,
441
- (SELECT COUNT(*) FROM "Thread" WHERE "userId" = t."userId") as total
442
- FROM "Thread" t
443
- LEFT JOIN "User" u ON t."userId" = u.id
444
- WHERE t."deletedAt" IS NULL
445
- """
446
- params: Dict[str, Any] = {}
447
- param_count = 1
448
-
449
- if filters.search:
450
- query += f" AND t.name ILIKE ${param_count}"
451
- params["name"] = f"%{filters.search}%"
452
- param_count += 1
453
-
454
- if filters.userId:
455
- query += f' AND t."userId" = ${param_count}'
456
- params["user_id"] = filters.userId
457
- param_count += 1
458
-
459
- if pagination.cursor:
460
- query += f' AND t."updatedAt" < (SELECT "updatedAt" FROM "Thread" WHERE id = ${param_count})'
461
- params["cursor"] = pagination.cursor
462
- param_count += 1
463
-
464
- query += f' ORDER BY t."updatedAt" DESC LIMIT ${param_count}'
465
- params["limit"] = pagination.first + 1
466
-
467
- results = await self.execute_query(query, params)
468
- threads = results
469
-
470
- has_next_page = len(threads) > pagination.first
471
- if has_next_page:
472
- threads = threads[:-1]
473
-
474
- thread_dicts = []
475
- for thread in threads:
476
- thread_dict = ThreadDict(
477
- id=str(thread["id"]),
478
- createdAt=thread["createdAt"].isoformat(),
479
- name=thread["name"],
480
- userId=str(thread["userId"]) if thread["userId"] else None,
481
- userIdentifier=thread["user_identifier"],
482
- metadata=json.loads(thread["metadata"]),
483
- steps=[],
484
- elements=[],
485
- tags=[],
486
- )
487
- thread_dicts.append(thread_dict)
488
-
489
- return PaginatedResponse(
490
- pageInfo=PageInfo(
491
- hasNextPage=has_next_page,
492
- startCursor=thread_dicts[0]["id"] if thread_dicts else None,
493
- endCursor=thread_dicts[-1]["id"] if thread_dicts else None,
494
- ),
495
- data=thread_dicts,
496
- )
497
-
498
- async def get_thread(self, thread_id: str) -> Optional[ThreadDict]:
499
- query = """
500
- SELECT t.*, u.identifier as user_identifier
501
- FROM "Thread" t
502
- LEFT JOIN "User" u ON t."userId" = u.id
503
- WHERE t.id = $1 AND t."deletedAt" IS NULL
504
- """
505
- results = await self.execute_query(query, {"thread_id": thread_id})
506
-
507
- if not results:
508
- return None
509
-
510
- thread = results[0]
511
-
512
- # Get steps and related feedback
513
- steps_query = """
514
- SELECT s.*,
515
- f.id feedback_id,
516
- f.value feedback_value,
517
- f."comment" feedback_comment
518
- FROM "Step" s left join "Feedback" f on s.id = f."stepId"
519
- WHERE s."threadId" = $1
520
- ORDER BY "startTime"
521
- """
522
- steps_results = await self.execute_query(steps_query, {"thread_id": thread_id})
523
-
524
- # Get elements
525
- elements_query = """
526
- SELECT * FROM "Element"
527
- WHERE "threadId" = $1
528
- """
529
- elements_results = await self.execute_query(
530
- elements_query, {"thread_id": thread_id}
531
- )
532
-
533
- if self.storage_client is not None:
534
- for elem in elements_results:
535
- if not elem["url"] and elem["objectKey"]:
536
- elem["url"] = await self.storage_client.get_read_url(
537
- object_key=elem["objectKey"],
538
- )
539
-
540
- return ThreadDict(
541
- id=str(thread["id"]),
542
- createdAt=thread["createdAt"].isoformat(),
543
- name=thread["name"],
544
- userId=str(thread["userId"]) if thread["userId"] else None,
545
- userIdentifier=thread["user_identifier"],
546
- metadata=json.loads(thread["metadata"]),
547
- steps=[self._convert_step_row_to_dict(step) for step in steps_results],
548
- elements=[
549
- self._convert_element_row_to_dict(elem) for elem in elements_results
550
- ],
551
- tags=[],
552
- )
553
-
554
- async def update_thread(
555
- self,
556
- thread_id: str,
557
- name: Optional[str] = None,
558
- user_id: Optional[str] = None,
559
- metadata: Optional[Dict] = None,
560
- tags: Optional[List[str]] = None,
561
- ):
562
- if self.show_logger:
563
- logger.info(f"asyncpg: update_thread, thread_id={thread_id}")
564
-
565
- thread_name = truncate(
566
- name
567
- if name is not None
568
- else (metadata.get("name") if metadata and "name" in metadata else None)
569
- )
570
-
571
- data = {
572
- "id": thread_id,
573
- "name": thread_name,
574
- "userId": user_id,
575
- "tags": tags,
576
- "metadata": json.dumps(metadata or {}),
577
- "updatedAt": datetime.now(),
578
- }
579
-
580
- # Remove None values
581
- data = {k: v for k, v in data.items() if v is not None}
582
-
583
- # Build the query dynamically based on available fields
584
- columns = [f'"{k}"' for k in data.keys()]
585
- placeholders = [f"${i + 1}" for i in range(len(data))]
586
- values = list(data.values())
587
-
588
- update_sets = [f'"{k}" = EXCLUDED."{k}"' for k in data.keys() if k != "id"]
589
-
590
- if update_sets:
591
- query = f"""
592
- INSERT INTO "Thread" ({", ".join(columns)})
593
- VALUES ({", ".join(placeholders)})
594
- ON CONFLICT (id) DO UPDATE
595
- SET {", ".join(update_sets)};
596
- """
597
- else:
598
- query = f"""
599
- INSERT INTO "Thread" ({", ".join(columns)})
600
- VALUES ({", ".join(placeholders)})
601
- ON CONFLICT (id) DO NOTHING
602
- """
603
-
604
- await self.execute_query(query, {str(i + 1): v for i, v in enumerate(values)})
605
-
606
- def _extract_feedback_dict_from_step_row(self, row: Dict) -> Optional[FeedbackDict]:
607
- if row["feedback_id"] is not None:
608
- return FeedbackDict(
609
- forId=row["id"],
610
- id=row["feedback_id"],
611
- value=row["feedback_value"],
612
- comment=row["feedback_comment"],
613
- )
614
- return None
615
-
616
- def _convert_step_row_to_dict(self, row: Dict) -> StepDict:
617
- return StepDict(
618
- id=str(row["id"]),
619
- threadId=str(row["threadId"]) if row.get("threadId") else "",
620
- parentId=str(row["parentId"]) if row.get("parentId") else None,
621
- name=str(row.get("name")),
622
- type=row["type"],
623
- input=row.get("input", {}),
624
- output=row.get("output", {}),
625
- metadata=json.loads(row.get("metadata", "{}")),
626
- createdAt=row["createdAt"].isoformat() if row.get("createdAt") else None,
627
- start=row["startTime"].isoformat() if row.get("startTime") else None,
628
- showInput=row.get("showInput"),
629
- isError=row.get("isError"),
630
- end=row["endTime"].isoformat() if row.get("endTime") else None,
631
- feedback=self._extract_feedback_dict_from_step_row(row),
632
- )
633
-
634
- def _convert_element_row_to_dict(self, row: Dict) -> ElementDict:
635
- metadata = json.loads(row.get("metadata", "{}"))
636
- return ElementDict(
637
- id=str(row["id"]),
638
- threadId=str(row["threadId"]) if row.get("threadId") else None,
639
- type=metadata.get("type", "file"),
640
- url=row["url"],
641
- name=row["name"],
642
- mime=row["mime"],
643
- objectKey=row["objectKey"],
644
- forId=str(row["stepId"]),
645
- chainlitKey=row.get("chainlitKey"),
646
- display=row["display"],
647
- size=row["size"],
648
- language=row["language"],
649
- page=row["page"],
650
- autoPlay=row.get("autoPlay"),
651
- playerConfig=row.get("playerConfig"),
652
- props=json.loads(row.get("props") or "{}"),
653
- )
654
-
655
- async def build_debug_url(self) -> str:
656
- return ""
657
-
658
- async def cleanup(self):
659
- """Cleanup database connections"""
660
- if self.pool:
661
- await self.pool.close()
662
-
663
- def _sync_cleanup(self):
664
- """Cleanup database connections in a synchronous context."""
665
- if self.pool and not self.pool.is_closing():
666
- loop = asyncio.get_event_loop()
667
- if loop.is_running():
668
- loop.create_task(self.cleanup())
669
- else:
670
- try:
671
- cleanup_loop = asyncio.new_event_loop()
672
- asyncio.set_event_loop(cleanup_loop)
673
- cleanup_loop.run_until_complete(self.cleanup())
674
- cleanup_loop.close()
675
- except Exception as e:
676
- logger.error(f"Error during sync cleanup: {e}")
677
-
678
- def _signal_handler(self, sig, frame):
679
- """Handle signals for graceful shutdown."""
680
- logger.info(f"Received signal {sig}, cleaning up connection pool.")
681
- self._sync_cleanup()
682
- # Re-raise the signal after cleanup
683
- signal.default_int_handler(sig, frame)
684
-
685
-
686
- def truncate(text: Optional[str], max_length: int = 255) -> Optional[str]:
687
- return None if text is None else text[:max_length]