chainlit 1.0.506__py3-none-any.whl → 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chainlit might be problematic. Click here for more details.

@@ -22,7 +22,7 @@
22
22
  <script>
23
23
  const global = globalThis;
24
24
  </script>
25
- <script type="module" crossorigin src="/assets/index-d4233b49.js"></script>
25
+ <script type="module" crossorigin src="/assets/index-0a52365d.js"></script>
26
26
  <link rel="stylesheet" href="/assets/index-d088547c.css">
27
27
  </head>
28
28
  <body>
@@ -70,7 +70,7 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
70
70
  ) -> str:
71
71
  """Run when an event starts and return id of event."""
72
72
  self._restore_context()
73
-
73
+
74
74
  step_type: StepType = "undefined"
75
75
  if event_type == CBEventType.RETRIEVE:
76
76
  step_type = "retrieval"
@@ -104,7 +104,6 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
104
104
  """Run when an event ends."""
105
105
  step = self.steps.get(event_id, None)
106
106
 
107
-
108
107
  if payload is None or step is None:
109
108
  return
110
109
 
@@ -117,11 +116,13 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
117
116
  source_nodes = getattr(response, "source_nodes", None)
118
117
  if source_nodes:
119
118
  source_refs = ", ".join(
120
- [f"Source {idx}" for idx, _ in enumerate(source_nodes)])
119
+ [f"Source {idx}" for idx, _ in enumerate(source_nodes)]
120
+ )
121
121
  step.elements = [
122
122
  Text(
123
123
  name=f"Source {idx}",
124
124
  content=source.text or "Empty node",
125
+ display="side",
125
126
  )
126
127
  for idx, source in enumerate(source_nodes)
127
128
  ]
@@ -137,6 +138,7 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
137
138
  step.elements = [
138
139
  Text(
139
140
  name=f"Source {idx}",
141
+ display="side",
140
142
  content=source.node.get_text() or "Empty node",
141
143
  )
142
144
  for idx, source in enumerate(sources)
@@ -173,7 +175,7 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
173
175
  token_count = self.total_llm_token_count or None
174
176
  raw_response = response.raw if response else None
175
177
  model = raw_response.get("model", None) if raw_response else None
176
-
178
+
177
179
  if messages and isinstance(response, ChatResponse):
178
180
  msg: ChatMessage = response.message
179
181
  step.generation = ChatGeneration(
@@ -198,7 +200,7 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
198
200
  else:
199
201
  step.output = payload
200
202
  self.context.loop.create_task(step.update())
201
-
203
+
202
204
  self.steps.pop(event_id, None)
203
205
 
204
206
  def _noop(self, *args, **kwargs):
@@ -206,4 +208,3 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
206
208
 
207
209
  start_trace = _noop
208
210
  end_trace = _noop
209
-
chainlit/message.py CHANGED
@@ -166,7 +166,7 @@ class MessageBase(ABC):
166
166
  step_dict = await self._create()
167
167
  await context.emitter.send_step(step_dict)
168
168
 
169
- return self.id
169
+ return self
170
170
 
171
171
  async def stream_token(self, token: str, is_sequence=False):
172
172
  """
@@ -251,7 +251,7 @@ class Message(MessageBase):
251
251
 
252
252
  super().__post_init__()
253
253
 
254
- async def send(self) -> str:
254
+ async def send(self):
255
255
  """
256
256
  Send the message to the UI and persist it in the cloud if a project ID is configured.
257
257
  Return the ID of the message.
@@ -268,7 +268,7 @@ class Message(MessageBase):
268
268
  # Run all tasks concurrently
269
269
  await asyncio.gather(*tasks)
270
270
 
271
- return self.id
271
+ return self
272
272
 
273
273
  async def update(self):
274
274
  """
chainlit/server.py CHANGED
@@ -119,16 +119,27 @@ async def lifespan(app: FastAPI):
119
119
 
120
120
  watch_task = asyncio.create_task(watch_files_for_changes())
121
121
 
122
+ discord_task = None
123
+
124
+ if discord_bot_token := os.environ.get("DISCORD_BOT_TOKEN"):
125
+ from chainlit.discord.app import client
126
+
127
+ discord_task = asyncio.create_task(client.start(discord_bot_token))
128
+
122
129
  try:
123
130
  yield
124
131
  finally:
125
- if watch_task:
126
- try:
132
+ try:
133
+ if watch_task:
127
134
  stop_event.set()
128
135
  watch_task.cancel()
129
136
  await watch_task
130
- except asyncio.exceptions.CancelledError:
131
- pass
137
+
138
+ if discord_task:
139
+ discord_task.cancel()
140
+ await discord_task
141
+ except asyncio.exceptions.CancelledError:
142
+ pass
132
143
 
133
144
  if FILES_DIRECTORY.is_dir():
134
145
  shutil.rmtree(FILES_DIRECTORY)
@@ -195,6 +206,18 @@ socket = SocketManager(
195
206
  )
196
207
 
197
208
 
209
+ # -------------------------------------------------------------------------------
210
+ # SLACK HANDLER
211
+ # -------------------------------------------------------------------------------
212
+
213
+ if os.environ.get("SLACK_BOT_TOKEN") and os.environ.get("SLACK_SIGNING_SECRET"):
214
+ from chainlit.slack.app import slack_app_handler
215
+
216
+ @app.post("/slack/events")
217
+ async def endpoint(req: Request):
218
+ return await slack_app_handler.handle(req)
219
+
220
+
198
221
  # -------------------------------------------------------------------------------
199
222
  # HTTP HANDLERS
200
223
  # -------------------------------------------------------------------------------
@@ -536,6 +559,10 @@ async def project_settings(
536
559
  chat_profiles = await config.code.set_chat_profiles(current_user)
537
560
  if chat_profiles:
538
561
  profiles = [p.to_dict() for p in chat_profiles]
562
+
563
+ if config.code.on_audio_chunk:
564
+ config.features.audio.enabled = True
565
+
539
566
  return JSONResponse(
540
567
  content={
541
568
  "ui": config.ui.to_dict(),
chainlit/session.py CHANGED
@@ -1,3 +1,4 @@
1
+ import asyncio
1
2
  import json
2
3
  import mimetypes
3
4
  import shutil
@@ -23,7 +24,7 @@ if TYPE_CHECKING:
23
24
  from chainlit.types import FileDict, FileReference
24
25
  from chainlit.user import PersistedUser, User
25
26
 
26
- ClientType = Literal["app", "copilot", "teams", "slack"]
27
+ ClientType = Literal["webapp", "copilot", "teams", "slack", "discord"]
27
28
 
28
29
 
29
30
  class JSONEncoderIgnoreNonSerializable(json.JSONEncoder):
@@ -34,17 +35,28 @@ class JSONEncoderIgnoreNonSerializable(json.JSONEncoder):
34
35
  return None
35
36
 
36
37
 
37
- def clean_metadata(metadata: Dict):
38
- return json.loads(
38
+
39
+ def clean_metadata(metadata: Dict, max_size: int = 1048576):
40
+ cleaned_metadata = json.loads(
39
41
  json.dumps(metadata, cls=JSONEncoderIgnoreNonSerializable, ensure_ascii=False)
40
42
  )
41
43
 
44
+ metadata_size = len(json.dumps(cleaned_metadata).encode('utf-8'))
45
+ if metadata_size > max_size:
46
+ # Redact the metadata if it exceeds the maximum size
47
+ cleaned_metadata = {
48
+ 'message': f'Metadata size exceeds the limit of {max_size} bytes. Redacted.'
49
+ }
50
+
51
+ return cleaned_metadata
52
+
42
53
 
43
54
  class BaseSession:
44
55
  """Base object."""
45
56
 
46
57
  thread_id_to_resume: Optional[str] = None
47
58
  client_type: ClientType
59
+ current_task: Optional[asyncio.Task] = None
48
60
 
49
61
  def __init__(
50
62
  self,
@@ -63,6 +75,8 @@ class BaseSession:
63
75
  root_message: Optional["Message"] = None,
64
76
  # Chat profile selected before the session was created
65
77
  chat_profile: Optional[str] = None,
78
+ # Origin of the request
79
+ http_referer: Optional[str] = None,
66
80
  ):
67
81
  if thread_id:
68
82
  self.thread_id_to_resume = thread_id
@@ -74,19 +88,68 @@ class BaseSession:
74
88
  self.has_first_interaction = False
75
89
  self.user_env = user_env or {}
76
90
  self.chat_profile = chat_profile
91
+ self.http_referer = http_referer
92
+
93
+ self.files = {} # type: Dict[str, "FileDict"]
77
94
 
78
95
  self.id = id
79
96
 
80
97
  self.chat_settings: Dict[str, Any] = {}
81
98
 
99
+ @property
100
+ def files_dir(self):
101
+ from chainlit.config import FILES_DIRECTORY
102
+
103
+ return FILES_DIRECTORY / self.id
104
+
82
105
  async def persist_file(
83
106
  self,
84
107
  name: str,
85
108
  mime: str,
86
109
  path: Optional[str] = None,
87
110
  content: Optional[Union[bytes, str]] = None,
88
- ):
89
- return None
111
+ ) -> "FileReference":
112
+ if not path and not content:
113
+ raise ValueError(
114
+ "Either path or content must be provided to persist a file"
115
+ )
116
+
117
+ self.files_dir.mkdir(exist_ok=True)
118
+
119
+ file_id = str(uuid.uuid4())
120
+
121
+ file_path = self.files_dir / file_id
122
+
123
+ file_extension = mimetypes.guess_extension(mime)
124
+
125
+ if file_extension:
126
+ file_path = file_path.with_suffix(file_extension)
127
+
128
+ if path:
129
+ # Copy the file from the given path
130
+ async with aiofiles.open(path, "rb") as src, aiofiles.open(
131
+ file_path, "wb"
132
+ ) as dst:
133
+ await dst.write(await src.read())
134
+ elif content:
135
+ # Write the provided content to the file
136
+ async with aiofiles.open(file_path, "wb") as buffer:
137
+ if isinstance(content, str):
138
+ content = content.encode("utf-8")
139
+ await buffer.write(content)
140
+
141
+ # Get the file size
142
+ file_size = file_path.stat().st_size
143
+ # Store the file content in memory
144
+ self.files[file_id] = {
145
+ "id": file_id,
146
+ "path": file_path,
147
+ "name": name,
148
+ "type": mime,
149
+ "size": file_size,
150
+ }
151
+
152
+ return {"id": file_id}
90
153
 
91
154
  def to_persistable(self) -> Dict:
92
155
  from chainlit.user_session import user_sessions
@@ -94,6 +157,8 @@ class BaseSession:
94
157
  user_session = user_sessions.get(self.id) or {} # type: Dict
95
158
  user_session["chat_settings"] = self.chat_settings
96
159
  user_session["chat_profile"] = self.chat_profile
160
+ user_session["http_referer"] = self.http_referer
161
+ user_session["client_type"] = self.client_type
97
162
  metadata = clean_metadata(user_session)
98
163
  return metadata
99
164
 
@@ -115,7 +180,8 @@ class HTTPSession(BaseSession):
115
180
  user_env: Optional[Dict[str, str]] = None,
116
181
  # Last message at the root of the chat
117
182
  root_message: Optional["Message"] = None,
118
- # User specific environment variables. Empty if no user environment variables are required.
183
+ # Origin of the request
184
+ http_referer: Optional[str] = None,
119
185
  ):
120
186
  super().__init__(
121
187
  id=id,
@@ -125,8 +191,14 @@ class HTTPSession(BaseSession):
125
191
  client_type=client_type,
126
192
  user_env=user_env,
127
193
  root_message=root_message,
194
+ http_referer=http_referer,
128
195
  )
129
196
 
197
+ def delete(self):
198
+ """Delete the session."""
199
+ if self.files_dir.is_dir():
200
+ shutil.rmtree(self.files_dir)
201
+
130
202
 
131
203
  class WebsocketSession(BaseSession):
132
204
  """Internal web socket session object.
@@ -140,6 +212,8 @@ class WebsocketSession(BaseSession):
140
212
  socket id for convenience.
141
213
  """
142
214
 
215
+ to_clear: bool = False
216
+
143
217
  def __init__(
144
218
  self,
145
219
  # Id from the session cookie
@@ -165,6 +239,8 @@ class WebsocketSession(BaseSession):
165
239
  chat_profile: Optional[str] = None,
166
240
  # Languages of the user's browser
167
241
  languages: Optional[str] = None,
242
+ # Origin of the request
243
+ http_referer: Optional[str] = None,
168
244
  ):
169
245
  super().__init__(
170
246
  id=id,
@@ -175,77 +251,22 @@ class WebsocketSession(BaseSession):
175
251
  client_type=client_type,
176
252
  root_message=root_message,
177
253
  chat_profile=chat_profile,
254
+ http_referer=http_referer,
178
255
  )
179
256
 
180
257
  self.socket_id = socket_id
181
258
  self.emit_call = emit_call
182
259
  self.emit = emit
183
260
 
184
- self.should_stop = False
185
261
  self.restored = False
186
262
 
187
263
  self.thread_queues = {} # type: Dict[str, Deque[Callable]]
188
- self.files = {} # type: Dict[str, "FileDict"]
189
264
 
190
265
  ws_sessions_id[self.id] = self
191
266
  ws_sessions_sid[socket_id] = self
192
267
 
193
268
  self.languages = languages
194
269
 
195
- @property
196
- def files_dir(self):
197
- from chainlit.config import FILES_DIRECTORY
198
-
199
- return FILES_DIRECTORY / self.id
200
-
201
- async def persist_file(
202
- self,
203
- name: str,
204
- mime: str,
205
- path: Optional[str] = None,
206
- content: Optional[Union[bytes, str]] = None,
207
- ) -> "FileReference":
208
- if not path and not content:
209
- raise ValueError(
210
- "Either path or content must be provided to persist a file"
211
- )
212
-
213
- self.files_dir.mkdir(exist_ok=True)
214
-
215
- file_id = str(uuid.uuid4())
216
-
217
- file_path = self.files_dir / file_id
218
-
219
- file_extension = mimetypes.guess_extension(mime)
220
- if file_extension:
221
- file_path = file_path.with_suffix(file_extension)
222
-
223
- if path:
224
- # Copy the file from the given path
225
- async with aiofiles.open(path, "rb") as src, aiofiles.open(
226
- file_path, "wb"
227
- ) as dst:
228
- await dst.write(await src.read())
229
- elif content:
230
- # Write the provided content to the file
231
- async with aiofiles.open(file_path, "wb") as buffer:
232
- if isinstance(content, str):
233
- content = content.encode("utf-8")
234
- await buffer.write(content)
235
-
236
- # Get the file size
237
- file_size = file_path.stat().st_size
238
- # Store the file content in memory
239
- self.files[file_id] = {
240
- "id": file_id,
241
- "path": file_path,
242
- "name": name,
243
- "type": mime,
244
- "size": file_size,
245
- }
246
-
247
- return {"id": file_id}
248
-
249
270
  def restore(self, new_socket_id: str):
250
271
  """Associate a new socket id to the session."""
251
272
  ws_sessions_sid.pop(self.socket_id, None)
@@ -0,0 +1,6 @@
1
+ try:
2
+ import slack_bolt
3
+ except ModuleNotFoundError:
4
+ raise ValueError(
5
+ "The slack_bolt package is required to integrate Chainlit with a Slack app. Run `pip install slack_bolt --upgrade`"
6
+ )