chainlit 0.4.1__py3-none-any.whl → 0.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chainlit might be problematic. Click here for more details.

chainlit/server.py CHANGED
@@ -6,11 +6,13 @@ mimetypes.add_type("text/css", ".css")
6
6
  import os
7
7
  import json
8
8
  import webbrowser
9
+ from pathlib import Path
10
+
9
11
 
10
12
  from contextlib import asynccontextmanager
11
13
  from watchfiles import awatch
12
14
 
13
- from fastapi import FastAPI
15
+ from fastapi import FastAPI, Request
14
16
  from fastapi.responses import (
15
17
  HTMLResponse,
16
18
  JSONResponse,
@@ -21,17 +23,25 @@ from fastapi_socketio import SocketManager
21
23
  from starlette.middleware.cors import CORSMiddleware
22
24
  import asyncio
23
25
 
26
+ from chainlit.context import emitter_var, loop_var
24
27
  from chainlit.config import config, load_module, reload_config, DEFAULT_HOST
25
28
  from chainlit.session import Session, sessions
26
29
  from chainlit.user_session import user_sessions
27
- from chainlit.client import CloudClient
30
+ from chainlit.client.cloud import CloudClient
31
+ from chainlit.client.local import LocalClient
32
+ from chainlit.client.utils import get_client
28
33
  from chainlit.emitter import ChainlitEmitter
29
34
  from chainlit.markdown import get_markdown_str
30
35
  from chainlit.action import Action
31
36
  from chainlit.message import Message, ErrorMessage
32
37
  from chainlit.telemetry import trace_event
33
38
  from chainlit.logger import logger
34
- from chainlit.types import CompletionRequest
39
+ from chainlit.types import (
40
+ CompletionRequest,
41
+ UpdateFeedbackRequest,
42
+ GetConversationsRequest,
43
+ DeleteConversationRequest,
44
+ )
35
45
 
36
46
 
37
47
  @asynccontextmanager
@@ -39,18 +49,25 @@ async def lifespan(app: FastAPI):
39
49
  host = config.run.host
40
50
  port = config.run.port
41
51
 
42
- if not config.run.headless:
43
- if host == DEFAULT_HOST:
44
- url = f"http://localhost:{port}"
45
- else:
46
- url = f"http://{host}:{port}"
52
+ if host == DEFAULT_HOST:
53
+ url = f"http://localhost:{port}"
54
+ else:
55
+ url = f"http://{host}:{port}"
47
56
 
48
- logger.info(f"Your app is available at {url}")
57
+ logger.info(f"Your app is available at {url}")
49
58
 
59
+ if not config.run.headless:
50
60
  # Add a delay before opening the browser
51
61
  await asyncio.sleep(1)
52
62
  webbrowser.open(url)
53
63
 
64
+ if config.project.database == "local":
65
+ from prisma import Client, register
66
+
67
+ client = Client()
68
+ register(client)
69
+ await client.connect()
70
+
54
71
  watch_task = None
55
72
  stop_event = asyncio.Event()
56
73
 
@@ -92,6 +109,8 @@ async def lifespan(app: FastAPI):
92
109
  try:
93
110
  yield
94
111
  finally:
112
+ if config.project.database == "local":
113
+ await client.disconnect()
95
114
  if watch_task:
96
115
  try:
97
116
  stop_event.set()
@@ -206,6 +225,80 @@ async def project_settings():
206
225
  )
207
226
 
208
227
 
228
+ @app.put("/message/feedback")
229
+ async def update_feedback(request: Request, update: UpdateFeedbackRequest):
230
+ """Update the human feedback for a particular message."""
231
+
232
+ client = await get_client(request)
233
+ await client.set_human_feedback(
234
+ message_id=update.messageId, feedback=update.feedback
235
+ )
236
+ return JSONResponse(content={"success": True})
237
+
238
+
239
+ @app.get("/project/members")
240
+ async def get_project_members(request: Request):
241
+ """Get all the members of a project."""
242
+
243
+ client = await get_client(request)
244
+ res = await client.get_project_members()
245
+ return JSONResponse(content=res)
246
+
247
+
248
+ @app.get("/project/role")
249
+ async def get_member_role(request: Request):
250
+ """Get the role of a member."""
251
+
252
+ client = await get_client(request)
253
+ res = await client.get_member_role()
254
+ return PlainTextResponse(content=res)
255
+
256
+
257
+ @app.post("/project/conversations")
258
+ async def get_project_conversations(request: Request, payload: GetConversationsRequest):
259
+ """Get the conversations page by page."""
260
+
261
+ client = await get_client(request)
262
+ res = await client.get_conversations(payload.pagination, payload.filter)
263
+ return JSONResponse(content=res.to_dict())
264
+
265
+
266
+ @app.get("/project/conversation/{conversation_id}")
267
+ async def get_conversation(request: Request, conversation_id: str):
268
+ """Get a specific conversation."""
269
+
270
+ client = await get_client(request)
271
+ res = await client.get_conversation(int(conversation_id))
272
+ return JSONResponse(content=res)
273
+
274
+
275
+ @app.get("/project/conversation/{conversation_id}/element/{element_id}")
276
+ async def get_conversation(request: Request, conversation_id: str, element_id: str):
277
+ """Get a specific conversation."""
278
+
279
+ client = await get_client(request)
280
+ res = await client.get_element(int(conversation_id), int(element_id))
281
+ return JSONResponse(content=res)
282
+
283
+
284
+ @app.delete("/project/conversation")
285
+ async def delete_conversation(request: Request, payload: DeleteConversationRequest):
286
+ """Delete a conversation."""
287
+
288
+ client = await get_client(request)
289
+ await client.delete_conversation(conversation_id=payload.conversationId)
290
+ return JSONResponse(content={"success": True})
291
+
292
+
293
+ @app.get("/files/{filename:path}")
294
+ async def serve_file(filename: str):
295
+ file_path = Path(config.project.local_fs_path) / filename
296
+ if file_path.is_file():
297
+ return FileResponse(file_path)
298
+ else:
299
+ return {"error": "File not found"}
300
+
301
+
209
302
  @app.get("/{path:path}")
210
303
  async def serve(path: str):
211
304
  """Serve the UI."""
@@ -236,7 +329,7 @@ def need_session(id: str):
236
329
  async def connect(sid, environ):
237
330
  user_env = environ.get("HTTP_USER_ENV")
238
331
  authorization = environ.get("HTTP_AUTHORIZATION")
239
- cloud_client = None
332
+ client = None
240
333
 
241
334
  # Check authorization
242
335
  if not config.project.public and not authorization:
@@ -244,17 +337,22 @@ async def connect(sid, environ):
244
337
  trace_event("no_access_token")
245
338
  logger.error("Connection refused: No access token provided")
246
339
  return False
247
- elif authorization and config.project.id:
340
+ elif authorization and config.project.id and config.project.database == "cloud":
248
341
  # Create the cloud client
249
- cloud_client = CloudClient(
342
+ client = CloudClient(
250
343
  project_id=config.project.id,
251
- session_id=sid,
252
344
  access_token=authorization,
253
345
  )
254
- is_project_member = await cloud_client.is_project_member()
346
+ is_project_member = await client.is_project_member()
255
347
  if not is_project_member:
256
348
  logger.error("Connection refused: You are not a member of this project")
257
349
  return False
350
+ elif config.project.database == "local":
351
+ client = LocalClient()
352
+ elif config.project.database == "custom":
353
+ if not config.code.client_factory:
354
+ raise ValueError("Client factory not provided")
355
+ client = await config.code.client_factory()
258
356
 
259
357
  # Check user env
260
358
  if config.project.user_env:
@@ -293,9 +391,8 @@ async def connect(sid, environ):
293
391
  "id": sid,
294
392
  "emit": emit_fn,
295
393
  "ask_user": ask_user_fn,
296
- "client": cloud_client,
394
+ "client": client,
297
395
  "user_env": user_env,
298
- "running_sync": False,
299
396
  "should_stop": False,
300
397
  } # type: Session
301
398
 
@@ -308,15 +405,21 @@ async def connect(sid, environ):
308
405
  @socket.on("connection_successful")
309
406
  async def connection_successful(sid):
310
407
  session = need_session(sid)
311
- __chainlit_emitter__ = ChainlitEmitter(session)
408
+ emitter_var.set(ChainlitEmitter(session))
409
+ loop_var.set(asyncio.get_event_loop())
410
+
411
+ if config.code.on_chat_start:
412
+ """Call the on_chat_start function provided by the developer."""
413
+ await config.code.on_chat_start()
414
+
312
415
  if config.code.lc_factory:
313
416
  """Instantiate the langchain agent and store it in the session."""
314
- agent = await config.code.lc_factory(__chainlit_emitter__=__chainlit_emitter__)
417
+ agent = await config.code.lc_factory()
315
418
  session["agent"] = agent
316
419
 
317
- if config.code.on_chat_start:
318
- """Call the on_chat_start function provided by the developer."""
319
- await config.code.on_chat_start(__chainlit_emitter__=__chainlit_emitter__)
420
+ if config.code.llama_index_factory:
421
+ llama_instance = await config.code.llama_index_factory()
422
+ session["llama_instance"] = llama_instance
320
423
 
321
424
 
322
425
  @socket.on("disconnect")
@@ -336,7 +439,8 @@ async def stop(sid):
336
439
  trace_event("stop_task")
337
440
  session = sessions[sid]
338
441
 
339
- __chainlit_emitter__ = ChainlitEmitter(session)
442
+ emitter_var.set(ChainlitEmitter(session))
443
+ loop_var.set(asyncio.get_event_loop())
340
444
 
341
445
  await Message(author="System", content="Task stopped by the user.").send()
342
446
 
@@ -350,8 +454,11 @@ async def process_message(session: Session, author: str, input_str: str):
350
454
  """Process a message from the user."""
351
455
 
352
456
  try:
353
- __chainlit_emitter__ = ChainlitEmitter(session)
354
- await __chainlit_emitter__.task_start()
457
+ emitter = ChainlitEmitter(session)
458
+ emitter_var.set(emitter)
459
+ loop_var.set(asyncio.get_event_loop())
460
+
461
+ await emitter.task_start()
355
462
 
356
463
  if session["client"]:
357
464
  # If cloud is enabled, persist the message
@@ -364,6 +471,8 @@ async def process_message(session: Session, author: str, input_str: str):
364
471
  )
365
472
 
366
473
  langchain_agent = session.get("agent")
474
+ llama_instance = session.get("llama_instance")
475
+
367
476
  if langchain_agent:
368
477
  from chainlit.lc.agent import run_langchain_agent
369
478
 
@@ -373,7 +482,6 @@ async def process_message(session: Session, author: str, input_str: str):
373
482
  await config.code.lc_run(
374
483
  langchain_agent,
375
484
  input_str,
376
- __chainlit_emitter__=__chainlit_emitter__,
377
485
  )
378
486
  return
379
487
  else:
@@ -384,9 +492,7 @@ async def process_message(session: Session, author: str, input_str: str):
384
492
 
385
493
  if config.code.lc_postprocess:
386
494
  # If the developer provided a custom postprocess function, use it
387
- await config.code.lc_postprocess(
388
- raw_res, __chainlit_emitter__=__chainlit_emitter__
389
- )
495
+ await config.code.lc_postprocess(raw_res)
390
496
  return
391
497
  elif output_key is not None:
392
498
  # Use the output key if provided
@@ -397,18 +503,23 @@ async def process_message(session: Session, author: str, input_str: str):
397
503
  # Finally, send the response to the user
398
504
  await Message(author=config.ui.name, content=res).send()
399
505
 
506
+ elif llama_instance:
507
+ from chainlit.llama_index.run import run_llama
508
+
509
+ await run_llama(llama_instance, input_str)
510
+
400
511
  elif config.code.on_message:
401
512
  # If no langchain agent is available, call the on_message function provided by the developer
402
- await config.code.on_message(
403
- input_str, __chainlit_emitter__=__chainlit_emitter__
404
- )
513
+ await config.code.on_message(input_str)
405
514
  except InterruptedError:
406
515
  pass
407
516
  except Exception as e:
408
517
  logger.exception(e)
409
- await ErrorMessage(author="Error", content=str(e)).send()
518
+ await ErrorMessage(
519
+ author="Error", content=str(e) or e.__class__.__name__
520
+ ).send()
410
521
  finally:
411
- await __chainlit_emitter__.task_end()
522
+ await emitter.task_end()
412
523
 
413
524
 
414
525
  @socket.on("ui_message")
@@ -423,11 +534,10 @@ async def message(sid, data):
423
534
  await process_message(session, author, input_str)
424
535
 
425
536
 
426
- async def process_action(session: Session, action: Action):
427
- __chainlit_emitter__ = ChainlitEmitter(session)
537
+ async def process_action(action: Action):
428
538
  callback = config.code.action_callbacks.get(action.name)
429
539
  if callback:
430
- await callback(action, __chainlit_emitter__=__chainlit_emitter__)
540
+ await callback(action)
431
541
  else:
432
542
  logger.warning("No callback found for action %s", action.name)
433
543
 
@@ -436,8 +546,9 @@ async def process_action(session: Session, action: Action):
436
546
  async def call_action(sid, action):
437
547
  """Handle an action call from the UI."""
438
548
  session = need_session(sid)
549
+ emitter_var.set(ChainlitEmitter(session))
550
+ loop_var.set(asyncio.get_event_loop())
439
551
 
440
- __chainlit_emitter__ = ChainlitEmitter(session)
441
552
  action = Action(**action)
442
553
 
443
- await process_action(session, action)
554
+ await process_action(action)
chainlit/session.py CHANGED
@@ -1,5 +1,5 @@
1
1
  from typing import Dict, TypedDict, Optional, Callable, Any, Union
2
- from chainlit.client import BaseClient
2
+ from chainlit.client.base import BaseClient
3
3
  from chainlit.types import AskResponse
4
4
 
5
5
 
@@ -16,8 +16,8 @@ class Session(TypedDict):
16
16
  user_env: Dict[str, str]
17
17
  # Optional langchain agent
18
18
  agent: Any
19
- # If the session is currently running a sync task
20
- running_sync: bool
19
+ # Optional llama instance
20
+ llama_instance: Any
21
21
  # Whether the current task should be stopped
22
22
  should_stop: bool
23
23
  # Optional client to persist messages and files
chainlit/sync.py CHANGED
@@ -1,37 +1,30 @@
1
- from typing import Any, Callable
1
+ import sys
2
+ from typing import Any, TypeVar, Coroutine
2
3
 
4
+ if sys.version_info >= (3, 10):
5
+ from typing import ParamSpec
6
+ else:
7
+ from typing_extensions import ParamSpec
8
+
9
+ import threading
3
10
  import asyncio
4
11
  from syncer import sync
5
12
  from asyncer import asyncify
6
13
 
7
- from chainlit.emitter import get_emitter
8
-
9
-
10
- def make_async(function: Callable):
11
- emitter = get_emitter()
12
- if not emitter:
13
- raise RuntimeError(
14
- "Emitter not found, please call make_async in a Chainlit context."
15
- )
14
+ from chainlit.context import get_loop
16
15
 
17
- def wrapper(*args, **kwargs):
18
- emitter.session["running_sync"] = True
19
- __chainlit_emitter__ = emitter
20
- res = function(*args, **kwargs)
21
- emitter.session["running_sync"] = False
22
- return res
23
16
 
24
- return asyncify(wrapper, cancellable=True)
17
+ make_async = asyncify
25
18
 
19
+ T_Retval = TypeVar("T_Retval")
20
+ T_ParamSpec = ParamSpec("T_ParamSpec")
21
+ T = TypeVar("T")
26
22
 
27
- def run_sync(co: Any):
28
- try:
29
- loop = asyncio.get_event_loop()
30
- except RuntimeError as e:
31
- if "There is no current event loop" in str(e):
32
- loop = None
33
23
 
34
- if loop is None or not loop.is_running():
35
- loop = asyncio.new_event_loop()
36
- asyncio.set_event_loop(loop)
37
- return sync(co)
24
+ def run_sync(co: Coroutine[Any, Any, T_Retval]) -> T_Retval:
25
+ if threading.current_thread() == threading.main_thread():
26
+ return sync(co)
27
+ else:
28
+ loop = get_loop()
29
+ result = asyncio.run_coroutine_threadsafe(co, loop=loop)
30
+ return result.result()
chainlit/types.py CHANGED
@@ -1,4 +1,4 @@
1
- from typing import List, TypedDict, Optional, Literal, Dict, Union
1
+ from typing import List, Any, TypedDict, Optional, Literal, Dict, Union
2
2
  from pydantic import BaseModel
3
3
  from pydantic.dataclasses import dataclass
4
4
  from dataclasses_json import dataclass_json
@@ -66,3 +66,28 @@ class CompletionRequest(BaseModel):
66
66
  prompt: str
67
67
  userEnv: Dict[str, str]
68
68
  settings: LLMSettings
69
+
70
+
71
+ class UpdateFeedbackRequest(BaseModel):
72
+ messageId: int
73
+ feedback: int
74
+
75
+
76
+ class DeleteConversationRequest(BaseModel):
77
+ conversationId: int
78
+
79
+
80
+ class Pagination(BaseModel):
81
+ first: int
82
+ cursor: Any
83
+
84
+
85
+ class ConversationFilter(BaseModel):
86
+ feedback: Optional[Literal[-1, 0, 1]]
87
+ authorEmail: Optional[str]
88
+ search: Optional[str]
89
+
90
+
91
+ class GetConversationsRequest(BaseModel):
92
+ pagination: Pagination
93
+ filter: ConversationFilter
chainlit/user_session.py CHANGED
@@ -1,5 +1,5 @@
1
1
  from typing import Dict
2
- from chainlit.emitter import get_emitter
2
+ from chainlit.context import get_emitter
3
3
 
4
4
  user_sessions: Dict[str, Dict] = {}
5
5
 
chainlit/utils.py ADDED
@@ -0,0 +1,51 @@
1
+ from typing import Callable
2
+ import inspect
3
+
4
+ from chainlit.context import get_emitter
5
+ from chainlit.logger import logger
6
+ from chainlit.message import ErrorMessage
7
+
8
+
9
+ def wrap_user_function(user_function: Callable, with_task=False) -> Callable:
10
+ """
11
+ Wraps a user-defined function to accept arguments as a dictionary.
12
+
13
+ Args:
14
+ user_function (Callable): The user-defined function to wrap.
15
+
16
+ Returns:
17
+ Callable: The wrapped function.
18
+ """
19
+
20
+ async def wrapper(*args):
21
+ # Get the parameter names of the user-defined function
22
+ user_function_params = list(inspect.signature(user_function).parameters.keys())
23
+
24
+ # Create a dictionary of parameter names and their corresponding values from *args
25
+ params_values = {
26
+ param_name: arg for param_name, arg in zip(user_function_params, args)
27
+ }
28
+
29
+ emitter = get_emitter()
30
+
31
+ if with_task:
32
+ await emitter.task_start()
33
+
34
+ try:
35
+ # Call the user-defined function with the arguments
36
+ if inspect.iscoroutinefunction(user_function):
37
+ return await user_function(**params_values)
38
+ else:
39
+ return user_function(**params_values)
40
+ except InterruptedError:
41
+ pass
42
+ except Exception as e:
43
+ logger.exception(e)
44
+ await ErrorMessage(
45
+ content=str(e) or e.__class__.__name__, author="Error"
46
+ ).send()
47
+ finally:
48
+ if with_task:
49
+ await emitter.task_end()
50
+
51
+ return wrapper
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: chainlit
3
- Version: 0.4.1
3
+ Version: 0.4.3
4
4
  Summary: A faster way to build chatbot UIs.
5
5
  Home-page: https://github.com/Chainlit/chainlit
6
6
  License: Apache-2.0 license
@@ -19,10 +19,11 @@ Requires-Dist: asyncer (>=0.0.2,<0.0.3)
19
19
  Requires-Dist: auth0-python (>=4.1.1,<5.0.0)
20
20
  Requires-Dist: click (>=8.1.3,<9.0.0)
21
21
  Requires-Dist: dataclasses_json (>=0.5.7,<0.6.0)
22
- Requires-Dist: fastapi (>=0.96.0,<0.97.0)
22
+ Requires-Dist: fastapi (>=0.97.0,<0.98.0)
23
23
  Requires-Dist: fastapi-socketio (>=0.0.10,<0.0.11)
24
24
  Requires-Dist: nest-asyncio (>=1.5.6,<2.0.0)
25
25
  Requires-Dist: openai (>=0.27.7,<0.28.0)
26
+ Requires-Dist: prisma (>=0.9.0,<0.10.0)
26
27
  Requires-Dist: pydantic (>=1.10.8,<2.0.0)
27
28
  Requires-Dist: python-dotenv (>=1.0.0,<2.0.0)
28
29
  Requires-Dist: python-graphql-client (>=0.4.3,<0.5.0)
@@ -92,7 +93,10 @@ $ chainlit run demo.py -w
92
93
 
93
94
  ### 🔗 With LangChain
94
95
 
95
- Checkout our plug and play [integration](https://docs.chainlit.io/langchain) with LangChain!
96
+ Check out our plug-and-play [integration](https://docs.chainlit.io/langchain) with LangChain!
97
+
98
+ ### 📚 More Examples - Cookbook
99
+ You can find various examples of Chainlit apps [here](https://github.com/Chainlit/cookbook) that leverage tools and services such as OpenAI, Anthropiс, LangChain, LlamaIndex, ChromaDB, Pinecone and more.
96
100
 
97
101
  ## 🛣 Roadmap
98
102
  - [ ] New UI elements (spreadsheet, video, carousel...)
@@ -0,0 +1,49 @@
1
+ chainlit/__init__.py,sha256=KJjPLSjatqlNVjWDpYy6XzHWKZeIjH3cJ0Q3WhKDp_A,4078
2
+ chainlit/__main__.py,sha256=7Vg3w3T3qDuz4KDu5lQhLH6lQ3cYdume7gHH7Z1V97U,87
3
+ chainlit/action.py,sha256=dk-ymZXZPQlHjh77ivwmdzhAp3QN7ihPZVVVhoh2jMc,1209
4
+ chainlit/cache.py,sha256=HFieoVnhsJf36AT7nRL8wcEJ1jTgeTLju23WW2-vgp4,1343
5
+ chainlit/cli/__init__.py,sha256=DMeygqozPGYgMcM9oRPXRV1RVdhorCe0W-sOyTmjhCU,4888
6
+ chainlit/cli/auth.py,sha256=G437UK4BvLA4wWPz_KPDwN6_6dnchMtHWr4tqu5XN-M,4093
7
+ chainlit/cli/deploy.py,sha256=RyIzpjS3yuVjQpllDO2PzXb6VZAtTcOyuAfF75WKVJE,2594
8
+ chainlit/cli/mock.py,sha256=hfcd62F0Bw6Bc2sj56gBYrxhujeCbQ2BzU8KZyTqG0Q,67012
9
+ chainlit/cli/utils.py,sha256=Pn6ANnw9GgdUduiMphnb8RiGUPRbzDafkCHM2rOPBj4,716
10
+ chainlit/client/base.py,sha256=HtVS6mGm_Ik6W08nLdUPtdVxzxK6Au1K6QKz-HdBz2Q,3284
11
+ chainlit/client/cloud.py,sha256=OEUVVnvJK0ofQRoOkjxNniuyPtxZfVLqYXi8_0o6fS0,14109
12
+ chainlit/client/local.py,sha256=JTSBHVArt7aA-rkuFJrGvlKUGin-PO2VXoWd_AQUko0,7324
13
+ chainlit/client/utils.py,sha256=YARrA5XMpOpKhNjvIpve5H5CH2YekNP46aEYFvBjXlk,682
14
+ chainlit/config.py,sha256=zd-vakyf06v-93g1JJ4aaHGJcQ3O-VoMJU71HqUIMUY,8493
15
+ chainlit/context.py,sha256=m639AAk-bt7DWTStuW8s9FotohVonuW7veg5UwiKdKA,709
16
+ chainlit/db/__init__.py,sha256=kFk5a6SoDrq1U6X4EjvJLd-3YsuVV4cl9r8BFHUExlM,982
17
+ chainlit/db/prisma/schema.prisma,sha256=ZvhefSK_9D2Vck9SORaG9o-jQr6_BRpKNo-2YTlmgmg,1373
18
+ chainlit/element.py,sha256=15nfoeQYF-hkOC8eFWmUWUsLT-vhyyacmG3GkOE_QDk,6317
19
+ chainlit/emitter.py,sha256=SVgD6f9evY2IzanZ_ZHLd3qK_keDO20E_2fqv1Ka5JI,4142
20
+ chainlit/frontend/dist/assets/index-37b5009c.js,sha256=rm7EbF7bDLQgCTyTHTVj9qQ9bjyAis7ZOzjU8yd-PHE,614554
21
+ chainlit/frontend/dist/assets/index-51393291.js,sha256=asduyAfPksSjLfrXf7gdIuy03MbpTCMfgBeEj2J_4ys,1334161
22
+ chainlit/frontend/dist/assets/index-f93cc942.css,sha256=-TzJQmhWdpIg94Jv25YHMi9h-jSQmeERggrrlmYIflg,5697
23
+ chainlit/frontend/dist/assets/logo_dark-bc7401f6.svg,sha256=vHQB9g-n5OqOmuH3Fduuc7ZMg0EmMsGyO9cEnYwLbHg,8889
24
+ chainlit/frontend/dist/assets/logo_light-f19fc2ea.svg,sha256=8Z_C6t-0V9QL9ldmLjaLfp2REcGDuaTeNynj6-6muNI,8891
25
+ chainlit/frontend/dist/favicon.svg,sha256=0Cy8x28obT5eWW3nxZRhsEvu6_zMqrqbg0y6hT3D0Q0,6455
26
+ chainlit/frontend/dist/index.html,sha256=7CUGF4qj4bXEsryZy4prJbz4scyBPQlt5RQioKKAJQE,793
27
+ chainlit/hello.py,sha256=bqKP00i0FKHnZ9fdyVW1a2xDAA1g7IWT0EVzUNky7rA,417
28
+ chainlit/langflow/__init__.py,sha256=spuypuoC0mVDVsxUbPJk2dKOfAtDv3IePuSe2a_L23U,2339
29
+ chainlit/lc/__init__.py,sha256=mEtyfyGNDieIysKsWOrlvUvqVCqVnhYZqZprlZ1CBso,3281
30
+ chainlit/lc/agent.py,sha256=tSR6TxZlMh91rmh95h2b-6bSGugak9lNpwaioDfiz1c,1140
31
+ chainlit/lc/callbacks.py,sha256=wZLpWKnQCmYBk_LPFvL25U5KdXV50UuKzDtRrPjkUWs,12788
32
+ chainlit/llama_index/__init__.py,sha256=X_Ydtrw62YzOQ1Dpe0BHXnX3akz5HbGrM5NNXZczNnQ,953
33
+ chainlit/llama_index/callbacks.py,sha256=8WiHAxirhTXzGFk2rKBRQ9u_wKRg_yHntuRh6yGA_mo,2994
34
+ chainlit/llama_index/run.py,sha256=NCYoMr-PJKjCENYuBJvE8W4MnH3VKvonxBmEMfQdZ78,1230
35
+ chainlit/logger.py,sha256=VFl6D0UbY7PH7gCl0HBXM_vfk7lK_bMEqiUxQBq_Vnw,374
36
+ chainlit/markdown.py,sha256=JRhaKb0BB1G2S1XEaGj95DH75y7jqWdeIPx4u0oDaxw,1623
37
+ chainlit/message.py,sha256=zCagR3TEhdzf39p4LEcloNaSYpVttwBiMdRtNmPTeo4,12216
38
+ chainlit/server.py,sha256=8oGPuRQV9QrrhGdEuSQmBnX_it_XkgyZ-BM2go__wQQ,17625
39
+ chainlit/session.py,sha256=V1cKSi1REHy3zJSk1szLPL59MuGuwjg_HECfqD7VdsU,866
40
+ chainlit/sync.py,sha256=g5BC-DA34BMhhB16fZ4QwKTda4oC2MYoAnLS-WoKRRI,688
41
+ chainlit/telemetry.py,sha256=yaRnmjjwLPbUa_wLQ--sp1VKPAKIHgTQP-MZo_W8AM8,2358
42
+ chainlit/types.py,sha256=py6vLmm_w43v7X_NNBvWtfi5rUc9owGNPdBO7L8Pto4,1981
43
+ chainlit/user_session.py,sha256=U4F1e9DBUhFoXvEdReRTq7Bu0Str4qsvSgAOeMobUzA,1213
44
+ chainlit/utils.py,sha256=2yoBX7W6vNxnIefHvrbspXggVwGrtYzAO9xJtTJT61M,1561
45
+ chainlit/version.py,sha256=iosXhlXclBwBqlADFKEilxAC2wWKbtuBKi87AmPi7s8,196
46
+ chainlit-0.4.3.dist-info/METADATA,sha256=aeKV9Cp0pRbcS0XeQykUVQTKYKLivr2QuvewIl7TQAE,4325
47
+ chainlit-0.4.3.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
48
+ chainlit-0.4.3.dist-info/entry_points.txt,sha256=FrkqdjrFl8juSnvBndniyX7XuKojmUwO4ghRh-CFMQc,45
49
+ chainlit-0.4.3.dist-info/RECORD,,