chainlit 0.4.2__py3-none-any.whl → 0.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chainlit might be problematic. Click here for more details.

@@ -0,0 +1,99 @@
1
+ from typing import Any, Dict, List, Optional
2
+
3
+
4
+ from llama_index.callbacks.base import BaseCallbackHandler
5
+ from llama_index.callbacks.schema import CBEventType, EventPayload
6
+
7
+
8
+ from chainlit.message import Message
9
+ from chainlit.element import Text
10
+ from chainlit.sync import run_sync
11
+
12
+
13
+ DEFAULT_IGNORE = [
14
+ CBEventType.CHUNKING,
15
+ CBEventType.SYNTHESIZE,
16
+ CBEventType.EMBEDDING,
17
+ CBEventType.NODE_PARSING,
18
+ CBEventType.QUERY,
19
+ CBEventType.TREE,
20
+ ]
21
+
22
+
23
+ class LlamaIndexCallbackHandler(BaseCallbackHandler):
24
+ """Base callback handler that can be used to track event starts and ends."""
25
+
26
+ def __init__(
27
+ self,
28
+ event_starts_to_ignore: List[CBEventType] = DEFAULT_IGNORE,
29
+ event_ends_to_ignore: List[CBEventType] = DEFAULT_IGNORE,
30
+ ) -> None:
31
+ """Initialize the base callback handler."""
32
+ self.event_starts_to_ignore = tuple(event_starts_to_ignore)
33
+ self.event_ends_to_ignore = tuple(event_ends_to_ignore)
34
+
35
+ def on_event_start(
36
+ self,
37
+ event_type: CBEventType,
38
+ payload: Optional[Dict[str, Any]] = None,
39
+ event_id: str = "",
40
+ **kwargs: Any,
41
+ ) -> str:
42
+ """Run when an event starts and return id of event."""
43
+ run_sync(
44
+ Message(
45
+ author=event_type,
46
+ indent=1,
47
+ content="",
48
+ ).send()
49
+ )
50
+ return ""
51
+
52
+ def on_event_end(
53
+ self,
54
+ event_type: CBEventType,
55
+ payload: Optional[Dict[str, Any]] = None,
56
+ event_id: str = "",
57
+ **kwargs: Any,
58
+ ) -> None:
59
+ """Run when an event ends."""
60
+
61
+ if event_type == CBEventType.RETRIEVE:
62
+ sources = payload.get(EventPayload.NODES)
63
+ if sources:
64
+ elements = [
65
+ Text(name=f"Source {idx}", content=source.node.get_text())
66
+ for idx, source in enumerate(sources)
67
+ ]
68
+ source_refs = "\, ".join(
69
+ [f"Source {idx}" for idx, _ in enumerate(sources)]
70
+ )
71
+ content = f"Retrieved the following sources: {source_refs}"
72
+
73
+ run_sync(
74
+ Message(
75
+ content=content, author=event_type, elements=elements, indent=1
76
+ ).send()
77
+ )
78
+
79
+ if event_type == CBEventType.LLM:
80
+ run_sync(
81
+ Message(
82
+ content=payload.get(EventPayload.RESPONSE, ""),
83
+ author=event_type,
84
+ indent=1,
85
+ prompt=payload.get(EventPayload.PROMPT),
86
+ ).send()
87
+ )
88
+
89
+ def start_trace(self, trace_id: Optional[str] = None) -> None:
90
+ """Run when an overall trace is launched."""
91
+ pass
92
+
93
+ def end_trace(
94
+ self,
95
+ trace_id: Optional[str] = None,
96
+ trace_map: Optional[Dict[str, List[str]]] = None,
97
+ ) -> None:
98
+ """Run when an overall trace is exited."""
99
+ pass
@@ -0,0 +1,34 @@
1
+ from typing import Union
2
+ from llama_index.response.schema import Response, StreamingResponse
3
+ from llama_index.chat_engine.types import BaseChatEngine
4
+ from llama_index.indices.query.base import BaseQueryEngine
5
+
6
+ from chainlit.message import Message
7
+ from chainlit.sync import make_async
8
+
9
+
10
+ async def run_llama(instance: Union[BaseChatEngine, BaseQueryEngine], input_str: str):
11
+ # Trick to display the loader in the UI until the first token is streamed
12
+ await Message(content="").send()
13
+
14
+ response_message = Message(content="")
15
+
16
+ if isinstance(instance, BaseQueryEngine):
17
+ response = await make_async(instance.query)(input_str)
18
+ elif isinstance(instance, BaseChatEngine):
19
+ response = await make_async(instance.chat)(input_str)
20
+ else:
21
+ raise NotImplementedError
22
+
23
+ if isinstance(response, Response):
24
+ response_message.content = str(response)
25
+ await response_message.send()
26
+ elif isinstance(response, StreamingResponse):
27
+ gen = response.response_gen
28
+ for token in gen:
29
+ await response_message.stream_token(token=token)
30
+
31
+ if response.response_txt:
32
+ response_message.content = response.response_txt
33
+
34
+ await response_message.send()
chainlit/message.py CHANGED
@@ -247,10 +247,10 @@ class ErrorMessage(MessageBase):
247
247
 
248
248
 
249
249
  class AskMessageBase(MessageBase):
250
- def remove(self):
251
- removed = super().remove()
250
+ async def remove(self):
251
+ removed = await super().remove()
252
252
  if removed:
253
- self.emitter.clear_ask()
253
+ await self.emitter.clear_ask()
254
254
 
255
255
 
256
256
  class AskUserMessage(AskMessageBase):
chainlit/server.py CHANGED
@@ -408,14 +408,18 @@ async def connection_successful(sid):
408
408
  emitter_var.set(ChainlitEmitter(session))
409
409
  loop_var.set(asyncio.get_event_loop())
410
410
 
411
+ if config.code.on_chat_start:
412
+ """Call the on_chat_start function provided by the developer."""
413
+ await config.code.on_chat_start()
414
+
411
415
  if config.code.lc_factory:
412
416
  """Instantiate the langchain agent and store it in the session."""
413
417
  agent = await config.code.lc_factory()
414
418
  session["agent"] = agent
415
419
 
416
- if config.code.on_chat_start:
417
- """Call the on_chat_start function provided by the developer."""
418
- await config.code.on_chat_start()
420
+ if config.code.llama_index_factory:
421
+ llama_instance = await config.code.llama_index_factory()
422
+ session["llama_instance"] = llama_instance
419
423
 
420
424
 
421
425
  @socket.on("disconnect")
@@ -467,6 +471,8 @@ async def process_message(session: Session, author: str, input_str: str):
467
471
  )
468
472
 
469
473
  langchain_agent = session.get("agent")
474
+ llama_instance = session.get("llama_instance")
475
+
470
476
  if langchain_agent:
471
477
  from chainlit.lc.agent import run_langchain_agent
472
478
 
@@ -497,6 +503,11 @@ async def process_message(session: Session, author: str, input_str: str):
497
503
  # Finally, send the response to the user
498
504
  await Message(author=config.ui.name, content=res).send()
499
505
 
506
+ elif llama_instance:
507
+ from chainlit.llama_index.run import run_llama
508
+
509
+ await run_llama(llama_instance, input_str)
510
+
500
511
  elif config.code.on_message:
501
512
  # If no langchain agent is available, call the on_message function provided by the developer
502
513
  await config.code.on_message(input_str)
chainlit/session.py CHANGED
@@ -16,6 +16,8 @@ class Session(TypedDict):
16
16
  user_env: Dict[str, str]
17
17
  # Optional langchain agent
18
18
  agent: Any
19
+ # Optional llama instance
20
+ llama_instance: Any
19
21
  # Whether the current task should be stopped
20
22
  should_stop: bool
21
23
  # Optional client to persist messages and files
chainlit/sync.py CHANGED
@@ -6,7 +6,9 @@ if sys.version_info >= (3, 10):
6
6
  else:
7
7
  from typing_extensions import ParamSpec
8
8
 
9
+ import threading
9
10
  import asyncio
11
+ from syncer import sync
10
12
  from asyncer import asyncify
11
13
 
12
14
  from chainlit.context import get_loop
@@ -20,6 +22,9 @@ T = TypeVar("T")
20
22
 
21
23
 
22
24
  def run_sync(co: Coroutine[Any, Any, T_Retval]) -> T_Retval:
23
- loop = get_loop()
24
- result = asyncio.run_coroutine_threadsafe(co, loop=loop)
25
- return result.result()
25
+ if threading.current_thread() == threading.main_thread():
26
+ return sync(co)
27
+ else:
28
+ loop = get_loop()
29
+ result = asyncio.run_coroutine_threadsafe(co, loop=loop)
30
+ return result.result()
chainlit/utils.py ADDED
@@ -0,0 +1,51 @@
1
+ from typing import Callable
2
+ import inspect
3
+
4
+ from chainlit.context import get_emitter
5
+ from chainlit.logger import logger
6
+ from chainlit.message import ErrorMessage
7
+
8
+
9
+ def wrap_user_function(user_function: Callable, with_task=False) -> Callable:
10
+ """
11
+ Wraps a user-defined function to accept arguments as a dictionary.
12
+
13
+ Args:
14
+ user_function (Callable): The user-defined function to wrap.
15
+
16
+ Returns:
17
+ Callable: The wrapped function.
18
+ """
19
+
20
+ async def wrapper(*args):
21
+ # Get the parameter names of the user-defined function
22
+ user_function_params = list(inspect.signature(user_function).parameters.keys())
23
+
24
+ # Create a dictionary of parameter names and their corresponding values from *args
25
+ params_values = {
26
+ param_name: arg for param_name, arg in zip(user_function_params, args)
27
+ }
28
+
29
+ emitter = get_emitter()
30
+
31
+ if with_task:
32
+ await emitter.task_start()
33
+
34
+ try:
35
+ # Call the user-defined function with the arguments
36
+ if inspect.iscoroutinefunction(user_function):
37
+ return await user_function(**params_values)
38
+ else:
39
+ return user_function(**params_values)
40
+ except InterruptedError:
41
+ pass
42
+ except Exception as e:
43
+ logger.exception(e)
44
+ await ErrorMessage(
45
+ content=str(e) or e.__class__.__name__, author="Error"
46
+ ).send()
47
+ finally:
48
+ if with_task:
49
+ await emitter.task_end()
50
+
51
+ return wrapper
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: chainlit
3
- Version: 0.4.2
3
+ Version: 0.4.3
4
4
  Summary: A faster way to build chatbot UIs.
5
5
  Home-page: https://github.com/Chainlit/chainlit
6
6
  License: Apache-2.0 license
@@ -19,8 +19,9 @@ Requires-Dist: asyncer (>=0.0.2,<0.0.3)
19
19
  Requires-Dist: auth0-python (>=4.1.1,<5.0.0)
20
20
  Requires-Dist: click (>=8.1.3,<9.0.0)
21
21
  Requires-Dist: dataclasses_json (>=0.5.7,<0.6.0)
22
- Requires-Dist: fastapi (>=0.96.0,<0.97.0)
22
+ Requires-Dist: fastapi (>=0.97.0,<0.98.0)
23
23
  Requires-Dist: fastapi-socketio (>=0.0.10,<0.0.11)
24
+ Requires-Dist: nest-asyncio (>=1.5.6,<2.0.0)
24
25
  Requires-Dist: openai (>=0.27.7,<0.28.0)
25
26
  Requires-Dist: prisma (>=0.9.0,<0.10.0)
26
27
  Requires-Dist: pydantic (>=1.10.8,<2.0.0)
@@ -1,44 +1,49 @@
1
- chainlit/__init__.py,sha256=0PqkxriXIHa1UsTGf_CWiHHQjC7Pc3tmIPT5yIJxOUQ,7956
1
+ chainlit/__init__.py,sha256=KJjPLSjatqlNVjWDpYy6XzHWKZeIjH3cJ0Q3WhKDp_A,4078
2
2
  chainlit/__main__.py,sha256=7Vg3w3T3qDuz4KDu5lQhLH6lQ3cYdume7gHH7Z1V97U,87
3
3
  chainlit/action.py,sha256=dk-ymZXZPQlHjh77ivwmdzhAp3QN7ihPZVVVhoh2jMc,1209
4
4
  chainlit/cache.py,sha256=HFieoVnhsJf36AT7nRL8wcEJ1jTgeTLju23WW2-vgp4,1343
5
- chainlit/cli/__init__.py,sha256=OIlAc1Pe7JZ3owUXwT_UOmt1nVTqiGPY9UqlKXYUoI8,4535
5
+ chainlit/cli/__init__.py,sha256=DMeygqozPGYgMcM9oRPXRV1RVdhorCe0W-sOyTmjhCU,4888
6
6
  chainlit/cli/auth.py,sha256=G437UK4BvLA4wWPz_KPDwN6_6dnchMtHWr4tqu5XN-M,4093
7
7
  chainlit/cli/deploy.py,sha256=RyIzpjS3yuVjQpllDO2PzXb6VZAtTcOyuAfF75WKVJE,2594
8
- chainlit/cli/mock.py,sha256=tWvkbPzm6TeBkAu6b1ia9gL6iTj9OS3YLyxmreTJU6o,1147
8
+ chainlit/cli/mock.py,sha256=hfcd62F0Bw6Bc2sj56gBYrxhujeCbQ2BzU8KZyTqG0Q,67012
9
9
  chainlit/cli/utils.py,sha256=Pn6ANnw9GgdUduiMphnb8RiGUPRbzDafkCHM2rOPBj4,716
10
10
  chainlit/client/base.py,sha256=HtVS6mGm_Ik6W08nLdUPtdVxzxK6Au1K6QKz-HdBz2Q,3284
11
11
  chainlit/client/cloud.py,sha256=OEUVVnvJK0ofQRoOkjxNniuyPtxZfVLqYXi8_0o6fS0,14109
12
12
  chainlit/client/local.py,sha256=JTSBHVArt7aA-rkuFJrGvlKUGin-PO2VXoWd_AQUko0,7324
13
13
  chainlit/client/utils.py,sha256=YARrA5XMpOpKhNjvIpve5H5CH2YekNP46aEYFvBjXlk,682
14
- chainlit/config.py,sha256=67obJCmbFCuLNP-5N1D_TOFPFruPi1b-NISWn6zMS1g,8323
14
+ chainlit/config.py,sha256=zd-vakyf06v-93g1JJ4aaHGJcQ3O-VoMJU71HqUIMUY,8493
15
15
  chainlit/context.py,sha256=m639AAk-bt7DWTStuW8s9FotohVonuW7veg5UwiKdKA,709
16
16
  chainlit/db/__init__.py,sha256=kFk5a6SoDrq1U6X4EjvJLd-3YsuVV4cl9r8BFHUExlM,982
17
17
  chainlit/db/prisma/schema.prisma,sha256=ZvhefSK_9D2Vck9SORaG9o-jQr6_BRpKNo-2YTlmgmg,1373
18
18
  chainlit/element.py,sha256=15nfoeQYF-hkOC8eFWmUWUsLT-vhyyacmG3GkOE_QDk,6317
19
19
  chainlit/emitter.py,sha256=SVgD6f9evY2IzanZ_ZHLd3qK_keDO20E_2fqv1Ka5JI,4142
20
- chainlit/frontend/dist/assets/index-995e21ad.js,sha256=SUgCkhIZA7gX6f3_yjaDsiGimnIPTsUebIwZVIhtqkE,614554
20
+ chainlit/frontend/dist/assets/index-37b5009c.js,sha256=rm7EbF7bDLQgCTyTHTVj9qQ9bjyAis7ZOzjU8yd-PHE,614554
21
+ chainlit/frontend/dist/assets/index-51393291.js,sha256=asduyAfPksSjLfrXf7gdIuy03MbpTCMfgBeEj2J_4ys,1334161
21
22
  chainlit/frontend/dist/assets/index-f93cc942.css,sha256=-TzJQmhWdpIg94Jv25YHMi9h-jSQmeERggrrlmYIflg,5697
22
- chainlit/frontend/dist/assets/index-fb1e167a.js,sha256=m1BTWtvezgPLE4cYNGIwZV1YdQPy66msAITDfDnDUZ8,1334132
23
23
  chainlit/frontend/dist/assets/logo_dark-bc7401f6.svg,sha256=vHQB9g-n5OqOmuH3Fduuc7ZMg0EmMsGyO9cEnYwLbHg,8889
24
24
  chainlit/frontend/dist/assets/logo_light-f19fc2ea.svg,sha256=8Z_C6t-0V9QL9ldmLjaLfp2REcGDuaTeNynj6-6muNI,8891
25
25
  chainlit/frontend/dist/favicon.svg,sha256=0Cy8x28obT5eWW3nxZRhsEvu6_zMqrqbg0y6hT3D0Q0,6455
26
- chainlit/frontend/dist/index.html,sha256=BCPkjtPetaewRy4-rWXdMkjkziVH9yhGXxB6nCiiPFM,793
26
+ chainlit/frontend/dist/index.html,sha256=7CUGF4qj4bXEsryZy4prJbz4scyBPQlt5RQioKKAJQE,793
27
27
  chainlit/hello.py,sha256=bqKP00i0FKHnZ9fdyVW1a2xDAA1g7IWT0EVzUNky7rA,417
28
- chainlit/lc/__init__.py,sha256=AHceu8jfttx74FLLhr7TIk5ihOqFFqJde1M33Y_YEFA,292
29
- chainlit/lc/agent.py,sha256=aC1BkKGN0WXkRLoU4H7DxsK7VffAYb1pF-9HlZ60duE,1121
30
- chainlit/lc/callbacks.py,sha256=VcTV9HhuWy9WHmIiI4mSt-A-9y6Aq96bZV-_NK6f_Sw,12783
28
+ chainlit/langflow/__init__.py,sha256=spuypuoC0mVDVsxUbPJk2dKOfAtDv3IePuSe2a_L23U,2339
29
+ chainlit/lc/__init__.py,sha256=mEtyfyGNDieIysKsWOrlvUvqVCqVnhYZqZprlZ1CBso,3281
30
+ chainlit/lc/agent.py,sha256=tSR6TxZlMh91rmh95h2b-6bSGugak9lNpwaioDfiz1c,1140
31
+ chainlit/lc/callbacks.py,sha256=wZLpWKnQCmYBk_LPFvL25U5KdXV50UuKzDtRrPjkUWs,12788
32
+ chainlit/llama_index/__init__.py,sha256=X_Ydtrw62YzOQ1Dpe0BHXnX3akz5HbGrM5NNXZczNnQ,953
33
+ chainlit/llama_index/callbacks.py,sha256=8WiHAxirhTXzGFk2rKBRQ9u_wKRg_yHntuRh6yGA_mo,2994
34
+ chainlit/llama_index/run.py,sha256=NCYoMr-PJKjCENYuBJvE8W4MnH3VKvonxBmEMfQdZ78,1230
31
35
  chainlit/logger.py,sha256=VFl6D0UbY7PH7gCl0HBXM_vfk7lK_bMEqiUxQBq_Vnw,374
32
36
  chainlit/markdown.py,sha256=JRhaKb0BB1G2S1XEaGj95DH75y7jqWdeIPx4u0oDaxw,1623
33
- chainlit/message.py,sha256=dyc_IKmz_AJK5eXcDV5MGVW3rXlcaJkY8MKmR_Zldl8,12198
34
- chainlit/server.py,sha256=SV8x5eYcQ7BjWLTUd8GLUpKbrUAmjRtIQvS6WxHErDk,17267
35
- chainlit/session.py,sha256=jP3x0-pRLi-ozCOoxiwV5t-x0IXDh-HzDTZDum8YyJA,812
36
- chainlit/sync.py,sha256=AsC0wU577zD-1fuQfhEnv42SOfUR3sIpf3Vesuzzha4,539
37
+ chainlit/message.py,sha256=zCagR3TEhdzf39p4LEcloNaSYpVttwBiMdRtNmPTeo4,12216
38
+ chainlit/server.py,sha256=8oGPuRQV9QrrhGdEuSQmBnX_it_XkgyZ-BM2go__wQQ,17625
39
+ chainlit/session.py,sha256=V1cKSi1REHy3zJSk1szLPL59MuGuwjg_HECfqD7VdsU,866
40
+ chainlit/sync.py,sha256=g5BC-DA34BMhhB16fZ4QwKTda4oC2MYoAnLS-WoKRRI,688
37
41
  chainlit/telemetry.py,sha256=yaRnmjjwLPbUa_wLQ--sp1VKPAKIHgTQP-MZo_W8AM8,2358
38
42
  chainlit/types.py,sha256=py6vLmm_w43v7X_NNBvWtfi5rUc9owGNPdBO7L8Pto4,1981
39
43
  chainlit/user_session.py,sha256=U4F1e9DBUhFoXvEdReRTq7Bu0Str4qsvSgAOeMobUzA,1213
44
+ chainlit/utils.py,sha256=2yoBX7W6vNxnIefHvrbspXggVwGrtYzAO9xJtTJT61M,1561
40
45
  chainlit/version.py,sha256=iosXhlXclBwBqlADFKEilxAC2wWKbtuBKi87AmPi7s8,196
41
- chainlit-0.4.2.dist-info/METADATA,sha256=RjdADKuJNlzLcRMPoMkU9eHQkm3Yhi1aHSOz7EnpHVE,4280
42
- chainlit-0.4.2.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
43
- chainlit-0.4.2.dist-info/entry_points.txt,sha256=FrkqdjrFl8juSnvBndniyX7XuKojmUwO4ghRh-CFMQc,45
44
- chainlit-0.4.2.dist-info/RECORD,,
46
+ chainlit-0.4.3.dist-info/METADATA,sha256=aeKV9Cp0pRbcS0XeQykUVQTKYKLivr2QuvewIl7TQAE,4325
47
+ chainlit-0.4.3.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
48
+ chainlit-0.4.3.dist-info/entry_points.txt,sha256=FrkqdjrFl8juSnvBndniyX7XuKojmUwO4ghRh-CFMQc,45
49
+ chainlit-0.4.3.dist-info/RECORD,,