chainlit 0.4.2__py3-none-any.whl → 0.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chainlit might be problematic. Click here for more details.

chainlit/__init__.py CHANGED
@@ -1,31 +1,42 @@
1
1
  from dotenv import load_dotenv
2
2
  from typing import Callable, Any, TYPE_CHECKING
3
- import inspect
4
3
  import os
5
4
  import asyncio
6
5
 
7
6
  if TYPE_CHECKING:
8
7
  from chainlit.client.base import BaseClient
9
8
 
10
- from chainlit.lc import LANGCHAIN_INSTALLED
9
+ from chainlit.lc import (
10
+ LANGCHAIN_INSTALLED,
11
+ langchain_factory,
12
+ langchain_postprocess,
13
+ langchain_run,
14
+ langchain_rename,
15
+ )
16
+ from chainlit.llama_index import LLAMA_INDEX_INSTALLED, llama_index_factory
17
+ from chainlit.langflow import langflow_factory
18
+ from chainlit.utils import wrap_user_function
11
19
  from chainlit.config import config
12
20
  from chainlit.telemetry import trace
13
21
  from chainlit.version import __version__
14
22
  from chainlit.logger import logger
15
23
  from chainlit.types import LLMSettings
16
- from chainlit.message import ErrorMessage
17
24
  from chainlit.action import Action
18
25
  from chainlit.element import Image, Text, Pdf, Avatar, Pyplot
19
26
  from chainlit.message import Message, ErrorMessage, AskUserMessage, AskFileMessage
20
27
  from chainlit.user_session import user_session
21
28
  from chainlit.sync import run_sync, make_async
22
29
  from chainlit.cache import cache
23
- from chainlit.context import get_emitter
24
30
 
25
31
  if LANGCHAIN_INSTALLED:
26
32
  from chainlit.lc.callbacks import (
27
- ChainlitCallbackHandler,
28
- AsyncChainlitCallbackHandler,
33
+ LangchainCallbackHandler,
34
+ AsyncLangchainCallbackHandler,
35
+ )
36
+
37
+ if LLAMA_INDEX_INSTALLED:
38
+ from chainlit.llama_index.callbacks import (
39
+ LlamaIndexCallbackHandler,
29
40
  )
30
41
 
31
42
 
@@ -35,98 +46,6 @@ if env_found:
35
46
  logger.info("Loaded .env file")
36
47
 
37
48
 
38
- def wrap_user_function(user_function: Callable, with_task=False) -> Callable:
39
- """
40
- Wraps a user-defined function to accept arguments as a dictionary.
41
-
42
- Args:
43
- user_function (Callable): The user-defined function to wrap.
44
-
45
- Returns:
46
- Callable: The wrapped function.
47
- """
48
-
49
- async def wrapper(*args):
50
- # Get the parameter names of the user-defined function
51
- user_function_params = list(inspect.signature(user_function).parameters.keys())
52
-
53
- # Create a dictionary of parameter names and their corresponding values from *args
54
- params_values = {
55
- param_name: arg for param_name, arg in zip(user_function_params, args)
56
- }
57
-
58
- emitter = get_emitter()
59
-
60
- if with_task:
61
- await emitter.task_start()
62
-
63
- try:
64
- # Call the user-defined function with the arguments
65
- if inspect.iscoroutinefunction(user_function):
66
- return await user_function(**params_values)
67
- else:
68
- return user_function(**params_values)
69
- except InterruptedError:
70
- pass
71
- except Exception as e:
72
- logger.exception(e)
73
- await ErrorMessage(
74
- content=str(e) or e.__class__.__name__, author="Error"
75
- ).send()
76
- finally:
77
- if with_task:
78
- await emitter.task_end()
79
-
80
- return wrapper
81
-
82
-
83
- @trace
84
- def langchain_factory(use_async: bool) -> Callable:
85
- """
86
- Plug and play decorator for the LangChain library.
87
- The decorated function should instantiate a new LangChain instance (Chain, Agent...).
88
- One instance per user session is created and cached.
89
- The per user instance is called every time a new message is received.
90
-
91
- Args:
92
- use_async bool: Whether to call the the agent asynchronously or not. Defaults to False.
93
-
94
- Returns:
95
- Callable[[], Any]: The decorated factory function.
96
- """
97
-
98
- # Check if the factory is called with the correct parameter
99
- if type(use_async) != bool:
100
- error_message = "langchain_factory use_async parameter is required"
101
- raise ValueError(error_message)
102
-
103
- def decorator(func: Callable) -> Callable:
104
- config.code.lc_factory = wrap_user_function(func, with_task=True)
105
- return func
106
-
107
- config.code.lc_agent_is_async = use_async
108
-
109
- return decorator
110
-
111
-
112
- @trace
113
- def langchain_postprocess(func: Callable[[Any], str]) -> Callable:
114
- """
115
- Useful to post process the response a LangChain object instantiated with @langchain_factory.
116
- The decorated function takes the raw output of the LangChain object as input.
117
- The response will NOT be automatically sent to the UI, you need to send a Message.
118
-
119
- Args:
120
- func (Callable[[Any], str]): The post-processing function to apply after generating a response. Takes the response as parameter.
121
-
122
- Returns:
123
- Callable[[Any], str]: The decorated post-processing function.
124
- """
125
-
126
- config.code.lc_postprocess = wrap_user_function(func)
127
- return func
128
-
129
-
130
49
  @trace
131
50
  def on_message(func: Callable) -> Callable:
132
51
  """
@@ -144,38 +63,6 @@ def on_message(func: Callable) -> Callable:
144
63
  return func
145
64
 
146
65
 
147
- @trace
148
- def langchain_run(func: Callable[[Any, str], str]) -> Callable:
149
- """
150
- Useful to override the default behavior of the LangChain object instantiated with @langchain_factory.
151
- Use when your agent run method has custom parameters.
152
- Takes the LangChain agent and the user input as parameters.
153
- The response will NOT be automatically sent to the UI, you need to send a Message.
154
- Args:
155
- func (Callable[[Any, str], str]): The function to be called when a new message is received. Takes the agent and user input as parameters and returns the output string.
156
-
157
- Returns:
158
- Callable[[Any, str], Any]: The decorated function.
159
- """
160
- config.code.lc_run = wrap_user_function(func)
161
- return func
162
-
163
-
164
- @trace
165
- def langchain_rename(func: Callable[[str], str]) -> Callable[[str], str]:
166
- """
167
- Useful to rename the LangChain tools/chains used in the agent and display more friendly author names in the UI.
168
- Args:
169
- func (Callable[[str], str]): The function to be called to rename a tool/chain. Takes the original tool/chain name as parameter.
170
-
171
- Returns:
172
- Callable[[Any, str], Any]: The decorated function.
173
- """
174
-
175
- config.code.lc_rename = wrap_user_function(func)
176
- return func
177
-
178
-
179
66
  @trace
180
67
  def on_chat_start(func: Callable) -> Callable:
181
68
  """
@@ -262,12 +149,15 @@ __all__ = [
262
149
  "langchain_postprocess",
263
150
  "langchain_run",
264
151
  "langchain_rename",
152
+ "llama_index_factory",
153
+ "langflow_factory",
265
154
  "on_chat_start",
266
155
  "on_stop",
267
156
  "action_callback",
268
157
  "sleep",
269
- "ChainlitCallbackHandler",
270
- "AsyncChainlitCallbackHandler",
158
+ "LangchainCallbackHandler",
159
+ "AsyncLangchainCallbackHandler",
160
+ "LlamaIndexCallbackHandler",
271
161
  "client_factory",
272
162
  "run_sync",
273
163
  "make_async",
chainlit/cli/__init__.py CHANGED
@@ -2,6 +2,10 @@ import click
2
2
  import os
3
3
  import sys
4
4
  import uvicorn
5
+ import asyncio
6
+ import nest_asyncio
7
+
8
+ nest_asyncio.apply()
5
9
 
6
10
  from chainlit.config import (
7
11
  config,
@@ -52,7 +56,15 @@ def run_chainlit(target: str):
52
56
 
53
57
  log_level = "debug" if config.run.debug else "error"
54
58
 
55
- uvicorn.run(app, host=host, port=port, log_level=log_level)
59
+ # Start the server
60
+ async def start():
61
+ config = uvicorn.Config(app, host=host, port=port, log_level=log_level)
62
+ server = uvicorn.Server(config)
63
+ await server.serve()
64
+
65
+ # Run the asyncio event loop instead of uvloop to enable re entrance
66
+ asyncio.run(start())
67
+ # uvicorn.run(app, host=host, port=port, log_level=log_level)
56
68
 
57
69
 
58
70
  # Define the "run" command for Chainlit CLI