chainlit 0.4.1__py3-none-any.whl → 0.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chainlit might be problematic. Click here for more details.

chainlit/__init__.py CHANGED
@@ -1,17 +1,26 @@
1
1
  from dotenv import load_dotenv
2
- from typing import Callable, Any
3
- import inspect
2
+ from typing import Callable, Any, TYPE_CHECKING
4
3
  import os
5
4
  import asyncio
6
5
 
7
- from chainlit.lc import LANGCHAIN_INSTALLED
6
+ if TYPE_CHECKING:
7
+ from chainlit.client.base import BaseClient
8
+
9
+ from chainlit.lc import (
10
+ LANGCHAIN_INSTALLED,
11
+ langchain_factory,
12
+ langchain_postprocess,
13
+ langchain_run,
14
+ langchain_rename,
15
+ )
16
+ from chainlit.llama_index import LLAMA_INDEX_INSTALLED, llama_index_factory
17
+ from chainlit.langflow import langflow_factory
18
+ from chainlit.utils import wrap_user_function
8
19
  from chainlit.config import config
9
20
  from chainlit.telemetry import trace
10
21
  from chainlit.version import __version__
11
22
  from chainlit.logger import logger
12
- from chainlit.emitter import ChainlitEmitter
13
23
  from chainlit.types import LLMSettings
14
- from chainlit.message import ErrorMessage
15
24
  from chainlit.action import Action
16
25
  from chainlit.element import Image, Text, Pdf, Avatar, Pyplot
17
26
  from chainlit.message import Message, ErrorMessage, AskUserMessage, AskFileMessage
@@ -21,8 +30,13 @@ from chainlit.cache import cache
21
30
 
22
31
  if LANGCHAIN_INSTALLED:
23
32
  from chainlit.lc.callbacks import (
24
- ChainlitCallbackHandler,
25
- AsyncChainlitCallbackHandler,
33
+ LangchainCallbackHandler,
34
+ AsyncLangchainCallbackHandler,
35
+ )
36
+
37
+ if LLAMA_INDEX_INSTALLED:
38
+ from chainlit.llama_index.callbacks import (
39
+ LlamaIndexCallbackHandler,
26
40
  )
27
41
 
28
42
 
@@ -32,94 +46,6 @@ if env_found:
32
46
  logger.info("Loaded .env file")
33
47
 
34
48
 
35
- def wrap_user_function(user_function: Callable, with_task=False) -> Callable:
36
- """
37
- Wraps a user-defined function to accept arguments as a dictionary.
38
-
39
- Args:
40
- user_function (Callable): The user-defined function to wrap.
41
-
42
- Returns:
43
- Callable: The wrapped function.
44
- """
45
-
46
- async def wrapper(*args, __chainlit_emitter__: ChainlitEmitter):
47
- # Get the parameter names of the user-defined function
48
- user_function_params = list(inspect.signature(user_function).parameters.keys())
49
-
50
- # Create a dictionary of parameter names and their corresponding values from *args
51
- params_values = {
52
- param_name: arg for param_name, arg in zip(user_function_params, args)
53
- }
54
-
55
- if with_task:
56
- await __chainlit_emitter__.task_start()
57
-
58
- try:
59
- # Call the user-defined function with the arguments
60
- if inspect.iscoroutinefunction(user_function):
61
- return await user_function(**params_values)
62
- else:
63
- return user_function(**params_values)
64
- except InterruptedError:
65
- pass
66
- except Exception as e:
67
- logger.exception(e)
68
- await ErrorMessage(content=str(e), author="Error").send()
69
- finally:
70
- if with_task:
71
- await __chainlit_emitter__.task_end()
72
-
73
- return wrapper
74
-
75
-
76
- @trace
77
- def langchain_factory(use_async: bool) -> Callable:
78
- """
79
- Plug and play decorator for the LangChain library.
80
- The decorated function should instantiate a new LangChain instance (Chain, Agent...).
81
- One instance per user session is created and cached.
82
- The per user instance is called every time a new message is received.
83
-
84
- Args:
85
- use_async bool: Whether to call the the agent asynchronously or not. Defaults to False.
86
-
87
- Returns:
88
- Callable[[], Any]: The decorated factory function.
89
- """
90
-
91
- # Check if the factory is called with the correct parameter
92
- if type(use_async) != bool:
93
- error_message = "langchain_factory use_async parameter is required"
94
- raise ValueError(error_message)
95
-
96
- def decorator(func: Callable) -> Callable:
97
- config.code.lc_factory = wrap_user_function(func, with_task=True)
98
- return func
99
-
100
- config.code.lc_agent_is_async = use_async
101
-
102
- return decorator
103
-
104
-
105
- @trace
106
- def langchain_postprocess(func: Callable[[Any], str]) -> Callable:
107
- """
108
- Useful to post process the response a LangChain object instantiated with @langchain_factory.
109
- The decorated function takes the raw output of the LangChain object as input.
110
- The response will NOT be automatically sent to the UI, you need to send a Message.
111
-
112
- Args:
113
- func (Callable[[Any], str]): The post-processing function to apply after generating a response. Takes the response as parameter.
114
-
115
- Returns:
116
- Callable[[Any], str]: The decorated post-processing function.
117
- """
118
-
119
- config.code.lc_postprocess = wrap_user_function(func)
120
- return func
121
-
122
-
123
49
  @trace
124
50
  def on_message(func: Callable) -> Callable:
125
51
  """
@@ -137,38 +63,6 @@ def on_message(func: Callable) -> Callable:
137
63
  return func
138
64
 
139
65
 
140
- @trace
141
- def langchain_run(func: Callable[[Any, str], str]) -> Callable:
142
- """
143
- Useful to override the default behavior of the LangChain object instantiated with @langchain_factory.
144
- Use when your agent run method has custom parameters.
145
- Takes the LangChain agent and the user input as parameters.
146
- The response will NOT be automatically sent to the UI, you need to send a Message.
147
- Args:
148
- func (Callable[[Any, str], str]): The function to be called when a new message is received. Takes the agent and user input as parameters and returns the output string.
149
-
150
- Returns:
151
- Callable[[Any, str], Any]: The decorated function.
152
- """
153
- config.code.lc_run = wrap_user_function(func)
154
- return func
155
-
156
-
157
- @trace
158
- def langchain_rename(func: Callable[[str], str]) -> Callable[[str], str]:
159
- """
160
- Useful to rename the LangChain tools/chains used in the agent and display more friendly author names in the UI.
161
- Args:
162
- func (Callable[[str], str]): The function to be called to rename a tool/chain. Takes the original tool/chain name as parameter.
163
-
164
- Returns:
165
- Callable[[Any, str], Any]: The decorated function.
166
- """
167
-
168
- config.code.lc_rename = wrap_user_function(func)
169
- return func
170
-
171
-
172
66
  @trace
173
67
  def on_chat_start(func: Callable) -> Callable:
174
68
  """
@@ -206,7 +100,7 @@ def action_callback(name: str) -> Callable:
206
100
  Callback to call when an action is clicked in the UI.
207
101
 
208
102
  Args:
209
- func (Callable[[Action], Any]): The action callback to exexute. First parameter is the action.
103
+ func (Callable[[Action], Any]): The action callback to execute. First parameter is the action.
210
104
  """
211
105
 
212
106
  def decorator(func: Callable[[Action], Any]):
@@ -216,6 +110,19 @@ def action_callback(name: str) -> Callable:
216
110
  return decorator
217
111
 
218
112
 
113
+ @trace
114
+ def client_factory(func: Callable[[], "BaseClient"]) -> Callable[[], "BaseClient"]:
115
+ """
116
+ Callback to call when to initialize the custom client.
117
+
118
+ Args:
119
+ func (Callable[[str], BaseClient]): The action callback to execute. First parameter is the session id.
120
+ """
121
+
122
+ config.code.client_factory = func
123
+ return func
124
+
125
+
219
126
  def sleep(duration: int):
220
127
  """
221
128
  Sleep for a given duration.
@@ -242,12 +149,16 @@ __all__ = [
242
149
  "langchain_postprocess",
243
150
  "langchain_run",
244
151
  "langchain_rename",
152
+ "llama_index_factory",
153
+ "langflow_factory",
245
154
  "on_chat_start",
246
155
  "on_stop",
247
156
  "action_callback",
248
157
  "sleep",
249
- "ChainlitCallbackHandler",
250
- "AsyncChainlitCallbackHandler",
158
+ "LangchainCallbackHandler",
159
+ "AsyncLangchainCallbackHandler",
160
+ "LlamaIndexCallbackHandler",
161
+ "client_factory",
251
162
  "run_sync",
252
163
  "make_async",
253
164
  "cache",
chainlit/action.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from pydantic.dataclasses import dataclass
2
2
  from dataclasses_json import dataclass_json
3
3
 
4
- from chainlit.emitter import get_emit_fn
4
+ from chainlit.context import get_emitter
5
5
  from chainlit.telemetry import trace_event
6
6
 
7
7
 
@@ -21,9 +21,7 @@ class Action:
21
21
 
22
22
  def __post_init__(self) -> None:
23
23
  trace_event(f"init {self.__class__.__name__}")
24
- self.emit = get_emit_fn()
25
- if not self.emit:
26
- raise RuntimeError("Action should be instantiated in a Chainlit context")
24
+ self.emit = get_emitter().emit
27
25
 
28
26
  async def send(self, for_id: str):
29
27
  trace_event(f"send {self.__class__.__name__}")
chainlit/cli/__init__.py CHANGED
@@ -21,6 +21,7 @@ from chainlit.cli.deploy import deploy
21
21
  from chainlit.cli.utils import check_file
22
22
  from chainlit.telemetry import trace_event
23
23
  from chainlit.cache import init_lc_cache
24
+ from chainlit.db import init_local_db, migrate_local_db
24
25
  from chainlit.logger import logger
25
26
  from chainlit.server import app
26
27
 
@@ -50,6 +51,9 @@ def run_chainlit(target: str):
50
51
  # Initialize the LangChain cache if installed and enabled
51
52
  init_lc_cache()
52
53
 
54
+ # Initialize the local database if configured to use it
55
+ init_local_db()
56
+
53
57
  log_level = "debug" if config.run.debug else "error"
54
58
 
55
59
  # Start the server
@@ -60,26 +64,69 @@ def run_chainlit(target: str):
60
64
 
61
65
  # Run the asyncio event loop instead of uvloop to enable re entrance
62
66
  asyncio.run(start())
63
- # uvicorn.run(app, host=host, port=port)
67
+ # uvicorn.run(app, host=host, port=port, log_level=log_level)
64
68
 
65
69
 
66
70
  # Define the "run" command for Chainlit CLI
67
71
  @cli.command("run")
68
72
  @click.argument("target", required=True, envvar="RUN_TARGET")
69
- @click.option("-w", "--watch", default=False, is_flag=True, envvar="WATCH")
70
- @click.option("-h", "--headless", default=False, is_flag=True, envvar="HEADLESS")
71
- @click.option("-d", "--debug", default=False, is_flag=True, envvar="DEBUG")
72
- @click.option("-c", "--ci", default=False, is_flag=True, envvar="CI")
73
- @click.option("--no-cache", default=False, is_flag=True, envvar="NO_CACHE")
74
- @click.option("--host")
75
- @click.option("--port")
76
- def chainlit_run(target, watch, headless, debug, ci, no_cache, host, port):
73
+ @click.option(
74
+ "-w",
75
+ "--watch",
76
+ default=False,
77
+ is_flag=True,
78
+ envvar="WATCH",
79
+ help="Reload the app when the module changes",
80
+ )
81
+ @click.option(
82
+ "-h",
83
+ "--headless",
84
+ default=False,
85
+ is_flag=True,
86
+ envvar="HEADLESS",
87
+ help="Will prevent to auto open the app in the browser",
88
+ )
89
+ @click.option(
90
+ "-d",
91
+ "--debug",
92
+ default=False,
93
+ is_flag=True,
94
+ envvar="DEBUG",
95
+ help="Set the log level to debug",
96
+ )
97
+ @click.option(
98
+ "-c",
99
+ "--ci",
100
+ default=False,
101
+ is_flag=True,
102
+ envvar="CI",
103
+ help="Flag to run in CI mode",
104
+ )
105
+ @click.option(
106
+ "--no-cache",
107
+ default=False,
108
+ is_flag=True,
109
+ envvar="NO_CACHE",
110
+ help="Useful to disable third parties cache, such as langchain.",
111
+ )
112
+ @click.option(
113
+ "--db",
114
+ type=click.Choice(["cloud", "local"]),
115
+ help="Useful to control database mode when running CI.",
116
+ )
117
+ @click.option("--host", help="Specify a different host to run the server on")
118
+ @click.option("--port", help="Specify a different port to run the server on")
119
+ def chainlit_run(target, watch, headless, debug, ci, no_cache, db, host, port):
77
120
  if host:
78
121
  os.environ["CHAINLIT_HOST"] = host
79
122
  if port:
80
123
  os.environ["CHAINLIT_PORT"] = port
81
124
  if ci:
82
125
  logger.info("Running in CI mode")
126
+
127
+ if db:
128
+ config.project.database = db
129
+
83
130
  config.project.enable_telemetry = False
84
131
  no_cache = True
85
132
  from chainlit.cli.mock import mock_openai
@@ -131,6 +178,14 @@ def chainlit_logout(args=None, **kwargs):
131
178
  sys.exit(0)
132
179
 
133
180
 
181
+ @cli.command("migrate")
182
+ @click.argument("args", nargs=-1)
183
+ def chainlit_migrate(args=None, **kwargs):
184
+ trace_event("chainlit migrate")
185
+ migrate_local_db()
186
+ sys.exit(0)
187
+
188
+
134
189
  @cli.command("init")
135
190
  @click.argument("args", nargs=-1)
136
191
  def chainlit_init(args=None, **kwargs):