chainlit 0.7.700__py3-none-any.whl → 1.0.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of chainlit might be problematic. Click here for more details.
- chainlit/__init__.py +32 -23
- chainlit/auth.py +9 -10
- chainlit/cli/__init__.py +1 -2
- chainlit/config.py +13 -12
- chainlit/context.py +7 -3
- chainlit/data/__init__.py +375 -9
- chainlit/data/acl.py +6 -5
- chainlit/element.py +86 -123
- chainlit/emitter.py +117 -50
- chainlit/frontend/dist/assets/{index-71698725.js → index-6aee009a.js} +118 -292
- chainlit/frontend/dist/assets/{react-plotly-2c0acdf0.js → react-plotly-2f07c02a.js} +1 -1
- chainlit/frontend/dist/index.html +1 -1
- chainlit/haystack/callbacks.py +45 -43
- chainlit/hello.py +1 -1
- chainlit/langchain/callbacks.py +132 -120
- chainlit/llama_index/callbacks.py +68 -48
- chainlit/message.py +179 -207
- chainlit/oauth_providers.py +39 -34
- chainlit/playground/provider.py +44 -30
- chainlit/playground/providers/anthropic.py +4 -4
- chainlit/playground/providers/huggingface.py +2 -2
- chainlit/playground/providers/langchain.py +8 -10
- chainlit/playground/providers/openai.py +19 -13
- chainlit/server.py +155 -99
- chainlit/session.py +109 -40
- chainlit/socket.py +47 -36
- chainlit/step.py +393 -0
- chainlit/types.py +78 -21
- chainlit/user.py +32 -0
- chainlit/user_session.py +1 -5
- {chainlit-0.7.700.dist-info → chainlit-1.0.0rc1.dist-info}/METADATA +12 -31
- chainlit-1.0.0rc1.dist-info/RECORD +60 -0
- chainlit/client/base.py +0 -169
- chainlit/client/cloud.py +0 -502
- chainlit/prompt.py +0 -40
- chainlit-0.7.700.dist-info/RECORD +0 -61
- {chainlit-0.7.700.dist-info → chainlit-1.0.0rc1.dist-info}/WHEEL +0 -0
- {chainlit-0.7.700.dist-info → chainlit-1.0.0rc1.dist-info}/entry_points.txt +0 -0
chainlit/socket.py
CHANGED
|
@@ -6,7 +6,7 @@ from chainlit.action import Action
|
|
|
6
6
|
from chainlit.auth import get_current_user, require_login
|
|
7
7
|
from chainlit.config import config
|
|
8
8
|
from chainlit.context import init_ws_context
|
|
9
|
-
from chainlit.data import
|
|
9
|
+
from chainlit.data import get_data_layer
|
|
10
10
|
from chainlit.logger import logger
|
|
11
11
|
from chainlit.message import ErrorMessage, Message
|
|
12
12
|
from chainlit.server import socket
|
|
@@ -27,39 +27,33 @@ def restore_existing_session(sid, session_id, emit_fn, ask_user_fn):
|
|
|
27
27
|
return False
|
|
28
28
|
|
|
29
29
|
|
|
30
|
-
async def persist_user_session(
|
|
31
|
-
if
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
await chainlit_client.update_conversation_metadata(
|
|
35
|
-
conversation_id=conversation_id, metadata=metadata
|
|
36
|
-
)
|
|
30
|
+
async def persist_user_session(thread_id: str, metadata: Dict):
|
|
31
|
+
if data_layer := get_data_layer():
|
|
32
|
+
await data_layer.update_thread(thread_id=thread_id, metadata=metadata)
|
|
37
33
|
|
|
38
34
|
|
|
39
|
-
async def
|
|
40
|
-
|
|
35
|
+
async def resume_thread(session: WebsocketSession):
|
|
36
|
+
data_layer = get_data_layer()
|
|
37
|
+
if not data_layer or not session.user or not session.thread_id_to_resume:
|
|
38
|
+
return
|
|
39
|
+
thread = await data_layer.get_thread(thread_id=session.thread_id_to_resume)
|
|
40
|
+
if not thread:
|
|
41
41
|
return
|
|
42
42
|
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
)
|
|
46
|
-
|
|
47
|
-
author = (
|
|
48
|
-
conversation["appUser"].get("username") if conversation["appUser"] else None
|
|
49
|
-
)
|
|
50
|
-
user_is_author = author == session.user.username
|
|
43
|
+
author = thread.get("user").get("identifier") if thread["user"] else None
|
|
44
|
+
user_is_author = author == session.user.identifier
|
|
51
45
|
|
|
52
|
-
if
|
|
53
|
-
metadata =
|
|
46
|
+
if user_is_author:
|
|
47
|
+
metadata = thread["metadata"] or {}
|
|
54
48
|
user_sessions[session.id] = metadata.copy()
|
|
55
49
|
if chat_profile := metadata.get("chat_profile"):
|
|
56
50
|
session.chat_profile = chat_profile
|
|
57
51
|
if chat_settings := metadata.get("chat_settings"):
|
|
58
52
|
session.chat_settings = chat_settings
|
|
59
53
|
|
|
60
|
-
trace_event("
|
|
54
|
+
trace_event("thread_resumed")
|
|
61
55
|
|
|
62
|
-
return
|
|
56
|
+
return thread
|
|
63
57
|
|
|
64
58
|
|
|
65
59
|
def load_user_env(user_env):
|
|
@@ -128,9 +122,8 @@ async def connect(sid, environ, auth):
|
|
|
128
122
|
user=user,
|
|
129
123
|
token=token,
|
|
130
124
|
chat_profile=environ.get("HTTP_X_CHAINLIT_CHAT_PROFILE"),
|
|
131
|
-
|
|
125
|
+
thread_id=environ.get("HTTP_X_CHAINLIT_THREAD_ID"),
|
|
132
126
|
)
|
|
133
|
-
|
|
134
127
|
trace_event("connection_successful")
|
|
135
128
|
return True
|
|
136
129
|
|
|
@@ -142,13 +135,13 @@ async def connection_successful(sid):
|
|
|
142
135
|
if context.session.restored:
|
|
143
136
|
return
|
|
144
137
|
|
|
145
|
-
if context.session.
|
|
146
|
-
|
|
147
|
-
if
|
|
138
|
+
if context.session.thread_id_to_resume and config.code.on_chat_resume:
|
|
139
|
+
thread = await resume_thread(context.session)
|
|
140
|
+
if thread:
|
|
148
141
|
context.session.has_user_message = True
|
|
149
142
|
await context.emitter.clear_ask()
|
|
150
|
-
await
|
|
151
|
-
await
|
|
143
|
+
await context.emitter.resume_thread(thread)
|
|
144
|
+
await config.code.on_chat_resume(thread)
|
|
152
145
|
return
|
|
153
146
|
|
|
154
147
|
if config.code.on_chat_start:
|
|
@@ -174,13 +167,14 @@ async def clean_session(sid):
|
|
|
174
167
|
@socket.on("disconnect")
|
|
175
168
|
async def disconnect(sid):
|
|
176
169
|
session = WebsocketSession.get(sid)
|
|
170
|
+
if session:
|
|
171
|
+
init_ws_context(session)
|
|
177
172
|
|
|
178
173
|
if config.code.on_chat_end and session:
|
|
179
|
-
init_ws_context(session)
|
|
180
174
|
await config.code.on_chat_end()
|
|
181
175
|
|
|
182
|
-
if session and session.
|
|
183
|
-
await persist_user_session(session.
|
|
176
|
+
if session and session.thread_id and session.has_user_message:
|
|
177
|
+
await persist_user_session(session.thread_id, session.to_persistable())
|
|
184
178
|
|
|
185
179
|
async def disconnect_on_timeout(sid):
|
|
186
180
|
await asyncio.sleep(config.project.session_timeout)
|
|
@@ -200,7 +194,9 @@ async def stop(sid):
|
|
|
200
194
|
trace_event("stop_task")
|
|
201
195
|
|
|
202
196
|
init_ws_context(session)
|
|
203
|
-
await Message(
|
|
197
|
+
await Message(
|
|
198
|
+
author="System", content="Task stopped by the user.", disable_feedback=True
|
|
199
|
+
).send()
|
|
204
200
|
|
|
205
201
|
session.should_stop = True
|
|
206
202
|
|
|
@@ -240,7 +236,8 @@ async def message(sid, payload: UIMessagePayload):
|
|
|
240
236
|
async def process_action(action: Action):
|
|
241
237
|
callback = config.code.action_callbacks.get(action.name)
|
|
242
238
|
if callback:
|
|
243
|
-
await callback(action)
|
|
239
|
+
res = await callback(action)
|
|
240
|
+
return res
|
|
244
241
|
else:
|
|
245
242
|
logger.warning("No callback found for action %s", action.name)
|
|
246
243
|
|
|
@@ -248,11 +245,25 @@ async def process_action(action: Action):
|
|
|
248
245
|
@socket.on("action_call")
|
|
249
246
|
async def call_action(sid, action):
|
|
250
247
|
"""Handle an action call from the UI."""
|
|
251
|
-
init_ws_context(sid)
|
|
248
|
+
context = init_ws_context(sid)
|
|
252
249
|
|
|
253
250
|
action = Action(**action)
|
|
254
251
|
|
|
255
|
-
|
|
252
|
+
try:
|
|
253
|
+
res = await process_action(action)
|
|
254
|
+
await context.emitter.send_action_response(
|
|
255
|
+
id=action.id, status=True, response=res if isinstance(res, str) else None
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
except InterruptedError:
|
|
259
|
+
await context.emitter.send_action_response(
|
|
260
|
+
id=action.id, status=False, response="Action interrupted by the user"
|
|
261
|
+
)
|
|
262
|
+
except Exception as e:
|
|
263
|
+
logger.exception(e)
|
|
264
|
+
await context.emitter.send_action_response(
|
|
265
|
+
id=action.id, status=False, response="An error occured"
|
|
266
|
+
)
|
|
256
267
|
|
|
257
268
|
|
|
258
269
|
@socket.on("chat_settings_change")
|
chainlit/step.py
ADDED
|
@@ -0,0 +1,393 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import inspect
|
|
3
|
+
import json
|
|
4
|
+
import uuid
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from functools import wraps
|
|
7
|
+
from typing import Callable, Dict, List, Optional, TypedDict, Union
|
|
8
|
+
|
|
9
|
+
from chainlit.config import config
|
|
10
|
+
from chainlit.context import context
|
|
11
|
+
from chainlit.data import get_data_layer
|
|
12
|
+
from chainlit.element import Element
|
|
13
|
+
from chainlit.logger import logger
|
|
14
|
+
from chainlit.telemetry import trace_event
|
|
15
|
+
from chainlit.types import FeedbackDict
|
|
16
|
+
from chainlit_client import BaseGeneration
|
|
17
|
+
from chainlit_client.step import StepType, TrueStepType
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class StepDict(TypedDict, total=False):
|
|
21
|
+
name: str
|
|
22
|
+
type: StepType
|
|
23
|
+
id: str
|
|
24
|
+
threadId: str
|
|
25
|
+
parentId: Optional[str]
|
|
26
|
+
disableFeedback: bool
|
|
27
|
+
streaming: bool
|
|
28
|
+
waitForAnswer: Optional[bool]
|
|
29
|
+
isError: Optional[bool]
|
|
30
|
+
metadata: Dict
|
|
31
|
+
input: str
|
|
32
|
+
output: str
|
|
33
|
+
createdAt: Optional[str]
|
|
34
|
+
start: Optional[str]
|
|
35
|
+
end: Optional[str]
|
|
36
|
+
generation: Optional[Dict]
|
|
37
|
+
showInput: Optional[Union[bool, str]]
|
|
38
|
+
language: Optional[str]
|
|
39
|
+
indent: Optional[int]
|
|
40
|
+
feedback: Optional[FeedbackDict]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def step(
|
|
44
|
+
original_function: Optional[Callable] = None,
|
|
45
|
+
*,
|
|
46
|
+
name: Optional[str] = "",
|
|
47
|
+
type: TrueStepType = "undefined",
|
|
48
|
+
id: Optional[str] = None,
|
|
49
|
+
disable_feedback: bool = True,
|
|
50
|
+
root: bool = False,
|
|
51
|
+
language: Optional[str] = None,
|
|
52
|
+
show_input: Union[bool, str] = False,
|
|
53
|
+
):
|
|
54
|
+
"""Step decorator for async and sync functions."""
|
|
55
|
+
|
|
56
|
+
def wrapper(func: Callable):
|
|
57
|
+
nonlocal name
|
|
58
|
+
if not name:
|
|
59
|
+
name = func.__name__
|
|
60
|
+
|
|
61
|
+
# Handle async decorator
|
|
62
|
+
|
|
63
|
+
if inspect.iscoroutinefunction(func):
|
|
64
|
+
|
|
65
|
+
@wraps(func)
|
|
66
|
+
async def async_wrapper(*args, **kwargs):
|
|
67
|
+
async with Step(
|
|
68
|
+
type=type,
|
|
69
|
+
name=name,
|
|
70
|
+
id=id,
|
|
71
|
+
disable_feedback=disable_feedback,
|
|
72
|
+
root=root,
|
|
73
|
+
language=language,
|
|
74
|
+
show_input=show_input,
|
|
75
|
+
) as step:
|
|
76
|
+
try:
|
|
77
|
+
step.input = {"args": args, "kwargs": kwargs}
|
|
78
|
+
except:
|
|
79
|
+
pass
|
|
80
|
+
result = await func(*args, **kwargs)
|
|
81
|
+
try:
|
|
82
|
+
if result and not step.output:
|
|
83
|
+
step.output = result
|
|
84
|
+
except:
|
|
85
|
+
pass
|
|
86
|
+
return result
|
|
87
|
+
|
|
88
|
+
return async_wrapper
|
|
89
|
+
else:
|
|
90
|
+
# Handle sync decorator
|
|
91
|
+
@wraps(func)
|
|
92
|
+
def sync_wrapper(*args, **kwargs):
|
|
93
|
+
with Step(
|
|
94
|
+
type=type,
|
|
95
|
+
name=name,
|
|
96
|
+
id=id,
|
|
97
|
+
disable_feedback=disable_feedback,
|
|
98
|
+
root=root,
|
|
99
|
+
language=language,
|
|
100
|
+
show_input=show_input,
|
|
101
|
+
) as step:
|
|
102
|
+
try:
|
|
103
|
+
step.input = {"args": args, "kwargs": kwargs}
|
|
104
|
+
except:
|
|
105
|
+
pass
|
|
106
|
+
result = func(*args, **kwargs)
|
|
107
|
+
try:
|
|
108
|
+
if result and not step.output:
|
|
109
|
+
step.output = result
|
|
110
|
+
except:
|
|
111
|
+
pass
|
|
112
|
+
return result
|
|
113
|
+
|
|
114
|
+
return sync_wrapper
|
|
115
|
+
|
|
116
|
+
func = original_function
|
|
117
|
+
if not func:
|
|
118
|
+
return wrapper
|
|
119
|
+
else:
|
|
120
|
+
return wrapper(func)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
class Step:
|
|
124
|
+
# Constructor
|
|
125
|
+
name: str
|
|
126
|
+
type: TrueStepType
|
|
127
|
+
id: str
|
|
128
|
+
parent_id: Optional[str]
|
|
129
|
+
disable_feedback: bool
|
|
130
|
+
|
|
131
|
+
streaming: bool
|
|
132
|
+
persisted: bool
|
|
133
|
+
|
|
134
|
+
root: bool
|
|
135
|
+
show_input: Union[bool, str]
|
|
136
|
+
|
|
137
|
+
is_error: Optional[bool]
|
|
138
|
+
metadata: Dict
|
|
139
|
+
thread_id: str
|
|
140
|
+
created_at: Union[str, None]
|
|
141
|
+
start: Union[str, None]
|
|
142
|
+
end: Union[str, None]
|
|
143
|
+
generation: Optional[BaseGeneration]
|
|
144
|
+
language: Optional[str]
|
|
145
|
+
elements: Optional[List[Element]]
|
|
146
|
+
fail_on_persist_error: bool
|
|
147
|
+
|
|
148
|
+
def __init__(
|
|
149
|
+
self,
|
|
150
|
+
name: Optional[str] = config.ui.name,
|
|
151
|
+
type: TrueStepType = "undefined",
|
|
152
|
+
id: Optional[str] = None,
|
|
153
|
+
parent_id: Optional[str] = None,
|
|
154
|
+
elements: Optional[List[Element]] = None,
|
|
155
|
+
disable_feedback: bool = True,
|
|
156
|
+
root: bool = False,
|
|
157
|
+
language: Optional[str] = None,
|
|
158
|
+
show_input: Union[bool, str] = False,
|
|
159
|
+
):
|
|
160
|
+
trace_event(f"init {self.__class__.__name__} {type}")
|
|
161
|
+
self._input = ""
|
|
162
|
+
self._output = ""
|
|
163
|
+
self.thread_id = context.session.thread_id
|
|
164
|
+
self.name = name or ""
|
|
165
|
+
self.type = type
|
|
166
|
+
self.id = id or str(uuid.uuid4())
|
|
167
|
+
self.disable_feedback = disable_feedback
|
|
168
|
+
self.metadata = {}
|
|
169
|
+
self.is_error = False
|
|
170
|
+
self.show_input = show_input
|
|
171
|
+
self.parent_id = parent_id
|
|
172
|
+
self.root = root
|
|
173
|
+
|
|
174
|
+
self.language = language
|
|
175
|
+
self.generation = None
|
|
176
|
+
self.elements = elements or []
|
|
177
|
+
|
|
178
|
+
self.created_at = datetime.utcnow().isoformat()
|
|
179
|
+
self.start = None
|
|
180
|
+
self.end = None
|
|
181
|
+
|
|
182
|
+
self.streaming = False
|
|
183
|
+
self.persisted = False
|
|
184
|
+
self.fail_on_persist_error = False
|
|
185
|
+
|
|
186
|
+
def _process_content(self, content, set_language=False):
|
|
187
|
+
if content is None:
|
|
188
|
+
return ""
|
|
189
|
+
if isinstance(content, dict):
|
|
190
|
+
try:
|
|
191
|
+
processed_content = json.dumps(content, indent=4, ensure_ascii=False)
|
|
192
|
+
if set_language:
|
|
193
|
+
self.language = "json"
|
|
194
|
+
except TypeError:
|
|
195
|
+
processed_content = str(content)
|
|
196
|
+
if set_language:
|
|
197
|
+
self.language = "text"
|
|
198
|
+
elif isinstance(content, str):
|
|
199
|
+
processed_content = content
|
|
200
|
+
else:
|
|
201
|
+
processed_content = str(content)
|
|
202
|
+
if set_language:
|
|
203
|
+
self.language = "text"
|
|
204
|
+
return processed_content
|
|
205
|
+
|
|
206
|
+
@property
|
|
207
|
+
def input(self):
|
|
208
|
+
return self._input
|
|
209
|
+
|
|
210
|
+
@input.setter
|
|
211
|
+
def input(self, content: Union[Dict, str]):
|
|
212
|
+
self._input = self._process_content(content, set_language=False)
|
|
213
|
+
|
|
214
|
+
@property
|
|
215
|
+
def output(self):
|
|
216
|
+
return self._output
|
|
217
|
+
|
|
218
|
+
@output.setter
|
|
219
|
+
def output(self, content: Union[Dict, str]):
|
|
220
|
+
self._output = self._process_content(content, set_language=True)
|
|
221
|
+
|
|
222
|
+
def to_dict(self) -> StepDict:
|
|
223
|
+
_dict: StepDict = {
|
|
224
|
+
"name": self.name,
|
|
225
|
+
"type": self.type,
|
|
226
|
+
"id": self.id,
|
|
227
|
+
"threadId": self.thread_id,
|
|
228
|
+
"parentId": self.parent_id,
|
|
229
|
+
"disableFeedback": self.disable_feedback,
|
|
230
|
+
"streaming": self.streaming,
|
|
231
|
+
"metadata": self.metadata,
|
|
232
|
+
"input": self.input,
|
|
233
|
+
"isError": self.is_error,
|
|
234
|
+
"output": self.output,
|
|
235
|
+
"createdAt": self.created_at,
|
|
236
|
+
"start": self.start,
|
|
237
|
+
"end": self.end,
|
|
238
|
+
"language": self.language,
|
|
239
|
+
"showInput": self.show_input,
|
|
240
|
+
"generation": self.generation.to_dict() if self.generation else None,
|
|
241
|
+
}
|
|
242
|
+
return _dict
|
|
243
|
+
|
|
244
|
+
async def update(self):
|
|
245
|
+
"""
|
|
246
|
+
Update a step already sent to the UI.
|
|
247
|
+
"""
|
|
248
|
+
trace_event("update_step")
|
|
249
|
+
|
|
250
|
+
if self.streaming:
|
|
251
|
+
self.streaming = False
|
|
252
|
+
|
|
253
|
+
step_dict = self.to_dict()
|
|
254
|
+
data_layer = get_data_layer()
|
|
255
|
+
|
|
256
|
+
if data_layer:
|
|
257
|
+
try:
|
|
258
|
+
asyncio.create_task(data_layer.update_step(step_dict))
|
|
259
|
+
except Exception as e:
|
|
260
|
+
if self.fail_on_persist_error:
|
|
261
|
+
raise e
|
|
262
|
+
logger.error(f"Failed to persist step update: {str(e)}")
|
|
263
|
+
|
|
264
|
+
tasks = [el.send(for_id=self.id) for el in self.elements]
|
|
265
|
+
await asyncio.gather(*tasks)
|
|
266
|
+
|
|
267
|
+
if not config.features.prompt_playground and "generation" in step_dict:
|
|
268
|
+
step_dict.pop("generation", None)
|
|
269
|
+
|
|
270
|
+
await context.emitter.update_step(step_dict)
|
|
271
|
+
|
|
272
|
+
return True
|
|
273
|
+
|
|
274
|
+
async def remove(self):
|
|
275
|
+
"""
|
|
276
|
+
Remove a step already sent to the UI.
|
|
277
|
+
"""
|
|
278
|
+
trace_event("remove_step")
|
|
279
|
+
|
|
280
|
+
step_dict = self.to_dict()
|
|
281
|
+
data_layer = get_data_layer()
|
|
282
|
+
|
|
283
|
+
if data_layer:
|
|
284
|
+
try:
|
|
285
|
+
asyncio.create_task(data_layer.delete_step(self.id))
|
|
286
|
+
except Exception as e:
|
|
287
|
+
if self.fail_on_persist_error:
|
|
288
|
+
raise e
|
|
289
|
+
logger.error(f"Failed to persist step deletion: {str(e)}")
|
|
290
|
+
|
|
291
|
+
await context.emitter.delete_step(step_dict)
|
|
292
|
+
|
|
293
|
+
return True
|
|
294
|
+
|
|
295
|
+
async def send(self):
|
|
296
|
+
if self.persisted:
|
|
297
|
+
return
|
|
298
|
+
|
|
299
|
+
if config.code.author_rename:
|
|
300
|
+
self.name = await config.code.author_rename(self.name)
|
|
301
|
+
|
|
302
|
+
if self.streaming:
|
|
303
|
+
self.streaming = False
|
|
304
|
+
|
|
305
|
+
step_dict = self.to_dict()
|
|
306
|
+
|
|
307
|
+
data_layer = get_data_layer()
|
|
308
|
+
|
|
309
|
+
if data_layer:
|
|
310
|
+
try:
|
|
311
|
+
asyncio.create_task(data_layer.create_step(step_dict))
|
|
312
|
+
self.persisted = True
|
|
313
|
+
except Exception as e:
|
|
314
|
+
if self.fail_on_persist_error:
|
|
315
|
+
raise e
|
|
316
|
+
logger.error(f"Failed to persist step creation: {str(e)}")
|
|
317
|
+
|
|
318
|
+
tasks = [el.send(for_id=self.id) for el in self.elements]
|
|
319
|
+
await asyncio.gather(*tasks)
|
|
320
|
+
|
|
321
|
+
if not config.features.prompt_playground and "generation" in step_dict:
|
|
322
|
+
step_dict.pop("generation", None)
|
|
323
|
+
|
|
324
|
+
await context.emitter.send_step(step_dict)
|
|
325
|
+
|
|
326
|
+
return self.id
|
|
327
|
+
|
|
328
|
+
async def stream_token(self, token: str, is_sequence=False):
|
|
329
|
+
"""
|
|
330
|
+
Sends a token to the UI.
|
|
331
|
+
Once all tokens have been streamed, call .send() to end the stream and persist the step if persistence is enabled.
|
|
332
|
+
"""
|
|
333
|
+
|
|
334
|
+
if not self.streaming:
|
|
335
|
+
self.streaming = True
|
|
336
|
+
step_dict = self.to_dict()
|
|
337
|
+
await context.emitter.stream_start(step_dict)
|
|
338
|
+
|
|
339
|
+
if is_sequence:
|
|
340
|
+
self.output = token
|
|
341
|
+
else:
|
|
342
|
+
self.output += token
|
|
343
|
+
|
|
344
|
+
assert self.id
|
|
345
|
+
await context.emitter.send_token(
|
|
346
|
+
id=self.id, token=token, is_sequence=is_sequence
|
|
347
|
+
)
|
|
348
|
+
|
|
349
|
+
# Handle parameter less decorator
|
|
350
|
+
def __call__(self, func):
|
|
351
|
+
return step(
|
|
352
|
+
original_function=func,
|
|
353
|
+
type=self.type,
|
|
354
|
+
name=self.name,
|
|
355
|
+
id=self.id,
|
|
356
|
+
parent_id=self.parent_id,
|
|
357
|
+
thread_id=self.thread_id,
|
|
358
|
+
disable_feedback=self.disable_feedback,
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
# Handle Context Manager Protocol
|
|
362
|
+
async def __aenter__(self):
|
|
363
|
+
self.start = datetime.utcnow().isoformat()
|
|
364
|
+
if not self.parent_id and not self.root:
|
|
365
|
+
if current_step := context.current_step:
|
|
366
|
+
self.parent_id = current_step.id
|
|
367
|
+
elif context.session.root_message:
|
|
368
|
+
self.parent_id = context.session.root_message.id
|
|
369
|
+
context.session.active_steps.append(self)
|
|
370
|
+
await self.send()
|
|
371
|
+
return self
|
|
372
|
+
|
|
373
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
374
|
+
self.end = datetime.utcnow().isoformat()
|
|
375
|
+
context.session.active_steps.pop()
|
|
376
|
+
await self.update()
|
|
377
|
+
|
|
378
|
+
def __enter__(self):
|
|
379
|
+
self.start = datetime.utcnow().isoformat()
|
|
380
|
+
if not self.parent_id and not self.root:
|
|
381
|
+
if current_step := context.current_step:
|
|
382
|
+
self.parent_id = current_step.id
|
|
383
|
+
elif context.session.root_message:
|
|
384
|
+
self.parent_id = context.session.root_message.id
|
|
385
|
+
context.session.active_steps.append(self)
|
|
386
|
+
|
|
387
|
+
asyncio.create_task(self.send())
|
|
388
|
+
return self
|
|
389
|
+
|
|
390
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
391
|
+
self.end = datetime.utcnow().isoformat()
|
|
392
|
+
context.session.active_steps.pop()
|
|
393
|
+
asyncio.create_task(self.update())
|
chainlit/types.py
CHANGED
|
@@ -1,9 +1,12 @@
|
|
|
1
1
|
from enum import Enum
|
|
2
|
-
from typing import Dict, List, Literal, Optional, TypedDict, Union
|
|
2
|
+
from typing import TYPE_CHECKING, Dict, List, Literal, Optional, TypedDict, Union
|
|
3
3
|
|
|
4
|
-
|
|
5
|
-
from chainlit.element import
|
|
6
|
-
from chainlit.
|
|
4
|
+
if TYPE_CHECKING:
|
|
5
|
+
from chainlit.element import ElementDict
|
|
6
|
+
from chainlit.user import UserDict
|
|
7
|
+
from chainlit.step import StepDict
|
|
8
|
+
|
|
9
|
+
from chainlit_client import ChatGeneration, CompletionGeneration
|
|
7
10
|
from dataclasses_json import DataClassJsonMixin
|
|
8
11
|
from pydantic import BaseModel
|
|
9
12
|
from pydantic.dataclasses import dataclass
|
|
@@ -13,6 +16,27 @@ InputWidgetType = Literal[
|
|
|
13
16
|
]
|
|
14
17
|
|
|
15
18
|
|
|
19
|
+
class ThreadDict(TypedDict):
|
|
20
|
+
id: str
|
|
21
|
+
createdAt: str
|
|
22
|
+
user: Optional["UserDict"]
|
|
23
|
+
tags: Optional[List[str]]
|
|
24
|
+
metadata: Optional[Dict]
|
|
25
|
+
steps: List["StepDict"]
|
|
26
|
+
elements: Optional[List["ElementDict"]]
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class Pagination(BaseModel):
|
|
30
|
+
first: int
|
|
31
|
+
cursor: Optional[str] = None
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class ThreadFilter(BaseModel):
|
|
35
|
+
feedback: Optional[Literal[-1, 0, 1]] = None
|
|
36
|
+
userIdentifier: Optional[str] = None
|
|
37
|
+
search: Optional[str] = None
|
|
38
|
+
|
|
39
|
+
|
|
16
40
|
@dataclass
|
|
17
41
|
class FileSpec(DataClassJsonMixin):
|
|
18
42
|
accept: Union[List[str], Dict[str, List[str]]]
|
|
@@ -43,23 +67,30 @@ class AskActionSpec(ActionSpec, AskSpec, DataClassJsonMixin):
|
|
|
43
67
|
"""Specification for asking the user an action"""
|
|
44
68
|
|
|
45
69
|
|
|
46
|
-
class
|
|
47
|
-
|
|
48
|
-
|
|
70
|
+
class FileReference(TypedDict):
|
|
71
|
+
id: str
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class FileDict(TypedDict):
|
|
75
|
+
id: str
|
|
76
|
+
name: str
|
|
77
|
+
path: str
|
|
78
|
+
size: int
|
|
79
|
+
type: str
|
|
49
80
|
|
|
50
81
|
|
|
51
82
|
class UIMessagePayload(TypedDict):
|
|
52
|
-
message:
|
|
53
|
-
|
|
83
|
+
message: "StepDict"
|
|
84
|
+
fileReferences: Optional[List[FileReference]]
|
|
54
85
|
|
|
55
86
|
|
|
56
87
|
@dataclass
|
|
57
88
|
class AskFileResponse:
|
|
89
|
+
id: str
|
|
58
90
|
name: str
|
|
59
91
|
path: str
|
|
60
92
|
size: int
|
|
61
93
|
type: str
|
|
62
|
-
content: bytes
|
|
63
94
|
|
|
64
95
|
|
|
65
96
|
class AskActionResponse(TypedDict):
|
|
@@ -72,24 +103,28 @@ class AskActionResponse(TypedDict):
|
|
|
72
103
|
collapsed: bool
|
|
73
104
|
|
|
74
105
|
|
|
75
|
-
class
|
|
76
|
-
|
|
106
|
+
class GenerationRequest(BaseModel):
|
|
107
|
+
chatGeneration: Optional[ChatGeneration] = None
|
|
108
|
+
completionGeneration: Optional[CompletionGeneration] = None
|
|
77
109
|
userEnv: Dict[str, str]
|
|
78
110
|
|
|
111
|
+
@property
|
|
112
|
+
def generation(self):
|
|
113
|
+
if self.chatGeneration:
|
|
114
|
+
return self.chatGeneration
|
|
115
|
+
return self.completionGeneration
|
|
79
116
|
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
feedback: Literal[-1, 0, 1]
|
|
83
|
-
feedbackComment: Optional[str] = None
|
|
117
|
+
def is_chat(self):
|
|
118
|
+
return self.chatGeneration is not None
|
|
84
119
|
|
|
85
120
|
|
|
86
|
-
class
|
|
87
|
-
|
|
121
|
+
class DeleteThreadRequest(BaseModel):
|
|
122
|
+
threadId: str
|
|
88
123
|
|
|
89
124
|
|
|
90
|
-
class
|
|
125
|
+
class GetThreadsRequest(BaseModel):
|
|
91
126
|
pagination: Pagination
|
|
92
|
-
filter:
|
|
127
|
+
filter: ThreadFilter
|
|
93
128
|
|
|
94
129
|
|
|
95
130
|
class Theme(str, Enum):
|
|
@@ -99,8 +134,30 @@ class Theme(str, Enum):
|
|
|
99
134
|
|
|
100
135
|
@dataclass
|
|
101
136
|
class ChatProfile(DataClassJsonMixin):
|
|
102
|
-
"""Specification for a chat profile that can be chosen by the user at the
|
|
137
|
+
"""Specification for a chat profile that can be chosen by the user at the thread start."""
|
|
103
138
|
|
|
104
139
|
name: str
|
|
105
140
|
markdown_description: str
|
|
106
141
|
icon: Optional[str] = None
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
FeedbackStrategy = Literal["BINARY"]
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
class FeedbackDict(TypedDict):
|
|
148
|
+
value: Literal[-1, 0, 1]
|
|
149
|
+
strategy: FeedbackStrategy
|
|
150
|
+
comment: Optional[str]
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
@dataclass
|
|
154
|
+
class Feedback:
|
|
155
|
+
forId: str
|
|
156
|
+
value: Literal[-1, 0, 1]
|
|
157
|
+
strategy: FeedbackStrategy = "BINARY"
|
|
158
|
+
id: Optional[str] = None
|
|
159
|
+
comment: Optional[str] = None
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
class UpdateFeedbackRequest(BaseModel):
|
|
163
|
+
feedback: Feedback
|