chainlit 1.0.500__py3-none-any.whl → 1.0.502__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chainlit might be problematic. Click here for more details.

@@ -22,7 +22,7 @@
22
22
  <script>
23
23
  const global = globalThis;
24
24
  </script>
25
- <script type="module" crossorigin src="/assets/index-dc38064c.js"></script>
25
+ <script type="module" crossorigin src="/assets/index-e306c2e5.js"></script>
26
26
  <link rel="stylesheet" href="/assets/index-d088547c.css">
27
27
  </head>
28
28
  <body>
@@ -14,7 +14,6 @@ DEFAULT_IGNORE = [
14
14
  CBEventType.SYNTHESIZE,
15
15
  CBEventType.EMBEDDING,
16
16
  CBEventType.NODE_PARSING,
17
- CBEventType.QUERY,
18
17
  CBEventType.TREE,
19
18
  ]
20
19
 
@@ -71,9 +70,12 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
71
70
  ) -> str:
72
71
  """Run when an event starts and return id of event."""
73
72
  self._restore_context()
73
+
74
74
  step_type: StepType = "undefined"
75
75
  if event_type == CBEventType.RETRIEVE:
76
76
  step_type = "retrieval"
77
+ elif event_type == CBEventType.QUERY:
78
+ step_type = "retrieval"
77
79
  elif event_type == CBEventType.LLM:
78
80
  step_type = "llm"
79
81
  else:
@@ -84,7 +86,7 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
84
86
  type=step_type,
85
87
  parent_id=self._get_parent_id(parent_id),
86
88
  id=event_id,
87
- disable_feedback=False,
89
+ disable_feedback=True,
88
90
  )
89
91
  self.steps[event_id] = step
90
92
  step.start = utc_now()
@@ -102,6 +104,7 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
102
104
  """Run when an event ends."""
103
105
  step = self.steps.get(event_id, None)
104
106
 
107
+
105
108
  if payload is None or step is None:
106
109
  return
107
110
 
@@ -109,10 +112,26 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
109
112
 
110
113
  step.end = utc_now()
111
114
 
112
- if event_type == CBEventType.RETRIEVE:
115
+ if event_type == CBEventType.QUERY:
116
+ response = payload.get(EventPayload.RESPONSE)
117
+ source_nodes = getattr(response, "source_nodes", None)
118
+ if source_nodes:
119
+ source_refs = ", ".join(
120
+ [f"Source {idx}" for idx, _ in enumerate(source_nodes)])
121
+ step.elements = [
122
+ Text(
123
+ name=f"Source {idx}",
124
+ content=source.text or "Empty node",
125
+ )
126
+ for idx, source in enumerate(source_nodes)
127
+ ]
128
+ step.output = f"Retrieved the following sources: {source_refs}"
129
+ self.context.loop.create_task(step.update())
130
+
131
+ elif event_type == CBEventType.RETRIEVE:
113
132
  sources = payload.get(EventPayload.NODES)
114
133
  if sources:
115
- source_refs = "\, ".join(
134
+ source_refs = ", ".join(
116
135
  [f"Source {idx}" for idx, _ in enumerate(sources)]
117
136
  )
118
137
  step.elements = [
@@ -125,7 +144,7 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
125
144
  step.output = f"Retrieved the following sources: {source_refs}"
126
145
  self.context.loop.create_task(step.update())
127
146
 
128
- if event_type == CBEventType.LLM:
147
+ elif event_type == CBEventType.LLM:
129
148
  formatted_messages = payload.get(
130
149
  EventPayload.MESSAGES
131
150
  ) # type: Optional[List[ChatMessage]]
@@ -152,10 +171,13 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
152
171
  step.output = content
153
172
 
154
173
  token_count = self.total_llm_token_count or None
155
-
174
+ raw_response = response.raw if response else None
175
+ model = raw_response.get("model", None) if raw_response else None
176
+
156
177
  if messages and isinstance(response, ChatResponse):
157
178
  msg: ChatMessage = response.message
158
179
  step.generation = ChatGeneration(
180
+ model=model,
159
181
  messages=messages,
160
182
  message_completion=GenerationMessage(
161
183
  role=msg.role.value, # type: ignore
@@ -165,6 +187,7 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
165
187
  )
166
188
  elif formatted_prompt:
167
189
  step.generation = CompletionGeneration(
190
+ model=model,
168
191
  prompt=formatted_prompt,
169
192
  completion=content,
170
193
  token_count=token_count,
@@ -172,6 +195,11 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
172
195
 
173
196
  self.context.loop.create_task(step.update())
174
197
 
198
+ else:
199
+ step.output = payload.get
200
+ self.context.loop.create_task(step.update())
201
+ return
202
+
175
203
  self.steps.pop(event_id, None)
176
204
 
177
205
  def _noop(self, *args, **kwargs):
@@ -179,3 +207,4 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
179
207
 
180
208
  start_trace = _noop
181
209
  end_trace = _noop
210
+
chainlit/message.py CHANGED
@@ -39,6 +39,8 @@ class MessageBase(ABC):
39
39
  persisted = False
40
40
  is_error = False
41
41
  language: Optional[str] = None
42
+ metadata: Optional[Dict] = None
43
+ tags: Optional[List[str]] = None
42
44
  wait_for_answer = False
43
45
  indent: Optional[int] = None
44
46
  generation: Optional[BaseGeneration] = None
@@ -83,6 +85,8 @@ class MessageBase(ABC):
83
85
  "waitForAnswer": self.wait_for_answer,
84
86
  "indent": self.indent,
85
87
  "generation": self.generation.to_dict() if self.generation else None,
88
+ "metadata": self.metadata or {},
89
+ "tags": self.tags,
86
90
  }
87
91
 
88
92
  return _dict
@@ -209,6 +213,8 @@ class Message(MessageBase):
209
213
  disable_feedback: bool = False,
210
214
  type: MessageStepType = "assistant_message",
211
215
  generation: Optional[BaseGeneration] = None,
216
+ metadata: Optional[Dict] = None,
217
+ tags: Optional[List[str]] = None,
212
218
  id: Optional[str] = None,
213
219
  created_at: Union[str, None] = None,
214
220
  ):
@@ -234,6 +240,9 @@ class Message(MessageBase):
234
240
  if created_at:
235
241
  self.created_at = created_at
236
242
 
243
+ self.metadata = metadata
244
+ self.tags = tags
245
+
237
246
  self.author = author
238
247
  self.type = type
239
248
  self.actions = actions if actions is not None else []
@@ -1,8 +1,7 @@
1
1
  from typing import Union
2
-
2
+ import asyncio
3
3
  from chainlit.context import get_context
4
4
  from chainlit.step import Step
5
- from chainlit.sync import run_sync
6
5
  from chainlit.utils import check_module_version
7
6
  from literalai import ChatGeneration, CompletionGeneration
8
7
  from literalai.helper import timestamp_utc
@@ -57,6 +56,6 @@ def instrument_openai():
57
56
  def on_new_generation_sync(
58
57
  generation: Union["ChatGeneration", "CompletionGeneration"], timing
59
58
  ):
60
- run_sync(on_new_generation(generation, timing))
61
-
59
+ asyncio.create_task(on_new_generation(generation, timing))
60
+
62
61
  instrument_openai(None, on_new_generation_sync)
@@ -4,9 +4,7 @@ from chainlit.playground.provider import BaseProvider
4
4
  from chainlit.playground.providers import (
5
5
  Anthropic,
6
6
  AzureChatOpenAI,
7
- AzureOpenAI,
8
7
  ChatOpenAI,
9
- OpenAI,
10
8
  ChatVertexAI,
11
9
  GenerationVertexAI,
12
10
  Gemini,
@@ -14,9 +12,7 @@ from chainlit.playground.providers import (
14
12
 
15
13
  providers = {
16
14
  AzureChatOpenAI.id: AzureChatOpenAI,
17
- AzureOpenAI.id: AzureOpenAI,
18
15
  ChatOpenAI.id: ChatOpenAI,
19
- OpenAI.id: OpenAI,
20
16
  Anthropic.id: Anthropic,
21
17
  ChatVertexAI.id: ChatVertexAI,
22
18
  GenerationVertexAI.id: GenerationVertexAI,
@@ -2,9 +2,7 @@ from .anthropic import Anthropic
2
2
  from .huggingface import HFFlanT5
3
3
  from .openai import (
4
4
  AzureChatOpenAI,
5
- AzureOpenAI,
6
5
  ChatOpenAI,
7
- OpenAI,
8
6
  )
9
7
  from .vertexai import (
10
8
  ChatVertexAI,
@@ -168,8 +168,8 @@ class ChatOpenAIProvider(BaseProvider):
168
168
 
169
169
  async def create_event_stream():
170
170
  message = response.choices[0].message
171
- if function_call := message.function_call:
172
- yield stringify_function_call(function_call)
171
+ if tool_calls := message.tool_calls:
172
+ yield json.dumps([tc.model_dump() for tc in tool_calls], indent=4, ensure_ascii=False)
173
173
  else:
174
174
  yield message.content or ""
175
175
 
@@ -335,8 +335,8 @@ class AzureChatOpenAIProvider(BaseProvider):
335
335
 
336
336
  async def create_event_stream():
337
337
  message = response.choices[0].message
338
- if function_call := message.function_call:
339
- yield stringify_function_call(function_call)
338
+ if tool_calls := message.tool_calls:
339
+ yield json.dumps([tc.model_dump() for tc in tool_calls], indent=4, ensure_ascii=False)
340
340
  else:
341
341
  yield message.content or ""
342
342
 
@@ -374,21 +374,7 @@ ChatOpenAI = ChatOpenAIProvider(
374
374
  is_chat=True,
375
375
  )
376
376
 
377
- OpenAI = OpenAIProvider(
378
- id="openai",
379
- name="OpenAI",
380
- env_vars=openai_env_vars,
381
- inputs=[
382
- Select(
383
- id="model",
384
- label="Model",
385
- values=["text-davinci-003", "text-davinci-002"],
386
- initial_value="text-davinci-003",
387
- ),
388
- *openai_common_inputs,
389
- ],
390
- is_chat=False,
391
- )
377
+
392
378
 
393
379
 
394
380
  AzureChatOpenAI = AzureChatOpenAIProvider(
@@ -398,11 +384,3 @@ AzureChatOpenAI = AzureChatOpenAIProvider(
398
384
  inputs=openai_common_inputs,
399
385
  is_chat=True,
400
386
  )
401
-
402
- AzureOpenAI = AzureOpenAIProvider(
403
- id="azure",
404
- name="AzureOpenAI",
405
- env_vars=azure_openai_env_vars,
406
- inputs=openai_common_inputs,
407
- is_chat=False,
408
- )
chainlit/server.py CHANGED
@@ -36,12 +36,12 @@ from chainlit.markdown import get_markdown_str
36
36
  from chainlit.playground.config import get_llm_providers
37
37
  from chainlit.telemetry import trace_event
38
38
  from chainlit.types import (
39
+ DeleteFeedbackRequest,
39
40
  DeleteThreadRequest,
40
41
  GenerationRequest,
41
42
  GetThreadsRequest,
42
43
  Theme,
43
44
  UpdateFeedbackRequest,
44
- DeleteFeedbackRequest,
45
45
  )
46
46
  from chainlit.user import PersistedUser, User
47
47
  from fastapi import (
@@ -194,6 +194,7 @@ socket = SocketManager(
194
194
  app,
195
195
  cors_allowed_origins=[],
196
196
  async_mode="asgi",
197
+ socketio_path="/ws/socket.io",
197
198
  )
198
199
 
199
200
 
@@ -569,6 +570,7 @@ async def update_feedback(
569
570
 
570
571
  return JSONResponse(content={"success": True, "feedbackId": feedback_id})
571
572
 
573
+
572
574
  @app.delete("/feedback")
573
575
  async def delete_feedback(
574
576
  request: Request,
@@ -782,7 +784,7 @@ async def get_logo(theme: Optional[Theme] = Query(Theme.light)):
782
784
  return FileResponse(logo_path, media_type=media_type)
783
785
 
784
786
 
785
- @app.head('/')
787
+ @app.head("/")
786
788
  def status_check():
787
789
  return {"message": "Site is operational"}
788
790
 
chainlit/socket.py CHANGED
@@ -276,6 +276,9 @@ async def call_action(sid, action):
276
276
  action = Action(**action)
277
277
 
278
278
  try:
279
+ if not context.session.has_first_interaction:
280
+ context.session.has_first_interaction = True
281
+ asyncio.create_task(context.emitter.init_thread(action.name))
279
282
  res = await process_action(action)
280
283
  await context.emitter.send_action_response(
281
284
  id=action.id, status=True, response=res if isinstance(res, str) else None
chainlit/step.py CHANGED
@@ -29,6 +29,7 @@ class StepDict(TypedDict, total=False):
29
29
  waitForAnswer: Optional[bool]
30
30
  isError: Optional[bool]
31
31
  metadata: Dict
32
+ tags: Optional[List[str]]
32
33
  input: str
33
34
  output: str
34
35
  createdAt: Optional[str]
@@ -47,6 +48,7 @@ def step(
47
48
  name: Optional[str] = "",
48
49
  type: TrueStepType = "undefined",
49
50
  id: Optional[str] = None,
51
+ tags: Optional[List[str]] = None,
50
52
  disable_feedback: bool = True,
51
53
  root: bool = False,
52
54
  language: Optional[str] = None,
@@ -71,6 +73,7 @@ def step(
71
73
  id=id,
72
74
  disable_feedback=disable_feedback,
73
75
  root=root,
76
+ tags=tags,
74
77
  language=language,
75
78
  show_input=show_input,
76
79
  ) as step:
@@ -97,6 +100,7 @@ def step(
97
100
  id=id,
98
101
  disable_feedback=disable_feedback,
99
102
  root=root,
103
+ tags=tags,
100
104
  language=language,
101
105
  show_input=show_input,
102
106
  ) as step:
@@ -137,6 +141,7 @@ class Step:
137
141
 
138
142
  is_error: Optional[bool]
139
143
  metadata: Dict
144
+ tags: Optional[List[str]]
140
145
  thread_id: str
141
146
  created_at: Union[str, None]
142
147
  start: Union[str, None]
@@ -153,6 +158,8 @@ class Step:
153
158
  id: Optional[str] = None,
154
159
  parent_id: Optional[str] = None,
155
160
  elements: Optional[List[Element]] = None,
161
+ metadata: Optional[Dict] = None,
162
+ tags: Optional[List[str]] = None,
156
163
  disable_feedback: bool = True,
157
164
  root: bool = False,
158
165
  language: Optional[str] = None,
@@ -167,7 +174,8 @@ class Step:
167
174
  self.type = type
168
175
  self.id = id or str(uuid.uuid4())
169
176
  self.disable_feedback = disable_feedback
170
- self.metadata = {}
177
+ self.metadata = metadata or {}
178
+ self.tags = tags
171
179
  self.is_error = False
172
180
  self.show_input = show_input
173
181
  self.parent_id = parent_id
@@ -231,6 +239,7 @@ class Step:
231
239
  "disableFeedback": self.disable_feedback,
232
240
  "streaming": self.streaming,
233
241
  "metadata": self.metadata,
242
+ "tags": self.tags,
234
243
  "input": self.input,
235
244
  "isError": self.is_error,
236
245
  "output": self.output,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: chainlit
3
- Version: 1.0.500
3
+ Version: 1.0.502
4
4
  Summary: Build Conversational AI.
5
5
  Home-page: https://github.com/Chainlit/chainlit
6
6
  License: Apache-2.0 license
@@ -17,12 +17,12 @@ Requires-Dist: aiofiles (>=23.1.0,<24.0.0)
17
17
  Requires-Dist: asyncer (>=0.0.2,<0.0.3)
18
18
  Requires-Dist: click (>=8.1.3,<9.0.0)
19
19
  Requires-Dist: dataclasses_json (>=0.5.7,<0.6.0)
20
- Requires-Dist: fastapi (>=0.100)
20
+ Requires-Dist: fastapi (>=0.110.1,<0.111.0)
21
21
  Requires-Dist: fastapi-socketio (>=0.0.10,<0.0.11)
22
22
  Requires-Dist: filetype (>=1.2.0,<2.0.0)
23
23
  Requires-Dist: httpx (>=0.23.0)
24
24
  Requires-Dist: lazify (>=0.4.0,<0.5.0)
25
- Requires-Dist: literalai (==0.0.401)
25
+ Requires-Dist: literalai (==0.0.500)
26
26
  Requires-Dist: nest-asyncio (>=1.5.6,<2.0.0)
27
27
  Requires-Dist: packaging (>=23.1,<24.0)
28
28
  Requires-Dist: pydantic (>=1,<3)
@@ -30,7 +30,7 @@ Requires-Dist: pyjwt (>=2.8.0,<3.0.0)
30
30
  Requires-Dist: python-dotenv (>=1.0.0,<2.0.0)
31
31
  Requires-Dist: python-graphql-client (>=0.4.3,<0.5.0)
32
32
  Requires-Dist: python-multipart (>=0.0.9,<0.0.10)
33
- Requires-Dist: starlette (<0.33.0)
33
+ Requires-Dist: starlette (>=0.37.2,<0.38.0)
34
34
  Requires-Dist: syncer (>=2.0.3,<3.0.0)
35
35
  Requires-Dist: tomli (>=2.0.1,<3.0.0)
36
36
  Requires-Dist: uptrace (>=1.22.0,<2.0.0)
@@ -10,18 +10,18 @@ chainlit/config.py,sha256=5vGp8rPIMZhO58cd1mNn2O5lZF2lcBbpq5rGDMIUJVk,15066
10
10
  chainlit/context.py,sha256=CecWdRuRCTr4jfXlOiU3Mh41j3B-p40c1jC7mhToVzk,2476
11
11
  chainlit/copilot/dist/assets/logo_dark-2a3cf740.svg,sha256=Kjz3QMh-oh-ag4YatjU0YCPqGF7F8nHh8VUQoJIs01E,8887
12
12
  chainlit/copilot/dist/assets/logo_light-b078e7bc.svg,sha256=sHjnvEq1rfqh3bcexJNYUY7WEDdTQZq3aKZYpi4w4ck,8889
13
- chainlit/copilot/dist/index.js,sha256=HGdbTp3gVoiWDEp2cr0Nz8eyk-c56xYXWCFSecU_YSk,7015581
14
- chainlit/data/__init__.py,sha256=BWsi8tldVQ79qr5n9-mP-bYGulKwUJiNrqAYGJdSmj8,15404
13
+ chainlit/copilot/dist/index.js,sha256=5fpVkoFSGS8HybSjPvjWJBLSjGKCN1uFuMtufy78ZfQ,7015695
14
+ chainlit/data/__init__.py,sha256=BpLeWSEMOsMN9JuyKxc4LSzXviNt68RDBayu_c7ccV4,15588
15
15
  chainlit/data/acl.py,sha256=hx7Othkx12EitonyZD4iFIRVHwxBmBY2TKdwjPuZMSo,461
16
16
  chainlit/element.py,sha256=K5-yxiO2E0ZMRARKcXCNPnxsDKeLcBsXiZ5L-CGNp0A,10162
17
17
  chainlit/emitter.py,sha256=QOB4HNCOwL5x1EVBwuKceiGDtcml8Nr9TsvpSFCntoU,12178
18
18
  chainlit/frontend/dist/assets/index-d088547c.css,sha256=0IhUfCm_IY1kjvlTR2edW1qKXAFDya3LZ6mnZnP6ovk,6605
19
- chainlit/frontend/dist/assets/index-dc38064c.js,sha256=oyyYC-wCrkXT_dAoSXsc_dpXOxvng0DdHDKsDo2atBQ,3072827
19
+ chainlit/frontend/dist/assets/index-e306c2e5.js,sha256=xr7IyXFh4_dNdnVp9tJxtyLXysHW_1DVpyK8BD0JDXM,3072941
20
20
  chainlit/frontend/dist/assets/logo_dark-2a3cf740.svg,sha256=Kjz3QMh-oh-ag4YatjU0YCPqGF7F8nHh8VUQoJIs01E,8887
21
21
  chainlit/frontend/dist/assets/logo_light-b078e7bc.svg,sha256=sHjnvEq1rfqh3bcexJNYUY7WEDdTQZq3aKZYpi4w4ck,8889
22
- chainlit/frontend/dist/assets/react-plotly-df285644.js,sha256=ea_GeVcXMEJmHtknexk9Ygb2forKcELqJIPxlODqUSg,3763471
22
+ chainlit/frontend/dist/assets/react-plotly-cc656f1c.js,sha256=8qygrxBoKy-nEkZ6r1iYUVgDPsc5F9FIHR03uQSe35E,3763471
23
23
  chainlit/frontend/dist/favicon.svg,sha256=0Cy8x28obT5eWW3nxZRhsEvu6_zMqrqbg0y6hT3D0Q0,6455
24
- chainlit/frontend/dist/index.html,sha256=Uu9t060GQN9bA_SMjcYhkmJQHr84Eo0JwFx4y0NSNcA,1005
24
+ chainlit/frontend/dist/index.html,sha256=xvVRWHyL7YkrPGKt7-UO8Qzso0AHsR1rCfA3rXKNPeU,1005
25
25
  chainlit/haystack/__init__.py,sha256=uZ77YiPy-qleSTi3dQCDO9HE6S6F6GpJWmh7jO4cxXA,217
26
26
  chainlit/haystack/callbacks.py,sha256=tItLc6OmskPeDEJH2Qjtg7KgAgIy1TuYQYHTZm9cr3U,5209
27
27
  chainlit/hello.py,sha256=LwENQWo5s5r8nNDn4iKSV77vX60Ky5r_qGjQhyi7qlY,416
@@ -30,39 +30,37 @@ chainlit/langchain/__init__.py,sha256=zErMw0_3ufSGeF9ye7X0ZX3wDat4mTOx97T40ePDO2
30
30
  chainlit/langchain/callbacks.py,sha256=bABLMuLx0h-It0zfB9tqcSeCAu_-uxMLgm2gPIGb4VY,20484
31
31
  chainlit/langflow/__init__.py,sha256=wxhxdsl1yxdsRyNTgZticxFF_8VFtJJ4OdIy3tnEIyM,817
32
32
  chainlit/llama_index/__init__.py,sha256=weRoIWCaRBGvA1LczCEfsqhWsltQSVlhtRnTovtdo8w,227
33
- chainlit/llama_index/callbacks.py,sha256=K2PpqA2w9UBVwq2R1DAzPBMMUBHDjhUvsQBagjZexZI,6078
33
+ chainlit/llama_index/callbacks.py,sha256=-qCTM6GlFr1VYFMeorGxVX6iHHJYQ4XreTe7qw5w9bc,7225
34
34
  chainlit/logger.py,sha256=wTwRSZsLfXwWy6U4351IgWAm4KCMThgxm9EZpjGUEr4,373
35
35
  chainlit/markdown.py,sha256=VUpqW7MqgjiPIQYHU4funwqC4GmHZBu_TGZTjTI4B0k,2025
36
- chainlit/message.py,sha256=UvH5zX6D_sdMqmxJ3X7HU0Pb7Y9lL01mH8e_-EWpzB0,17683
36
+ chainlit/message.py,sha256=O9Qtw_5cDYA3TQVGyfbGwEeLLjpOoRIFe9LOedAkF_c,17974
37
37
  chainlit/oauth_providers.py,sha256=WiKUFpNp0RRN5Vq6LHCR9V-9td_1YEn2yD8iGu8atvY,17459
38
- chainlit/openai/__init__.py,sha256=ASiUNEvsFaDJeXGjZHbLTHhSyjGFvjhT0TRPu0_HpT4,2002
38
+ chainlit/openai/__init__.py,sha256=wYxag9R0LjUw4K6_XxV_7_6C5Q-hKZANEP6My8goGBs,1993
39
39
  chainlit/playground/__init__.py,sha256=igNRcBgqLKPTjOQtTNhhGNJFmZn-Dl1fHRQzQSjDGTQ,80
40
- chainlit/playground/config.py,sha256=_T0ELFRfBVUIOlaNvp3Af3X7wfMEQjxZPh26X_6mXYY,1033
40
+ chainlit/playground/config.py,sha256=XtTXYqFfv0JnygZcywurXyY62v7zukVwxxpnYiZIQRs,948
41
41
  chainlit/playground/provider.py,sha256=tU805uWOX6Tgh8rMoqVWdWqVq0nyylkrI8l2KZv2biw,3858
42
- chainlit/playground/providers/__init__.py,sha256=DxVp9BbQUdAPstpDTg2koNUTMS3tJa84iPTt_gQoHGM,236
42
+ chainlit/playground/providers/__init__.py,sha256=-Tx93fy4N3knLVF0fssaJb7e_aSYk8hXAaW9NsvZ0lI,207
43
43
  chainlit/playground/providers/anthropic.py,sha256=M9I0-RpKAmEJzGnH4-XW7n8yvNtr9phlPGnjr_32lfs,3495
44
44
  chainlit/playground/providers/huggingface.py,sha256=AmBmIzvfqBjswEI40jifb0OrMQkTk5rXCkGX7nMJ-bk,2130
45
45
  chainlit/playground/providers/langchain.py,sha256=8_gfZr3iEHhyFiR1X1bo-yfxZWjJkHaw_76MvWLZTYc,3103
46
- chainlit/playground/providers/openai.py,sha256=_Gnzfk9UHrDS83y3_OdhLWO13E800Z0RrIUsUv00syo,12397
46
+ chainlit/playground/providers/openai.py,sha256=9aDSgXVW3sW-gaybBBWMIE8cJPyk9ZuGvBmWwrddcMM,11956
47
47
  chainlit/playground/providers/vertexai.py,sha256=zKy501f-MHnLrvuRzN50FqgB3xoHzfQFTVbw83Nsj20,5084
48
48
  chainlit/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
49
  chainlit/secret.py,sha256=cQvIFGTQ7r2heC8EOGdgifSZZYqslh-qQxhUhKhD8vU,295
50
- chainlit/server.py,sha256=TYfcZWP8J6aPVXh3hiI3n5iCgtcxIElCTQ7nuexmtWw,23680
50
+ chainlit/server.py,sha256=5HY4W6-iUYriEfgndYMcilTHTow1YsdrDewiGD0dWcY,23716
51
51
  chainlit/session.py,sha256=AP9xhIM0HuvlOrPgcWR6sg161rSmZZ-iDPvJF0f6pZg,8844
52
- chainlit/socket.py,sha256=1-w2uRyGzDNK3v-0Lk6O9eRIJoVjPkyG285pAGePx5E,9810
53
- chainlit/step.py,sha256=UJI_V07Ai6Kf-JD_s6bHIdtEMWdKusGZt6PaAaAcn_w,12791
52
+ chainlit/socket.py,sha256=xcLxqZls_nrUrFHseF_AaYghtwmOCoTv7l6QPr5mwoY,9995
53
+ chainlit/step.py,sha256=JdXVqG73d9kNtHJjLhmfo1mqkCYtgqfF3jm08uGaCMs,13102
54
54
  chainlit/sync.py,sha256=G1n-7-3WgXsN8y1bJkEyws_YwmHZIyDZoZUwhprigag,1235
55
55
  chainlit/telemetry.py,sha256=Rk4dnZv0OnGOgV4kD-VHdhgl4i7i3ypqhSE_R-LZceM,3060
56
- chainlit/translations/de.json,sha256=FM3x5Lwy-mW0rmTPqqYvVmLTzus5Wazp4WSjqHbBtEE,6252
57
56
  chainlit/translations/en-US.json,sha256=uUuS4hlNoYSlDp0DZGTAlPZxwLfsP4Jiu4ckrfr-fI0,7835
58
- chainlit/translations/pt-BR.json,sha256=ua3_BILdU72WwereH0PX1Ua1lxTqNGxGxGjEzQhZ5AU,4614
59
57
  chainlit/translations.py,sha256=WG_r7HzxBYns-zk9tVvoGdoofv71okTZx8k1RlcoTIg,2034
60
58
  chainlit/types.py,sha256=0DVObo9zKnT4hG9Diqp0rwBf0RFtu22TJCNoViOY3hE,3375
61
59
  chainlit/user.py,sha256=Cw4uGz0ffivWFszv8W__EHwkvTHQ3Lj9hqpRCPxFujo,619
62
60
  chainlit/user_session.py,sha256=nyPx8vSICP8BhpPcW5h9vbHVf9ixj39SrkvJBUI_6zs,1368
63
61
  chainlit/utils.py,sha256=3HzhfZ4XJhBIe9sJ_3Lxv3lMH4mFXsi6nLBGqm8Gtdw,2571
64
62
  chainlit/version.py,sha256=iosXhlXclBwBqlADFKEilxAC2wWKbtuBKi87AmPi7s8,196
65
- chainlit-1.0.500.dist-info/METADATA,sha256=SNk49TQRf2sZEIjfvhD0TkOiSwrGZZbgmgOlQjDI-p4,5540
66
- chainlit-1.0.500.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
67
- chainlit-1.0.500.dist-info/entry_points.txt,sha256=FrkqdjrFl8juSnvBndniyX7XuKojmUwO4ghRh-CFMQc,45
68
- chainlit-1.0.500.dist-info/RECORD,,
63
+ chainlit-1.0.502.dist-info/METADATA,sha256=x_a6epCKzziT_oxQYdiVcH4Gcv4jBN4gxIlqjo_KK5k,5560
64
+ chainlit-1.0.502.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
65
+ chainlit-1.0.502.dist-info/entry_points.txt,sha256=FrkqdjrFl8juSnvBndniyX7XuKojmUwO4ghRh-CFMQc,45
66
+ chainlit-1.0.502.dist-info/RECORD,,
@@ -1,161 +0,0 @@
1
- {
2
- "components": {
3
- "atoms": {
4
- "buttons": {
5
- "userButton": {
6
- "menu": {
7
- "settings": "Einstellungen",
8
- "settingsKey": "S",
9
- "APIKeys": "API-Schl\u00fcssel",
10
- "logout": "Abmelden"
11
- }
12
- }
13
- }
14
- },
15
- "molecules": {
16
- "newChatButton": {
17
- "newChat": "Neuer Chat"
18
- },
19
- "tasklist": {
20
- "TaskList": {
21
- "title": "\ud83d\uddd2\ufe0f Aufgabenliste",
22
- "loading": "L\u00e4dt...",
23
- "error": "Ein Fehler ist aufgetreten"
24
- }
25
- },
26
- "attachments": {
27
- "cancelUpload": "Upload abbrechen",
28
- "removeAttachment": "Anhang entfernen"
29
- },
30
- "newChatDialog": {
31
- "createNewChat": "Neuen Chat erstellen?",
32
- "clearChat": "Dies wird die aktuellen Nachrichten l\u00f6schen und einen neuen Chat starten.",
33
- "cancel": "Abbrechen",
34
- "confirm": "Best\u00e4tigen"
35
- },
36
- "settingsModal": {
37
- "settings": "Einstellungen",
38
- "expandMessages": "Nachrichten ausklappen",
39
- "hideChainOfThought": "Zwischenschritte verbergen",
40
- "darkMode": "Dunkelmodus"
41
- }
42
- },
43
- "organisms": {
44
- "chat": {
45
- "history": {
46
- "index": {
47
- "showHistory": "Zeige Chatverlauf",
48
- "lastInputs": "Letzte Eingaben",
49
- "noInputs": "Leer...",
50
- "loading": "L\u00e4dt..."
51
- }
52
- },
53
- "inputBox": {
54
- "input": {
55
- "placeholder": "Nachricht eingeben..."
56
- },
57
- "speechButton": {
58
- "start": "Aufnahme starten",
59
- "stop": "Aufnahme stoppen"
60
- },
61
- "SubmitButton": {
62
- "sendMessage": "Nachricht senden",
63
- "stopTask": "Aufgabe stoppen"
64
- },
65
- "UploadButton": {
66
- "attachFiles": "Dateien anh\u00e4ngen"
67
- },
68
- "waterMark": {
69
- "text": "Erstellt mit"
70
- }
71
- },
72
- "Messages": {
73
- "index": {
74
- "running": "L\u00e4uft",
75
- "executedSuccessfully": "erfolgreich ausgef\u00fchrt",
76
- "failed": "fehlgeschlagen",
77
- "feedbackUpdated": "Feedback aktualisiert",
78
- "updating": "Aktualisiert"
79
- }
80
- },
81
- "dropScreen": {
82
- "dropYourFilesHere": "Ziehe deine Dateien hierher"
83
- },
84
- "index": {
85
- "failedToUpload": "Upload fehlgeschlagen",
86
- "cancelledUploadOf": "Upload abgebrochen von",
87
- "couldNotReachServer": "Konnte den Server nicht erreichen",
88
- "continuingChat": "Vorherigen Chat fortsetzen"
89
- },
90
- "settings": {
91
- "settingsPanel": "Einstellungsfenster",
92
- "reset": "Zur\u00fccksetzen",
93
- "cancel": "Abbrechen",
94
- "confirm": "Best\u00e4tigen"
95
- }
96
- },
97
- "threadHistory": {
98
- "sidebar": {
99
- "filters": {
100
- "FeedbackSelect": {
101
- "feedbackAll": "Feedback: Alle",
102
- "feedbackPositive": "Feedback: Positiv",
103
- "feedbackNegative": "Feedback: Negativ"
104
- },
105
- "SearchBar": {
106
- "search": "Suche"
107
- }
108
- },
109
- "DeleteThreadButton": {
110
- "confirmMessage": "Dies wird den Thread sowie seine Nachrichten und Elemente l\u00f6schen.",
111
- "cancel": "Abbrechen",
112
- "confirm": "Best\u00e4tigen",
113
- "deletingChat": "Chat wird gel\u00f6scht",
114
- "chatDeleted": "Chat gel\u00f6scht"
115
- },
116
- "index": {
117
- "pastChats": "Vergangene Chats"
118
- },
119
- "ThreadList": {
120
- "empty": "Leer...",
121
- "today": "Heute",
122
- "yesterday": "Gestern",
123
- "previous7days": "Vor 7 Tagen",
124
- "previous30days": "Vor 30 Tagen"
125
- },
126
- "TriggerButton": {
127
- "closeSidebar": "Seitenleiste schlie\u00dfen",
128
- "openSidebar": "Seitenleiste \u00f6ffnen"
129
- }
130
- },
131
- "Thread": {
132
- "backToChat": "Zur\u00fcck zum Chat",
133
- "chatCreatedOn": "Dieser Chat wurde erstellt am"
134
- }
135
- },
136
- "header": {
137
- "chat": "Chat",
138
- "readme": "Liesmich"
139
- }
140
- }
141
- },
142
- "hooks": {
143
- "useLLMProviders": {
144
- "failedToFetchProviders": "Anbieter konnten nicht geladen werden:"
145
- }
146
- },
147
- "pages": {
148
- "Design": {},
149
- "Env": {
150
- "savedSuccessfully": "Erfolgreich gespeichert",
151
- "requiredApiKeys": "Ben\u00f6tigte API-Schl\u00fcssel",
152
- "requiredApiKeysInfo": "Um diese App zu nutzen, werden die folgenden API-Schl\u00fcssel ben\u00f6tigt. Die Schl\u00fcssel werden im lokalen Speicher Ihres Ger\u00e4ts gespeichert."
153
- },
154
- "Page": {
155
- "notPartOfProject": "Sie sind nicht Teil dieses Projekts."
156
- },
157
- "ResumeButton": {
158
- "resumeChat": "Chat fortsetzen"
159
- }
160
- }
161
- }