chainlit 1.0.401__py3-none-any.whl → 1.0.501__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of chainlit might be problematic. Click here for more details.

@@ -22,7 +22,7 @@
22
22
  <script>
23
23
  const global = globalThis;
24
24
  </script>
25
- <script type="module" crossorigin src="/assets/index-9711593e.js"></script>
25
+ <script type="module" crossorigin src="/assets/index-e306c2e5.js"></script>
26
26
  <link rel="stylesheet" href="/assets/index-d088547c.css">
27
27
  </head>
28
28
  <body>
@@ -533,7 +533,7 @@ class LangchainTracer(BaseTracer, GenerationHelper, FinalStreamHelper):
533
533
  break
534
534
 
535
535
  current_step.language = "json"
536
- current_step.output = json.dumps(message_completion)
536
+ current_step.output = json.dumps(message_completion, indent=4, ensure_ascii=False)
537
537
  else:
538
538
  completion_start = self.completion_generations[str(run.id)]
539
539
  completion = generation.get("text", "")
@@ -14,7 +14,6 @@ DEFAULT_IGNORE = [
14
14
  CBEventType.SYNTHESIZE,
15
15
  CBEventType.EMBEDDING,
16
16
  CBEventType.NODE_PARSING,
17
- CBEventType.QUERY,
18
17
  CBEventType.TREE,
19
18
  ]
20
19
 
@@ -71,9 +70,12 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
71
70
  ) -> str:
72
71
  """Run when an event starts and return id of event."""
73
72
  self._restore_context()
73
+
74
74
  step_type: StepType = "undefined"
75
75
  if event_type == CBEventType.RETRIEVE:
76
76
  step_type = "retrieval"
77
+ elif event_type == CBEventType.QUERY:
78
+ step_type = "retrieval"
77
79
  elif event_type == CBEventType.LLM:
78
80
  step_type = "llm"
79
81
  else:
@@ -84,7 +86,7 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
84
86
  type=step_type,
85
87
  parent_id=self._get_parent_id(parent_id),
86
88
  id=event_id,
87
- disable_feedback=False,
89
+ disable_feedback=True,
88
90
  )
89
91
  self.steps[event_id] = step
90
92
  step.start = utc_now()
@@ -102,6 +104,7 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
102
104
  """Run when an event ends."""
103
105
  step = self.steps.get(event_id, None)
104
106
 
107
+
105
108
  if payload is None or step is None:
106
109
  return
107
110
 
@@ -109,10 +112,26 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
109
112
 
110
113
  step.end = utc_now()
111
114
 
112
- if event_type == CBEventType.RETRIEVE:
115
+ if event_type == CBEventType.QUERY:
116
+ response = payload.get(EventPayload.RESPONSE)
117
+ source_nodes = getattr(response, "source_nodes", None)
118
+ if source_nodes:
119
+ source_refs = ", ".join(
120
+ [f"Source {idx}" for idx, _ in enumerate(source_nodes)])
121
+ step.elements = [
122
+ Text(
123
+ name=f"Source {idx}",
124
+ content=source.text or "Empty node",
125
+ )
126
+ for idx, source in enumerate(source_nodes)
127
+ ]
128
+ step.output = f"Retrieved the following sources: {source_refs}"
129
+ self.context.loop.create_task(step.update())
130
+
131
+ elif event_type == CBEventType.RETRIEVE:
113
132
  sources = payload.get(EventPayload.NODES)
114
133
  if sources:
115
- source_refs = "\, ".join(
134
+ source_refs = ", ".join(
116
135
  [f"Source {idx}" for idx, _ in enumerate(sources)]
117
136
  )
118
137
  step.elements = [
@@ -125,7 +144,7 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
125
144
  step.output = f"Retrieved the following sources: {source_refs}"
126
145
  self.context.loop.create_task(step.update())
127
146
 
128
- if event_type == CBEventType.LLM:
147
+ elif event_type == CBEventType.LLM:
129
148
  formatted_messages = payload.get(
130
149
  EventPayload.MESSAGES
131
150
  ) # type: Optional[List[ChatMessage]]
@@ -152,10 +171,13 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
152
171
  step.output = content
153
172
 
154
173
  token_count = self.total_llm_token_count or None
155
-
174
+ raw_response = response.raw if response else None
175
+ model = raw_response.get("model", None) if raw_response else None
176
+
156
177
  if messages and isinstance(response, ChatResponse):
157
178
  msg: ChatMessage = response.message
158
179
  step.generation = ChatGeneration(
180
+ model=model,
159
181
  messages=messages,
160
182
  message_completion=GenerationMessage(
161
183
  role=msg.role.value, # type: ignore
@@ -165,6 +187,7 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
165
187
  )
166
188
  elif formatted_prompt:
167
189
  step.generation = CompletionGeneration(
190
+ model=model,
168
191
  prompt=formatted_prompt,
169
192
  completion=content,
170
193
  token_count=token_count,
@@ -172,6 +195,11 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
172
195
 
173
196
  self.context.loop.create_task(step.update())
174
197
 
198
+ else:
199
+ step.output = payload.get
200
+ self.context.loop.create_task(step.update())
201
+ return
202
+
175
203
  self.steps.pop(event_id, None)
176
204
 
177
205
  def _noop(self, *args, **kwargs):
@@ -179,3 +207,4 @@ class LlamaIndexCallbackHandler(TokenCountingHandler):
179
207
 
180
208
  start_trace = _noop
181
209
  end_trace = _noop
210
+
chainlit/markdown.py CHANGED
@@ -30,9 +30,19 @@ def init_markdown(root: str):
30
30
  logger.info(f"Created default chainlit markdown file at {chainlit_md_file}")
31
31
 
32
32
 
33
- def get_markdown_str(root: str):
33
+ def get_markdown_str(root: str, language: str):
34
34
  """Get the chainlit.md file as a string."""
35
- chainlit_md_path = os.path.join(root, "chainlit.md")
35
+ translated_chainlit_md_path = os.path.join(root, f"chainlit_{language}.md")
36
+ default_chainlit_md_path = os.path.join(root, "chainlit.md")
37
+
38
+ if os.path.exists(translated_chainlit_md_path):
39
+ chainlit_md_path = translated_chainlit_md_path
40
+ else:
41
+ chainlit_md_path = default_chainlit_md_path
42
+ logger.warning(
43
+ f"Translated markdown file for {language} not found. Defaulting to chainlit.md."
44
+ )
45
+
36
46
  if os.path.exists(chainlit_md_path):
37
47
  with open(chainlit_md_path, "r", encoding="utf-8") as f:
38
48
  chainlit_md = f.read()
chainlit/message.py CHANGED
@@ -39,6 +39,8 @@ class MessageBase(ABC):
39
39
  persisted = False
40
40
  is_error = False
41
41
  language: Optional[str] = None
42
+ metadata: Optional[Dict] = None
43
+ tags: Optional[List[str]] = None
42
44
  wait_for_answer = False
43
45
  indent: Optional[int] = None
44
46
  generation: Optional[BaseGeneration] = None
@@ -83,6 +85,8 @@ class MessageBase(ABC):
83
85
  "waitForAnswer": self.wait_for_answer,
84
86
  "indent": self.indent,
85
87
  "generation": self.generation.to_dict() if self.generation else None,
88
+ "metadata": self.metadata or {},
89
+ "tags": self.tags,
86
90
  }
87
91
 
88
92
  return _dict
@@ -209,6 +213,8 @@ class Message(MessageBase):
209
213
  disable_feedback: bool = False,
210
214
  type: MessageStepType = "assistant_message",
211
215
  generation: Optional[BaseGeneration] = None,
216
+ metadata: Optional[Dict] = None,
217
+ tags: Optional[List[str]] = None,
212
218
  id: Optional[str] = None,
213
219
  created_at: Union[str, None] = None,
214
220
  ):
@@ -234,6 +240,9 @@ class Message(MessageBase):
234
240
  if created_at:
235
241
  self.created_at = created_at
236
242
 
243
+ self.metadata = metadata
244
+ self.tags = tags
245
+
237
246
  self.author = author
238
247
  self.type = type
239
248
  self.actions = actions if actions is not None else []
@@ -542,7 +551,7 @@ class AskActionMessage(AskMessageBase):
542
551
  if res is None:
543
552
  self.content = "Timed out: no action was taken"
544
553
  else:
545
- self.content = f'**Selected action:** {res["label"]}'
554
+ self.content = f'**Selected:** {res["label"]}'
546
555
 
547
556
  self.wait_for_answer = False
548
557
 
@@ -1,8 +1,7 @@
1
1
  from typing import Union
2
-
2
+ import asyncio
3
3
  from chainlit.context import get_context
4
4
  from chainlit.step import Step
5
- from chainlit.sync import run_sync
6
5
  from chainlit.utils import check_module_version
7
6
  from literalai import ChatGeneration, CompletionGeneration
8
7
  from literalai.helper import timestamp_utc
@@ -57,6 +56,6 @@ def instrument_openai():
57
56
  def on_new_generation_sync(
58
57
  generation: Union["ChatGeneration", "CompletionGeneration"], timing
59
58
  ):
60
- run_sync(on_new_generation(generation, timing))
61
-
59
+ asyncio.create_task(on_new_generation(generation, timing))
60
+
62
61
  instrument_openai(None, on_new_generation_sync)
@@ -4,9 +4,7 @@ from chainlit.playground.provider import BaseProvider
4
4
  from chainlit.playground.providers import (
5
5
  Anthropic,
6
6
  AzureChatOpenAI,
7
- AzureOpenAI,
8
7
  ChatOpenAI,
9
- OpenAI,
10
8
  ChatVertexAI,
11
9
  GenerationVertexAI,
12
10
  Gemini,
@@ -14,9 +12,7 @@ from chainlit.playground.providers import (
14
12
 
15
13
  providers = {
16
14
  AzureChatOpenAI.id: AzureChatOpenAI,
17
- AzureOpenAI.id: AzureOpenAI,
18
15
  ChatOpenAI.id: ChatOpenAI,
19
- OpenAI.id: OpenAI,
20
16
  Anthropic.id: Anthropic,
21
17
  ChatVertexAI.id: ChatVertexAI,
22
18
  GenerationVertexAI.id: GenerationVertexAI,
@@ -2,9 +2,7 @@ from .anthropic import Anthropic
2
2
  from .huggingface import HFFlanT5
3
3
  from .openai import (
4
4
  AzureChatOpenAI,
5
- AzureOpenAI,
6
5
  ChatOpenAI,
7
- OpenAI,
8
6
  )
9
7
  from .vertexai import (
10
8
  ChatVertexAI,
@@ -168,8 +168,8 @@ class ChatOpenAIProvider(BaseProvider):
168
168
 
169
169
  async def create_event_stream():
170
170
  message = response.choices[0].message
171
- if function_call := message.function_call:
172
- yield stringify_function_call(function_call)
171
+ if tool_calls := message.tool_calls:
172
+ yield json.dumps([tc.model_dump() for tc in tool_calls], indent=4, ensure_ascii=False)
173
173
  else:
174
174
  yield message.content or ""
175
175
 
@@ -335,8 +335,8 @@ class AzureChatOpenAIProvider(BaseProvider):
335
335
 
336
336
  async def create_event_stream():
337
337
  message = response.choices[0].message
338
- if function_call := message.function_call:
339
- yield stringify_function_call(function_call)
338
+ if tool_calls := message.tool_calls:
339
+ yield json.dumps([tc.model_dump() for tc in tool_calls], indent=4, ensure_ascii=False)
340
340
  else:
341
341
  yield message.content or ""
342
342
 
@@ -374,21 +374,7 @@ ChatOpenAI = ChatOpenAIProvider(
374
374
  is_chat=True,
375
375
  )
376
376
 
377
- OpenAI = OpenAIProvider(
378
- id="openai",
379
- name="OpenAI",
380
- env_vars=openai_env_vars,
381
- inputs=[
382
- Select(
383
- id="model",
384
- label="Model",
385
- values=["text-davinci-003", "text-davinci-002"],
386
- initial_value="text-davinci-003",
387
- ),
388
- *openai_common_inputs,
389
- ],
390
- is_chat=False,
391
- )
377
+
392
378
 
393
379
 
394
380
  AzureChatOpenAI = AzureChatOpenAIProvider(
@@ -398,11 +384,3 @@ AzureChatOpenAI = AzureChatOpenAIProvider(
398
384
  inputs=openai_common_inputs,
399
385
  is_chat=True,
400
386
  )
401
-
402
- AzureOpenAI = AzureOpenAIProvider(
403
- id="azure",
404
- name="AzureOpenAI",
405
- env_vars=azure_openai_env_vars,
406
- inputs=openai_common_inputs,
407
- is_chat=False,
408
- )
chainlit/server.py CHANGED
@@ -36,6 +36,7 @@ from chainlit.markdown import get_markdown_str
36
36
  from chainlit.playground.config import get_llm_providers
37
37
  from chainlit.telemetry import trace_event
38
38
  from chainlit.types import (
39
+ DeleteFeedbackRequest,
39
40
  DeleteThreadRequest,
40
41
  GenerationRequest,
41
42
  GetThreadsRequest,
@@ -53,6 +54,7 @@ from fastapi import (
53
54
  UploadFile,
54
55
  status,
55
56
  )
57
+ from fastapi.middleware.gzip import GZipMiddleware
56
58
  from fastapi.responses import FileResponse, HTMLResponse, JSONResponse, RedirectResponse
57
59
  from fastapi.security import OAuth2PasswordRequestForm
58
60
  from fastapi.staticfiles import StaticFiles
@@ -139,7 +141,12 @@ async def lifespan(app: FastAPI):
139
141
  def get_build_dir(local_target: str, packaged_target: str):
140
142
  local_build_dir = os.path.join(PACKAGE_ROOT, local_target, "dist")
141
143
  packaged_build_dir = os.path.join(BACKEND_ROOT, packaged_target, "dist")
142
- if os.path.exists(local_build_dir):
144
+
145
+ if config.ui.custom_build and os.path.exists(
146
+ os.path.join(APP_ROOT, config.ui.custom_build, packaged_target, "dist")
147
+ ):
148
+ return os.path.join(APP_ROOT, config.ui.custom_build, packaged_target, "dist")
149
+ elif os.path.exists(local_build_dir):
143
150
  return local_build_dir
144
151
  elif os.path.exists(packaged_build_dir):
145
152
  return packaged_build_dir
@@ -181,10 +188,13 @@ app.add_middleware(
181
188
  allow_headers=["*"],
182
189
  )
183
190
 
191
+ app.add_middleware(GZipMiddleware)
192
+
184
193
  socket = SocketManager(
185
194
  app,
186
195
  cors_allowed_origins=[],
187
196
  async_mode="asgi",
197
+ socketio_path="/ws/socket.io",
188
198
  )
189
199
 
190
200
 
@@ -498,6 +508,22 @@ async def get_providers(
498
508
  return JSONResponse(content={"providers": providers})
499
509
 
500
510
 
511
+ @app.get("/project/translations")
512
+ async def project_translations(
513
+ language: str = Query(default="en-US", description="Language code"),
514
+ ):
515
+ """Return project translations."""
516
+
517
+ # Load translation based on the provided language
518
+ translation = config.load_translation(language)
519
+
520
+ return JSONResponse(
521
+ content={
522
+ "translation": translation,
523
+ }
524
+ )
525
+
526
+
501
527
  @app.get("/project/settings")
502
528
  async def project_settings(
503
529
  current_user: Annotated[Union[User, PersistedUser], Depends(get_current_user)],
@@ -505,8 +531,8 @@ async def project_settings(
505
531
  ):
506
532
  """Return project settings. This is called by the UI before the establishing the websocket connection."""
507
533
 
508
- # Load translation based on the provided language
509
- translation = config.load_translation(language)
534
+ # Load the markdown file based on the provided language
535
+ markdown = get_markdown_str(config.root, language)
510
536
 
511
537
  profiles = []
512
538
  if config.code.set_chat_profiles:
@@ -520,9 +546,8 @@ async def project_settings(
520
546
  "userEnv": config.project.user_env,
521
547
  "dataPersistence": get_data_layer() is not None,
522
548
  "threadResumable": bool(config.code.on_chat_resume),
523
- "markdown": get_markdown_str(config.root),
549
+ "markdown": markdown,
524
550
  "chatProfiles": profiles,
525
- "translation": translation,
526
551
  }
527
552
  )
528
553
 
@@ -546,6 +571,25 @@ async def update_feedback(
546
571
  return JSONResponse(content={"success": True, "feedbackId": feedback_id})
547
572
 
548
573
 
574
+ @app.delete("/feedback")
575
+ async def delete_feedback(
576
+ request: Request,
577
+ payload: DeleteFeedbackRequest,
578
+ current_user: Annotated[Union[User, PersistedUser], Depends(get_current_user)],
579
+ ):
580
+ """Delete a feedback."""
581
+
582
+ data_layer = get_data_layer()
583
+
584
+ if not data_layer:
585
+ raise HTTPException(status_code=400, detail="Data persistence is not enabled")
586
+
587
+ feedback_id = payload.feedbackId
588
+
589
+ await data_layer.delete_feedback(feedback_id)
590
+ return JSONResponse(content={"success": True})
591
+
592
+
549
593
  @app.post("/project/threads")
550
594
  async def get_user_threads(
551
595
  request: Request,
@@ -560,7 +604,10 @@ async def get_user_threads(
560
604
  if not data_layer:
561
605
  raise HTTPException(status_code=400, detail="Data persistence is not enabled")
562
606
 
563
- payload.filter.userIdentifier = current_user.identifier
607
+ if not isinstance(current_user, PersistedUser):
608
+ raise HTTPException(status_code=400, detail="User not persisted")
609
+
610
+ payload.filter.userId = current_user.id
564
611
 
565
612
  res = await data_layer.list_threads(payload.pagination, payload.filter)
566
613
  return JSONResponse(content=res.to_dict())
@@ -737,6 +784,11 @@ async def get_logo(theme: Optional[Theme] = Query(Theme.light)):
737
784
  return FileResponse(logo_path, media_type=media_type)
738
785
 
739
786
 
787
+ @app.head("/")
788
+ def status_check():
789
+ return {"message": "Site is operational"}
790
+
791
+
740
792
  def register_wildcard_route_handler():
741
793
  @app.get("/{path:path}")
742
794
  async def serve(request: Request, path: str):
chainlit/socket.py CHANGED
@@ -42,7 +42,7 @@ async def resume_thread(session: WebsocketSession):
42
42
  if not thread:
43
43
  return
44
44
 
45
- author = thread.get("user").get("identifier") if thread["user"] else None
45
+ author = thread.get("userIdentifier")
46
46
  user_is_author = author == session.user.identifier
47
47
 
48
48
  if user_is_author:
chainlit/step.py CHANGED
@@ -29,6 +29,7 @@ class StepDict(TypedDict, total=False):
29
29
  waitForAnswer: Optional[bool]
30
30
  isError: Optional[bool]
31
31
  metadata: Dict
32
+ tags: Optional[List[str]]
32
33
  input: str
33
34
  output: str
34
35
  createdAt: Optional[str]
@@ -47,6 +48,7 @@ def step(
47
48
  name: Optional[str] = "",
48
49
  type: TrueStepType = "undefined",
49
50
  id: Optional[str] = None,
51
+ tags: Optional[List[str]] = None,
50
52
  disable_feedback: bool = True,
51
53
  root: bool = False,
52
54
  language: Optional[str] = None,
@@ -71,6 +73,7 @@ def step(
71
73
  id=id,
72
74
  disable_feedback=disable_feedback,
73
75
  root=root,
76
+ tags=tags,
74
77
  language=language,
75
78
  show_input=show_input,
76
79
  ) as step:
@@ -97,6 +100,7 @@ def step(
97
100
  id=id,
98
101
  disable_feedback=disable_feedback,
99
102
  root=root,
103
+ tags=tags,
100
104
  language=language,
101
105
  show_input=show_input,
102
106
  ) as step:
@@ -137,6 +141,7 @@ class Step:
137
141
 
138
142
  is_error: Optional[bool]
139
143
  metadata: Dict
144
+ tags: Optional[List[str]]
140
145
  thread_id: str
141
146
  created_at: Union[str, None]
142
147
  start: Union[str, None]
@@ -153,6 +158,8 @@ class Step:
153
158
  id: Optional[str] = None,
154
159
  parent_id: Optional[str] = None,
155
160
  elements: Optional[List[Element]] = None,
161
+ metadata: Optional[Dict] = None,
162
+ tags: Optional[List[str]] = None,
156
163
  disable_feedback: bool = True,
157
164
  root: bool = False,
158
165
  language: Optional[str] = None,
@@ -167,7 +174,8 @@ class Step:
167
174
  self.type = type
168
175
  self.id = id or str(uuid.uuid4())
169
176
  self.disable_feedback = disable_feedback
170
- self.metadata = {}
177
+ self.metadata = metadata or {}
178
+ self.tags = tags
171
179
  self.is_error = False
172
180
  self.show_input = show_input
173
181
  self.parent_id = parent_id
@@ -194,13 +202,13 @@ class Step:
194
202
  if set_language:
195
203
  self.language = "json"
196
204
  except TypeError:
197
- processed_content = str(content)
205
+ processed_content = str(content).replace("\\n", "\n")
198
206
  if set_language:
199
207
  self.language = "text"
200
208
  elif isinstance(content, str):
201
209
  processed_content = content
202
210
  else:
203
- processed_content = str(content)
211
+ processed_content = str(content).replace("\\n", "\n")
204
212
  if set_language:
205
213
  self.language = "text"
206
214
  return processed_content
@@ -231,6 +239,7 @@ class Step:
231
239
  "disableFeedback": self.disable_feedback,
232
240
  "streaming": self.streaming,
233
241
  "metadata": self.metadata,
242
+ "tags": self.tags,
234
243
  "input": self.input,
235
244
  "isError": self.is_error,
236
245
  "output": self.output,
@@ -34,15 +34,87 @@
34
34
  "confirm": "Confirm"
35
35
  },
36
36
  "settingsModal": {
37
+ "settings": "Settings",
37
38
  "expandMessages": "Expand Messages",
38
39
  "hideChainOfThought": "Hide Chain of Thought",
39
40
  "darkMode": "Dark Mode"
41
+ },
42
+ "detailsButton": {
43
+ "using": "Using",
44
+ "running": "Running",
45
+ "took_one": "Took {{count}} step",
46
+ "took_other": "Took {{count}} steps"
47
+ },
48
+ "auth": {
49
+ "authLogin": {
50
+ "title": "Login to access the app.",
51
+ "form": {
52
+ "email": "Email address",
53
+ "password": "Password",
54
+ "noAccount": "Don't have an account?",
55
+ "alreadyHaveAccount": "Already have an account?",
56
+ "signup": "Sign Up",
57
+ "signin": "Sign In",
58
+ "or": "OR",
59
+ "continue": "Continue",
60
+ "forgotPassword": "Forgot password?",
61
+ "passwordMustContain": "Your password must contain:",
62
+ "emailRequired": "email is a required field",
63
+ "passwordRequired": "password is a required field"
64
+ },
65
+ "error": {
66
+ "default": "Unable to sign in.",
67
+ "signin": "Try signing in with a different account.",
68
+ "oauthsignin": "Try signing in with a different account.",
69
+ "redirect_uri_mismatch": "The redirect URI is not matching the oauth app configuration.",
70
+ "oauthcallbackerror": "Try signing in with a different account.",
71
+ "oauthcreateaccount": "Try signing in with a different account.",
72
+ "emailcreateaccount": "Try signing in with a different account.",
73
+ "callback": "Try signing in with a different account.",
74
+ "oauthaccountnotlinked": "To confirm your identity, sign in with the same account you used originally.",
75
+ "emailsignin": "The e-mail could not be sent.",
76
+ "emailverify": "Please verify your email, a new email has been sent.",
77
+ "credentialssignin": "Sign in failed. Check the details you provided are correct.",
78
+ "sessionrequired": "Please sign in to access this page."
79
+ }
80
+ },
81
+ "authVerifyEmail": {
82
+ "almostThere": "You're almost there! We've sent an email to ",
83
+ "verifyEmailLink": "Please click on the link in that email to complete your signup.",
84
+ "didNotReceive": "Can't find the email?",
85
+ "resendEmail": "Resend email",
86
+ "goBack": "Go Back",
87
+ "emailSent": "Email sent successfully.",
88
+ "verifyEmail": "Verify your email address"
89
+ },
90
+ "providerButton": {
91
+ "continue": "Continue with {{provider}}",
92
+ "signup": "Sign up with {{provider}}"
93
+ },
94
+ "authResetPassword": {
95
+ "newPasswordRequired": "New password is a required field",
96
+ "passwordsMustMatch": "Passwords must match",
97
+ "confirmPasswordRequired": "Confirm password is a required field",
98
+ "newPassword": "New password",
99
+ "confirmPassword": "Confirm password",
100
+ "resetPassword": "Reset Password"
101
+ },
102
+ "authForgotPassword": {
103
+ "email": "Email address",
104
+ "emailRequired": "email is a required field",
105
+ "emailSent": "Please check the email address {{email}} for instructions to reset your password.",
106
+ "enterEmail": "Enter your email address and we will send you instructions to reset your password.",
107
+ "resendEmail": "Resend email",
108
+ "continue": "Continue",
109
+ "goBack": "Go Back"
110
+ }
40
111
  }
41
112
  },
42
113
  "organisms": {
43
114
  "chat": {
44
115
  "history": {
45
116
  "index": {
117
+ "showHistory": "Show history",
46
118
  "lastInputs": "Last Inputs",
47
119
  "noInputs": "Such empty...",
48
120
  "loading": "Loading..."
@@ -115,7 +187,11 @@
115
187
  "pastChats": "Past Chats"
116
188
  },
117
189
  "ThreadList": {
118
- "empty": "Empty..."
190
+ "empty": "Empty...",
191
+ "today": "Today",
192
+ "yesterday": "Yesterday",
193
+ "previous7days": "Previous 7 days",
194
+ "previous30days": "Previous 30 days"
119
195
  },
120
196
  "TriggerButton": {
121
197
  "closeSidebar": "Close sidebar",