letta-nightly 0.6.24.dev20250212104045__py3-none-any.whl → 0.6.25.dev20250213104102__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-nightly might be problematic. Click here for more details.

letta/schemas/agent.py CHANGED
@@ -43,7 +43,6 @@ class AgentState(OrmMetadataBase, validate_assignment=True):
43
43
  system (str): The system prompt used by the agent.
44
44
  llm_config (LLMConfig): The LLM configuration used by the agent.
45
45
  embedding_config (EmbeddingConfig): The embedding configuration used by the agent.
46
-
47
46
  """
48
47
 
49
48
  __id_prefix__ = "agent"
@@ -85,6 +84,12 @@ class AgentState(OrmMetadataBase, validate_assignment=True):
85
84
  template_id: Optional[str] = Field(None, description="The id of the template the agent belongs to.")
86
85
  base_template_id: Optional[str] = Field(None, description="The base template id of the agent.")
87
86
 
87
+ # An advanced configuration that makes it so this agent does not remember any previous messages
88
+ message_buffer_autoclear: bool = Field(
89
+ False,
90
+ description="If set to True, the agent will not remember previous messages (though the agent will still retain state via core memory blocks and archival/recall memory). Not recommended unless you have an advanced use case.",
91
+ )
92
+
88
93
  def get_agent_env_vars_as_dict(self) -> Dict[str, str]:
89
94
  # Get environment variables for this agent specifically
90
95
  per_agent_env_vars = {}
@@ -146,6 +151,10 @@ class CreateAgent(BaseModel, validate_assignment=True): #
146
151
  project_id: Optional[str] = Field(None, description="The id of the project the agent belongs to.")
147
152
  template_id: Optional[str] = Field(None, description="The id of the template the agent belongs to.")
148
153
  base_template_id: Optional[str] = Field(None, description="The base template id of the agent.")
154
+ message_buffer_autoclear: bool = Field(
155
+ False,
156
+ description="If set to True, the agent will not remember previous messages (though the agent will still retain state via core memory blocks and archival/recall memory). Not recommended unless you have an advanced use case.",
157
+ )
149
158
 
150
159
  @field_validator("name")
151
160
  @classmethod
@@ -216,6 +225,10 @@ class UpdateAgent(BaseModel):
216
225
  project_id: Optional[str] = Field(None, description="The id of the project the agent belongs to.")
217
226
  template_id: Optional[str] = Field(None, description="The id of the template the agent belongs to.")
218
227
  base_template_id: Optional[str] = Field(None, description="The base template id of the agent.")
228
+ message_buffer_autoclear: Optional[bool] = Field(
229
+ None,
230
+ description="If set to True, the agent will not remember previous messages (though the agent will still retain state via core memory blocks and archival/recall memory). Not recommended unless you have an advanced use case.",
231
+ )
219
232
 
220
233
  class Config:
221
234
  extra = "ignore" # Ignores extra fields
@@ -26,6 +26,7 @@ class EmbeddingConfig(BaseModel):
26
26
  "bedrock",
27
27
  "cohere",
28
28
  "google_ai",
29
+ "google_vertex",
29
30
  "azure",
30
31
  "groq",
31
32
  "ollama",
@@ -25,6 +25,7 @@ class LLMConfig(BaseModel):
25
25
  "anthropic",
26
26
  "cohere",
27
27
  "google_ai",
28
+ "google_vertex",
28
29
  "azure",
29
30
  "groq",
30
31
  "ollama",
letta/schemas/message.py CHANGED
@@ -570,19 +570,12 @@ class Message(BaseMessage):
570
570
  "role": "user",
571
571
  }
572
572
 
573
- # Optional field, do not include if null
574
- if self.name is not None:
575
- anthropic_message["name"] = self.name
576
-
577
573
  elif self.role == "user":
578
574
  assert all([v is not None for v in [self.text, self.role]]), vars(self)
579
575
  anthropic_message = {
580
576
  "content": self.text,
581
577
  "role": self.role,
582
578
  }
583
- # Optional field, do not include if null
584
- if self.name is not None:
585
- anthropic_message["name"] = self.name
586
579
 
587
580
  elif self.role == "assistant":
588
581
  assert self.tool_calls is not None or self.text is not None
@@ -624,10 +617,6 @@ class Message(BaseMessage):
624
617
  # TODO support multi-modal
625
618
  anthropic_message["content"] = content
626
619
 
627
- # Optional fields, do not include if null
628
- if self.name is not None:
629
- anthropic_message["name"] = self.name
630
-
631
620
  elif self.role == "tool":
632
621
  # NOTE: Anthropic uses role "user" for "tool" responses
633
622
  assert all([v is not None for v in [self.role, self.tool_call_id]]), vars(self)
@@ -327,7 +327,7 @@ class LMStudioOpenAIProvider(OpenAIProvider):
327
327
  embedding_endpoint_type="openai",
328
328
  embedding_endpoint=self.base_url,
329
329
  embedding_dim=context_window_size,
330
- embedding_chunk_size=300,
330
+ embedding_chunk_size=300, # NOTE: max is 2048
331
331
  handle=self.get_handle(model_name),
332
332
  ),
333
333
  )
@@ -737,6 +737,45 @@ class GoogleAIProvider(Provider):
737
737
  return google_ai_get_model_context_window(self.base_url, self.api_key, model_name)
738
738
 
739
739
 
740
+ class GoogleVertexProvider(Provider):
741
+ name: str = "google_vertex"
742
+ google_cloud_project: str = Field(..., description="GCP project ID for the Google Vertex API.")
743
+ google_cloud_location: str = Field(..., description="GCP region for the Google Vertex API.")
744
+
745
+ def list_llm_models(self) -> List[LLMConfig]:
746
+ from letta.llm_api.google_constants import GOOGLE_MODEL_TO_CONTEXT_LENGTH
747
+
748
+ configs = []
749
+ for model, context_length in GOOGLE_MODEL_TO_CONTEXT_LENGTH.items():
750
+ configs.append(
751
+ LLMConfig(
752
+ model=model,
753
+ model_endpoint_type="google_vertex",
754
+ model_endpoint=f"https://{self.google_cloud_location}-aiplatform.googleapis.com/v1/projects/{self.google_cloud_project}/locations/{self.google_cloud_location}",
755
+ context_window=context_length,
756
+ handle=self.get_handle(model),
757
+ )
758
+ )
759
+ return configs
760
+
761
+ def list_embedding_models(self) -> List[EmbeddingConfig]:
762
+ from letta.llm_api.google_constants import GOOGLE_EMBEDING_MODEL_TO_DIM
763
+
764
+ configs = []
765
+ for model, dim in GOOGLE_EMBEDING_MODEL_TO_DIM.items():
766
+ configs.append(
767
+ EmbeddingConfig(
768
+ embedding_model=model,
769
+ embedding_endpoint_type="google_vertex",
770
+ embedding_endpoint=f"https://{self.google_cloud_location}-aiplatform.googleapis.com/v1/projects/{self.google_cloud_project}/locations/{self.google_cloud_location}",
771
+ embedding_dim=dim,
772
+ embedding_chunk_size=300, # NOTE: max is 2048
773
+ handle=self.get_handle(model, is_embedding=True),
774
+ )
775
+ )
776
+ return configs
777
+
778
+
740
779
  class AzureProvider(Provider):
741
780
  name: str = "azure"
742
781
  latest_api_version: str = "2024-09-01-preview" # https://learn.microsoft.com/en-us/azure/ai-services/openai/api-version-deprecation
@@ -792,8 +831,8 @@ class AzureProvider(Provider):
792
831
  embedding_endpoint=model_endpoint,
793
832
  embedding_dim=768,
794
833
  embedding_chunk_size=300, # NOTE: max is 2048
795
- handle=self.get_handle(model_name, is_embedding=True),
796
- )
834
+ handle=self.get_handle(model_name),
835
+ ),
797
836
  )
798
837
  return configs
799
838
 
@@ -124,6 +124,10 @@ def upsert_tool(
124
124
  # Log the error and raise a conflict exception
125
125
  print(f"Unique constraint violation occurred: {e}")
126
126
  raise HTTPException(status_code=409, detail=str(e))
127
+ except LettaToolCreateError as e:
128
+ # HTTP 400 == Bad Request
129
+ print(f"Error occurred during tool upsert: {e}")
130
+ raise HTTPException(status_code=400, detail=str(e))
127
131
  except Exception as e:
128
132
  # Catch other unexpected errors and raise an internal server error
129
133
  print(f"Unexpected error occurred: {e}")
@@ -140,8 +144,17 @@ def modify_tool(
140
144
  """
141
145
  Update an existing tool
142
146
  """
143
- actor = server.user_manager.get_user_or_default(user_id=user_id)
144
- return server.tool_manager.update_tool_by_id(tool_id=tool_id, tool_update=request, actor=actor)
147
+ try:
148
+ actor = server.user_manager.get_user_or_default(user_id=user_id)
149
+ return server.tool_manager.update_tool_by_id(tool_id=tool_id, tool_update=request, actor=actor)
150
+ except LettaToolCreateError as e:
151
+ # HTTP 400 == Bad Request
152
+ print(f"Error occurred during tool update: {e}")
153
+ raise HTTPException(status_code=400, detail=str(e))
154
+ except Exception as e:
155
+ # Catch other unexpected errors and raise an internal server error
156
+ print(f"Unexpected error occurred: {e}")
157
+ raise HTTPException(status_code=500, detail=f"An unexpected error occurred: {str(e)}")
145
158
 
146
159
 
147
160
  @router.post("/add-base-tools", response_model=List[Tool], operation_id="add_base_tools")
letta/server/server.py CHANGED
@@ -47,6 +47,7 @@ from letta.schemas.providers import (
47
47
  AnthropicProvider,
48
48
  AzureProvider,
49
49
  GoogleAIProvider,
50
+ GoogleVertexProvider,
50
51
  GroqProvider,
51
52
  LettaProvider,
52
53
  LMStudioOpenAIProvider,
@@ -352,6 +353,13 @@ class SyncServer(Server):
352
353
  api_key=model_settings.gemini_api_key,
353
354
  )
354
355
  )
356
+ if model_settings.google_cloud_location and model_settings.google_cloud_project:
357
+ self._enabled_providers.append(
358
+ GoogleVertexProvider(
359
+ google_cloud_project=model_settings.google_cloud_project,
360
+ google_cloud_location=model_settings.google_cloud_location,
361
+ )
362
+ )
355
363
  if model_settings.azure_api_key and model_settings.azure_base_url:
356
364
  assert model_settings.azure_api_version, "AZURE_API_VERSION is required"
357
365
  self._enabled_providers.append(
@@ -875,14 +883,12 @@ class SyncServer(Server):
875
883
  # TODO: Thread actor directly through this function, since the top level caller most likely already retrieved the user
876
884
 
877
885
  actor = self.user_manager.get_user_or_default(user_id=user_id)
878
- start_date = self.message_manager.get_message_by_id(after, actor=actor).created_at if after else None
879
- end_date = self.message_manager.get_message_by_id(before, actor=actor).created_at if before else None
880
886
 
881
887
  records = self.message_manager.list_messages_for_agent(
882
888
  agent_id=agent_id,
883
889
  actor=actor,
884
- start_date=start_date,
885
- end_date=end_date,
890
+ after=after,
891
+ before=before,
886
892
  limit=limit,
887
893
  ascending=not reverse,
888
894
  )
@@ -123,6 +123,7 @@ class AgentManager:
123
123
  project_id=agent_create.project_id,
124
124
  template_id=agent_create.template_id,
125
125
  base_template_id=agent_create.base_template_id,
126
+ message_buffer_autoclear=agent_create.message_buffer_autoclear,
126
127
  )
127
128
 
128
129
  # If there are provided environment variables, add them in
@@ -185,6 +186,7 @@ class AgentManager:
185
186
  project_id: Optional[str] = None,
186
187
  template_id: Optional[str] = None,
187
188
  base_template_id: Optional[str] = None,
189
+ message_buffer_autoclear: bool = False,
188
190
  ) -> PydanticAgentState:
189
191
  """Create a new agent."""
190
192
  with self.session_maker() as session:
@@ -202,6 +204,7 @@ class AgentManager:
202
204
  "project_id": project_id,
203
205
  "template_id": template_id,
204
206
  "base_template_id": base_template_id,
207
+ "message_buffer_autoclear": message_buffer_autoclear,
205
208
  }
206
209
 
207
210
  # Create the new agent using SqlalchemyBase.create
@@ -263,6 +266,7 @@ class AgentManager:
263
266
  "project_id",
264
267
  "template_id",
265
268
  "base_template_id",
269
+ "message_buffer_autoclear",
266
270
  }
267
271
  for field in scalar_fields:
268
272
  value = getattr(agent_update, field, None)
@@ -494,6 +498,7 @@ class AgentManager:
494
498
  @enforce_types
495
499
  def trim_all_in_context_messages_except_system(self, agent_id: str, actor: PydanticUser) -> PydanticAgentState:
496
500
  message_ids = self.get_agent_by_id(agent_id=agent_id, actor=actor).message_ids
501
+ # TODO: How do we know this?
497
502
  new_messages = [message_ids[0]] # 0 is system message
498
503
  return self._set_in_context_messages(agent_id=agent_id, message_ids=new_messages, actor=actor)
499
504
 
@@ -1,6 +1,8 @@
1
- from datetime import datetime
2
- from typing import Dict, List, Optional
1
+ from typing import List, Optional
3
2
 
3
+ from sqlalchemy import and_, or_
4
+
5
+ from letta.orm.agent import Agent as AgentModel
4
6
  from letta.orm.errors import NoResultFound
5
7
  from letta.orm.message import Message as MessageModel
6
8
  from letta.schemas.enums import MessageRole
@@ -127,44 +129,21 @@ class MessageManager:
127
129
  def list_user_messages_for_agent(
128
130
  self,
129
131
  agent_id: str,
130
- actor: Optional[PydanticUser] = None,
131
- before: Optional[str] = None,
132
+ actor: PydanticUser,
132
133
  after: Optional[str] = None,
133
- start_date: Optional[datetime] = None,
134
- end_date: Optional[datetime] = None,
135
- limit: Optional[int] = 50,
136
- filters: Optional[Dict] = None,
134
+ before: Optional[str] = None,
137
135
  query_text: Optional[str] = None,
136
+ limit: Optional[int] = 50,
138
137
  ascending: bool = True,
139
138
  ) -> List[PydanticMessage]:
140
- """List user messages with flexible filtering and pagination options.
141
-
142
- Args:
143
- before: Cursor-based pagination - return records before this ID (exclusive)
144
- after: Cursor-based pagination - return records after this ID (exclusive)
145
- start_date: Filter records created after this date
146
- end_date: Filter records created before this date
147
- limit: Maximum number of records to return
148
- filters: Additional filters to apply
149
- query_text: Optional text to search for in message content
150
-
151
- Returns:
152
- List[PydanticMessage] - List of messages matching the criteria
153
- """
154
- message_filters = {"role": "user"}
155
- if filters:
156
- message_filters.update(filters)
157
-
158
139
  return self.list_messages_for_agent(
159
140
  agent_id=agent_id,
160
141
  actor=actor,
161
- before=before,
162
142
  after=after,
163
- start_date=start_date,
164
- end_date=end_date,
165
- limit=limit,
166
- filters=message_filters,
143
+ before=before,
167
144
  query_text=query_text,
145
+ role=MessageRole.user,
146
+ limit=limit,
168
147
  ascending=ascending,
169
148
  )
170
149
 
@@ -172,48 +151,94 @@ class MessageManager:
172
151
  def list_messages_for_agent(
173
152
  self,
174
153
  agent_id: str,
175
- actor: Optional[PydanticUser] = None,
176
- before: Optional[str] = None,
154
+ actor: PydanticUser,
177
155
  after: Optional[str] = None,
178
- start_date: Optional[datetime] = None,
179
- end_date: Optional[datetime] = None,
180
- limit: Optional[int] = 50,
181
- filters: Optional[Dict] = None,
156
+ before: Optional[str] = None,
182
157
  query_text: Optional[str] = None,
158
+ role: Optional[MessageRole] = None, # New parameter for filtering by role
159
+ limit: Optional[int] = 50,
183
160
  ascending: bool = True,
184
161
  ) -> List[PydanticMessage]:
185
- """List messages with flexible filtering and pagination options.
162
+ """
163
+ Most performant query to list messages for an agent by directly querying the Message table.
164
+
165
+ This function filters by the agent_id (leveraging the index on messages.agent_id)
166
+ and applies efficient pagination using (created_at, id) as the cursor.
167
+ If query_text is provided, it will filter messages whose text content partially matches the query.
168
+ If role is provided, it will filter messages by the specified role.
186
169
 
187
170
  Args:
188
- before: Cursor-based pagination - return records before this ID (exclusive)
189
- after: Cursor-based pagination - return records after this ID (exclusive)
190
- start_date: Filter records created after this date
191
- end_date: Filter records created before this date
192
- limit: Maximum number of records to return
193
- filters: Additional filters to apply
194
- query_text: Optional text to search for in message content
171
+ agent_id: The ID of the agent whose messages are queried.
172
+ actor: The user performing the action (used for permission checks).
173
+ after: A message ID; if provided, only messages *after* this message (per sort order) are returned.
174
+ before: A message ID; if provided, only messages *before* this message are returned.
175
+ query_text: Optional string to partially match the message text content.
176
+ role: Optional MessageRole to filter messages by role.
177
+ limit: Maximum number of messages to return.
178
+ ascending: If True, sort by (created_at, id) ascending; if False, sort descending.
195
179
 
196
180
  Returns:
197
- List[PydanticMessage] - List of messages matching the criteria
181
+ List[PydanticMessage]: A list of messages (converted via .to_pydantic()).
182
+
183
+ Raises:
184
+ NoResultFound: If the provided after/before message IDs do not exist.
198
185
  """
199
186
  with self.session_maker() as session:
200
- # Start with base filters
201
- message_filters = {"agent_id": agent_id}
202
- if actor:
203
- message_filters.update({"organization_id": actor.organization_id})
204
- if filters:
205
- message_filters.update(filters)
206
-
207
- results = MessageModel.list(
208
- db_session=session,
209
- before=before,
210
- after=after,
211
- start_date=start_date,
212
- end_date=end_date,
213
- limit=limit,
214
- query_text=query_text,
215
- ascending=ascending,
216
- **message_filters,
217
- )
187
+ # Permission check: raise if the agent doesn't exist or actor is not allowed.
188
+ AgentModel.read(db_session=session, identifier=agent_id, actor=actor)
189
+
190
+ # Build a query that directly filters the Message table by agent_id.
191
+ query = session.query(MessageModel).filter(MessageModel.agent_id == agent_id)
192
+
193
+ # If query_text is provided, filter messages by partial match on text.
194
+ if query_text:
195
+ query = query.filter(MessageModel.text.ilike(f"%{query_text}%"))
196
+
197
+ # If role is provided, filter messages by role.
198
+ if role:
199
+ query = query.filter(MessageModel.role == role.value) # Enum.value ensures comparison is against the string value
200
+
201
+ # Apply 'after' pagination if specified.
202
+ if after:
203
+ after_ref = session.query(MessageModel.created_at, MessageModel.id).filter(MessageModel.id == after).limit(1).one_or_none()
204
+ if not after_ref:
205
+ raise NoResultFound(f"No message found with id '{after}' for agent '{agent_id}'.")
206
+ query = query.filter(
207
+ or_(
208
+ MessageModel.created_at > after_ref.created_at,
209
+ and_(
210
+ MessageModel.created_at == after_ref.created_at,
211
+ MessageModel.id > after_ref.id,
212
+ ),
213
+ )
214
+ )
215
+
216
+ # Apply 'before' pagination if specified.
217
+ if before:
218
+ before_ref = (
219
+ session.query(MessageModel.created_at, MessageModel.id).filter(MessageModel.id == before).limit(1).one_or_none()
220
+ )
221
+ if not before_ref:
222
+ raise NoResultFound(f"No message found with id '{before}' for agent '{agent_id}'.")
223
+ query = query.filter(
224
+ or_(
225
+ MessageModel.created_at < before_ref.created_at,
226
+ and_(
227
+ MessageModel.created_at == before_ref.created_at,
228
+ MessageModel.id < before_ref.id,
229
+ ),
230
+ )
231
+ )
232
+
233
+ # Apply ordering based on the ascending flag.
234
+ if ascending:
235
+ query = query.order_by(MessageModel.created_at.asc(), MessageModel.id.asc())
236
+ else:
237
+ query = query.order_by(MessageModel.created_at.desc(), MessageModel.id.desc())
238
+
239
+ # Limit the number of results.
240
+ query = query.limit(limit)
218
241
 
242
+ # Execute and convert each Message to its Pydantic representation.
243
+ results = query.all()
219
244
  return [msg.to_pydantic() for msg in results]
letta/settings.py CHANGED
@@ -86,6 +86,11 @@ class ModelSettings(BaseSettings):
86
86
  # google ai
87
87
  gemini_api_key: Optional[str] = None
88
88
  gemini_base_url: str = "https://generativelanguage.googleapis.com/"
89
+
90
+ # google vertex
91
+ google_cloud_project: Optional[str] = None
92
+ google_cloud_location: Optional[str] = None
93
+
89
94
  # together
90
95
  together_api_key: Optional[str] = None
91
96
 
@@ -151,6 +156,9 @@ class Settings(BaseSettings):
151
156
  multi_agent_send_message_timeout: int = 20 * 60
152
157
  multi_agent_concurrent_sends: int = 15
153
158
 
159
+ # telemetry logging
160
+ verbose_telemetry_logging: bool = False
161
+
154
162
  @property
155
163
  def letta_pg_uri(self) -> str:
156
164
  if self.pg_uri:
letta/utils.py CHANGED
@@ -16,6 +16,7 @@ import uuid
16
16
  from contextlib import contextmanager
17
17
  from datetime import datetime, timedelta, timezone
18
18
  from functools import wraps
19
+ from logging import Logger
19
20
  from typing import Any, Coroutine, List, Union, _GenericAlias, get_args, get_origin, get_type_hints
20
21
  from urllib.parse import urljoin, urlparse
21
22
 
@@ -1150,3 +1151,19 @@ def run_async_task(coro: Coroutine[Any, Any, Any]) -> Any:
1150
1151
  except RuntimeError:
1151
1152
  # If no event loop is running, create a new one
1152
1153
  return asyncio.run(coro)
1154
+
1155
+
1156
+ def log_telemetry(logger: Logger, event: str, **kwargs):
1157
+ """
1158
+ Logs telemetry events with a timestamp.
1159
+
1160
+ :param logger: A logger
1161
+ :param event: A string describing the event.
1162
+ :param kwargs: Additional key-value pairs for logging metadata.
1163
+ """
1164
+ from letta.settings import settings
1165
+
1166
+ if settings.verbose_telemetry_logging:
1167
+ timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S,%f UTC") # More readable timestamp
1168
+ extra_data = " | ".join(f"{key}={value}" for key, value in kwargs.items() if value is not None)
1169
+ logger.info(f"[{timestamp}] EVENT: {event} | {extra_data}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-nightly
3
- Version: 0.6.24.dev20250212104045
3
+ Version: 0.6.25.dev20250213104102
4
4
  Summary: Create LLM agents with long-term memory and custom tools
5
5
  License: Apache License
6
6
  Author: Letta Team
@@ -16,6 +16,7 @@ Provides-Extra: bedrock
16
16
  Provides-Extra: cloud-tool-sandbox
17
17
  Provides-Extra: dev
18
18
  Provides-Extra: external-tools
19
+ Provides-Extra: google
19
20
  Provides-Extra: postgres
20
21
  Provides-Extra: qdrant
21
22
  Provides-Extra: server
@@ -36,6 +37,7 @@ Requires-Dist: docx2txt (>=0.8,<0.9)
36
37
  Requires-Dist: e2b-code-interpreter (>=1.0.3,<2.0.0) ; extra == "cloud-tool-sandbox"
37
38
  Requires-Dist: faker (>=36.1.0,<37.0.0)
38
39
  Requires-Dist: fastapi (>=0.115.6,<0.116.0) ; extra == "server" or extra == "all"
40
+ Requires-Dist: google-genai (>=1.1.0,<2.0.0) ; extra == "google"
39
41
  Requires-Dist: grpcio (>=1.68.1,<2.0.0)
40
42
  Requires-Dist: grpcio-tools (>=1.68.1,<2.0.0)
41
43
  Requires-Dist: html2text (>=2020.1.16,<2021.0.0)
@@ -82,7 +84,6 @@ Requires-Dist: sqlmodel (>=0.0.16,<0.0.17)
82
84
  Requires-Dist: tqdm (>=4.66.1,<5.0.0)
83
85
  Requires-Dist: typer (>=0.12,<1.0)
84
86
  Requires-Dist: uvicorn (>=0.24.0.post1,<0.25.0) ; extra == "server" or extra == "all"
85
- Requires-Dist: websockets (>=12.0,<13.0) ; extra == "server" or extra == "all"
86
87
  Requires-Dist: wikipedia (>=1.4.0,<2.0.0) ; extra == "external-tools" or extra == "tests" or extra == "all"
87
88
  Description-Content-Type: text/markdown
88
89