letta-nightly 0.11.4.dev20250826104242__py3-none-any.whl → 0.11.6.dev20250827050912__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. letta/__init__.py +1 -1
  2. letta/agent.py +9 -3
  3. letta/agents/base_agent.py +2 -2
  4. letta/agents/letta_agent.py +56 -45
  5. letta/agents/voice_agent.py +2 -2
  6. letta/data_sources/redis_client.py +146 -1
  7. letta/errors.py +4 -0
  8. letta/functions/function_sets/files.py +2 -2
  9. letta/functions/mcp_client/types.py +30 -6
  10. letta/functions/schema_generator.py +46 -1
  11. letta/functions/schema_validator.py +17 -2
  12. letta/functions/types.py +1 -1
  13. letta/helpers/tool_execution_helper.py +0 -2
  14. letta/llm_api/anthropic_client.py +27 -5
  15. letta/llm_api/deepseek_client.py +97 -0
  16. letta/llm_api/groq_client.py +79 -0
  17. letta/llm_api/helpers.py +0 -1
  18. letta/llm_api/llm_api_tools.py +2 -113
  19. letta/llm_api/llm_client.py +21 -0
  20. letta/llm_api/llm_client_base.py +11 -9
  21. letta/llm_api/openai_client.py +3 -0
  22. letta/llm_api/xai_client.py +85 -0
  23. letta/prompts/prompt_generator.py +190 -0
  24. letta/schemas/agent_file.py +17 -2
  25. letta/schemas/file.py +24 -1
  26. letta/schemas/job.py +2 -0
  27. letta/schemas/letta_message.py +2 -0
  28. letta/schemas/letta_request.py +22 -0
  29. letta/schemas/message.py +10 -1
  30. letta/schemas/providers/bedrock.py +1 -0
  31. letta/schemas/response_format.py +2 -2
  32. letta/server/generate_openapi_schema.sh +4 -4
  33. letta/server/rest_api/redis_stream_manager.py +300 -0
  34. letta/server/rest_api/routers/v1/agents.py +129 -7
  35. letta/server/rest_api/routers/v1/folders.py +15 -5
  36. letta/server/rest_api/routers/v1/runs.py +101 -11
  37. letta/server/rest_api/routers/v1/sources.py +21 -53
  38. letta/server/rest_api/routers/v1/telemetry.py +14 -4
  39. letta/server/rest_api/routers/v1/tools.py +2 -2
  40. letta/server/rest_api/streaming_response.py +3 -24
  41. letta/server/server.py +0 -1
  42. letta/services/agent_manager.py +2 -2
  43. letta/services/agent_serialization_manager.py +129 -32
  44. letta/services/file_manager.py +111 -6
  45. letta/services/file_processor/file_processor.py +5 -2
  46. letta/services/files_agents_manager.py +60 -0
  47. letta/services/helpers/agent_manager_helper.py +6 -207
  48. letta/services/helpers/tool_parser_helper.py +6 -3
  49. letta/services/llm_batch_manager.py +1 -1
  50. letta/services/mcp/base_client.py +7 -1
  51. letta/services/mcp/sse_client.py +7 -2
  52. letta/services/mcp/stdio_client.py +5 -0
  53. letta/services/mcp/streamable_http_client.py +11 -2
  54. letta/services/mcp_manager.py +31 -30
  55. letta/services/source_manager.py +26 -1
  56. letta/services/summarizer/summarizer.py +21 -10
  57. letta/services/tool_executor/files_tool_executor.py +13 -9
  58. letta/services/tool_executor/mcp_tool_executor.py +3 -0
  59. letta/services/tool_executor/tool_execution_manager.py +13 -0
  60. letta/services/tool_executor/tool_execution_sandbox.py +0 -1
  61. letta/services/tool_manager.py +43 -20
  62. letta/services/tool_sandbox/local_sandbox.py +0 -2
  63. letta/settings.py +1 -0
  64. letta/utils.py +37 -0
  65. {letta_nightly-0.11.4.dev20250826104242.dist-info → letta_nightly-0.11.6.dev20250827050912.dist-info}/METADATA +116 -102
  66. {letta_nightly-0.11.4.dev20250826104242.dist-info → letta_nightly-0.11.6.dev20250827050912.dist-info}/RECORD +128 -127
  67. {letta_nightly-0.11.4.dev20250826104242.dist-info → letta_nightly-0.11.6.dev20250827050912.dist-info}/WHEEL +1 -1
  68. letta_nightly-0.11.6.dev20250827050912.dist-info/entry_points.txt +2 -0
  69. letta/functions/mcp_client/__init__.py +0 -0
  70. letta/functions/mcp_client/base_client.py +0 -156
  71. letta/functions/mcp_client/sse_client.py +0 -51
  72. letta/functions/mcp_client/stdio_client.py +0 -109
  73. letta_nightly-0.11.4.dev20250826104242.dist-info/entry_points.txt +0 -3
  74. {letta_nightly-0.11.4.dev20250826104242.dist-info → letta_nightly-0.11.6.dev20250827050912.dist-info/licenses}/LICENSE +0 -0
@@ -151,16 +151,16 @@ class LettaFileToolExecutor(ToolExecutor):
151
151
  offset = file_request.offset
152
152
  length = file_request.length
153
153
 
154
- # Convert 1-indexed offset/length to 0-indexed start/end for LineChunker
154
+ # Use 0-indexed offset/length directly for LineChunker
155
155
  start, end = None, None
156
156
  if offset is not None or length is not None:
157
- if offset is not None and offset < 1:
158
- raise ValueError(f"Offset for file {file_name} must be >= 1 (1-indexed), got {offset}")
157
+ if offset is not None and offset < 0:
158
+ raise ValueError(f"Offset for file {file_name} must be >= 0 (0-indexed), got {offset}")
159
159
  if length is not None and length < 1:
160
160
  raise ValueError(f"Length for file {file_name} must be >= 1, got {length}")
161
161
 
162
- # Convert to 0-indexed for LineChunker
163
- start = (offset - 1) if offset is not None else None
162
+ # Use offset directly as it's already 0-indexed
163
+ start = offset if offset is not None else None
164
164
  if start is not None and length is not None:
165
165
  end = start + length
166
166
  else:
@@ -193,7 +193,7 @@ class LettaFileToolExecutor(ToolExecutor):
193
193
  visible_content=visible_content,
194
194
  max_files_open=agent_state.max_files_open,
195
195
  start_line=start + 1 if start is not None else None, # convert to 1-indexed for user display
196
- end_line=end if end is not None else None, # end is already exclusive in slicing, so this is correct
196
+ end_line=end if end is not None else None, # end is already exclusive, shows as 1-indexed inclusive
197
197
  )
198
198
 
199
199
  opened_files.append(file_name)
@@ -220,10 +220,14 @@ class LettaFileToolExecutor(ToolExecutor):
220
220
  for req in file_requests:
221
221
  previous_info = format_previous_range(req.file_name)
222
222
  if req.offset is not None and req.length is not None:
223
- end_line = req.offset + req.length - 1
224
- file_summaries.append(f"{req.file_name} (lines {req.offset}-{end_line}){previous_info}")
223
+ # Display as 1-indexed for user readability: (offset+1) to (offset+length)
224
+ start_line = req.offset + 1
225
+ end_line = req.offset + req.length
226
+ file_summaries.append(f"{req.file_name} (lines {start_line}-{end_line}){previous_info}")
225
227
  elif req.offset is not None:
226
- file_summaries.append(f"{req.file_name} (lines {req.offset}-end){previous_info}")
228
+ # Display as 1-indexed
229
+ start_line = req.offset + 1
230
+ file_summaries.append(f"{req.file_name} (lines {start_line}-end){previous_info}")
227
231
  else:
228
232
  file_summaries.append(f"{req.file_name}{previous_info}")
229
233
 
@@ -37,8 +37,10 @@ class ExternalMCPToolExecutor(ToolExecutor):
37
37
  # TODO: may need to have better client connection management
38
38
 
39
39
  environment_variables = {}
40
+ agent_id = None
40
41
  if agent_state:
41
42
  environment_variables = agent_state.get_agent_env_vars_as_dict()
43
+ agent_id = agent_state.id
42
44
 
43
45
  function_response, success = await mcp_manager.execute_mcp_server_tool(
44
46
  mcp_server_name=mcp_server_name,
@@ -46,6 +48,7 @@ class ExternalMCPToolExecutor(ToolExecutor):
46
48
  tool_args=function_args,
47
49
  environment_variables=environment_variables,
48
50
  actor=actor,
51
+ agent_id=agent_id,
49
52
  )
50
53
 
51
54
  return ToolExecutionResult(
@@ -1,3 +1,4 @@
1
+ import asyncio
1
2
  import traceback
2
3
  from typing import Any, Dict, Optional, Type
3
4
 
@@ -129,6 +130,18 @@ class ToolExecutionManager:
129
130
  result.func_return = FUNCTION_RETURN_VALUE_TRUNCATED(return_str, len(return_str), tool.return_char_limit)
130
131
  return result
131
132
 
133
+ except asyncio.CancelledError as e:
134
+ self.logger.error(f"Aysnc cancellation error executing tool {function_name}: {str(e)}")
135
+ error_message = get_friendly_error_msg(
136
+ function_name=function_name,
137
+ exception_name=type(e).__name__,
138
+ exception_message=str(e),
139
+ )
140
+ return ToolExecutionResult(
141
+ status="error",
142
+ func_return=error_message,
143
+ stderr=[traceback.format_exc()],
144
+ )
132
145
  except Exception as e:
133
146
  status = "error"
134
147
  self.logger.error(f"Error executing tool {function_name}: {str(e)}")
@@ -249,7 +249,6 @@ class ToolExecutionSandbox:
249
249
  logger.error(f"Executing tool {self.tool_name} has an unexpected error: {e}")
250
250
  raise e
251
251
 
252
- @trace_method
253
252
  def run_local_dir_sandbox_directly(
254
253
  self,
255
254
  sbx_config: SandboxConfig,
@@ -184,7 +184,9 @@ class ToolManager:
184
184
 
185
185
  @enforce_types
186
186
  @trace_method
187
- async def bulk_upsert_tools_async(self, pydantic_tools: List[PydanticTool], actor: PydanticUser) -> List[PydanticTool]:
187
+ async def bulk_upsert_tools_async(
188
+ self, pydantic_tools: List[PydanticTool], actor: PydanticUser, override_existing_tools: bool = True
189
+ ) -> List[PydanticTool]:
188
190
  """
189
191
  Bulk create or update multiple tools in a single database transaction.
190
192
 
@@ -227,10 +229,10 @@ class ToolManager:
227
229
  if settings.letta_pg_uri_no_default:
228
230
  # use optimized postgresql bulk upsert
229
231
  async with db_registry.async_session() as session:
230
- return await self._bulk_upsert_postgresql(session, pydantic_tools, actor)
232
+ return await self._bulk_upsert_postgresql(session, pydantic_tools, actor, override_existing_tools)
231
233
  else:
232
234
  # fallback to individual upserts for sqlite
233
- return await self._upsert_tools_individually(pydantic_tools, actor)
235
+ return await self._upsert_tools_individually(pydantic_tools, actor, override_existing_tools)
234
236
 
235
237
  @enforce_types
236
238
  @trace_method
@@ -784,8 +786,10 @@ class ToolManager:
784
786
  return await self._upsert_tools_individually(tool_data_list, actor)
785
787
 
786
788
  @trace_method
787
- async def _bulk_upsert_postgresql(self, session, tool_data_list: List[PydanticTool], actor: PydanticUser) -> List[PydanticTool]:
788
- """hyper-optimized postgresql bulk upsert using on_conflict_do_update."""
789
+ async def _bulk_upsert_postgresql(
790
+ self, session, tool_data_list: List[PydanticTool], actor: PydanticUser, override_existing_tools: bool = True
791
+ ) -> List[PydanticTool]:
792
+ """hyper-optimized postgresql bulk upsert using on_conflict_do_update or on_conflict_do_nothing."""
789
793
  from sqlalchemy import func, select
790
794
  from sqlalchemy.dialects.postgresql import insert
791
795
 
@@ -809,32 +813,51 @@ class ToolManager:
809
813
  # use postgresql's native bulk upsert
810
814
  stmt = insert(table).values(insert_data)
811
815
 
812
- # on conflict, update all columns except id, created_at, and _created_by_id
813
- excluded = stmt.excluded
814
- update_dict = {}
815
- for col in table.columns:
816
- if col.name not in ("id", "created_at", "_created_by_id"):
817
- if col.name == "updated_at":
818
- update_dict[col.name] = func.now()
819
- else:
820
- update_dict[col.name] = excluded[col.name]
821
-
822
- upsert_stmt = stmt.on_conflict_do_update(index_elements=["name", "organization_id"], set_=update_dict)
816
+ if override_existing_tools:
817
+ # on conflict, update all columns except id, created_at, and _created_by_id
818
+ excluded = stmt.excluded
819
+ update_dict = {}
820
+ for col in table.columns:
821
+ if col.name not in ("id", "created_at", "_created_by_id"):
822
+ if col.name == "updated_at":
823
+ update_dict[col.name] = func.now()
824
+ else:
825
+ update_dict[col.name] = excluded[col.name]
826
+
827
+ upsert_stmt = stmt.on_conflict_do_update(index_elements=["name", "organization_id"], set_=update_dict)
828
+ else:
829
+ # on conflict, do nothing (skip existing tools)
830
+ upsert_stmt = stmt.on_conflict_do_nothing(index_elements=["name", "organization_id"])
823
831
 
824
832
  await session.execute(upsert_stmt)
825
833
  await session.commit()
826
834
 
827
- # fetch results
835
+ # fetch results (includes both inserted and skipped tools)
828
836
  tool_names = [tool.name for tool in tool_data_list]
829
837
  result_query = select(ToolModel).where(ToolModel.name.in_(tool_names), ToolModel.organization_id == actor.organization_id)
830
838
  result = await session.execute(result_query)
831
839
  return [tool.to_pydantic() for tool in result.scalars()]
832
840
 
833
841
  @trace_method
834
- async def _upsert_tools_individually(self, tool_data_list: List[PydanticTool], actor: PydanticUser) -> List[PydanticTool]:
842
+ async def _upsert_tools_individually(
843
+ self, tool_data_list: List[PydanticTool], actor: PydanticUser, override_existing_tools: bool = True
844
+ ) -> List[PydanticTool]:
835
845
  """fallback to individual upserts for sqlite (original approach)."""
836
846
  tools = []
837
847
  for tool in tool_data_list:
838
- upserted_tool = await self.create_or_update_tool_async(tool, actor)
839
- tools.append(upserted_tool)
848
+ if override_existing_tools:
849
+ # update existing tools if they exist
850
+ upserted_tool = await self.create_or_update_tool_async(tool, actor)
851
+ tools.append(upserted_tool)
852
+ else:
853
+ # skip existing tools, only create new ones
854
+ existing_tool_id = await self.get_tool_id_by_name_async(tool_name=tool.name, actor=actor)
855
+ if existing_tool_id:
856
+ # tool exists, fetch and return it without updating
857
+ existing_tool = await self.get_tool_by_id_async(existing_tool_id, actor=actor)
858
+ tools.append(existing_tool)
859
+ else:
860
+ # tool doesn't exist, create it
861
+ created_tool = await self.create_tool_async(tool, actor=actor)
862
+ tools.append(created_tool)
840
863
  return tools
@@ -155,7 +155,6 @@ class AsyncToolSandboxLocal(AsyncToolSandboxBase):
155
155
  if not settings.debug:
156
156
  await asyncio.to_thread(os.remove, temp_file_path)
157
157
 
158
- @trace_method
159
158
  async def _prepare_venv(self, local_configs, venv_path: str, env: Dict[str, str]):
160
159
  """
161
160
  Prepare virtual environment asynchronously (in a background thread).
@@ -179,7 +178,6 @@ class AsyncToolSandboxLocal(AsyncToolSandboxBase):
179
178
  )
180
179
  log_event(name="finish install_pip_requirements_for_sandbox", attributes={"local_configs": local_configs.model_dump_json()})
181
180
 
182
- @trace_method
183
181
  async def _execute_tool_subprocess(
184
182
  self, sbx_config, python_executable: str, temp_file_path: str, env: Dict[str, str], cwd: str
185
183
  ) -> ToolExecutionResult:
letta/settings.py CHANGED
@@ -252,6 +252,7 @@ class Settings(BaseSettings):
252
252
  track_errored_messages: bool = Field(default=True, description="Enable tracking for errored messages")
253
253
  track_stop_reason: bool = Field(default=True, description="Enable tracking stop reason on steps.")
254
254
  track_agent_run: bool = Field(default=True, description="Enable tracking agent run with cancellation support")
255
+ track_provider_trace: bool = Field(default=True, description="Enable tracking raw llm request and response at each step")
255
256
 
256
257
  # FastAPI Application Settings
257
258
  uvicorn_workers: int = 1
letta/utils.py CHANGED
@@ -1103,6 +1103,43 @@ def safe_create_task(coro, logger: Logger, label: str = "background task"):
1103
1103
  return asyncio.create_task(wrapper())
1104
1104
 
1105
1105
 
1106
+ def safe_create_file_processing_task(coro, file_metadata, server, actor, logger: Logger, label: str = "file processing task"):
1107
+ """
1108
+ Create a task for file processing that updates file status on failure.
1109
+
1110
+ This is a specialized version of safe_create_task that ensures file
1111
+ status is properly updated to ERROR with a meaningful message if the
1112
+ task fails.
1113
+
1114
+ Args:
1115
+ coro: The coroutine to execute
1116
+ file_metadata: FileMetadata object being processed
1117
+ server: Server instance with file_manager
1118
+ actor: User performing the operation
1119
+ logger: Logger instance for error logging
1120
+ label: Description of the task for logging
1121
+ """
1122
+ from letta.schemas.enums import FileProcessingStatus
1123
+
1124
+ async def wrapper():
1125
+ try:
1126
+ await coro
1127
+ except Exception as e:
1128
+ logger.exception(f"{label} failed for file {file_metadata.file_name} with {type(e).__name__}: {e}")
1129
+ # update file status to ERROR with a meaningful message
1130
+ try:
1131
+ await server.file_manager.update_file_status(
1132
+ file_id=file_metadata.id,
1133
+ actor=actor,
1134
+ processing_status=FileProcessingStatus.ERROR,
1135
+ error_message=f"Processing failed: {str(e)}" if str(e) else f"Processing failed: {type(e).__name__}",
1136
+ )
1137
+ except Exception as update_error:
1138
+ logger.error(f"Failed to update file status to ERROR for {file_metadata.id}: {update_error}")
1139
+
1140
+ return asyncio.create_task(wrapper())
1141
+
1142
+
1106
1143
  class CancellationSignal:
1107
1144
  """
1108
1145
  A signal that can be checked for cancellation during streaming operations.
@@ -1,117 +1,132 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: letta-nightly
3
- Version: 0.11.4.dev20250826104242
3
+ Version: 0.11.6.dev20250827050912
4
4
  Summary: Create LLM agents with long-term memory and custom tools
5
+ Author-email: Letta Team <contact@letta.com>
5
6
  License: Apache License
6
- Author: Letta Team
7
- Author-email: contact@letta.com
8
- Requires-Python: >=3.11,<3.14
9
- Classifier: License :: Other/Proprietary License
10
- Classifier: Programming Language :: Python :: 3
11
- Classifier: Programming Language :: Python :: 3.11
12
- Classifier: Programming Language :: Python :: 3.12
13
- Provides-Extra: all
7
+ License-File: LICENSE
8
+ Requires-Python: <3.14,>=3.11
9
+ Requires-Dist: aiomultiprocess>=0.9.1
10
+ Requires-Dist: alembic>=1.13.3
11
+ Requires-Dist: anthropic>=0.49.0
12
+ Requires-Dist: apscheduler>=3.11.0
13
+ Requires-Dist: black[jupyter]>=24.2.0
14
+ Requires-Dist: brotli>=1.1.0
15
+ Requires-Dist: certifi>=2025.6.15
16
+ Requires-Dist: colorama>=0.4.6
17
+ Requires-Dist: composio-core>=0.7.7
18
+ Requires-Dist: datamodel-code-generator[http]>=0.25.0
19
+ Requires-Dist: demjson3>=3.0.6
20
+ Requires-Dist: docstring-parser<0.17,>=0.16
21
+ Requires-Dist: faker>=36.1.0
22
+ Requires-Dist: firecrawl-py<3.0.0,>=2.8.0
23
+ Requires-Dist: grpcio-tools>=1.68.1
24
+ Requires-Dist: grpcio>=1.68.1
25
+ Requires-Dist: html2text>=2020.1.16
26
+ Requires-Dist: httpx-sse>=0.4.0
27
+ Requires-Dist: httpx>=0.28.0
28
+ Requires-Dist: jinja2>=3.1.5
29
+ Requires-Dist: letta-client>=0.1.277
30
+ Requires-Dist: llama-index-embeddings-openai>=0.3.1
31
+ Requires-Dist: llama-index>=0.12.2
32
+ Requires-Dist: markitdown[docx,pdf,pptx]>=0.1.2
33
+ Requires-Dist: marshmallow-sqlalchemy>=1.4.1
34
+ Requires-Dist: matplotlib>=3.10.1
35
+ Requires-Dist: mcp[cli]>=1.9.4
36
+ Requires-Dist: mistralai>=1.8.1
37
+ Requires-Dist: nltk>=3.8.1
38
+ Requires-Dist: numpy>=2.1.0
39
+ Requires-Dist: openai>=1.99.9
40
+ Requires-Dist: opentelemetry-api==1.30.0
41
+ Requires-Dist: opentelemetry-exporter-otlp==1.30.0
42
+ Requires-Dist: opentelemetry-instrumentation-requests==0.51b0
43
+ Requires-Dist: opentelemetry-instrumentation-sqlalchemy==0.51b0
44
+ Requires-Dist: opentelemetry-sdk==1.30.0
45
+ Requires-Dist: orjson>=3.11.1
46
+ Requires-Dist: pathvalidate>=3.2.1
47
+ Requires-Dist: prettytable>=3.9.0
48
+ Requires-Dist: pydantic-settings>=2.2.1
49
+ Requires-Dist: pydantic>=2.10.6
50
+ Requires-Dist: pyhumps>=3.8.0
51
+ Requires-Dist: python-box>=7.1.1
52
+ Requires-Dist: python-multipart>=0.0.19
53
+ Requires-Dist: pytz>=2023.3.post1
54
+ Requires-Dist: pyyaml>=6.0.1
55
+ Requires-Dist: questionary>=2.0.1
56
+ Requires-Dist: rich>=13.9.4
57
+ Requires-Dist: sentry-sdk[fastapi]==2.19.1
58
+ Requires-Dist: setuptools>=70
59
+ Requires-Dist: sqlalchemy-json>=0.7.0
60
+ Requires-Dist: sqlalchemy-utils>=0.41.2
61
+ Requires-Dist: sqlalchemy[asyncio]>=2.0.41
62
+ Requires-Dist: sqlmodel>=0.0.16
63
+ Requires-Dist: structlog>=25.4.0
64
+ Requires-Dist: tavily-python>=0.7.2
65
+ Requires-Dist: tqdm>=4.66.1
66
+ Requires-Dist: typer>=0.15.2
14
67
  Provides-Extra: bedrock
68
+ Requires-Dist: aioboto3>=14.3.0; extra == 'bedrock'
69
+ Requires-Dist: boto3>=1.36.24; extra == 'bedrock'
15
70
  Provides-Extra: cloud-tool-sandbox
71
+ Requires-Dist: e2b-code-interpreter>=1.0.3; extra == 'cloud-tool-sandbox'
16
72
  Provides-Extra: desktop
73
+ Requires-Dist: aiosqlite>=0.21.0; extra == 'desktop'
74
+ Requires-Dist: docker>=7.1.0; extra == 'desktop'
75
+ Requires-Dist: fastapi>=0.115.6; extra == 'desktop'
76
+ Requires-Dist: langchain-community>=0.3.7; extra == 'desktop'
77
+ Requires-Dist: langchain>=0.3.7; extra == 'desktop'
78
+ Requires-Dist: locust>=2.31.5; extra == 'desktop'
79
+ Requires-Dist: pgvector>=0.2.3; extra == 'desktop'
80
+ Requires-Dist: sqlite-vec>=0.1.7a2; extra == 'desktop'
81
+ Requires-Dist: uvicorn>=0.24.0.post1; extra == 'desktop'
82
+ Requires-Dist: websockets; extra == 'desktop'
83
+ Requires-Dist: wikipedia>=1.4.0; extra == 'desktop'
17
84
  Provides-Extra: dev
85
+ Requires-Dist: autoflake>=2.3.0; extra == 'dev'
86
+ Requires-Dist: black[jupyter]>=24.4.2; extra == 'dev'
87
+ Requires-Dist: ipdb>=0.13.13; extra == 'dev'
88
+ Requires-Dist: ipykernel>=6.29.5; extra == 'dev'
89
+ Requires-Dist: isort>=5.13.2; extra == 'dev'
90
+ Requires-Dist: pexpect>=4.9.0; extra == 'dev'
91
+ Requires-Dist: pre-commit>=3.5.0; extra == 'dev'
92
+ Requires-Dist: pyright>=1.1.347; extra == 'dev'
93
+ Requires-Dist: pytest; extra == 'dev'
94
+ Requires-Dist: pytest-asyncio>=0.24.0; extra == 'dev'
95
+ Requires-Dist: pytest-json-report>=1.5.0; extra == 'dev'
96
+ Requires-Dist: pytest-mock>=3.14.0; extra == 'dev'
97
+ Requires-Dist: pytest-order>=1.2.0; extra == 'dev'
18
98
  Provides-Extra: experimental
99
+ Requires-Dist: google-cloud-profiler>=4.1.0; extra == 'experimental'
100
+ Requires-Dist: granian[reload,uvloop]>=2.3.2; extra == 'experimental'
101
+ Requires-Dist: uvloop>=0.21.0; extra == 'experimental'
19
102
  Provides-Extra: external-tools
103
+ Requires-Dist: docker>=7.1.0; extra == 'external-tools'
104
+ Requires-Dist: firecrawl-py<3.0.0,>=2.8.0; extra == 'external-tools'
105
+ Requires-Dist: langchain-community>=0.3.7; extra == 'external-tools'
106
+ Requires-Dist: langchain>=0.3.7; extra == 'external-tools'
107
+ Requires-Dist: turbopuffer>=0.5.17; extra == 'external-tools'
108
+ Requires-Dist: wikipedia>=1.4.0; extra == 'external-tools'
20
109
  Provides-Extra: google
110
+ Requires-Dist: google-genai>=1.15.0; extra == 'google'
111
+ Provides-Extra: modal
112
+ Requires-Dist: modal>=1.1.0; extra == 'modal'
21
113
  Provides-Extra: pinecone
114
+ Requires-Dist: pinecone[asyncio]>=7.3.0; extra == 'pinecone'
22
115
  Provides-Extra: postgres
116
+ Requires-Dist: asyncpg>=0.30.0; extra == 'postgres'
117
+ Requires-Dist: pg8000>=1.30.3; extra == 'postgres'
118
+ Requires-Dist: pgvector>=0.2.3; extra == 'postgres'
119
+ Requires-Dist: psycopg2-binary>=2.9.10; extra == 'postgres'
120
+ Requires-Dist: psycopg2>=2.9.10; extra == 'postgres'
23
121
  Provides-Extra: redis
122
+ Requires-Dist: redis>=6.2.0; extra == 'redis'
24
123
  Provides-Extra: server
25
- Provides-Extra: tests
26
- Requires-Dist: aioboto3 (>=14.3.0,<15.0.0) ; extra == "bedrock"
27
- Requires-Dist: aiomultiprocess (>=0.9.1,<0.10.0)
28
- Requires-Dist: alembic (>=1.13.3,<2.0.0)
29
- Requires-Dist: anthropic (>=0.49.0,<0.50.0)
30
- Requires-Dist: apscheduler (>=3.11.0,<4.0.0)
31
- Requires-Dist: asyncpg (>=0.30.0,<0.31.0) ; extra == "postgres"
32
- Requires-Dist: autoflake (>=2.3.0,<3.0.0) ; extra == "dev" or extra == "all"
33
- Requires-Dist: black[jupyter] (>=24.2.0,<25.0.0) ; extra == "dev" or extra == "all"
34
- Requires-Dist: boto3 (>=1.36.24,<2.0.0) ; extra == "bedrock"
35
- Requires-Dist: brotli (>=1.1.0,<2.0.0)
36
- Requires-Dist: certifi (>=2025.6.15,<2026.0.0)
37
- Requires-Dist: colorama (>=0.4.6,<0.5.0)
38
- Requires-Dist: composio-core (>=0.7.7,<0.8.0)
39
- Requires-Dist: datamodel-code-generator[http] (>=0.25.0,<0.26.0)
40
- Requires-Dist: demjson3 (>=3.0.6,<4.0.0)
41
- Requires-Dist: docker (>=7.1.0,<8.0.0) ; extra == "external-tools" or extra == "desktop" or extra == "all"
42
- Requires-Dist: docstring-parser (>=0.16,<0.17)
43
- Requires-Dist: e2b-code-interpreter (>=1.0.3,<2.0.0) ; extra == "cloud-tool-sandbox"
44
- Requires-Dist: faker (>=36.1.0,<37.0.0)
45
- Requires-Dist: fastapi (>=0.115.6,<0.116.0) ; extra == "server" or extra == "desktop" or extra == "all"
46
- Requires-Dist: firecrawl-py (>=2.8.0,<3.0.0) ; extra == "external-tools"
47
- Requires-Dist: google-cloud-profiler (>=4.1.0,<5.0.0) ; extra == "experimental" or extra == "all"
48
- Requires-Dist: google-genai (>=1.15.0,<2.0.0) ; extra == "google"
49
- Requires-Dist: granian[reload] (>=2.3.2,<3.0.0) ; extra == "experimental" or extra == "all"
50
- Requires-Dist: grpcio (>=1.68.1,<2.0.0)
51
- Requires-Dist: grpcio-tools (>=1.68.1,<2.0.0)
52
- Requires-Dist: html2text (>=2020.1.16,<2021.0.0)
53
- Requires-Dist: httpx (>=0.28.0,<0.29.0)
54
- Requires-Dist: httpx-sse (>=0.4.0,<0.5.0)
55
- Requires-Dist: isort (>=5.13.2,<6.0.0) ; extra == "dev" or extra == "all"
56
- Requires-Dist: jinja2 (>=3.1.5,<4.0.0)
57
- Requires-Dist: langchain (>=0.3.7,<0.4.0) ; extra == "external-tools" or extra == "desktop" or extra == "all"
58
- Requires-Dist: langchain-community (>=0.3.7,<0.4.0) ; extra == "external-tools" or extra == "desktop" or extra == "all"
59
- Requires-Dist: letta_client (>=0.1.220,<0.2.0)
60
- Requires-Dist: llama-index (>=0.12.2,<0.13.0)
61
- Requires-Dist: llama-index-embeddings-openai (>=0.3.1,<0.4.0)
62
- Requires-Dist: locust (>=2.31.5,<3.0.0) ; extra == "dev" or extra == "desktop" or extra == "all"
63
- Requires-Dist: markitdown[docx,pdf,pptx] (>=0.1.2,<0.2.0)
64
- Requires-Dist: marshmallow-sqlalchemy (>=1.4.1,<2.0.0)
65
- Requires-Dist: matplotlib (>=3.10.1,<4.0.0)
66
- Requires-Dist: mcp[cli] (>=1.9.4,<2.0.0)
67
- Requires-Dist: mistralai (>=1.8.1,<2.0.0)
68
- Requires-Dist: modal (>=1.1.0,<2.0.0) ; extra == "cloud-tool-sandbox"
69
- Requires-Dist: nltk (>=3.8.1,<4.0.0)
70
- Requires-Dist: numpy (>=2.1.0,<3.0.0)
71
- Requires-Dist: openai (>=1.99.9,<2.0.0)
72
- Requires-Dist: opentelemetry-api (==1.30.0)
73
- Requires-Dist: opentelemetry-exporter-otlp (==1.30.0)
74
- Requires-Dist: opentelemetry-instrumentation-requests (==0.51b0)
75
- Requires-Dist: opentelemetry-instrumentation-sqlalchemy (==0.51b0)
76
- Requires-Dist: opentelemetry-sdk (==1.30.0)
77
- Requires-Dist: orjson (>=3.11.1,<4.0.0)
78
- Requires-Dist: pathvalidate (>=3.2.1,<4.0.0)
79
- Requires-Dist: pexpect (>=4.9.0,<5.0.0) ; extra == "dev" or extra == "all"
80
- Requires-Dist: pg8000 (>=1.30.3,<2.0.0) ; extra == "postgres" or extra == "all"
81
- Requires-Dist: pgvector (>=0.2.3,<0.3.0) ; extra == "postgres" or extra == "desktop" or extra == "all"
82
- Requires-Dist: pinecone[asyncio] (>=7.3.0,<8.0.0) ; extra == "pinecone" or extra == "all"
83
- Requires-Dist: pre-commit (>=3.5.0,<4.0.0) ; extra == "dev" or extra == "all"
84
- Requires-Dist: prettytable (>=3.9.0,<4.0.0)
85
- Requires-Dist: psycopg2 (>=2.9.10,<3.0.0) ; extra == "postgres" or extra == "all"
86
- Requires-Dist: psycopg2-binary (>=2.9.10,<3.0.0) ; extra == "postgres" or extra == "all"
87
- Requires-Dist: pydantic (>=2.10.6,<3.0.0)
88
- Requires-Dist: pydantic-settings (>=2.2.1,<3.0.0)
89
- Requires-Dist: pyhumps (>=3.8.0,<4.0.0)
90
- Requires-Dist: pyright (>=1.1.347,<2.0.0) ; extra == "dev" or extra == "desktop" or extra == "all"
91
- Requires-Dist: pytest-asyncio (>=0.24.0,<0.25.0) ; extra == "dev" or extra == "all"
92
- Requires-Dist: pytest-order (>=1.2.0,<2.0.0) ; extra == "dev" or extra == "all"
93
- Requires-Dist: python-box (>=7.1.1,<8.0.0)
94
- Requires-Dist: python-multipart (>=0.0.19,<0.0.20)
95
- Requires-Dist: pytz (>=2023.3.post1,<2024.0)
96
- Requires-Dist: pyyaml (>=6.0.1,<7.0.0)
97
- Requires-Dist: questionary (>=2.0.1,<3.0.0)
98
- Requires-Dist: redis (>=6.2.0,<7.0.0) ; extra == "redis" or extra == "all"
99
- Requires-Dist: rich (>=13.9.4,<14.0.0)
100
- Requires-Dist: sentry-sdk[fastapi] (==2.19.1)
101
- Requires-Dist: setuptools (>=70,<71)
102
- Requires-Dist: sqlalchemy-json (>=0.7.0,<0.8.0)
103
- Requires-Dist: sqlalchemy-utils (>=0.41.2,<0.42.0)
104
- Requires-Dist: sqlalchemy[asyncio] (>=2.0.41,<3.0.0)
105
- Requires-Dist: sqlite-vec (>=0.1.7a2,<0.2.0) ; extra == "desktop"
106
- Requires-Dist: sqlmodel (>=0.0.16,<0.0.17)
107
- Requires-Dist: structlog (>=25.4.0,<26.0.0)
108
- Requires-Dist: tavily-python (>=0.7.2,<0.8.0)
109
- Requires-Dist: tqdm (>=4.66.1,<5.0.0)
110
- Requires-Dist: turbopuffer (>=0.5.17,<0.6.0) ; extra == "all"
111
- Requires-Dist: typer (>=0.15.2,<0.16.0)
112
- Requires-Dist: uvicorn (>=0.24.0.post1,<0.25.0) ; extra == "server" or extra == "desktop" or extra == "all"
113
- Requires-Dist: uvloop (>=0.21.0,<0.22.0) ; (sys_platform != "win32") and (extra == "experimental" or extra == "all")
114
- Requires-Dist: wikipedia (>=1.4.0,<2.0.0) ; extra == "external-tools" or extra == "tests" or extra == "desktop" or extra == "all"
124
+ Requires-Dist: fastapi>=0.115.6; extra == 'server'
125
+ Requires-Dist: uvicorn>=0.24.0.post1; extra == 'server'
126
+ Requires-Dist: websockets; extra == 'server'
127
+ Provides-Extra: sqlite
128
+ Requires-Dist: aiosqlite>=0.21.0; extra == 'sqlite'
129
+ Requires-Dist: sqlite-vec>=0.1.7a2; extra == 'sqlite'
115
130
  Description-Content-Type: text/markdown
116
131
 
117
132
  <p align="center">
@@ -236,7 +251,7 @@ If your Letta server isn't running on `localhost` (for example, you deployed it
236
251
 
237
252
  > _"Do I need to install Docker to use Letta?"_
238
253
 
239
- No, you can install Letta using `pip` (via `pip install -U letta`), as well as from source (via `poetry install`). See instructions below.
254
+ No, you can install Letta using `pip` (via `pip install -U letta`), as well as from source (via `uv sync`). See instructions below.
240
255
 
241
256
  > _"What's the difference between installing with `pip` vs `Docker`?"_
242
257
 
@@ -405,4 +420,3 @@ Letta is an open source project built by over a hundred contributors. There are
405
420
  ---
406
421
 
407
422
  ***Legal notices**: By using Letta and related Letta services (such as the Letta endpoint or hosted service), you are agreeing to our [privacy policy](https://www.letta.com/privacy-policy) and [terms of service](https://www.letta.com/terms-of-service).*
408
-