letta-nightly 0.4.1.dev20241008104105__py3-none-any.whl → 0.4.1.dev20241010104112__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-nightly might be problematic. Click here for more details.

Files changed (43) hide show
  1. letta/agent.py +18 -2
  2. letta/agent_store/db.py +23 -7
  3. letta/cli/cli.py +2 -1
  4. letta/cli/cli_config.py +1 -1098
  5. letta/client/client.py +8 -1
  6. letta/client/utils.py +7 -2
  7. letta/credentials.py +2 -2
  8. letta/embeddings.py +3 -0
  9. letta/functions/schema_generator.py +1 -1
  10. letta/interface.py +6 -2
  11. letta/llm_api/anthropic.py +3 -24
  12. letta/llm_api/azure_openai.py +47 -98
  13. letta/llm_api/azure_openai_constants.py +10 -0
  14. letta/llm_api/google_ai.py +38 -63
  15. letta/llm_api/helpers.py +64 -2
  16. letta/llm_api/llm_api_tools.py +6 -15
  17. letta/llm_api/openai.py +6 -49
  18. letta/local_llm/constants.py +3 -0
  19. letta/main.py +1 -1
  20. letta/metadata.py +2 -0
  21. letta/providers.py +165 -31
  22. letta/schemas/agent.py +14 -0
  23. letta/schemas/llm_config.py +0 -3
  24. letta/schemas/openai/chat_completion_response.py +3 -0
  25. letta/schemas/tool.py +3 -3
  26. letta/server/rest_api/routers/openai/assistants/threads.py +5 -5
  27. letta/server/rest_api/routers/openai/chat_completions/chat_completions.py +2 -2
  28. letta/server/rest_api/routers/v1/agents.py +11 -11
  29. letta/server/rest_api/routers/v1/blocks.py +2 -2
  30. letta/server/rest_api/routers/v1/jobs.py +2 -2
  31. letta/server/rest_api/routers/v1/sources.py +12 -12
  32. letta/server/rest_api/routers/v1/tools.py +6 -6
  33. letta/server/server.py +26 -7
  34. letta/settings.py +3 -112
  35. letta/streaming_interface.py +8 -4
  36. {letta_nightly-0.4.1.dev20241008104105.dist-info → letta_nightly-0.4.1.dev20241010104112.dist-info}/METADATA +1 -1
  37. {letta_nightly-0.4.1.dev20241008104105.dist-info → letta_nightly-0.4.1.dev20241010104112.dist-info}/RECORD +40 -42
  38. letta/configs/anthropic.json +0 -13
  39. letta/configs/letta_hosted.json +0 -11
  40. letta/configs/openai.json +0 -12
  41. {letta_nightly-0.4.1.dev20241008104105.dist-info → letta_nightly-0.4.1.dev20241010104112.dist-info}/LICENSE +0 -0
  42. {letta_nightly-0.4.1.dev20241008104105.dist-info → letta_nightly-0.4.1.dev20241010104112.dist-info}/WHEEL +0 -0
  43. {letta_nightly-0.4.1.dev20241008104105.dist-info → letta_nightly-0.4.1.dev20241010104112.dist-info}/entry_points.txt +0 -0
@@ -40,7 +40,7 @@ router = APIRouter(prefix="/agents", tags=["agents"])
40
40
  @router.get("/", response_model=List[AgentState], operation_id="list_agents")
41
41
  def list_agents(
42
42
  server: "SyncServer" = Depends(get_letta_server),
43
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
43
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
44
44
  ):
45
45
  """
46
46
  List all agents associated with a given user.
@@ -55,7 +55,7 @@ def list_agents(
55
55
  def create_agent(
56
56
  agent: CreateAgent = Body(...),
57
57
  server: "SyncServer" = Depends(get_letta_server),
58
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
58
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
59
59
  ):
60
60
  """
61
61
  Create a new agent with the specified configuration.
@@ -76,7 +76,7 @@ def update_agent(
76
76
  agent_id: str,
77
77
  update_agent: UpdateAgentState = Body(...),
78
78
  server: "SyncServer" = Depends(get_letta_server),
79
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
79
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
80
80
  ):
81
81
  """Update an exsiting agent"""
82
82
  actor = server.get_user_or_default(user_id=user_id)
@@ -89,7 +89,7 @@ def update_agent(
89
89
  def get_agent_state(
90
90
  agent_id: str,
91
91
  server: "SyncServer" = Depends(get_letta_server),
92
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
92
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
93
93
  ):
94
94
  """
95
95
  Get the state of the agent.
@@ -107,7 +107,7 @@ def get_agent_state(
107
107
  def delete_agent(
108
108
  agent_id: str,
109
109
  server: "SyncServer" = Depends(get_letta_server),
110
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
110
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
111
111
  ):
112
112
  """
113
113
  Delete an agent.
@@ -159,7 +159,7 @@ def update_agent_memory(
159
159
  agent_id: str,
160
160
  request: Dict = Body(...),
161
161
  server: "SyncServer" = Depends(get_letta_server),
162
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
162
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
163
163
  ):
164
164
  """
165
165
  Update the core memory of a specific agent.
@@ -202,7 +202,7 @@ def get_agent_archival_memory(
202
202
  after: Optional[int] = Query(None, description="Unique ID of the memory to start the query range at."),
203
203
  before: Optional[int] = Query(None, description="Unique ID of the memory to end the query range at."),
204
204
  limit: Optional[int] = Query(None, description="How many results to include in the response."),
205
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
205
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
206
206
  ):
207
207
  """
208
208
  Retrieve the memories in an agent's archival memory store (paginated query).
@@ -227,7 +227,7 @@ def insert_agent_archival_memory(
227
227
  agent_id: str,
228
228
  request: CreateArchivalMemory = Body(...),
229
229
  server: "SyncServer" = Depends(get_letta_server),
230
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
230
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
231
231
  ):
232
232
  """
233
233
  Insert a memory into an agent's archival memory store.
@@ -245,7 +245,7 @@ def delete_agent_archival_memory(
245
245
  memory_id: str,
246
246
  # memory_id: str = Query(..., description="Unique ID of the memory to be deleted."),
247
247
  server: "SyncServer" = Depends(get_letta_server),
248
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
248
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
249
249
  ):
250
250
  """
251
251
  Delete a memory from an agent's archival memory store.
@@ -276,7 +276,7 @@ def get_agent_messages(
276
276
  DEFAULT_MESSAGE_TOOL_KWARG,
277
277
  description="[Only applicable if use_assistant_message is True] The name of the message argument in the designated message tool.",
278
278
  ),
279
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
279
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
280
280
  ):
281
281
  """
282
282
  Retrieve message history for an agent.
@@ -315,7 +315,7 @@ async def send_message(
315
315
  agent_id: str,
316
316
  server: SyncServer = Depends(get_letta_server),
317
317
  request: LettaRequest = Body(...),
318
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
318
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
319
319
  ):
320
320
  """
321
321
  Process a user message and return the agent's response.
@@ -19,7 +19,7 @@ def list_blocks(
19
19
  templates_only: bool = Query(True, description="Whether to include only templates"),
20
20
  name: Optional[str] = Query(None, description="Name of the block"),
21
21
  server: SyncServer = Depends(get_letta_server),
22
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
22
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
23
23
  ):
24
24
  actor = server.get_user_or_default(user_id=user_id)
25
25
 
@@ -33,7 +33,7 @@ def list_blocks(
33
33
  def create_block(
34
34
  create_block: CreateBlock = Body(...),
35
35
  server: SyncServer = Depends(get_letta_server),
36
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
36
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
37
37
  ):
38
38
  actor = server.get_user_or_default(user_id=user_id)
39
39
 
@@ -13,7 +13,7 @@ router = APIRouter(prefix="/jobs", tags=["jobs"])
13
13
  def list_jobs(
14
14
  server: "SyncServer" = Depends(get_letta_server),
15
15
  source_id: Optional[str] = Query(None, description="Only list jobs associated with the source."),
16
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
16
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
17
17
  ):
18
18
  """
19
19
  List all jobs.
@@ -34,7 +34,7 @@ def list_jobs(
34
34
  @router.get("/active", response_model=List[Job], operation_id="list_active_jobs")
35
35
  def list_active_jobs(
36
36
  server: "SyncServer" = Depends(get_letta_server),
37
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
37
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
38
38
  ):
39
39
  """
40
40
  List all active jobs.
@@ -1,6 +1,6 @@
1
1
  import os
2
2
  import tempfile
3
- from typing import List
3
+ from typing import List, Optional
4
4
 
5
5
  from fastapi import APIRouter, BackgroundTasks, Depends, Header, Query, UploadFile
6
6
 
@@ -21,7 +21,7 @@ router = APIRouter(prefix="/sources", tags=["sources"])
21
21
  def get_source(
22
22
  source_id: str,
23
23
  server: "SyncServer" = Depends(get_letta_server),
24
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
24
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
25
25
  ):
26
26
  """
27
27
  Get all sources
@@ -35,7 +35,7 @@ def get_source(
35
35
  def get_source_id_by_name(
36
36
  source_name: str,
37
37
  server: "SyncServer" = Depends(get_letta_server),
38
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
38
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
39
39
  ):
40
40
  """
41
41
  Get a source by name
@@ -49,7 +49,7 @@ def get_source_id_by_name(
49
49
  @router.get("/", response_model=List[Source], operation_id="list_sources")
50
50
  def list_sources(
51
51
  server: "SyncServer" = Depends(get_letta_server),
52
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
52
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
53
53
  ):
54
54
  """
55
55
  List all data sources created by a user.
@@ -63,7 +63,7 @@ def list_sources(
63
63
  def create_source(
64
64
  source: SourceCreate,
65
65
  server: "SyncServer" = Depends(get_letta_server),
66
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
66
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
67
67
  ):
68
68
  """
69
69
  Create a new data source.
@@ -78,7 +78,7 @@ def update_source(
78
78
  source_id: str,
79
79
  source: SourceUpdate,
80
80
  server: "SyncServer" = Depends(get_letta_server),
81
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
81
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
82
82
  ):
83
83
  """
84
84
  Update the name or documentation of an existing data source.
@@ -94,7 +94,7 @@ def update_source(
94
94
  def delete_source(
95
95
  source_id: str,
96
96
  server: "SyncServer" = Depends(get_letta_server),
97
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
97
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
98
98
  ):
99
99
  """
100
100
  Delete a data source.
@@ -109,7 +109,7 @@ def attach_source_to_agent(
109
109
  source_id: str,
110
110
  agent_id: str = Query(..., description="The unique identifier of the agent to attach the source to."),
111
111
  server: "SyncServer" = Depends(get_letta_server),
112
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
112
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
113
113
  ):
114
114
  """
115
115
  Attach a data source to an existing agent.
@@ -127,7 +127,7 @@ def detach_source_from_agent(
127
127
  source_id: str,
128
128
  agent_id: str = Query(..., description="The unique identifier of the agent to detach the source from."),
129
129
  server: "SyncServer" = Depends(get_letta_server),
130
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
130
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
131
131
  ) -> None:
132
132
  """
133
133
  Detach a data source from an existing agent.
@@ -143,7 +143,7 @@ def upload_file_to_source(
143
143
  source_id: str,
144
144
  background_tasks: BackgroundTasks,
145
145
  server: "SyncServer" = Depends(get_letta_server),
146
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
146
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
147
147
  ):
148
148
  """
149
149
  Upload a file to a data source.
@@ -176,7 +176,7 @@ def upload_file_to_source(
176
176
  def list_passages(
177
177
  source_id: str,
178
178
  server: SyncServer = Depends(get_letta_server),
179
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
179
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
180
180
  ):
181
181
  """
182
182
  List all passages associated with a data source.
@@ -190,7 +190,7 @@ def list_passages(
190
190
  def list_documents(
191
191
  source_id: str,
192
192
  server: "SyncServer" = Depends(get_letta_server),
193
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
193
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
194
194
  ):
195
195
  """
196
196
  List all documents associated with a data source.
@@ -1,4 +1,4 @@
1
- from typing import List
1
+ from typing import List, Optional
2
2
 
3
3
  from fastapi import APIRouter, Body, Depends, Header, HTTPException
4
4
 
@@ -13,7 +13,7 @@ router = APIRouter(prefix="/tools", tags=["tools"])
13
13
  def delete_tool(
14
14
  tool_id: str,
15
15
  server: SyncServer = Depends(get_letta_server),
16
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
16
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
17
17
  ):
18
18
  """
19
19
  Delete a tool by name
@@ -43,7 +43,7 @@ def get_tool(
43
43
  def get_tool_id(
44
44
  tool_name: str,
45
45
  server: SyncServer = Depends(get_letta_server),
46
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
46
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
47
47
  ):
48
48
  """
49
49
  Get a tool ID by name
@@ -60,7 +60,7 @@ def get_tool_id(
60
60
  @router.get("/", response_model=List[Tool], operation_id="list_tools")
61
61
  def list_all_tools(
62
62
  server: SyncServer = Depends(get_letta_server),
63
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
63
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
64
64
  ):
65
65
  """
66
66
  Get a list of all tools available to agents created by a user
@@ -78,7 +78,7 @@ def create_tool(
78
78
  tool: ToolCreate = Body(...),
79
79
  update: bool = False,
80
80
  server: SyncServer = Depends(get_letta_server),
81
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
81
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
82
82
  ):
83
83
  """
84
84
  Create a new tool
@@ -98,7 +98,7 @@ def update_tool(
98
98
  tool_id: str,
99
99
  request: ToolUpdate = Body(...),
100
100
  server: SyncServer = Depends(get_letta_server),
101
- user_id: str = Header(None), # Extract user_id from header, default to None if not present
101
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
102
102
  ):
103
103
  """
104
104
  Update an existing tool
letta/server/server.py CHANGED
@@ -47,11 +47,12 @@ from letta.providers import (
47
47
  AnthropicProvider,
48
48
  AzureProvider,
49
49
  GoogleAIProvider,
50
+ LettaProvider,
50
51
  OllamaProvider,
51
52
  OpenAIProvider,
52
53
  VLLMProvider,
53
54
  )
54
- from letta.schemas.agent import AgentState, CreateAgent, UpdateAgentState
55
+ from letta.schemas.agent import AgentState, AgentType, CreateAgent, UpdateAgentState
55
56
  from letta.schemas.api_key import APIKey, APIKeyCreate
56
57
  from letta.schemas.block import (
57
58
  Block,
@@ -259,8 +260,8 @@ class SyncServer(Server):
259
260
  # add global default tools (for admin)
260
261
  self.add_default_tools(module_name="base")
261
262
 
262
- # collect providers
263
- self._enabled_providers = []
263
+ # collect providers (always has Letta as a default)
264
+ self._enabled_providers = [LettaProvider()]
264
265
  if model_settings.openai_api_key:
265
266
  self._enabled_providers.append(OpenAIProvider(api_key=model_settings.openai_api_key))
266
267
  if model_settings.anthropic_api_key:
@@ -272,7 +273,13 @@ class SyncServer(Server):
272
273
  if model_settings.gemini_api_key:
273
274
  self._enabled_providers.append(GoogleAIProvider(api_key=model_settings.gemini_api_key))
274
275
  if model_settings.azure_api_key and model_settings.azure_base_url:
275
- self._enabled_providers.append(AzureProvider(api_key=model_settings.azure_api_key, base_url=model_settings.azure_base_url))
276
+ self._enabled_providers.append(
277
+ AzureProvider(
278
+ api_key=model_settings.azure_api_key,
279
+ base_url=model_settings.azure_base_url,
280
+ api_version=model_settings.azure_api_version,
281
+ )
282
+ )
276
283
 
277
284
  def save_agents(self):
278
285
  """Saves all the agents that are in the in-memory object store"""
@@ -335,7 +342,10 @@ class SyncServer(Server):
335
342
  # Make sure the memory is a memory object
336
343
  assert isinstance(agent_state.memory, Memory)
337
344
 
338
- letta_agent = Agent(agent_state=agent_state, interface=interface, tools=tool_objs)
345
+ if agent_state.agent_type == AgentType.memgpt_agent:
346
+ letta_agent = Agent(agent_state=agent_state, interface=interface, tools=tool_objs)
347
+ else:
348
+ raise NotImplementedError("Only base agents are supported as of right now!")
339
349
 
340
350
  # Add the agent to the in-memory store and return its reference
341
351
  logger.debug(f"Adding agent to the agent cache: user_id={user_id}, agent_id={agent_id}")
@@ -599,7 +609,7 @@ class SyncServer(Server):
599
609
  )
600
610
 
601
611
  # Run the agent state forward
602
- usage = self._step(user_id=user_id, agent_id=agent_id, input_message=packaged_user_message, timestamp=timestamp)
612
+ usage = self._step(user_id=user_id, agent_id=agent_id, input_message=message, timestamp=timestamp)
603
613
  return usage
604
614
 
605
615
  def system_message(
@@ -787,6 +797,7 @@ class SyncServer(Server):
787
797
  name=request.name,
788
798
  user_id=user_id,
789
799
  tools=request.tools if request.tools else [],
800
+ agent_type=request.agent_type or AgentType.memgpt_agent,
790
801
  llm_config=llm_config,
791
802
  embedding_config=embedding_config,
792
803
  system=request.system,
@@ -1612,6 +1623,11 @@ class SyncServer(Server):
1612
1623
  agent = self._get_or_load_agent(agent_id=agent_id)
1613
1624
  archival_memory = agent.persistence_manager.archival_memory
1614
1625
  archival_memory.storage.delete({"source_id": source_id})
1626
+
1627
+ # delete agent-source mapping
1628
+ self.ms.detach_source(agent_id=agent_id, source_id=source_id)
1629
+
1630
+ # return back source data
1615
1631
  return source
1616
1632
 
1617
1633
  def list_attached_sources(self, agent_id: str) -> List[Source]:
@@ -1921,7 +1937,10 @@ class SyncServer(Server):
1921
1937
  if user_id is None:
1922
1938
  return self.get_default_user()
1923
1939
  else:
1924
- return self.get_user(user_id=user_id)
1940
+ try:
1941
+ return self.get_user(user_id=user_id)
1942
+ except ValueError:
1943
+ raise HTTPException(status_code=404, detail=f"User with id {user_id} not found")
1925
1944
 
1926
1945
  def list_llm_models(self) -> List[LLMConfig]:
1927
1946
  """List available models"""
letta/settings.py CHANGED
@@ -13,8 +13,8 @@ class ModelSettings(BaseSettings):
13
13
  openai_api_key: Optional[str] = None
14
14
  # TODO: provide overriding BASE_URL?
15
15
 
16
- # grok
17
- grok_api_key: Optional[str] = None
16
+ # groq
17
+ groq_api_key: Optional[str] = None
18
18
 
19
19
  # anthropic
20
20
  anthropic_api_key: Optional[str] = None
@@ -25,6 +25,7 @@ class ModelSettings(BaseSettings):
25
25
  # azure
26
26
  azure_api_key: Optional[str] = None
27
27
  azure_base_url: Optional[str] = None
28
+ azure_api_version: Optional[str] = None
28
29
 
29
30
  # google ai
30
31
  gemini_api_key: Optional[str] = None
@@ -55,116 +56,6 @@ class Settings(BaseSettings):
55
56
  pg_port: Optional[int] = None
56
57
  pg_uri: Optional[str] = None # option to specifiy full uri
57
58
 
58
- ## llm configuration
59
- # llm_endpoint: Optional[str] = None
60
- # llm_endpoint_type: Optional[str] = None
61
- # llm_model: Optional[str] = None
62
- # llm_context_window: Optional[int] = None
63
-
64
- ## embedding configuration
65
- # embedding_endpoint: Optional[str] = None
66
- # embedding_endpoint_type: Optional[str] = None
67
- # embedding_dim: Optional[int] = None
68
- # embedding_model: Optional[str] = None
69
- # embedding_chunk_size: int = 300
70
-
71
- # @property
72
- # def llm_config(self):
73
-
74
- # # try to get LLM config from settings
75
- # if self.llm_endpoint and self.llm_endpoint_type and self.llm_model and self.llm_context_window:
76
- # return LLMConfig(
77
- # model=self.llm_model,
78
- # model_endpoint_type=self.llm_endpoint_type,
79
- # model_endpoint=self.llm_endpoint,
80
- # model_wrapper=None,
81
- # context_window=self.llm_context_window,
82
- # )
83
- # else:
84
- # if not self.llm_endpoint:
85
- # printd(f"No LETTA_LLM_ENDPOINT provided")
86
- # if not self.llm_endpoint_type:
87
- # printd(f"No LETTA_LLM_ENDPOINT_TYPE provided")
88
- # if not self.llm_model:
89
- # printd(f"No LETTA_LLM_MODEL provided")
90
- # if not self.llm_context_window:
91
- # printd(f"No LETTA_LLM_CONTEX_WINDOW provided")
92
-
93
- # # quickstart options
94
- # if self.llm_model:
95
- # try:
96
- # return LLMConfig.default_config(self.llm_model)
97
- # except ValueError:
98
- # pass
99
-
100
- # # try to read from config file (last resort)
101
- # from letta.config import LettaConfig
102
-
103
- # if LettaConfig.exists():
104
- # config = LettaConfig.load()
105
- # llm_config = LLMConfig(
106
- # model=config.default_llm_config.model,
107
- # model_endpoint_type=config.default_llm_config.model_endpoint_type,
108
- # model_endpoint=config.default_llm_config.model_endpoint,
109
- # model_wrapper=config.default_llm_config.model_wrapper,
110
- # context_window=config.default_llm_config.context_window,
111
- # )
112
- # return llm_config
113
-
114
- # # check OpenAI API key
115
- # if os.getenv("OPENAI_API_KEY"):
116
- # return LLMConfig.default_config(self.llm_model if self.llm_model else "gpt-4")
117
-
118
- # return LLMConfig.default_config("letta")
119
-
120
- # @property
121
- # def embedding_config(self):
122
-
123
- # # try to get LLM config from settings
124
- # if self.embedding_endpoint and self.embedding_endpoint_type and self.embedding_model and self.embedding_dim:
125
- # return EmbeddingConfig(
126
- # embedding_model=self.embedding_model,
127
- # embedding_endpoint_type=self.embedding_endpoint_type,
128
- # embedding_endpoint=self.embedding_endpoint,
129
- # embedding_dim=self.embedding_dim,
130
- # embedding_chunk_size=self.embedding_chunk_size,
131
- # )
132
- # else:
133
- # if not self.embedding_endpoint:
134
- # printd(f"No LETTA_EMBEDDING_ENDPOINT provided")
135
- # if not self.embedding_endpoint_type:
136
- # printd(f"No LETTA_EMBEDDING_ENDPOINT_TYPE provided")
137
- # if not self.embedding_model:
138
- # printd(f"No LETTA_EMBEDDING_MODEL provided")
139
- # if not self.embedding_dim:
140
- # printd(f"No LETTA_EMBEDDING_DIM provided")
141
-
142
- # # TODO
143
- # ## quickstart options
144
- # # if self.embedding_model:
145
- # # try:
146
- # # return EmbeddingConfig.default_config(self.embedding_model)
147
- # # except ValueError as e:
148
- # # pass
149
-
150
- # # try to read from config file (last resort)
151
- # from letta.config import LettaConfig
152
-
153
- # if LettaConfig.exists():
154
- # config = LettaConfig.load()
155
- # return EmbeddingConfig(
156
- # embedding_model=config.default_embedding_config.embedding_model,
157
- # embedding_endpoint_type=config.default_embedding_config.embedding_endpoint_type,
158
- # embedding_endpoint=config.default_embedding_config.embedding_endpoint,
159
- # embedding_dim=config.default_embedding_config.embedding_dim,
160
- # embedding_chunk_size=config.default_embedding_config.embedding_chunk_size,
161
- # )
162
-
163
- # if os.getenv("OPENAI_API_KEY"):
164
- # return EmbeddingConfig.default_config(self.embedding_model if self.embedding_model else "text-embedding-ada-002")
165
-
166
- # return EmbeddingConfig.default_config("letta")
167
-
168
59
  @property
169
60
  def letta_pg_uri(self) -> str:
170
61
  if self.pg_uri:
@@ -9,6 +9,10 @@ from rich.live import Live
9
9
  from rich.markup import escape
10
10
 
11
11
  from letta.interface import CLIInterface
12
+ from letta.local_llm.constants import (
13
+ ASSISTANT_MESSAGE_CLI_SYMBOL,
14
+ INNER_THOUGHTS_CLI_SYMBOL,
15
+ )
12
16
  from letta.schemas.message import Message
13
17
  from letta.schemas.openai.chat_completion_response import (
14
18
  ChatCompletionChunkResponse,
@@ -296,7 +300,7 @@ class StreamingRefreshCLIInterface(AgentRefreshStreamingInterface):
296
300
  def process_refresh(self, response: ChatCompletionResponse):
297
301
  """Process the response to rewrite the current output buffer."""
298
302
  if not response.choices:
299
- self.update_output("💭 [italic]...[/italic]")
303
+ self.update_output(f"{INNER_THOUGHTS_CLI_SYMBOL} [italic]...[/italic]")
300
304
  return # Early exit if there are no choices
301
305
 
302
306
  choice = response.choices[0]
@@ -304,7 +308,7 @@ class StreamingRefreshCLIInterface(AgentRefreshStreamingInterface):
304
308
  tool_calls = choice.message.tool_calls if choice.message.tool_calls else []
305
309
 
306
310
  if self.fancy:
307
- message_string = f"💭 [italic]{inner_thoughts}[/italic]" if inner_thoughts else ""
311
+ message_string = f"{INNER_THOUGHTS_CLI_SYMBOL} [italic]{inner_thoughts}[/italic]" if inner_thoughts else ""
308
312
  else:
309
313
  message_string = "[inner thoughts] " + inner_thoughts if inner_thoughts else ""
310
314
 
@@ -326,7 +330,7 @@ class StreamingRefreshCLIInterface(AgentRefreshStreamingInterface):
326
330
  message = function_args[len(prefix) :]
327
331
  else:
328
332
  message = function_args
329
- message_string += f"🤖 [bold yellow]{message}[/bold yellow]"
333
+ message_string += f"{ASSISTANT_MESSAGE_CLI_SYMBOL} [bold yellow]{message}[/bold yellow]"
330
334
  else:
331
335
  message_string += f"{function_name}({function_args})"
332
336
 
@@ -336,7 +340,7 @@ class StreamingRefreshCLIInterface(AgentRefreshStreamingInterface):
336
340
  if self.streaming:
337
341
  print()
338
342
  self.live.start() # Start the Live display context and keep it running
339
- self.update_output("💭 [italic]...[/italic]")
343
+ self.update_output(f"{INNER_THOUGHTS_CLI_SYMBOL} [italic]...[/italic]")
340
344
 
341
345
  def stream_end(self):
342
346
  if self.streaming:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-nightly
3
- Version: 0.4.1.dev20241008104105
3
+ Version: 0.4.1.dev20241010104112
4
4
  Summary: Create LLM agents with long-term memory and custom tools
5
5
  License: Apache License
6
6
  Author: Letta Team