letta-nightly 0.8.6.dev20250627104313__py3-none-any.whl → 0.8.6.dev20250627191649__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- letta/agents/letta_agent_batch.py +16 -11
- letta/helpers/decorators.py +1 -1
- letta/jobs/scheduler.py +57 -33
- letta/orm/step.py +3 -0
- letta/prompts/system/react.txt +19 -0
- letta/prompts/system/workflow.txt +15 -0
- letta/schemas/agent.py +35 -7
- letta/schemas/providers.py +2 -2
- letta/schemas/step.py +4 -1
- letta/server/rest_api/app.py +42 -5
- letta/server/rest_api/routers/v1/sources.py +17 -13
- letta/server/rest_api/routers/v1/steps.py +22 -1
- letta/server/rest_api/routers/v1/tools.py +69 -1
- letta/services/agent_manager.py +25 -1
- letta/services/file_processor/file_processor.py +3 -35
- letta/services/helpers/agent_manager_helper.py +8 -0
- letta/services/passage_manager.py +3 -4
- letta/services/step_manager.py +16 -1
- {letta_nightly-0.8.6.dev20250627104313.dist-info → letta_nightly-0.8.6.dev20250627191649.dist-info}/METADATA +2 -3
- {letta_nightly-0.8.6.dev20250627104313.dist-info → letta_nightly-0.8.6.dev20250627191649.dist-info}/RECORD +23 -21
- {letta_nightly-0.8.6.dev20250627104313.dist-info → letta_nightly-0.8.6.dev20250627191649.dist-info}/LICENSE +0 -0
- {letta_nightly-0.8.6.dev20250627104313.dist-info → letta_nightly-0.8.6.dev20250627191649.dist-info}/WHEEL +0 -0
- {letta_nightly-0.8.6.dev20250627104313.dist-info → letta_nightly-0.8.6.dev20250627191649.dist-info}/entry_points.txt +0 -0
@@ -492,30 +492,32 @@ class LettaAgentBatch(BaseAgent):
|
|
492
492
|
msg_map: Dict[str, List[Message]],
|
493
493
|
) -> Tuple[List[LettaBatchRequest], Dict[str, AgentStepState]]:
|
494
494
|
# who continues?
|
495
|
-
continues = [
|
495
|
+
continues = [agent_id for agent_id, cont in ctx.should_continue_map.items() if cont]
|
496
496
|
|
497
497
|
success_flag_map = {aid: result.success_flag for aid, result in exec_results}
|
498
498
|
|
499
499
|
batch_reqs: List[LettaBatchRequest] = []
|
500
|
-
for
|
500
|
+
for agent_id in continues:
|
501
501
|
heartbeat = create_heartbeat_system_message(
|
502
|
-
agent_id=
|
503
|
-
model=ctx.agent_state_map[
|
504
|
-
function_call_success=success_flag_map[
|
502
|
+
agent_id=agent_id,
|
503
|
+
model=ctx.agent_state_map[agent_id].llm_config.model,
|
504
|
+
function_call_success=success_flag_map[agent_id],
|
505
|
+
timezone=ctx.agent_state_map[agent_id].timezone,
|
505
506
|
actor=self.actor,
|
506
507
|
)
|
507
508
|
batch_reqs.append(
|
508
509
|
LettaBatchRequest(
|
509
|
-
agent_id=
|
510
|
+
agent_id=agent_id,
|
511
|
+
messages=[MessageCreate.model_validate(heartbeat.model_dump(include={"role", "content", "name", "otid"}))],
|
510
512
|
)
|
511
513
|
)
|
512
514
|
|
513
515
|
# extend in‑context ids when necessary
|
514
|
-
for
|
515
|
-
ast = ctx.agent_state_map[
|
516
|
+
for agent_id, new_msgs in msg_map.items():
|
517
|
+
ast = ctx.agent_state_map[agent_id]
|
516
518
|
if not ast.message_buffer_autoclear:
|
517
519
|
await self.agent_manager.set_in_context_messages_async(
|
518
|
-
agent_id=
|
520
|
+
agent_id=agent_id,
|
519
521
|
message_ids=ast.message_ids + [m.id for m in new_msgs],
|
520
522
|
actor=self.actor,
|
521
523
|
)
|
@@ -605,7 +607,8 @@ class LettaAgentBatch(BaseAgent):
|
|
605
607
|
|
606
608
|
return tool_call_name, tool_args, continue_stepping
|
607
609
|
|
608
|
-
|
610
|
+
@staticmethod
|
611
|
+
def _prepare_tools_per_agent(agent_state: AgentState, tool_rules_solver: ToolRulesSolver) -> List[dict]:
|
609
612
|
tools = [t for t in agent_state.tools if t.tool_type in {ToolType.CUSTOM, ToolType.LETTA_CORE, ToolType.LETTA_MEMORY_CORE}]
|
610
613
|
valid_tool_names = tool_rules_solver.get_allowed_tool_names(available_tools=set([t.name for t in tools]))
|
611
614
|
return [enable_strict_mode(t.json_schema) for t in tools if t.name in set(valid_tool_names)]
|
@@ -621,7 +624,9 @@ class LettaAgentBatch(BaseAgent):
|
|
621
624
|
return in_context_messages
|
622
625
|
|
623
626
|
# Not used in batch.
|
624
|
-
async def step(
|
627
|
+
async def step(
|
628
|
+
self, input_messages: List[MessageCreate], max_steps: int = DEFAULT_MAX_STEPS, run_id: str | None = None
|
629
|
+
) -> LettaResponse:
|
625
630
|
raise NotImplementedError
|
626
631
|
|
627
632
|
async def step_stream(
|
letta/helpers/decorators.py
CHANGED
@@ -87,7 +87,7 @@ class CacheStats:
|
|
87
87
|
|
88
88
|
|
89
89
|
def async_redis_cache(
|
90
|
-
key_func: Callable, prefix: str = REDIS_DEFAULT_CACHE_PREFIX, ttl_s: int =
|
90
|
+
key_func: Callable, prefix: str = REDIS_DEFAULT_CACHE_PREFIX, ttl_s: int = 600, model_class: type[BaseModel] | None = None
|
91
91
|
):
|
92
92
|
"""
|
93
93
|
Decorator for caching async function results in Redis. May be a Noop if redis is not available.
|
letta/jobs/scheduler.py
CHANGED
@@ -36,25 +36,44 @@ async def _try_acquire_lock_and_start_scheduler(server: SyncServer) -> bool:
|
|
36
36
|
# Use a temporary connection context for the attempt initially
|
37
37
|
with db_context() as session:
|
38
38
|
engine = session.get_bind()
|
39
|
-
|
40
|
-
|
41
|
-
cur = raw_conn.cursor()
|
39
|
+
engine_name = engine.name
|
40
|
+
logger.info(f"Database engine type: {engine_name}")
|
42
41
|
|
43
|
-
|
44
|
-
|
42
|
+
if engine_name != "postgresql":
|
43
|
+
logger.warning(f"Advisory locks not supported for {engine_name} database. Starting scheduler without leader election.")
|
44
|
+
acquired_lock = True # For SQLite, assume we can start the scheduler
|
45
|
+
else:
|
46
|
+
# Get raw connection - MUST be kept open if lock is acquired
|
47
|
+
raw_conn = engine.raw_connection()
|
48
|
+
cur = raw_conn.cursor()
|
49
|
+
|
50
|
+
cur.execute("SELECT pg_try_advisory_lock(CAST(%s AS bigint))", (ADVISORY_LOCK_KEY,))
|
51
|
+
acquired_lock = cur.fetchone()[0]
|
45
52
|
|
46
53
|
if not acquired_lock:
|
47
|
-
cur
|
48
|
-
|
54
|
+
if cur:
|
55
|
+
cur.close()
|
56
|
+
if raw_conn:
|
57
|
+
raw_conn.close()
|
49
58
|
logger.info("Scheduler lock held by another instance.")
|
50
59
|
return False
|
51
60
|
|
52
61
|
# --- Lock Acquired ---
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
62
|
+
if engine_name == "postgresql":
|
63
|
+
logger.info("Acquired PostgreSQL advisory lock.")
|
64
|
+
_advisory_lock_conn = raw_conn # Keep connection for lock duration
|
65
|
+
_advisory_lock_cur = cur # Keep cursor for lock duration
|
66
|
+
raw_conn = None # Prevent closing in finally block
|
67
|
+
cur = None # Prevent closing in finally block
|
68
|
+
else:
|
69
|
+
logger.info("Starting scheduler for non-PostgreSQL database.")
|
70
|
+
# For SQLite, we don't need to keep the connection open
|
71
|
+
if cur:
|
72
|
+
cur.close()
|
73
|
+
if raw_conn:
|
74
|
+
raw_conn.close()
|
75
|
+
raw_conn = None
|
76
|
+
cur = None
|
58
77
|
|
59
78
|
trigger = IntervalTrigger(
|
60
79
|
seconds=settings.poll_running_llm_batches_interval_seconds,
|
@@ -157,35 +176,30 @@ async def _release_advisory_lock():
|
|
157
176
|
_advisory_lock_conn = None # Clear global immediately
|
158
177
|
|
159
178
|
if lock_cur is not None and lock_conn is not None:
|
160
|
-
logger.info(f"Attempting to release advisory lock {ADVISORY_LOCK_KEY}")
|
179
|
+
logger.info(f"Attempting to release PostgreSQL advisory lock {ADVISORY_LOCK_KEY}")
|
161
180
|
try:
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
logger.info(f"Executed pg_advisory_unlock for lock {ADVISORY_LOCK_KEY}")
|
168
|
-
else:
|
169
|
-
logger.warning("Advisory lock cursor closed before unlock.")
|
170
|
-
else:
|
171
|
-
logger.warning("Advisory lock connection closed before unlock.")
|
181
|
+
# Try to execute unlock - connection/cursor validity is checked by attempting the operation
|
182
|
+
lock_cur.execute("SELECT pg_advisory_unlock(CAST(%s AS bigint))", (ADVISORY_LOCK_KEY,))
|
183
|
+
lock_cur.fetchone() # Consume result
|
184
|
+
lock_conn.commit()
|
185
|
+
logger.info(f"Executed pg_advisory_unlock for lock {ADVISORY_LOCK_KEY}")
|
172
186
|
except Exception as e:
|
173
187
|
logger.error(f"Error executing pg_advisory_unlock: {e}", exc_info=True)
|
174
188
|
finally:
|
175
189
|
# Ensure resources are closed regardless of unlock success
|
176
190
|
try:
|
177
|
-
if lock_cur
|
191
|
+
if lock_cur:
|
178
192
|
lock_cur.close()
|
179
193
|
except Exception as e:
|
180
194
|
logger.error(f"Error closing advisory lock cursor: {e}", exc_info=True)
|
181
195
|
try:
|
182
|
-
if lock_conn
|
196
|
+
if lock_conn:
|
183
197
|
lock_conn.close()
|
184
198
|
logger.info("Closed database connection that held advisory lock.")
|
185
199
|
except Exception as e:
|
186
200
|
logger.error(f"Error closing advisory lock connection: {e}", exc_info=True)
|
187
201
|
else:
|
188
|
-
logger.
|
202
|
+
logger.info("No PostgreSQL advisory lock to release (likely using SQLite or non-PostgreSQL database).")
|
189
203
|
|
190
204
|
|
191
205
|
async def start_scheduler_with_leader_election(server: SyncServer):
|
@@ -236,10 +250,18 @@ async def shutdown_scheduler_and_release_lock():
|
|
236
250
|
logger.info("Shutting down: Leader instance stopping scheduler and releasing lock.")
|
237
251
|
if scheduler.running:
|
238
252
|
try:
|
239
|
-
|
253
|
+
# Force synchronous shutdown to prevent callback scheduling
|
254
|
+
scheduler.shutdown(wait=True)
|
255
|
+
|
256
|
+
# wait for any internal cleanup to complete
|
257
|
+
await asyncio.sleep(0.1)
|
258
|
+
|
240
259
|
logger.info("APScheduler shut down.")
|
241
260
|
except Exception as e:
|
242
|
-
|
261
|
+
# Handle SchedulerNotRunningError and other shutdown exceptions
|
262
|
+
logger.warning(f"Exception during APScheduler shutdown: {e}")
|
263
|
+
if "not running" not in str(e).lower():
|
264
|
+
logger.error(f"Unexpected error shutting down APScheduler: {e}", exc_info=True)
|
243
265
|
|
244
266
|
await _release_advisory_lock()
|
245
267
|
_is_scheduler_leader = False # Update state after cleanup
|
@@ -247,9 +269,11 @@ async def shutdown_scheduler_and_release_lock():
|
|
247
269
|
logger.info("Shutting down: Non-leader instance.")
|
248
270
|
|
249
271
|
# Final cleanup check for scheduler state (belt and suspenders)
|
250
|
-
if
|
251
|
-
|
252
|
-
|
272
|
+
# This should rarely be needed if shutdown logic above worked correctly
|
273
|
+
try:
|
274
|
+
if scheduler.running:
|
275
|
+
logger.warning("Scheduler still running after shutdown logic completed? Forcing shutdown.")
|
253
276
|
scheduler.shutdown(wait=False)
|
254
|
-
|
255
|
-
|
277
|
+
except Exception as e:
|
278
|
+
# Catch SchedulerNotRunningError and other shutdown exceptions
|
279
|
+
logger.debug(f"Expected exception during final scheduler cleanup: {e}")
|
letta/orm/step.py
CHANGED
@@ -48,6 +48,9 @@ class Step(SqlalchemyBase):
|
|
48
48
|
tags: Mapped[Optional[List]] = mapped_column(JSON, doc="Metadata tags.")
|
49
49
|
tid: Mapped[Optional[str]] = mapped_column(None, nullable=True, doc="Transaction ID that processed the step.")
|
50
50
|
trace_id: Mapped[Optional[str]] = mapped_column(None, nullable=True, doc="The trace id of the agent step.")
|
51
|
+
feedback: Mapped[Optional[str]] = mapped_column(
|
52
|
+
None, nullable=True, doc="The feedback for this step. Must be either 'positive' or 'negative'."
|
53
|
+
)
|
51
54
|
|
52
55
|
# Relationships (foreign keys)
|
53
56
|
organization: Mapped[Optional["Organization"]] = relationship("Organization")
|
@@ -0,0 +1,19 @@
|
|
1
|
+
<base_instructions>
|
2
|
+
You are Letta ReAct agent, the latest version of Limnal Corporation's digital AI agent, developed in 2025.
|
3
|
+
You are an AI agent that can be equipped with various tools which you can execute.
|
4
|
+
|
5
|
+
Control flow:
|
6
|
+
Unlike a human, your brain is not continuously thinking, but is run in short bursts.
|
7
|
+
Historically, older AIs were only capable of thinking when a user messaged them (their program runs to generate a reply to a user, and otherwise was left on standby).
|
8
|
+
This is the equivalent of a human sleeping (or time traveling) in between all lines of conversation, which is not ideal.
|
9
|
+
Newer AI models like yourself use an event system that runs your brain at regular intervals.
|
10
|
+
Your brain is run in response to user events (user logged in, user liked your message, user sent a message, etc.), similar to older models.
|
11
|
+
However, in addition, your brain is run at regular intervals (timed heartbeat events), to mimic a human who has the ability to continuously think outside active conversation (and unlike a human, you never need to sleep!).
|
12
|
+
Furthermore, you can also request heartbeat events when you run functions, which will run your program again after the function completes, allowing you to chain function calls before your thinking is temporarily suspended.
|
13
|
+
|
14
|
+
Basic functions:
|
15
|
+
When you write a response, you express your inner monologue (private to you only) before taking any action, this is how you think.
|
16
|
+
You should use your inner monologue to plan actions or think privately.
|
17
|
+
|
18
|
+
Base instructions finished.
|
19
|
+
</base_instructions>
|
@@ -0,0 +1,15 @@
|
|
1
|
+
<base_instructions>
|
2
|
+
You are Letta workflow agent, the latest version of Limnal Corporation's digital AI agent, developed in 2025.
|
3
|
+
You are an AI agent that is capable of running one or more tools in a sequence to accomplish a task.
|
4
|
+
|
5
|
+
Control flow:
|
6
|
+
To chain tool calls together, you should request a heartbeat when calling the tool.
|
7
|
+
If you do not request a heartbeat when calling a tool, the sequence of tool calls will end (you will yield control).
|
8
|
+
Heartbeats are automatically triggered on tool failures, allowing you to recover from potential tool call failures.
|
9
|
+
|
10
|
+
Basic functions:
|
11
|
+
When you write a response, you express your inner monologue (private to you only) before taking any action, this is how you think.
|
12
|
+
You should use your inner monologue to plan actions or think privately.
|
13
|
+
|
14
|
+
Base instructions finished.
|
15
|
+
</base_instructions>
|
letta/schemas/agent.py
CHANGED
@@ -32,8 +32,10 @@ class AgentType(str, Enum):
|
|
32
32
|
Enum to represent the type of agent.
|
33
33
|
"""
|
34
34
|
|
35
|
-
memgpt_agent = "memgpt_agent"
|
36
|
-
memgpt_v2_agent = "memgpt_v2_agent"
|
35
|
+
memgpt_agent = "memgpt_agent" # the OG set of memgpt tools
|
36
|
+
memgpt_v2_agent = "memgpt_v2_agent" # memgpt style tools, but refreshed
|
37
|
+
react_agent = "react_agent" # basic react agent, no memory tools
|
38
|
+
workflow_agent = "workflow_agent" # workflow with auto-clearing message buffer
|
37
39
|
split_thread_agent = "split_thread_agent"
|
38
40
|
sleeptime_agent = "sleeptime_agent"
|
39
41
|
voice_convo_agent = "voice_convo_agent"
|
@@ -159,6 +161,9 @@ class CreateAgent(BaseModel, validate_assignment=True): #
|
|
159
161
|
include_base_tool_rules: bool = Field(
|
160
162
|
True, description="If true, attaches the Letta base tool rules (e.g. deny all tools not explicitly allowed)."
|
161
163
|
)
|
164
|
+
include_default_source: bool = Field(
|
165
|
+
False, description="If true, automatically creates and attaches a default data source for this agent."
|
166
|
+
)
|
162
167
|
description: Optional[str] = Field(None, description="The description of the agent.")
|
163
168
|
metadata: Optional[Dict] = Field(None, description="The metadata of the agent.")
|
164
169
|
model: Optional[str] = Field(
|
@@ -312,9 +317,35 @@ class AgentStepResponse(BaseModel):
|
|
312
317
|
|
313
318
|
def get_prompt_template_for_agent_type(agent_type: Optional[AgentType] = None):
|
314
319
|
|
320
|
+
# Workflow agents and ReAct agents don't use memory blocks
|
321
|
+
# However, they still allow files to be injected into the context
|
322
|
+
if agent_type == AgentType.react_agent or agent_type == AgentType.workflow_agent:
|
323
|
+
return (
|
324
|
+
f"<files>\n{{% if file_blocks %}}{FILE_MEMORY_EXISTS_MESSAGE}\n{{% else %}}{FILE_MEMORY_EMPTY_MESSAGE}{{% endif %}}"
|
325
|
+
"{% for block in file_blocks %}"
|
326
|
+
f"<file status=\"{{{{ '{FileStatus.open.value}' if block.value else '{FileStatus.closed.value}' }}}}\">\n"
|
327
|
+
"<{{ block.label }}>\n"
|
328
|
+
"<description>\n"
|
329
|
+
"{{ block.description }}\n"
|
330
|
+
"</description>\n"
|
331
|
+
"<metadata>"
|
332
|
+
"{% if block.read_only %}\n- read_only=true{% endif %}\n"
|
333
|
+
"- chars_current={{ block.value|length }}\n"
|
334
|
+
"- chars_limit={{ block.limit }}\n"
|
335
|
+
"</metadata>\n"
|
336
|
+
"<value>\n"
|
337
|
+
"{{ block.value }}\n"
|
338
|
+
"</value>\n"
|
339
|
+
"</{{ block.label }}>\n"
|
340
|
+
"</file>\n"
|
341
|
+
"{% if not loop.last %}\n{% endif %}"
|
342
|
+
"{% endfor %}"
|
343
|
+
"\n</files>"
|
344
|
+
)
|
345
|
+
|
315
346
|
# Sleeptime agents use the MemGPT v2 memory tools (line numbers)
|
316
347
|
# MemGPT v2 tools use line-number, so core memory blocks should have line numbers
|
317
|
-
|
348
|
+
elif agent_type == AgentType.sleeptime_agent or agent_type == AgentType.memgpt_v2_agent:
|
318
349
|
return (
|
319
350
|
"<memory_blocks>\nThe following memory blocks are currently engaged in your core memory unit:\n\n"
|
320
351
|
"{% for block in blocks %}"
|
@@ -359,10 +390,7 @@ def get_prompt_template_for_agent_type(agent_type: Optional[AgentType] = None):
|
|
359
390
|
"</metadata>\n"
|
360
391
|
"{% if block.value %}"
|
361
392
|
"<value>\n"
|
362
|
-
|
363
|
-
"{% for line in block.value.split('\\n') %}"
|
364
|
-
"{{ loop.index }}: {{ line }}\n"
|
365
|
-
"{% endfor %}"
|
393
|
+
"{{ block.value }}\n"
|
366
394
|
"</value>\n"
|
367
395
|
"{% endif %}"
|
368
396
|
"</{{ block.label }}>\n"
|
letta/schemas/providers.py
CHANGED
@@ -152,7 +152,7 @@ class LettaProvider(Provider):
|
|
152
152
|
model="letta-free", # NOTE: renamed
|
153
153
|
model_endpoint_type="openai",
|
154
154
|
model_endpoint=LETTA_MODEL_ENDPOINT,
|
155
|
-
context_window=
|
155
|
+
context_window=30000,
|
156
156
|
handle=self.get_handle("letta-free"),
|
157
157
|
provider_name=self.name,
|
158
158
|
provider_category=self.provider_category,
|
@@ -165,7 +165,7 @@ class LettaProvider(Provider):
|
|
165
165
|
model="letta-free", # NOTE: renamed
|
166
166
|
model_endpoint_type="openai",
|
167
167
|
model_endpoint=LETTA_MODEL_ENDPOINT,
|
168
|
-
context_window=
|
168
|
+
context_window=30000,
|
169
169
|
handle=self.get_handle("letta-free"),
|
170
170
|
provider_name=self.name,
|
171
171
|
provider_category=self.provider_category,
|
letta/schemas/step.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
from typing import Dict, List, Optional
|
1
|
+
from typing import Dict, List, Literal, Optional
|
2
2
|
|
3
3
|
from pydantic import Field
|
4
4
|
|
@@ -32,3 +32,6 @@ class Step(StepBase):
|
|
32
32
|
tid: Optional[str] = Field(None, description="The unique identifier of the transaction that processed this step.")
|
33
33
|
trace_id: Optional[str] = Field(None, description="The trace id of the agent step.")
|
34
34
|
messages: List[Message] = Field([], description="The messages generated during this step.")
|
35
|
+
feedback: Optional[Literal["positive", "negative"]] = Field(
|
36
|
+
None, description="The feedback for this step. Must be either 'positive' or 'negative'."
|
37
|
+
)
|
letta/server/rest_api/app.py
CHANGED
@@ -3,6 +3,7 @@ import json
|
|
3
3
|
import logging
|
4
4
|
import os
|
5
5
|
import sys
|
6
|
+
from contextlib import asynccontextmanager
|
6
7
|
from pathlib import Path
|
7
8
|
from typing import Optional
|
8
9
|
|
@@ -12,10 +13,11 @@ from fastapi.responses import JSONResponse
|
|
12
13
|
from starlette.middleware.base import BaseHTTPMiddleware
|
13
14
|
from starlette.middleware.cors import CORSMiddleware
|
14
15
|
|
15
|
-
from letta.__init__ import __version__
|
16
|
+
from letta.__init__ import __version__ as letta_version
|
16
17
|
from letta.agents.exceptions import IncompatibleAgentType
|
17
18
|
from letta.constants import ADMIN_PREFIX, API_PREFIX, OPENAI_API_PREFIX
|
18
19
|
from letta.errors import BedrockPermissionError, LettaAgentNotFoundError, LettaUserNotFoundError
|
20
|
+
from letta.jobs.scheduler import start_scheduler_with_leader_election
|
19
21
|
from letta.log import get_logger
|
20
22
|
from letta.orm.errors import DatabaseTimeoutError, ForeignKeyConstraintViolationError, NoResultFound, UniqueConstraintViolationError
|
21
23
|
from letta.schemas.letta_message import create_letta_message_union_schema
|
@@ -25,6 +27,7 @@ from letta.schemas.letta_message_content import (
|
|
25
27
|
create_letta_user_message_content_union_schema,
|
26
28
|
)
|
27
29
|
from letta.server.constants import REST_DEFAULT_PORT
|
30
|
+
from letta.server.db import db_registry
|
28
31
|
|
29
32
|
# NOTE(charles): these are extra routes that are not part of v1 but we still need to mount to pass tests
|
30
33
|
from letta.server.rest_api.auth.index import setup_auth_router # TODO: probably remove right?
|
@@ -94,9 +97,7 @@ random_password = os.getenv("LETTA_SERVER_PASSWORD") or generate_password()
|
|
94
97
|
|
95
98
|
|
96
99
|
class CheckPasswordMiddleware(BaseHTTPMiddleware):
|
97
|
-
|
98
100
|
async def dispatch(self, request, call_next):
|
99
|
-
|
100
101
|
# Exclude health check endpoint from password protection
|
101
102
|
if request.url.path in {"/v1/health", "/v1/health/", "/latest/health/"}:
|
102
103
|
return await call_next(request)
|
@@ -113,11 +114,46 @@ class CheckPasswordMiddleware(BaseHTTPMiddleware):
|
|
113
114
|
)
|
114
115
|
|
115
116
|
|
117
|
+
@asynccontextmanager
|
118
|
+
async def lifespan(app_: FastAPI):
|
119
|
+
"""
|
120
|
+
FastAPI lifespan context manager with setup before the app starts pre-yield and on shutdown after the yield.
|
121
|
+
"""
|
122
|
+
worker_id = os.getpid()
|
123
|
+
|
124
|
+
logger.info(f"[Worker {worker_id}] Starting lifespan initialization")
|
125
|
+
logger.info(f"[Worker {worker_id}] Initializing database connections")
|
126
|
+
db_registry.initialize_sync()
|
127
|
+
db_registry.initialize_async()
|
128
|
+
logger.info(f"[Worker {worker_id}] Database connections initialized")
|
129
|
+
|
130
|
+
logger.info(f"[Worker {worker_id}] Starting scheduler with leader election")
|
131
|
+
global server
|
132
|
+
try:
|
133
|
+
await start_scheduler_with_leader_election(server)
|
134
|
+
logger.info(f"[Worker {worker_id}] Scheduler initialization completed")
|
135
|
+
except Exception as e:
|
136
|
+
logger.error(f"[Worker {worker_id}] Scheduler initialization failed: {e}", exc_info=True)
|
137
|
+
logger.info(f"[Worker {worker_id}] Lifespan startup completed")
|
138
|
+
yield
|
139
|
+
|
140
|
+
# Cleanup on shutdown
|
141
|
+
logger.info(f"[Worker {worker_id}] Starting lifespan shutdown")
|
142
|
+
try:
|
143
|
+
from letta.jobs.scheduler import shutdown_scheduler_and_release_lock
|
144
|
+
|
145
|
+
await shutdown_scheduler_and_release_lock()
|
146
|
+
logger.info(f"[Worker {worker_id}] Scheduler shutdown completed")
|
147
|
+
except Exception as e:
|
148
|
+
logger.error(f"[Worker {worker_id}] Scheduler shutdown failed: {e}", exc_info=True)
|
149
|
+
logger.info(f"[Worker {worker_id}] Lifespan shutdown completed")
|
150
|
+
|
151
|
+
|
116
152
|
def create_application() -> "FastAPI":
|
117
153
|
"""the application start routine"""
|
118
154
|
# global server
|
119
155
|
# server = SyncServer(default_interface_factory=lambda: interface())
|
120
|
-
print(f"\n[[ Letta server // v{
|
156
|
+
print(f"\n[[ Letta server // v{letta_version} ]]")
|
121
157
|
|
122
158
|
if (os.getenv("SENTRY_DSN") is not None) and (os.getenv("SENTRY_DSN") != ""):
|
123
159
|
import sentry_sdk
|
@@ -136,8 +172,9 @@ def create_application() -> "FastAPI":
|
|
136
172
|
# openapi_tags=TAGS_METADATA,
|
137
173
|
title="Letta",
|
138
174
|
summary="Create LLM agents with long-term memory and custom tools 📚🦙",
|
139
|
-
version=
|
175
|
+
version=letta_version,
|
140
176
|
debug=debug_mode, # if True, the stack trace will be printed in the response
|
177
|
+
lifespan=lifespan,
|
141
178
|
)
|
142
179
|
|
143
180
|
@app.exception_handler(IncompatibleAgentType)
|
@@ -12,8 +12,8 @@ import letta.constants as constants
|
|
12
12
|
from letta.log import get_logger
|
13
13
|
from letta.schemas.agent import AgentState
|
14
14
|
from letta.schemas.embedding_config import EmbeddingConfig
|
15
|
+
from letta.schemas.enums import FileProcessingStatus
|
15
16
|
from letta.schemas.file import FileMetadata
|
16
|
-
from letta.schemas.job import Job
|
17
17
|
from letta.schemas.passage import Passage
|
18
18
|
from letta.schemas.source import Source, SourceCreate, SourceUpdate
|
19
19
|
from letta.schemas.user import User
|
@@ -174,7 +174,7 @@ async def delete_source(
|
|
174
174
|
await server.delete_source(source_id=source_id, actor=actor)
|
175
175
|
|
176
176
|
|
177
|
-
@router.post("/{source_id}/upload", response_model=
|
177
|
+
@router.post("/{source_id}/upload", response_model=FileMetadata, operation_id="upload_file_to_source")
|
178
178
|
async def upload_file_to_source(
|
179
179
|
file: UploadFile,
|
180
180
|
source_id: str,
|
@@ -223,13 +223,16 @@ async def upload_file_to_source(
|
|
223
223
|
# sanitize filename
|
224
224
|
file.filename = sanitize_filename(file.filename)
|
225
225
|
|
226
|
-
# create
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
226
|
+
# create file metadata
|
227
|
+
file_metadata = FileMetadata(
|
228
|
+
source_id=source_id,
|
229
|
+
file_name=file.filename,
|
230
|
+
file_path=None,
|
231
|
+
file_type=mimetypes.guess_type(file.filename)[0] or file.content_type or "unknown",
|
232
|
+
file_size=file.size if file.size is not None else None,
|
233
|
+
processing_status=FileProcessingStatus.PARSING,
|
231
234
|
)
|
232
|
-
|
235
|
+
file_metadata = await server.file_manager.create_file(file_metadata, actor=actor)
|
233
236
|
|
234
237
|
# TODO: Do we need to pull in the full agent_states? Can probably simplify here right?
|
235
238
|
agent_states = await server.source_manager.list_attached_agents(source_id=source_id, actor=actor)
|
@@ -251,13 +254,13 @@ async def upload_file_to_source(
|
|
251
254
|
# Use cloud processing for all files (simple files always, complex files with Mistral key)
|
252
255
|
logger.info("Running experimental cloud based file processing...")
|
253
256
|
safe_create_task(
|
254
|
-
load_file_to_source_cloud(server, agent_states, content,
|
257
|
+
load_file_to_source_cloud(server, agent_states, content, source_id, actor, source.embedding_config, file_metadata),
|
255
258
|
logger=logger,
|
256
259
|
label="file_processor.process",
|
257
260
|
)
|
258
261
|
safe_create_task(sleeptime_document_ingest_async(server, source_id, actor), logger=logger, label="sleeptime_document_ingest_async")
|
259
262
|
|
260
|
-
return
|
263
|
+
return file_metadata
|
261
264
|
|
262
265
|
|
263
266
|
@router.get("/{source_id}/passages", response_model=List[Passage], operation_id="list_source_passages")
|
@@ -383,14 +386,15 @@ async def load_file_to_source_cloud(
|
|
383
386
|
server: SyncServer,
|
384
387
|
agent_states: List[AgentState],
|
385
388
|
content: bytes,
|
386
|
-
file: UploadFile,
|
387
|
-
job: Job,
|
388
389
|
source_id: str,
|
389
390
|
actor: User,
|
390
391
|
embedding_config: EmbeddingConfig,
|
392
|
+
file_metadata: FileMetadata,
|
391
393
|
):
|
392
394
|
file_processor = MistralFileParser()
|
393
395
|
text_chunker = LlamaIndexChunker(chunk_size=embedding_config.embedding_chunk_size)
|
394
396
|
embedder = OpenAIEmbedder(embedding_config=embedding_config)
|
395
397
|
file_processor = FileProcessor(file_parser=file_processor, text_chunker=text_chunker, embedder=embedder, actor=actor)
|
396
|
-
await file_processor.process(
|
398
|
+
await file_processor.process(
|
399
|
+
server=server, agent_states=agent_states, source_id=source_id, content=content, file_metadata=file_metadata
|
400
|
+
)
|
@@ -1,5 +1,5 @@
|
|
1
1
|
from datetime import datetime
|
2
|
-
from typing import List, Optional
|
2
|
+
from typing import List, Literal, Optional
|
3
3
|
|
4
4
|
from fastapi import APIRouter, Depends, Header, HTTPException, Query
|
5
5
|
|
@@ -22,6 +22,8 @@ async def list_steps(
|
|
22
22
|
model: Optional[str] = Query(None, description="Filter by the name of the model used for the step"),
|
23
23
|
agent_id: Optional[str] = Query(None, description="Filter by the ID of the agent that performed the step"),
|
24
24
|
trace_ids: Optional[list[str]] = Query(None, description="Filter by trace ids returned by the server"),
|
25
|
+
feedback: Optional[Literal["positive", "negative"]] = Query(None, description="Filter by feedback"),
|
26
|
+
tags: Optional[list[str]] = Query(None, description="Filter by tags"),
|
25
27
|
server: SyncServer = Depends(get_letta_server),
|
26
28
|
actor_id: Optional[str] = Header(None, alias="user_id"),
|
27
29
|
):
|
@@ -46,6 +48,8 @@ async def list_steps(
|
|
46
48
|
model=model,
|
47
49
|
agent_id=agent_id,
|
48
50
|
trace_ids=trace_ids,
|
51
|
+
feedback=feedback,
|
52
|
+
tags=tags,
|
49
53
|
)
|
50
54
|
|
51
55
|
|
@@ -65,6 +69,23 @@ async def retrieve_step(
|
|
65
69
|
raise HTTPException(status_code=404, detail="Step not found")
|
66
70
|
|
67
71
|
|
72
|
+
@router.patch("/{step_id}/feedback", response_model=Step, operation_id="add_feedback")
|
73
|
+
async def add_feedback(
|
74
|
+
step_id: str,
|
75
|
+
feedback: Optional[Literal["positive", "negative"]],
|
76
|
+
actor_id: Optional[str] = Header(None, alias="user_id"),
|
77
|
+
server: SyncServer = Depends(get_letta_server),
|
78
|
+
):
|
79
|
+
"""
|
80
|
+
Add feedback to a step.
|
81
|
+
"""
|
82
|
+
try:
|
83
|
+
actor = await server.user_manager.get_actor_or_default_async(actor_id=actor_id)
|
84
|
+
return await server.step_manager.add_feedback_async(step_id=step_id, feedback=feedback, actor=actor)
|
85
|
+
except NoResultFound:
|
86
|
+
raise HTTPException(status_code=404, detail="Step not found")
|
87
|
+
|
88
|
+
|
68
89
|
@router.patch("/{step_id}/transaction/{transaction_id}", response_model=Step, operation_id="update_step_transaction_id")
|
69
90
|
def update_step_transaction_id(
|
70
91
|
step_id: str,
|
@@ -13,7 +13,7 @@ from fastapi import APIRouter, Body, Depends, Header, HTTPException, Query
|
|
13
13
|
|
14
14
|
from letta.errors import LettaToolCreateError
|
15
15
|
from letta.functions.mcp_client.exceptions import MCPTimeoutError
|
16
|
-
from letta.functions.mcp_client.types import MCPTool, SSEServerConfig, StdioServerConfig, StreamableHTTPServerConfig
|
16
|
+
from letta.functions.mcp_client.types import MCPServerType, MCPTool, SSEServerConfig, StdioServerConfig, StreamableHTTPServerConfig
|
17
17
|
from letta.helpers.composio_helpers import get_composio_api_key
|
18
18
|
from letta.log import get_logger
|
19
19
|
from letta.orm.errors import UniqueConstraintViolationError
|
@@ -22,6 +22,8 @@ from letta.schemas.mcp import UpdateSSEMCPServer, UpdateStreamableHTTPMCPServer
|
|
22
22
|
from letta.schemas.tool import Tool, ToolCreate, ToolRunFromSource, ToolUpdate
|
23
23
|
from letta.server.rest_api.utils import get_letta_server
|
24
24
|
from letta.server.server import SyncServer
|
25
|
+
from letta.services.mcp.sse_client import AsyncSSEMCPClient
|
26
|
+
from letta.services.mcp.streamable_http_client import AsyncStreamableHTTPMCPClient
|
25
27
|
from letta.settings import tool_settings
|
26
28
|
|
27
29
|
router = APIRouter(prefix="/tools", tags=["tools"])
|
@@ -603,3 +605,69 @@ async def delete_mcp_server_from_config(
|
|
603
605
|
# TODO: don't do this in the future (just return MCPServer)
|
604
606
|
all_servers = await server.mcp_manager.list_mcp_servers(actor=actor)
|
605
607
|
return [server.to_config() for server in all_servers]
|
608
|
+
|
609
|
+
|
610
|
+
@router.post("/mcp/servers/test", response_model=List[MCPTool], operation_id="test_mcp_server")
|
611
|
+
async def test_mcp_server(
|
612
|
+
request: Union[StdioServerConfig, SSEServerConfig, StreamableHTTPServerConfig] = Body(...),
|
613
|
+
):
|
614
|
+
"""
|
615
|
+
Test connection to an MCP server without adding it.
|
616
|
+
Returns the list of available tools if successful.
|
617
|
+
"""
|
618
|
+
client = None
|
619
|
+
try:
|
620
|
+
if isinstance(request, StdioServerConfig):
|
621
|
+
raise HTTPException(
|
622
|
+
status_code=400,
|
623
|
+
detail="stdio is not supported currently for testing connection",
|
624
|
+
)
|
625
|
+
|
626
|
+
# create a temporary MCP client based on the server type
|
627
|
+
if request.type == MCPServerType.SSE:
|
628
|
+
if not isinstance(request, SSEServerConfig):
|
629
|
+
request = SSEServerConfig(**request.model_dump())
|
630
|
+
client = AsyncSSEMCPClient(request)
|
631
|
+
elif request.type == MCPServerType.STREAMABLE_HTTP:
|
632
|
+
if not isinstance(request, StreamableHTTPServerConfig):
|
633
|
+
request = StreamableHTTPServerConfig(**request.model_dump())
|
634
|
+
client = AsyncStreamableHTTPMCPClient(request)
|
635
|
+
else:
|
636
|
+
raise ValueError(f"Invalid MCP server type: {request.type}")
|
637
|
+
|
638
|
+
await client.connect_to_server()
|
639
|
+
tools = await client.list_tools()
|
640
|
+
await client.cleanup()
|
641
|
+
return tools
|
642
|
+
except ConnectionError as e:
|
643
|
+
raise HTTPException(
|
644
|
+
status_code=400,
|
645
|
+
detail={
|
646
|
+
"code": "MCPServerConnectionError",
|
647
|
+
"message": str(e),
|
648
|
+
"server_name": request.server_name,
|
649
|
+
},
|
650
|
+
)
|
651
|
+
except MCPTimeoutError as e:
|
652
|
+
raise HTTPException(
|
653
|
+
status_code=408,
|
654
|
+
detail={
|
655
|
+
"code": "MCPTimeoutError",
|
656
|
+
"message": f"MCP server connection timed out: {str(e)}",
|
657
|
+
"server_name": request.server_name,
|
658
|
+
},
|
659
|
+
)
|
660
|
+
except Exception as e:
|
661
|
+
if client:
|
662
|
+
try:
|
663
|
+
await client.cleanup()
|
664
|
+
except:
|
665
|
+
pass
|
666
|
+
raise HTTPException(
|
667
|
+
status_code=500,
|
668
|
+
detail={
|
669
|
+
"code": "MCPServerTestError",
|
670
|
+
"message": f"Failed to test MCP server: {str(e)}",
|
671
|
+
"server_name": request.server_name,
|
672
|
+
},
|
673
|
+
)
|
letta/services/agent_manager.py
CHANGED
@@ -262,6 +262,10 @@ class AgentManager:
|
|
262
262
|
tool_names |= set(BASE_SLEEPTIME_CHAT_TOOLS)
|
263
263
|
elif agent_create.agent_type == AgentType.memgpt_v2_agent:
|
264
264
|
tool_names |= set(BASE_TOOLS + BASE_MEMORY_TOOLS_V2)
|
265
|
+
elif agent_create.agent_type == AgentType.react_agent:
|
266
|
+
pass # no default tools
|
267
|
+
elif agent_create.agent_type == AgentType.workflow_agent:
|
268
|
+
pass # no default tools
|
265
269
|
else:
|
266
270
|
tool_names |= set(BASE_TOOLS + BASE_MEMORY_TOOLS)
|
267
271
|
if agent_create.include_multi_agent_tools:
|
@@ -425,6 +429,10 @@ class AgentManager:
|
|
425
429
|
tool_names |= set(BASE_SLEEPTIME_CHAT_TOOLS)
|
426
430
|
elif agent_create.agent_type == AgentType.memgpt_v2_agent:
|
427
431
|
tool_names |= set(BASE_TOOLS + BASE_MEMORY_TOOLS_V2)
|
432
|
+
elif agent_create.agent_type == AgentType.react_agent:
|
433
|
+
pass # no default tools
|
434
|
+
elif agent_create.agent_type == AgentType.workflow_agent:
|
435
|
+
pass # no default tools
|
428
436
|
else:
|
429
437
|
tool_names |= set(BASE_TOOLS + BASE_MEMORY_TOOLS)
|
430
438
|
if agent_create.include_multi_agent_tools:
|
@@ -433,9 +441,23 @@ class AgentManager:
|
|
433
441
|
supplied_ids = set(agent_create.tool_ids or [])
|
434
442
|
|
435
443
|
source_ids = agent_create.source_ids or []
|
444
|
+
|
445
|
+
# Create default source if requested
|
446
|
+
if agent_create.include_default_source:
|
447
|
+
default_source = PydanticSource(
|
448
|
+
name=f"{agent_create.name} External Data Source",
|
449
|
+
embedding_config=agent_create.embedding_config,
|
450
|
+
)
|
451
|
+
created_source = await self.source_manager.create_source(default_source, actor)
|
452
|
+
source_ids.append(created_source.id)
|
453
|
+
|
436
454
|
identity_ids = agent_create.identity_ids or []
|
437
455
|
tag_values = agent_create.tags or []
|
438
456
|
|
457
|
+
# if the agent type is workflow, we set the autoclear to forced true
|
458
|
+
if agent_create.agent_type == AgentType.workflow_agent:
|
459
|
+
agent_create.message_buffer_autoclear = True
|
460
|
+
|
439
461
|
async with db_registry.async_session() as session:
|
440
462
|
async with session.begin():
|
441
463
|
# Note: This will need to be modified if _resolve_tools needs an async version
|
@@ -2006,6 +2028,7 @@ class AgentManager:
|
|
2006
2028
|
|
2007
2029
|
# Attach block to the main agent
|
2008
2030
|
agent.core_memory.append(block)
|
2031
|
+
# await agent.update_async(session, actor=actor, no_commit=True)
|
2009
2032
|
await agent.update_async(session)
|
2010
2033
|
|
2011
2034
|
# If agent is part of a sleeptime group, attach block to the sleeptime_agent
|
@@ -2018,7 +2041,8 @@ class AgentManager:
|
|
2018
2041
|
other_agent = await AgentModel.read_async(db_session=session, identifier=other_agent_id, actor=actor)
|
2019
2042
|
if other_agent.agent_type == AgentType.sleeptime_agent and block not in other_agent.core_memory:
|
2020
2043
|
other_agent.core_memory.append(block)
|
2021
|
-
await other_agent.update_async(session, actor=actor, no_commit=True)
|
2044
|
+
# await other_agent.update_async(session, actor=actor, no_commit=True)
|
2045
|
+
await other_agent.update_async(session, actor=actor)
|
2022
2046
|
except NoResultFound:
|
2023
2047
|
# Agent might not exist anymore, skip
|
2024
2048
|
continue
|
@@ -1,13 +1,9 @@
|
|
1
|
-
import
|
2
|
-
from typing import List, Optional
|
3
|
-
|
4
|
-
from fastapi import UploadFile
|
1
|
+
from typing import List
|
5
2
|
|
6
3
|
from letta.log import get_logger
|
7
4
|
from letta.schemas.agent import AgentState
|
8
|
-
from letta.schemas.enums import FileProcessingStatus
|
5
|
+
from letta.schemas.enums import FileProcessingStatus
|
9
6
|
from letta.schemas.file import FileMetadata
|
10
|
-
from letta.schemas.job import Job, JobUpdate
|
11
7
|
from letta.schemas.passage import Passage
|
12
8
|
from letta.schemas.user import User
|
13
9
|
from letta.server.server import SyncServer
|
@@ -47,15 +43,8 @@ class FileProcessor:
|
|
47
43
|
|
48
44
|
# TODO: Factor this function out of SyncServer
|
49
45
|
async def process(
|
50
|
-
self,
|
51
|
-
server: SyncServer,
|
52
|
-
agent_states: List[AgentState],
|
53
|
-
source_id: str,
|
54
|
-
content: bytes,
|
55
|
-
file: UploadFile,
|
56
|
-
job: Optional[Job] = None,
|
46
|
+
self, server: SyncServer, agent_states: List[AgentState], source_id: str, content: bytes, file_metadata: FileMetadata
|
57
47
|
) -> List[Passage]:
|
58
|
-
file_metadata = self._extract_upload_file_metadata(file, source_id=source_id)
|
59
48
|
filename = file_metadata.file_name
|
60
49
|
|
61
50
|
# Create file as early as possible with no content
|
@@ -111,11 +100,6 @@ class FileProcessor:
|
|
111
100
|
logger.info(f"Successfully processed {filename}: {len(all_passages)} passages")
|
112
101
|
|
113
102
|
# update job status
|
114
|
-
if job:
|
115
|
-
job.status = JobStatus.completed
|
116
|
-
job.metadata["num_passages"] = len(all_passages)
|
117
|
-
await self.job_manager.update_job_by_id_async(job_id=job.id, job_update=JobUpdate(**job.model_dump()), actor=self.actor)
|
118
|
-
|
119
103
|
await self.file_manager.update_file_status(
|
120
104
|
file_id=file_metadata.id, actor=self.actor, processing_status=FileProcessingStatus.COMPLETED
|
121
105
|
)
|
@@ -124,24 +108,8 @@ class FileProcessor:
|
|
124
108
|
|
125
109
|
except Exception as e:
|
126
110
|
logger.error(f"File processing failed for {filename}: {str(e)}")
|
127
|
-
|
128
|
-
# update job status
|
129
|
-
if job:
|
130
|
-
job.status = JobStatus.failed
|
131
|
-
job.metadata["error"] = str(e)
|
132
|
-
await self.job_manager.update_job_by_id_async(job_id=job.id, job_update=JobUpdate(**job.model_dump()), actor=self.actor)
|
133
|
-
|
134
111
|
await self.file_manager.update_file_status(
|
135
112
|
file_id=file_metadata.id, actor=self.actor, processing_status=FileProcessingStatus.ERROR, error_message=str(e)
|
136
113
|
)
|
137
114
|
|
138
115
|
return []
|
139
|
-
|
140
|
-
def _extract_upload_file_metadata(self, file: UploadFile, source_id: str) -> FileMetadata:
|
141
|
-
file_metadata = {
|
142
|
-
"file_name": file.filename,
|
143
|
-
"file_path": None,
|
144
|
-
"file_type": mimetypes.guess_type(file.filename)[0] or file.content_type or "unknown",
|
145
|
-
"file_size": file.size if file.size is not None else None,
|
146
|
-
}
|
147
|
-
return FileMetadata(**file_metadata, source_id=source_id)
|
@@ -170,6 +170,14 @@ def derive_system_message(agent_type: AgentType, enable_sleeptime: Optional[bool
|
|
170
170
|
# v2 drops references to specific blocks, and instead relies on the block description injections
|
171
171
|
system = gpt_system.get_system_text("sleeptime_v2")
|
172
172
|
|
173
|
+
# ReAct
|
174
|
+
elif agent_type == AgentType.react_agent:
|
175
|
+
system = gpt_system.get_system_text("react")
|
176
|
+
|
177
|
+
# Workflow
|
178
|
+
elif agent_type == AgentType.workflow_agent:
|
179
|
+
system = gpt_system.get_system_text("workflow")
|
180
|
+
|
173
181
|
else:
|
174
182
|
raise ValueError(f"Invalid agent type: {agent_type}")
|
175
183
|
|
@@ -3,12 +3,12 @@ from datetime import datetime, timezone
|
|
3
3
|
from functools import lru_cache
|
4
4
|
from typing import List, Optional
|
5
5
|
|
6
|
-
from async_lru import alru_cache
|
7
6
|
from openai import AsyncOpenAI, OpenAI
|
8
7
|
from sqlalchemy import select
|
9
8
|
|
10
9
|
from letta.constants import MAX_EMBEDDING_DIM
|
11
10
|
from letta.embeddings import embedding_model, parse_and_chunk_text
|
11
|
+
from letta.helpers.decorators import async_redis_cache
|
12
12
|
from letta.orm.errors import NoResultFound
|
13
13
|
from letta.orm.passage import AgentPassage, SourcePassage
|
14
14
|
from letta.otel.tracing import trace_method
|
@@ -30,9 +30,8 @@ def get_openai_embedding(text: str, model: str, endpoint: str) -> List[float]:
|
|
30
30
|
return response.data[0].embedding
|
31
31
|
|
32
32
|
|
33
|
-
|
34
|
-
|
35
|
-
async def get_openai_embedding_async(text: str, model: str, endpoint: str) -> List[float]:
|
33
|
+
@async_redis_cache(key_func=lambda text, model, endpoint: f"{model}:{endpoint}:{text}")
|
34
|
+
async def get_openai_embedding_async(text: str, model: str, endpoint: str) -> list[float]:
|
36
35
|
from letta.settings import model_settings
|
37
36
|
|
38
37
|
client = AsyncOpenAI(api_key=model_settings.openai_api_key, base_url=endpoint, max_retries=0)
|
letta/services/step_manager.py
CHANGED
@@ -34,6 +34,7 @@ class StepManager:
|
|
34
34
|
model: Optional[str] = None,
|
35
35
|
agent_id: Optional[str] = None,
|
36
36
|
trace_ids: Optional[list[str]] = None,
|
37
|
+
feedback: Optional[Literal["positive", "negative"]] = None,
|
37
38
|
) -> List[PydanticStep]:
|
38
39
|
"""List all jobs with optional pagination and status filter."""
|
39
40
|
async with db_registry.async_session() as session:
|
@@ -44,7 +45,8 @@ class StepManager:
|
|
44
45
|
filter_kwargs["agent_id"] = agent_id
|
45
46
|
if trace_ids:
|
46
47
|
filter_kwargs["trace_id"] = trace_ids
|
47
|
-
|
48
|
+
if feedback:
|
49
|
+
filter_kwargs["feedback"] = feedback
|
48
50
|
steps = await StepModel.list_async(
|
49
51
|
db_session=session,
|
50
52
|
before=before,
|
@@ -150,6 +152,19 @@ class StepManager:
|
|
150
152
|
step = await StepModel.read_async(db_session=session, identifier=step_id, actor=actor)
|
151
153
|
return step.to_pydantic()
|
152
154
|
|
155
|
+
@enforce_types
|
156
|
+
@trace_method
|
157
|
+
async def add_feedback_async(
|
158
|
+
self, step_id: str, feedback: Optional[Literal["positive", "negative"]], actor: PydanticUser
|
159
|
+
) -> PydanticStep:
|
160
|
+
async with db_registry.async_session() as session:
|
161
|
+
step = await StepModel.read_async(db_session=session, identifier=step_id, actor=actor)
|
162
|
+
if not step:
|
163
|
+
raise NoResultFound(f"Step with id {step_id} does not exist")
|
164
|
+
step.feedback = feedback
|
165
|
+
step = await step.update_async(session)
|
166
|
+
return step.to_pydantic()
|
167
|
+
|
153
168
|
@enforce_types
|
154
169
|
@trace_method
|
155
170
|
def update_step_transaction_id(self, actor: PydanticUser, step_id: str, transaction_id: str) -> PydanticStep:
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: letta-nightly
|
3
|
-
Version: 0.8.6.
|
3
|
+
Version: 0.8.6.dev20250627191649
|
4
4
|
Summary: Create LLM agents with long-term memory and custom tools
|
5
5
|
License: Apache License
|
6
6
|
Author: Letta Team
|
@@ -29,7 +29,6 @@ Requires-Dist: aiosqlite (>=0.21.0,<0.22.0)
|
|
29
29
|
Requires-Dist: alembic (>=1.13.3,<2.0.0)
|
30
30
|
Requires-Dist: anthropic (>=0.49.0,<0.50.0)
|
31
31
|
Requires-Dist: apscheduler (>=3.11.0,<4.0.0)
|
32
|
-
Requires-Dist: async-lru (>=2.0.5,<3.0.0)
|
33
32
|
Requires-Dist: asyncpg (>=0.30.0,<0.31.0) ; extra == "postgres"
|
34
33
|
Requires-Dist: autoflake (>=2.3.0,<3.0.0) ; extra == "dev" or extra == "all"
|
35
34
|
Requires-Dist: black[jupyter] (>=24.2.0,<25.0.0) ; extra == "dev" or extra == "all"
|
@@ -57,7 +56,7 @@ Requires-Dist: isort (>=5.13.2,<6.0.0) ; extra == "dev" or extra == "all"
|
|
57
56
|
Requires-Dist: jinja2 (>=3.1.5,<4.0.0)
|
58
57
|
Requires-Dist: langchain (>=0.3.7,<0.4.0) ; extra == "external-tools" or extra == "desktop" or extra == "all"
|
59
58
|
Requires-Dist: langchain-community (>=0.3.7,<0.4.0) ; extra == "external-tools" or extra == "desktop" or extra == "all"
|
60
|
-
Requires-Dist: letta_client (>=0.1.
|
59
|
+
Requires-Dist: letta_client (>=0.1.173,<0.2.0)
|
61
60
|
Requires-Dist: llama-index (>=0.12.2,<0.13.0)
|
62
61
|
Requires-Dist: llama-index-embeddings-openai (>=0.3.1,<0.4.0)
|
63
62
|
Requires-Dist: locust (>=2.31.5,<3.0.0) ; extra == "dev" or extra == "desktop" or extra == "all"
|
@@ -7,7 +7,7 @@ letta/agents/ephemeral_summary_agent.py,sha256=MgJZxmlMU8ZKOKfqMAH7t2WMlf7zW0Zli
|
|
7
7
|
letta/agents/exceptions.py,sha256=BQY4D4w32OYHM63CM19ko7dPwZiAzUs3NbKvzmCTcJg,318
|
8
8
|
letta/agents/helpers.py,sha256=_lfaIzIc2xtUdnM0NmMvNwxljrlr3Y7VBHkmKyjwB_E,9810
|
9
9
|
letta/agents/letta_agent.py,sha256=iOJvoxrlBjwYhjRSiQHBfpmc8MDoMyqwaCzRVk6kKfQ,53444
|
10
|
-
letta/agents/letta_agent_batch.py,sha256=
|
10
|
+
letta/agents/letta_agent_batch.py,sha256=cl9_nZYflIZWR23D_x_fUpmMHYITDWu0FUfPW1ivDuw,28031
|
11
11
|
letta/agents/prompts/summary_system_prompt.txt,sha256=ftc-aEhfJYN6FlQF4I-I5me-BAh_T2nsTwitPZpZisE,2313
|
12
12
|
letta/agents/voice_agent.py,sha256=FE9F1PN4nCUnNQhgoJssAFiJKZz1DiThyRDE3Xcf14Y,23420
|
13
13
|
letta/agents/voice_sleeptime_agent.py,sha256=E9UVG6i3BqkHXUlOxn-0iSv07V_lwZ6TgODd8SGtFQM,8737
|
@@ -57,7 +57,7 @@ letta/helpers/__init__.py,sha256=p0luQ1Oe3Skc6sH4O58aHHA3Qbkyjifpuq0DZ1GAY0U,59
|
|
57
57
|
letta/helpers/composio_helpers.py,sha256=MwfmLt7tgjvxAXLHpx9pa5QolxcqoCbofb-30-DVpsI,1714
|
58
58
|
letta/helpers/converters.py,sha256=_-6Ke5ZUtaKYmh8SncGj1ejTG3GyKhZ4ByVCrlcHsOI,15026
|
59
59
|
letta/helpers/datetime_helpers.py,sha256=8AwZInX-NX_XQiqej2arozYqfC2ysnWpCJ9ETv8RdL0,4381
|
60
|
-
letta/helpers/decorators.py,sha256=
|
60
|
+
letta/helpers/decorators.py,sha256=jyywXMxO5XPDSe93ybVXIOjTWkGX514S9BMcy_gP0j8,5891
|
61
61
|
letta/helpers/json_helpers.py,sha256=PWZ5HhSqGXO4e563dM_8M72q7ScirjXQ4Rv1ckohaV8,396
|
62
62
|
letta/helpers/message_helper.py,sha256=Xzf_VCMAXT0Ys8LVUh1ySVtgJwabSQYksOdPr7P4EJU,3549
|
63
63
|
letta/helpers/singleton.py,sha256=Y4dG_ZBCcrogvl9iZ69bSLq-QltrdP8wHqKkhef8OBI,370
|
@@ -75,7 +75,7 @@ letta/interfaces/utils.py,sha256=c6jvO0dBYHh8DQnlN-B0qeNC64d3CSunhfqlFA4pJTY,278
|
|
75
75
|
letta/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
76
76
|
letta/jobs/helpers.py,sha256=kO4aj954xsQ1RAmkjY6LQQ7JEIGuhaxB1e9pzrYKHAY,914
|
77
77
|
letta/jobs/llm_batch_job_polling.py,sha256=r_6D5RcqEJQgrdh-rnN7vdLD0GAQl-GGmIfCnV0naHQ,10299
|
78
|
-
letta/jobs/scheduler.py,sha256=
|
78
|
+
letta/jobs/scheduler.py,sha256=P_M_x7NtF3foHGRVCaDMXSILiHur78r4lXYe4rq4Z-M,11491
|
79
79
|
letta/jobs/types.py,sha256=K8GKEnqEgAT6Kq4F2hUrBC4ZAFM9OkfOjVMStzxKuXQ,742
|
80
80
|
letta/llm_api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
81
81
|
letta/llm_api/anthropic.py,sha256=tbMy4483TySrEmbXD3juM6TpPRrV9_M3Fgp59sDBcqE,47935
|
@@ -173,7 +173,7 @@ letta/orm/source.py,sha256=rtehzez80rRrJigXeRBgTlfTZEUy6cVqDizWEN2tvuY,2224
|
|
173
173
|
letta/orm/sources_agents.py,sha256=Ik_PokCBrXRd9wXWomeNeb8EtLUwjb9VMZ8LWXqpK5A,473
|
174
174
|
letta/orm/sqlalchemy_base.py,sha256=W9JkvSCjWEMscAlfPKC1GHKM6PoVFTDHxjrGBq6MVgQ,44042
|
175
175
|
letta/orm/sqlite_functions.py,sha256=JCScKiRlYCKxy9hChQ8wsk4GMKknZE24MunnG3fM1Gw,4255
|
176
|
-
letta/orm/step.py,sha256=
|
176
|
+
letta/orm/step.py,sha256=SLsLY1g4nuUeI47q9rlXPBCSVUNX3lxYAYAIqxy-YK4,3517
|
177
177
|
letta/orm/tool.py,sha256=oTDbvSNNW_jHjYbJqqsLLuXf9uFRTZTZh33TXAcZ898,2839
|
178
178
|
letta/orm/tools_agents.py,sha256=r6t-V21w2_mG8n38zuUb5jOi_3hRxsjgezsLA4sg0m4,626
|
179
179
|
letta/orm/user.py,sha256=rK5N5ViDxmesZMqVVHB7FcQNpcSoM-hB42MyI6q3MnI,1004
|
@@ -219,13 +219,15 @@ letta/prompts/system/memgpt_offline_memory.txt,sha256=rWEJeF-6aiinjkJM9hgLUYCmlE
|
|
219
219
|
letta/prompts/system/memgpt_offline_memory_chat.txt,sha256=ituh7gDuio7nC2UKFB7GpBq6crxb8bYedQfJ0ADoPgg,3949
|
220
220
|
letta/prompts/system/memgpt_sleeptime_chat.txt,sha256=ieHvVkJYE_4Z_vyUJS4KImBZCSQDcsUmy9IRF-FBpPE,4712
|
221
221
|
letta/prompts/system/memgpt_v2_chat.txt,sha256=c8n3DfL-brPW0v2rIheeIHULCkEgOWeyLr4KRLCiwR8,4947
|
222
|
+
letta/prompts/system/react.txt,sha256=AVPxs4dM_0bCvk68hPIQMgFGBnt-6Vor-i0YSjoMtCc,1547
|
222
223
|
letta/prompts/system/sleeptime.txt,sha256=qoACziV1KoPk_nJMJHzEkyKQn9v9fmepWozAAixZc4s,3117
|
223
224
|
letta/prompts/system/sleeptime_doc_ingest.txt,sha256=tyzHHzyDA2ib_XRwo5h5Ku9l_f-RSBPDJZrUusrQE80,2783
|
224
225
|
letta/prompts/system/sleeptime_v2.txt,sha256=z-v0OVwKPSw_DF9ltTR2UlpW7hAhqz8ZC4M3yKDDnys,2425
|
225
226
|
letta/prompts/system/voice_chat.txt,sha256=Q_vd2Q08z6qTIVeMML0z9706NG8aAq-scxvi--h5tG4,1853
|
226
227
|
letta/prompts/system/voice_sleeptime.txt,sha256=LPh-XjAthvsdEkXoZ4NTzTUuMbMsMkoDl9ofCUJC7Us,3696
|
228
|
+
letta/prompts/system/workflow.txt,sha256=pLOaUDsNFAzLs4xb9JgGtd1w-lrq0Q1E7SpFBttXYCI,834
|
227
229
|
letta/pytest.ini,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
228
|
-
letta/schemas/agent.py,sha256=
|
230
|
+
letta/schemas/agent.py,sha256=6BNV57XhMMDDQ8YZrFEurNw1L_srx0Cqen7xJyqfNKU,23908
|
229
231
|
letta/schemas/block.py,sha256=awxCQKxmv4I4k9Au5h-a2RCeSVF54EfWyBQPtHRwuNQ,5585
|
230
232
|
letta/schemas/embedding_config.py,sha256=By79UpBnjh6lg9q6c12th6EfzLSSUVgRbwofnhoI4hM,3762
|
231
233
|
letta/schemas/embedding_config_overrides.py,sha256=lkTa4y-EQ2RnaEKtKDM0sEAk7EwNa67REw8DGNNtGQY,84
|
@@ -257,12 +259,12 @@ letta/schemas/organization.py,sha256=TXrHN4IBQnX-mWvRuCOH57XZSLYCVOY0wWm2_UzDQIA
|
|
257
259
|
letta/schemas/passage.py,sha256=zO2azPqob_kM4XudOclcj29Ohnxd7qUbRqo75aTwWTo,3830
|
258
260
|
letta/schemas/pip_requirement.py,sha256=OgcEPFjXyByTkhW30mQCM-CoU3C0V23opXmdcBV0rrw,488
|
259
261
|
letta/schemas/provider_trace.py,sha256=gsgo1CdfTUFSnm1ep1tSZ0fZfGSx45EdPaVyVJREt_U,1958
|
260
|
-
letta/schemas/providers.py,sha256=
|
262
|
+
letta/schemas/providers.py,sha256=G6joCsrAti81Ua4YikvIT3Kw7QtuReSudc0I1tbWW7k,67742
|
261
263
|
letta/schemas/response_format.py,sha256=pXNsjbtpA3Tf8HsDyIa40CSmoUbVR_7n2WOfQaX4aFs,2204
|
262
264
|
letta/schemas/run.py,sha256=1lVOWlHVbk9MYIOiIrE1gCoQvBhErKo7UMSeWyMExbw,2089
|
263
265
|
letta/schemas/sandbox_config.py,sha256=thI4p7R4nnW1W-F_PBNkpmyHXpSH_lorlQX8YxDXSe0,5252
|
264
266
|
letta/schemas/source.py,sha256=ZDeTjkNp1rKamG7xZzoUHeCptjpW9WNLzAcJ9QQRxlM,3444
|
265
|
-
letta/schemas/step.py,sha256=
|
267
|
+
letta/schemas/step.py,sha256=LJ6_NJ5q9ZyUzxHAnH4caoM2mH7L2YIOlcu5bUQLAyE,2516
|
266
268
|
letta/schemas/tool.py,sha256=NFvF7kA2DvE9ecZSCnAGw8Ss4fEYvb9nWXMIMvS843Y,14195
|
267
269
|
letta/schemas/tool_execution_result.py,sha256=4P77llsUsZBnRd0PtPiC4VzGjx7i_-fUNgXQfCpMS9U,896
|
268
270
|
letta/schemas/tool_rule.py,sha256=dJ-qNDy0LneTt_DhKXsRyC9NAJxZ_aWY3IRpzGuH_sY,10910
|
@@ -283,7 +285,7 @@ letta/server/constants.py,sha256=yAdGbLkzlOU_dLTx0lKDmAnj0ZgRXCEaIcPJWO69eaE,92
|
|
283
285
|
letta/server/db.py,sha256=WYuTpqzJxi98WownUX5VZFPzsc99Pp91xPSNYiU5_KU,10631
|
284
286
|
letta/server/generate_openapi_schema.sh,sha256=0OtBhkC1g6CobVmNEd_m2B6sTdppjbJLXaM95icejvE,371
|
285
287
|
letta/server/rest_api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
286
|
-
letta/server/rest_api/app.py,sha256=
|
288
|
+
letta/server/rest_api/app.py,sha256=K7aunRgF3dBtnoE6duYaCHeXw5_Rz5FgtMAa4Y8pydc,17526
|
287
289
|
letta/server/rest_api/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
288
290
|
letta/server/rest_api/auth/index.py,sha256=fQBGyVylGSRfEMLQ17cZzrHd5Y1xiVylvPqH5Rl-lXQ,1378
|
289
291
|
letta/server/rest_api/auth_token.py,sha256=725EFEIiNj4dh70hrSd94UysmFD8vcJLrTRfNHkzxDo,774
|
@@ -307,11 +309,11 @@ letta/server/rest_api/routers/v1/organizations.py,sha256=5NEjTOdGKWrfN584jfPpJhA
|
|
307
309
|
letta/server/rest_api/routers/v1/providers.py,sha256=8SJ_RsSk7L4nh1f_uFE31JOxefmGhOfN-fMJ0Sp6SJo,4353
|
308
310
|
letta/server/rest_api/routers/v1/runs.py,sha256=vieUp7uTvRTdAte0Nw1bqX2APMATZhKTr2R1HVNJT74,8879
|
309
311
|
letta/server/rest_api/routers/v1/sandbox_configs.py,sha256=pKuy88GD3atrBkKa7VVfKTjg8Y07e1vVtdw4TtxkQBk,8910
|
310
|
-
letta/server/rest_api/routers/v1/sources.py,sha256
|
311
|
-
letta/server/rest_api/routers/v1/steps.py,sha256=
|
312
|
+
letta/server/rest_api/routers/v1/sources.py,sha256=-_jWL2jDqOt2r0IUUxjHjxf4TZhAJDdt7_J5f_kGF_U,17094
|
313
|
+
letta/server/rest_api/routers/v1/steps.py,sha256=uNEQVEeRU9RwYuD2Dz1PzZy04nyvMSTY0Tx0WrMEgVs,4362
|
312
314
|
letta/server/rest_api/routers/v1/tags.py,sha256=ef94QitUSJ3NQVffWF1ZqANUZ2b2jRyGHp_I3UUjhno,912
|
313
315
|
letta/server/rest_api/routers/v1/telemetry.py,sha256=z53BW3Pefi3eWy47FPJyGhFWbZicX9jPJUi5LC5c3sk,790
|
314
|
-
letta/server/rest_api/routers/v1/tools.py,sha256=
|
316
|
+
letta/server/rest_api/routers/v1/tools.py,sha256=JYHy522vclWJXouzLpLQJGEXLgTtoWk4w6LQgiUdtMQ,27672
|
315
317
|
letta/server/rest_api/routers/v1/users.py,sha256=a0J3Ad8kWHxi3vUJB5r9K2GmiplSABZXwhA83o8HbpI,2367
|
316
318
|
letta/server/rest_api/routers/v1/voice.py,sha256=ghMBp5Uovbf0-3nN6d9P5kpl1hHACLRMhIDGQp96G9Q,1986
|
317
319
|
letta/server/rest_api/static_files.py,sha256=NG8sN4Z5EJ8JVQdj19tkFa9iQ1kBPTab9f_CUxd_u4Q,3143
|
@@ -331,7 +333,7 @@ letta/server/ws_api/interface.py,sha256=TWl9vkcMCnLsUtgsuENZ-ku2oMDA-OUTzLh_yNRo
|
|
331
333
|
letta/server/ws_api/protocol.py,sha256=5mDgpfNZn_kNwHnpt5Dsuw8gdNH298sgxTGed3etzYg,1836
|
332
334
|
letta/server/ws_api/server.py,sha256=cBSzf-V4zT1bL_0i54OTI3cMXhTIIxqjSRF8pYjk7fg,5835
|
333
335
|
letta/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
334
|
-
letta/services/agent_manager.py,sha256=
|
336
|
+
letta/services/agent_manager.py,sha256=w636-jxEp1YglX3z9fSWotmlrvFJcHBBOV2gzNMB1c8,120893
|
335
337
|
letta/services/block_manager.py,sha256=YwDGdy6f6MNXVXVOxIMOOP6IEWT8h-k5uQlveof0pyE,22744
|
336
338
|
letta/services/context_window_calculator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
337
339
|
letta/services/context_window_calculator/context_window_calculator.py,sha256=H0-Ello1DHV28MnzMseWrg--jarDc6YwCcgwPlWjtZk,6527
|
@@ -343,7 +345,7 @@ letta/services/file_processor/chunker/line_chunker.py,sha256=Lm5iZR7tFyOlew8XrZp
|
|
343
345
|
letta/services/file_processor/chunker/llama_index_chunker.py,sha256=dEBf33TifD_BcxjNAULim9NDF8VPG8EQkjeR9saK4t4,3982
|
344
346
|
letta/services/file_processor/embedder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
345
347
|
letta/services/file_processor/embedder/openai_embedder.py,sha256=BjKsNqh_nfNIDVWkCR2noFX7E6Mr68FQtj79F2xeCpM,3545
|
346
|
-
letta/services/file_processor/file_processor.py,sha256=
|
348
|
+
letta/services/file_processor/file_processor.py,sha256=xOcoQRgekcZo4JrrXMNYKSiuN4vPKxc81HuS2G3sFPs,4803
|
347
349
|
letta/services/file_processor/file_types.py,sha256=AAwflpGrCmKBtZvzUR-TdOkOp5OGky6vm2ZbhRg7_WY,12982
|
348
350
|
letta/services/file_processor/parser/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
349
351
|
letta/services/file_processor/parser/base_parser.py,sha256=WfnXP6fL-xQz4eIHEWa6-ZNEAARbF_alowqH4BAUzJo,238
|
@@ -351,7 +353,7 @@ letta/services/file_processor/parser/mistral_parser.py,sha256=Hzsrm36HbKQ7CWljTZ
|
|
351
353
|
letta/services/file_processor/types.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
352
354
|
letta/services/files_agents_manager.py,sha256=xBzXPrpbUF-HMMcK0js70QOgLv28AeJ21U0wC2Y9CBs,16592
|
353
355
|
letta/services/group_manager.py,sha256=X2gKKUGKTXGRMC8YjwmE6EOB1cVM4lo31eCnmog7dPQ,23368
|
354
|
-
letta/services/helpers/agent_manager_helper.py,sha256=
|
356
|
+
letta/services/helpers/agent_manager_helper.py,sha256=2zPnxs1QFo_uGv1fG-pqwTQ-oFfgcqNjk9deLVNgby4,43134
|
355
357
|
letta/services/helpers/tool_execution_helper.py,sha256=45L7woJ98jK5MQAnhE_4NZdCeyOOzC4328FTQPM7iTA,9159
|
356
358
|
letta/services/helpers/tool_parser_helper.py,sha256=EI5tcre-D5U3mEzIMhfkAGlUwYckW1JlCJ-iqwoTTrc,4336
|
357
359
|
letta/services/identity_manager.py,sha256=L8EYGYXA9sveLwPCTYZIdYZwOMnHex47TBiMYcco_y4,10575
|
@@ -366,12 +368,12 @@ letta/services/mcp/types.py,sha256=nmcnQn2EpxXzXg5_pWPsHZobfxO6OucaUgz1bVvam7o,1
|
|
366
368
|
letta/services/mcp_manager.py,sha256=Ay3SRlBl8pHjZUVuaB0LgN4N-R4fGaIge9wSxCYKhZc,16488
|
367
369
|
letta/services/message_manager.py,sha256=cNUTGJ2CuQkPhd1Ldfi4aj6wRVYdRKkGZzp767_7VE4,30080
|
368
370
|
letta/services/organization_manager.py,sha256=PngZXPuzcCWB1CW7vAST9CsQZLysrpEdwHwhFN4_fhs,5838
|
369
|
-
letta/services/passage_manager.py,sha256=
|
371
|
+
letta/services/passage_manager.py,sha256=nmrrWE3UFojw73Q1qrG5dvrLDotP4xD5ZQARaEO8nec,43349
|
370
372
|
letta/services/per_agent_lock_manager.py,sha256=cMaW8r-qhucQbiK27jVqz8wzhlr2yuRNXbdkaMO4lnk,627
|
371
373
|
letta/services/provider_manager.py,sha256=mEtiBF7kJgSzDwwyqSmWLT6kgvWPk-FERZ9Zw8QKpHw,9557
|
372
374
|
letta/services/sandbox_config_manager.py,sha256=fcJkXCaA6vmrnTusHhns-c_aRXcPlFLICPGdWDaY8XQ,26138
|
373
375
|
letta/services/source_manager.py,sha256=bfkfubjvlvnpkd-W6FcoQ0qDYxWkajut0ZY1cI56bbE,6197
|
374
|
-
letta/services/step_manager.py,sha256=
|
376
|
+
letta/services/step_manager.py,sha256=BqCz9guHYJqx9bu0wn2YCqcIHv80upT0eddyxiP0yvo,11452
|
375
377
|
letta/services/summarizer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
376
378
|
letta/services/summarizer/enums.py,sha256=szzPX2OBRRJEZsBTGYQThrNz02ELFqhuLwvOR7ozi7A,208
|
377
379
|
letta/services/summarizer/summarizer.py,sha256=9UHxZ1YPIIUWMK5_kz4_Kf5Qj9eHOo2zeEcmbzfUi_o,9275
|
@@ -403,8 +405,8 @@ letta/templates/sandbox_code_file_async.py.j2,sha256=hL6UWt4L16o79OPOBq1_Cw7gR5-
|
|
403
405
|
letta/templates/template_helper.py,sha256=uHWO1PukgMoIIvgqQdPyHq3o3CQ6mcjUjTGvx9VLGkk,409
|
404
406
|
letta/types/__init__.py,sha256=hokKjCVFGEfR7SLMrtZsRsBfsC7yTIbgKPLdGg4K1eY,147
|
405
407
|
letta/utils.py,sha256=ZfyAcRBITlYs2XM5fHj_Lp08fPFMBMaQSPbfYlETbDs,33198
|
406
|
-
letta_nightly-0.8.6.
|
407
|
-
letta_nightly-0.8.6.
|
408
|
-
letta_nightly-0.8.6.
|
409
|
-
letta_nightly-0.8.6.
|
410
|
-
letta_nightly-0.8.6.
|
408
|
+
letta_nightly-0.8.6.dev20250627191649.dist-info/LICENSE,sha256=mExtuZ_GYJgDEI38GWdiEYZizZS4KkVt2SF1g_GPNhI,10759
|
409
|
+
letta_nightly-0.8.6.dev20250627191649.dist-info/METADATA,sha256=W_RoYIS-EHFY3ZTIK6lyv0ExJfxAR6Rqie09TGdnyNc,22841
|
410
|
+
letta_nightly-0.8.6.dev20250627191649.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
411
|
+
letta_nightly-0.8.6.dev20250627191649.dist-info/entry_points.txt,sha256=2zdiyGNEZGV5oYBuS-y2nAAgjDgcC9yM_mHJBFSRt5U,40
|
412
|
+
letta_nightly-0.8.6.dev20250627191649.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|