letta-nightly 0.6.15.dev20250125103914__py3-none-any.whl → 0.6.16.dev20250127040412__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-nightly might be problematic. Click here for more details.
- letta/__init__.py +1 -2
- letta/agent.py +5 -1
- letta/cli/cli_config.py +1 -1
- letta/client/client.py +4 -20
- letta/functions/schema_generator.py +24 -11
- letta/llm_api/anthropic.py +485 -7
- letta/llm_api/llm_api_tools.py +28 -13
- letta/llm_api/openai.py +8 -3
- letta/local_llm/constants.py +1 -0
- letta/schemas/message.py +6 -5
- letta/schemas/providers.py +125 -0
- letta/schemas/tool.py +0 -4
- letta/server/rest_api/interface.py +15 -3
- letta/server/rest_api/routers/v1/agents.py +2 -0
- letta/server/rest_api/routers/v1/tools.py +1 -1
- letta/server/server.py +23 -5
- letta/services/helpers/agent_manager_helper.py +22 -1
- letta/services/tool_manager.py +1 -0
- letta/settings.py +3 -0
- letta/streaming_utils.py +5 -1
- {letta_nightly-0.6.15.dev20250125103914.dist-info → letta_nightly-0.6.16.dev20250127040412.dist-info}/METADATA +1 -1
- {letta_nightly-0.6.15.dev20250125103914.dist-info → letta_nightly-0.6.16.dev20250127040412.dist-info}/RECORD +25 -25
- {letta_nightly-0.6.15.dev20250125103914.dist-info → letta_nightly-0.6.16.dev20250127040412.dist-info}/LICENSE +0 -0
- {letta_nightly-0.6.15.dev20250125103914.dist-info → letta_nightly-0.6.16.dev20250127040412.dist-info}/WHEEL +0 -0
- {letta_nightly-0.6.15.dev20250125103914.dist-info → letta_nightly-0.6.16.dev20250127040412.dist-info}/entry_points.txt +0 -0
letta/schemas/providers.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import warnings
|
|
1
2
|
from datetime import datetime
|
|
2
3
|
from typing import List, Optional
|
|
3
4
|
|
|
@@ -210,6 +211,130 @@ class OpenAIProvider(Provider):
|
|
|
210
211
|
return None
|
|
211
212
|
|
|
212
213
|
|
|
214
|
+
class LMStudioOpenAIProvider(OpenAIProvider):
|
|
215
|
+
name: str = "lmstudio-openai"
|
|
216
|
+
base_url: str = Field(..., description="Base URL for the LMStudio OpenAI API.")
|
|
217
|
+
api_key: Optional[str] = Field(None, description="API key for the LMStudio API.")
|
|
218
|
+
|
|
219
|
+
def list_llm_models(self) -> List[LLMConfig]:
|
|
220
|
+
from letta.llm_api.openai import openai_get_model_list
|
|
221
|
+
|
|
222
|
+
# For LMStudio, we want to hit 'GET /api/v0/models' instead of 'GET /v1/models'
|
|
223
|
+
MODEL_ENDPOINT_URL = f"{self.base_url.strip('/v1')}/api/v0"
|
|
224
|
+
response = openai_get_model_list(MODEL_ENDPOINT_URL)
|
|
225
|
+
|
|
226
|
+
"""
|
|
227
|
+
Example response:
|
|
228
|
+
|
|
229
|
+
{
|
|
230
|
+
"object": "list",
|
|
231
|
+
"data": [
|
|
232
|
+
{
|
|
233
|
+
"id": "qwen2-vl-7b-instruct",
|
|
234
|
+
"object": "model",
|
|
235
|
+
"type": "vlm",
|
|
236
|
+
"publisher": "mlx-community",
|
|
237
|
+
"arch": "qwen2_vl",
|
|
238
|
+
"compatibility_type": "mlx",
|
|
239
|
+
"quantization": "4bit",
|
|
240
|
+
"state": "not-loaded",
|
|
241
|
+
"max_context_length": 32768
|
|
242
|
+
},
|
|
243
|
+
...
|
|
244
|
+
"""
|
|
245
|
+
if "data" not in response:
|
|
246
|
+
warnings.warn(f"LMStudio OpenAI model query response missing 'data' field: {response}")
|
|
247
|
+
return []
|
|
248
|
+
|
|
249
|
+
configs = []
|
|
250
|
+
for model in response["data"]:
|
|
251
|
+
assert "id" in model, f"Model missing 'id' field: {model}"
|
|
252
|
+
model_name = model["id"]
|
|
253
|
+
|
|
254
|
+
if "type" not in model:
|
|
255
|
+
warnings.warn(f"LMStudio OpenAI model missing 'type' field: {model}")
|
|
256
|
+
continue
|
|
257
|
+
elif model["type"] not in ["vlm", "llm"]:
|
|
258
|
+
continue
|
|
259
|
+
|
|
260
|
+
if "max_context_length" in model:
|
|
261
|
+
context_window_size = model["max_context_length"]
|
|
262
|
+
else:
|
|
263
|
+
warnings.warn(f"LMStudio OpenAI model missing 'max_context_length' field: {model}")
|
|
264
|
+
continue
|
|
265
|
+
|
|
266
|
+
configs.append(
|
|
267
|
+
LLMConfig(
|
|
268
|
+
model=model_name,
|
|
269
|
+
model_endpoint_type="openai",
|
|
270
|
+
model_endpoint=self.base_url,
|
|
271
|
+
context_window=context_window_size,
|
|
272
|
+
handle=self.get_handle(model_name),
|
|
273
|
+
)
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
return configs
|
|
277
|
+
|
|
278
|
+
def list_embedding_models(self) -> List[EmbeddingConfig]:
|
|
279
|
+
from letta.llm_api.openai import openai_get_model_list
|
|
280
|
+
|
|
281
|
+
# For LMStudio, we want to hit 'GET /api/v0/models' instead of 'GET /v1/models'
|
|
282
|
+
MODEL_ENDPOINT_URL = f"{self.base_url}/api/v0"
|
|
283
|
+
response = openai_get_model_list(MODEL_ENDPOINT_URL)
|
|
284
|
+
|
|
285
|
+
"""
|
|
286
|
+
Example response:
|
|
287
|
+
{
|
|
288
|
+
"object": "list",
|
|
289
|
+
"data": [
|
|
290
|
+
{
|
|
291
|
+
"id": "text-embedding-nomic-embed-text-v1.5",
|
|
292
|
+
"object": "model",
|
|
293
|
+
"type": "embeddings",
|
|
294
|
+
"publisher": "nomic-ai",
|
|
295
|
+
"arch": "nomic-bert",
|
|
296
|
+
"compatibility_type": "gguf",
|
|
297
|
+
"quantization": "Q4_0",
|
|
298
|
+
"state": "not-loaded",
|
|
299
|
+
"max_context_length": 2048
|
|
300
|
+
}
|
|
301
|
+
...
|
|
302
|
+
"""
|
|
303
|
+
if "data" not in response:
|
|
304
|
+
warnings.warn(f"LMStudio OpenAI model query response missing 'data' field: {response}")
|
|
305
|
+
return []
|
|
306
|
+
|
|
307
|
+
configs = []
|
|
308
|
+
for model in response["data"]:
|
|
309
|
+
assert "id" in model, f"Model missing 'id' field: {model}"
|
|
310
|
+
model_name = model["id"]
|
|
311
|
+
|
|
312
|
+
if "type" not in model:
|
|
313
|
+
warnings.warn(f"LMStudio OpenAI model missing 'type' field: {model}")
|
|
314
|
+
continue
|
|
315
|
+
elif model["type"] not in ["embeddings"]:
|
|
316
|
+
continue
|
|
317
|
+
|
|
318
|
+
if "max_context_length" in model:
|
|
319
|
+
context_window_size = model["max_context_length"]
|
|
320
|
+
else:
|
|
321
|
+
warnings.warn(f"LMStudio OpenAI model missing 'max_context_length' field: {model}")
|
|
322
|
+
continue
|
|
323
|
+
|
|
324
|
+
configs.append(
|
|
325
|
+
EmbeddingConfig(
|
|
326
|
+
embedding_model=model_name,
|
|
327
|
+
embedding_endpoint_type="openai",
|
|
328
|
+
embedding_endpoint=self.base_url,
|
|
329
|
+
embedding_dim=context_window_size,
|
|
330
|
+
embedding_chunk_size=300,
|
|
331
|
+
handle=self.get_handle(model_name),
|
|
332
|
+
),
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
return configs
|
|
336
|
+
|
|
337
|
+
|
|
213
338
|
class AnthropicProvider(Provider):
|
|
214
339
|
name: str = "anthropic"
|
|
215
340
|
api_key: str = Field(..., description="API key for the Anthropic API.")
|
letta/schemas/tool.py
CHANGED
|
@@ -112,7 +112,6 @@ class Tool(BaseTool):
|
|
|
112
112
|
|
|
113
113
|
|
|
114
114
|
class ToolCreate(LettaBase):
|
|
115
|
-
name: Optional[str] = Field(None, description="The name of the function (auto-generated from source_code if not provided).")
|
|
116
115
|
description: Optional[str] = Field(None, description="The description of the tool.")
|
|
117
116
|
tags: List[str] = Field([], description="Metadata tags.")
|
|
118
117
|
source_code: str = Field(..., description="The source code of the function.")
|
|
@@ -155,7 +154,6 @@ class ToolCreate(LettaBase):
|
|
|
155
154
|
json_schema = generate_tool_schema_for_composio(composio_action_schema.parameters, name=wrapper_func_name, description=description)
|
|
156
155
|
|
|
157
156
|
return cls(
|
|
158
|
-
name=wrapper_func_name,
|
|
159
157
|
description=description,
|
|
160
158
|
source_type=source_type,
|
|
161
159
|
tags=tags,
|
|
@@ -187,7 +185,6 @@ class ToolCreate(LettaBase):
|
|
|
187
185
|
json_schema = generate_schema_from_args_schema_v2(langchain_tool.args_schema, name=wrapper_func_name, description=description)
|
|
188
186
|
|
|
189
187
|
return cls(
|
|
190
|
-
name=wrapper_func_name,
|
|
191
188
|
description=description,
|
|
192
189
|
source_type=source_type,
|
|
193
190
|
tags=tags,
|
|
@@ -198,7 +195,6 @@ class ToolCreate(LettaBase):
|
|
|
198
195
|
|
|
199
196
|
class ToolUpdate(LettaBase):
|
|
200
197
|
description: Optional[str] = Field(None, description="The description of the tool.")
|
|
201
|
-
name: Optional[str] = Field(None, description="The name of the function.")
|
|
202
198
|
tags: Optional[List[str]] = Field(None, description="Metadata tags.")
|
|
203
199
|
source_code: Optional[str] = Field(None, description="The source code of the function.")
|
|
204
200
|
source_type: Optional[str] = Field(None, description="The type of the source code.")
|
|
@@ -424,6 +424,16 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
424
424
|
choice = chunk.choices[0]
|
|
425
425
|
message_delta = choice.delta
|
|
426
426
|
|
|
427
|
+
if (
|
|
428
|
+
message_delta.content is None
|
|
429
|
+
and message_delta.tool_calls is None
|
|
430
|
+
and message_delta.function_call is None
|
|
431
|
+
and choice.finish_reason is None
|
|
432
|
+
and chunk.model.startswith("claude-")
|
|
433
|
+
):
|
|
434
|
+
# First chunk of Anthropic is empty
|
|
435
|
+
return None
|
|
436
|
+
|
|
427
437
|
# inner thoughts
|
|
428
438
|
if message_delta.content is not None:
|
|
429
439
|
processed_chunk = ReasoningMessage(
|
|
@@ -515,7 +525,11 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
515
525
|
self.function_id_buffer += tool_call.id
|
|
516
526
|
|
|
517
527
|
if tool_call.function.arguments:
|
|
518
|
-
|
|
528
|
+
if chunk.model.startswith("claude-"):
|
|
529
|
+
updates_main_json = tool_call.function.arguments
|
|
530
|
+
updates_inner_thoughts = ""
|
|
531
|
+
else: # OpenAI
|
|
532
|
+
updates_main_json, updates_inner_thoughts = self.function_args_reader.process_fragment(tool_call.function.arguments)
|
|
519
533
|
|
|
520
534
|
# If we have inner thoughts, we should output them as a chunk
|
|
521
535
|
if updates_inner_thoughts:
|
|
@@ -585,7 +599,6 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
585
599
|
):
|
|
586
600
|
# do an additional parse on the updates_main_json
|
|
587
601
|
if self.function_args_buffer:
|
|
588
|
-
|
|
589
602
|
updates_main_json = self.function_args_buffer + updates_main_json
|
|
590
603
|
self.function_args_buffer = None
|
|
591
604
|
|
|
@@ -875,7 +888,6 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
875
888
|
raise NotImplementedError("OpenAI proxy streaming temporarily disabled")
|
|
876
889
|
else:
|
|
877
890
|
processed_chunk = self._process_chunk_to_letta_style(chunk=chunk, message_id=message_id, message_date=message_date)
|
|
878
|
-
|
|
879
891
|
if processed_chunk is None:
|
|
880
892
|
return
|
|
881
893
|
|
|
@@ -408,6 +408,7 @@ AgentMessagesResponse = Annotated[
|
|
|
408
408
|
def list_messages(
|
|
409
409
|
agent_id: str,
|
|
410
410
|
server: "SyncServer" = Depends(get_letta_server),
|
|
411
|
+
after: Optional[str] = Query(None, description="Message after which to retrieve the returned messages."),
|
|
411
412
|
before: Optional[str] = Query(None, description="Message before which to retrieve the returned messages."),
|
|
412
413
|
limit: int = Query(10, description="Maximum number of messages to retrieve."),
|
|
413
414
|
msg_object: bool = Query(False, description="If true, returns Message objects. If false, return LettaMessage objects."),
|
|
@@ -430,6 +431,7 @@ def list_messages(
|
|
|
430
431
|
return server.get_agent_recall(
|
|
431
432
|
user_id=actor.id,
|
|
432
433
|
agent_id=agent_id,
|
|
434
|
+
after=after,
|
|
433
435
|
before=before,
|
|
434
436
|
limit=limit,
|
|
435
437
|
reverse=True,
|
|
@@ -83,7 +83,7 @@ def create_tool(
|
|
|
83
83
|
except UniqueConstraintViolationError as e:
|
|
84
84
|
# Log or print the full exception here for debugging
|
|
85
85
|
print(f"Error occurred: {e}")
|
|
86
|
-
clean_error_message = f"Tool with
|
|
86
|
+
clean_error_message = f"Tool with this name already exists."
|
|
87
87
|
raise HTTPException(status_code=409, detail=clean_error_message)
|
|
88
88
|
except LettaToolCreateError as e:
|
|
89
89
|
# HTTP 400 == Bad Request
|
letta/server/server.py
CHANGED
|
@@ -49,6 +49,7 @@ from letta.schemas.providers import (
|
|
|
49
49
|
GoogleAIProvider,
|
|
50
50
|
GroqProvider,
|
|
51
51
|
LettaProvider,
|
|
52
|
+
LMStudioOpenAIProvider,
|
|
52
53
|
OllamaProvider,
|
|
53
54
|
OpenAIProvider,
|
|
54
55
|
Provider,
|
|
@@ -186,8 +187,8 @@ def db_error_handler():
|
|
|
186
187
|
exit(1)
|
|
187
188
|
|
|
188
189
|
|
|
189
|
-
print("Creating engine", settings.letta_pg_uri)
|
|
190
190
|
if settings.letta_pg_uri_no_default:
|
|
191
|
+
print("Creating postgres engine", settings.letta_pg_uri)
|
|
191
192
|
config.recall_storage_type = "postgres"
|
|
192
193
|
config.recall_storage_uri = settings.letta_pg_uri_no_default
|
|
193
194
|
config.archival_storage_type = "postgres"
|
|
@@ -204,7 +205,10 @@ if settings.letta_pg_uri_no_default:
|
|
|
204
205
|
)
|
|
205
206
|
else:
|
|
206
207
|
# TODO: don't rely on config storage
|
|
207
|
-
|
|
208
|
+
engine_path = "sqlite:///" + os.path.join(config.recall_storage_path, "sqlite.db")
|
|
209
|
+
print("Creating sqlite engine", engine_path)
|
|
210
|
+
|
|
211
|
+
engine = create_engine(engine_path)
|
|
208
212
|
|
|
209
213
|
# Store the original connect method
|
|
210
214
|
original_connect = engine.connect
|
|
@@ -391,6 +395,18 @@ class SyncServer(Server):
|
|
|
391
395
|
aws_region=model_settings.aws_region,
|
|
392
396
|
)
|
|
393
397
|
)
|
|
398
|
+
# Attempt to enable LM Studio by default
|
|
399
|
+
if model_settings.lmstudio_base_url:
|
|
400
|
+
# Auto-append v1 to the base URL
|
|
401
|
+
lmstudio_url = (
|
|
402
|
+
model_settings.lmstudio_base_url
|
|
403
|
+
if model_settings.lmstudio_base_url.endswith("/v1")
|
|
404
|
+
else model_settings.lmstudio_base_url + "/v1"
|
|
405
|
+
)
|
|
406
|
+
# Set the OpenAI API key to something non-empty
|
|
407
|
+
if model_settings.openai_api_key is None:
|
|
408
|
+
model_settings.openai_api_key = "DUMMY"
|
|
409
|
+
self._enabled_providers.append(LMStudioOpenAIProvider(base_url=lmstudio_url))
|
|
394
410
|
|
|
395
411
|
def load_agent(self, agent_id: str, actor: User, interface: Union[AgentInterface, None] = None) -> Agent:
|
|
396
412
|
"""Updated method to load agents from persisted storage"""
|
|
@@ -1266,12 +1282,14 @@ class SyncServer(Server):
|
|
|
1266
1282
|
# This will be attached to the POST SSE request used under-the-hood
|
|
1267
1283
|
letta_agent = self.load_agent(agent_id=agent_id, actor=actor)
|
|
1268
1284
|
|
|
1269
|
-
# Disable token streaming if not OpenAI
|
|
1285
|
+
# Disable token streaming if not OpenAI or Anthropic
|
|
1270
1286
|
# TODO: cleanup this logic
|
|
1271
1287
|
llm_config = letta_agent.agent_state.llm_config
|
|
1272
|
-
if stream_tokens and (
|
|
1288
|
+
if stream_tokens and (
|
|
1289
|
+
llm_config.model_endpoint_type not in ["openai", "anthropic"] or "inference.memgpt.ai" in llm_config.model_endpoint
|
|
1290
|
+
):
|
|
1273
1291
|
warnings.warn(
|
|
1274
|
-
"Token streaming is only supported for models with type 'openai' or `inference.memgpt.ai` in the model_endpoint: agent has endpoint type {llm_config.model_endpoint_type} and {llm_config.model_endpoint}. Setting stream_tokens to False."
|
|
1292
|
+
"Token streaming is only supported for models with type 'openai', 'anthropic', or `inference.memgpt.ai` in the model_endpoint: agent has endpoint type {llm_config.model_endpoint_type} and {llm_config.model_endpoint}. Setting stream_tokens to False."
|
|
1275
1293
|
)
|
|
1276
1294
|
stream_tokens = False
|
|
1277
1295
|
|
|
@@ -118,6 +118,27 @@ def compile_memory_metadata_block(
|
|
|
118
118
|
return memory_metadata_block
|
|
119
119
|
|
|
120
120
|
|
|
121
|
+
class PreserveMapping(dict):
|
|
122
|
+
"""Used to preserve (do not modify) undefined variables in the system prompt"""
|
|
123
|
+
|
|
124
|
+
def __missing__(self, key):
|
|
125
|
+
return "{" + key + "}"
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def safe_format(template: str, variables: dict) -> str:
|
|
129
|
+
"""
|
|
130
|
+
Safely formats a template string, preserving empty {} and {unknown_vars}
|
|
131
|
+
while substituting known variables.
|
|
132
|
+
|
|
133
|
+
If we simply use {} in format_map, it'll be treated as a positional field
|
|
134
|
+
"""
|
|
135
|
+
# First escape any empty {} by doubling them
|
|
136
|
+
escaped = template.replace("{}", "{{}}")
|
|
137
|
+
|
|
138
|
+
# Now use format_map with our custom mapping
|
|
139
|
+
return escaped.format_map(PreserveMapping(variables))
|
|
140
|
+
|
|
141
|
+
|
|
121
142
|
def compile_system_message(
|
|
122
143
|
system_prompt: str,
|
|
123
144
|
in_context_memory: Memory,
|
|
@@ -169,7 +190,7 @@ def compile_system_message(
|
|
|
169
190
|
|
|
170
191
|
# render the variables using the built-in templater
|
|
171
192
|
try:
|
|
172
|
-
formatted_prompt = system_prompt
|
|
193
|
+
formatted_prompt = safe_format(system_prompt, variables)
|
|
173
194
|
except Exception as e:
|
|
174
195
|
raise ValueError(f"Failed to format system prompt - {str(e)}. System prompt value:\n{system_prompt}")
|
|
175
196
|
|
letta/services/tool_manager.py
CHANGED
|
@@ -122,6 +122,7 @@ class ToolManager:
|
|
|
122
122
|
new_schema = derive_openai_json_schema(source_code=pydantic_tool.source_code)
|
|
123
123
|
|
|
124
124
|
tool.json_schema = new_schema
|
|
125
|
+
tool.name = new_schema["name"]
|
|
125
126
|
|
|
126
127
|
# Save the updated tool to the database
|
|
127
128
|
return tool.update(db_session=session, actor=actor).to_pydantic()
|
letta/settings.py
CHANGED
letta/streaming_utils.py
CHANGED
|
@@ -209,6 +209,11 @@ class JSONInnerThoughtsExtractor:
|
|
|
209
209
|
|
|
210
210
|
return updates_main_json, updates_inner_thoughts
|
|
211
211
|
|
|
212
|
+
# def process_anthropic_fragment(self, fragment) -> Tuple[str, str]:
|
|
213
|
+
# # Add to buffer
|
|
214
|
+
# self.main_buffer += fragment
|
|
215
|
+
# return fragment, ""
|
|
216
|
+
|
|
212
217
|
@property
|
|
213
218
|
def main_json(self):
|
|
214
219
|
return self.main_buffer
|
|
@@ -233,7 +238,6 @@ class FunctionArgumentsStreamHandler:
|
|
|
233
238
|
|
|
234
239
|
def process_json_chunk(self, chunk: str) -> Optional[str]:
|
|
235
240
|
"""Process a chunk from the function arguments and return the plaintext version"""
|
|
236
|
-
|
|
237
241
|
# Use strip to handle only leading and trailing whitespace in control structures
|
|
238
242
|
if self.accumulating:
|
|
239
243
|
clean_chunk = chunk.strip()
|
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
letta/__init__.py,sha256=
|
|
1
|
+
letta/__init__.py,sha256=f4jKUd8LM4lY5v3oLSX-PcCxmD4qajTjqAcpXj_AcQY,919
|
|
2
2
|
letta/__main__.py,sha256=6Hs2PV7EYc5Tid4g4OtcLXhqVHiNYTGzSBdoOnW2HXA,29
|
|
3
|
-
letta/agent.py,sha256=
|
|
3
|
+
letta/agent.py,sha256=aIzscF44Vr6m6s29h9iEHgZaTqzbxneGmvQyMIROErw,56679
|
|
4
4
|
letta/benchmark/benchmark.py,sha256=ebvnwfp3yezaXOQyGXkYCDYpsmre-b9hvNtnyx4xkG0,3701
|
|
5
5
|
letta/benchmark/constants.py,sha256=aXc5gdpMGJT327VuxsT5FngbCK2J41PQYeICBO7g_RE,536
|
|
6
6
|
letta/chat_only_agent.py,sha256=71Lf-df8y3nsE9IFKpEigaZaWHoWnXnhVChkp1L-83I,4760
|
|
7
7
|
letta/cli/cli.py,sha256=_uGKM-RvGLGf7y8iWjkLgLTxIw7uWrdCdL5ETUOCkUs,16472
|
|
8
|
-
letta/cli/cli_config.py,sha256=
|
|
8
|
+
letta/cli/cli_config.py,sha256=2oo4vui1GXQarAD6Ru4SRzPvcW4eX2mCXOBusfYGvJw,8533
|
|
9
9
|
letta/cli/cli_load.py,sha256=xFw-CuzjChcIptaqQ1XpDROENt0JSjyPeiQ0nmEeO1k,2706
|
|
10
10
|
letta/client/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
-
letta/client/client.py,sha256=
|
|
11
|
+
letta/client/client.py,sha256=S6p3kLpztox4kTob-iwcTuIZMVcQzFWQPwfopVf-Rbc,138725
|
|
12
12
|
letta/client/streaming.py,sha256=Vkf5iz6h9FwiCmpYTrtMR162HQZNqrpHHWQlhUjpxQA,4873
|
|
13
13
|
letta/client/utils.py,sha256=VCGV-op5ZSmurd4yw7Vhf93XDQ0BkyBT8qsuV7EqfiU,2859
|
|
14
14
|
letta/config.py,sha256=JFGY4TWW0Wm5fTbZamOwWqk5G8Nn-TXyhgByGoAqy2c,12375
|
|
@@ -24,7 +24,7 @@ letta/functions/function_sets/extras.py,sha256=Z9yEdBpQFtTjpxkgbtkWMA8GtDWC6ai2b
|
|
|
24
24
|
letta/functions/function_sets/multi_agent.py,sha256=Z5x0J71l_14FDd0QbQ0rNqCb5UVRkSKH-u2cPrQPF4o,3838
|
|
25
25
|
letta/functions/functions.py,sha256=wxxo6MJXBfcPeEc1YYWK5ENOD3RFNTIc65RTDBo77x4,5673
|
|
26
26
|
letta/functions/helpers.py,sha256=iG6KVUDamhJuWNxVANtZB5GSSEUv1KWQTRnb5fVNh4Q,14045
|
|
27
|
-
letta/functions/schema_generator.py,sha256=
|
|
27
|
+
letta/functions/schema_generator.py,sha256=qosgp3p27QRTqOCPLrSkCGVdyQsyTTZunXQ_g-YaTkw,20138
|
|
28
28
|
letta/helpers/__init__.py,sha256=p0luQ1Oe3Skc6sH4O58aHHA3Qbkyjifpuq0DZ1GAY0U,59
|
|
29
29
|
letta/helpers/tool_rule_solver.py,sha256=VnJfqb5L1Lcipc_tBVGj0om60GKQkMkNLgg6X9VZl2c,6210
|
|
30
30
|
letta/humans/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -32,20 +32,20 @@ letta/humans/examples/basic.txt,sha256=Lcp8YESTWvOJgO4Yf_yyQmgo5bKakeB1nIVrwEGG6
|
|
|
32
32
|
letta/humans/examples/cs_phd.txt,sha256=9C9ZAV_VuG7GB31ksy3-_NAyk8rjE6YtVOkhp08k1xw,297
|
|
33
33
|
letta/interface.py,sha256=JszHyhIK34dpV0h5KL0CD1W4svh4eijaHGgfOYyZOhg,12755
|
|
34
34
|
letta/llm_api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
35
|
-
letta/llm_api/anthropic.py,sha256=
|
|
35
|
+
letta/llm_api/anthropic.py,sha256=5prpeZWHtjors9zT5e_6ozq5HG3BP2FVWAmoPqAW8zA,34162
|
|
36
36
|
letta/llm_api/aws_bedrock.py,sha256=-ms9tdROu8DLrEZJ9XgL-IyIOU_0UJKuhfRbjLs0_Gc,3838
|
|
37
37
|
letta/llm_api/azure_openai.py,sha256=Y1HKPog1XzM_f7ujUK_Gv2zQkoy5pU-1bKiUnvSxSrs,6297
|
|
38
38
|
letta/llm_api/azure_openai_constants.py,sha256=oXtKrgBFHf744gyt5l1thILXgyi8NDNUrKEa2GGGpjw,278
|
|
39
39
|
letta/llm_api/cohere.py,sha256=H5kzYH_aQAnGNq7lip7XyKGLEOKC318Iw0_tiTP6kc4,14772
|
|
40
40
|
letta/llm_api/google_ai.py,sha256=MIX4nmyC6448AvyPPSE8JZ_tzSpKJTArkZSfQGGoy0M,17920
|
|
41
41
|
letta/llm_api/helpers.py,sha256=ov9WHsLSvkceIpSNJ3PUgCvufD862Bcrum-bWrUVJko,16193
|
|
42
|
-
letta/llm_api/llm_api_tools.py,sha256=
|
|
42
|
+
letta/llm_api/llm_api_tools.py,sha256=LxaXhc9x_zj68lzGig832qK7RqJZqFzgeUFMd3_5O3U,20040
|
|
43
43
|
letta/llm_api/mistral.py,sha256=fHdfD9ug-rQIk2qn8tRKay1U6w9maF11ryhKi91FfXM,1593
|
|
44
|
-
letta/llm_api/openai.py,sha256=
|
|
44
|
+
letta/llm_api/openai.py,sha256=AjMJFpFI02cWsv_ih8zLlgifwQ-dWosTDGGuASHqwzg,20247
|
|
45
45
|
letta/local_llm/README.md,sha256=hFJyw5B0TU2jrh9nb0zGZMgdH-Ei1dSRfhvPQG_NSoU,168
|
|
46
46
|
letta/local_llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
47
|
letta/local_llm/chat_completion_proxy.py,sha256=ElYR0M5SY2zL4NQzInye21MxqtiP3AUXX9Ia0KbkD4Y,12948
|
|
48
|
-
letta/local_llm/constants.py,sha256=
|
|
48
|
+
letta/local_llm/constants.py,sha256=MwdXcv2TtH4dh4h6jSdIUor7mfhB7VjJHRweXjpl3Zk,1303
|
|
49
49
|
letta/local_llm/function_parser.py,sha256=eRAiP1CmfpiZTHgKZ2gbbMpA1SV8TlMLlgG1Wdfug2I,2607
|
|
50
50
|
letta/local_llm/grammars/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
51
51
|
letta/local_llm/grammars/gbnf_grammar_generator.py,sha256=-ohgZYgKVA8Qj8w2iu1wTYhuhfpYg7dIA56I9OQluZM,56332
|
|
@@ -155,7 +155,7 @@ letta/schemas/letta_response.py,sha256=yL0w-cdUazgEqg6_F4LJz2tugKNAZsB83Gr5jfXwa
|
|
|
155
155
|
letta/schemas/llm_config.py,sha256=d18QhSjy8TxtXior00PQE3Q8Kk2gbyvFPF6GmMnjEuc,4934
|
|
156
156
|
letta/schemas/llm_config_overrides.py,sha256=-oRglCTcajF6UAK3RAa0FLWVuKODPI1v403fDIWMAtA,1815
|
|
157
157
|
letta/schemas/memory.py,sha256=GOYDfPKzbWftUWO9Hv4KW7xAi1EIQmC8zpP7qvEkVHw,10245
|
|
158
|
-
letta/schemas/message.py,sha256=
|
|
158
|
+
letta/schemas/message.py,sha256=ioPZqUYO5Wqjt54XTeaQdyvdJMm70RdQ9gpwXV0rCJI,36216
|
|
159
159
|
letta/schemas/openai/chat_completion_request.py,sha256=AOIwgbN3CZKVqkuXeMHeSa53u4h0wVq69t3T_LJ0vIE,3389
|
|
160
160
|
letta/schemas/openai/chat_completion_response.py,sha256=ub-oVSyLpuJd-5_yzCSIRR8tD3GM83IeDO1c1uAATa4,3970
|
|
161
161
|
letta/schemas/openai/chat_completions.py,sha256=l0e9sT9boTD5VBU5YtJ0s7qUtCfFGB2K-gQLeEZ2LHU,3599
|
|
@@ -163,12 +163,12 @@ letta/schemas/openai/embedding_response.py,sha256=WKIZpXab1Av7v6sxKG8feW3ZtpQUNo
|
|
|
163
163
|
letta/schemas/openai/openai.py,sha256=Hilo5BiLAGabzxCwnwfzK5QrWqwYD8epaEKFa4Pwndk,7970
|
|
164
164
|
letta/schemas/organization.py,sha256=WWbUWVSp_VQRFwWN4fdHg1yObiV6x9rZnvIY8x5BPs0,746
|
|
165
165
|
letta/schemas/passage.py,sha256=pdCLZgOn0gWK1gB6aFHLS0gfdWCBqLaiHDA0iQ12Zd8,3704
|
|
166
|
-
letta/schemas/providers.py,sha256=
|
|
166
|
+
letta/schemas/providers.py,sha256=Wd0d0jgv6z3X5t7cT1ZVoX_Qa85ecsm1gQzkOPgQFUo,34890
|
|
167
167
|
letta/schemas/run.py,sha256=OyuAXXjL96ftOeLdqkiIKi9csGeewy-pN5SgWk-vYGg,2124
|
|
168
168
|
letta/schemas/sandbox_config.py,sha256=v32V5T73X-VxhDk0g_1RGniK985KMvg2xyLVi1dvMQY,4215
|
|
169
169
|
letta/schemas/source.py,sha256=-BQVolcXA2ziCu2ztR6cbTdGUc8G7vGJy7rvpdf1hpg,2880
|
|
170
170
|
letta/schemas/step.py,sha256=cCmDChQMndy7aMJGH0Z19VbzJkAeFTYuA0cJpzjW2g0,1928
|
|
171
|
-
letta/schemas/tool.py,sha256=
|
|
171
|
+
letta/schemas/tool.py,sha256=uv3WxTt9SzaoXzwTLNHT2wegWTcaBFQBnBvNJrxeYvs,11022
|
|
172
172
|
letta/schemas/tool_rule.py,sha256=LJwi1T474-3zbFGiW7_fegyfduC3F2u7cdlBsV0U_IU,1679
|
|
173
173
|
letta/schemas/usage.py,sha256=8oYRH-JX0PfjIu2zkT5Uu3UWQ7Unnz_uHiO8hRGI4m0,912
|
|
174
174
|
letta/schemas/user.py,sha256=V32Tgl6oqB3KznkxUz12y7agkQicjzW7VocSpj78i6Q,1526
|
|
@@ -180,10 +180,10 @@ letta/server/rest_api/app.py,sha256=bhF2LyikAQckEAqDHJ6mCCjeIT0QFEvYOkvO6dWPjD4,
|
|
|
180
180
|
letta/server/rest_api/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
181
181
|
letta/server/rest_api/auth/index.py,sha256=fQBGyVylGSRfEMLQ17cZzrHd5Y1xiVylvPqH5Rl-lXQ,1378
|
|
182
182
|
letta/server/rest_api/auth_token.py,sha256=725EFEIiNj4dh70hrSd94UysmFD8vcJLrTRfNHkzxDo,774
|
|
183
|
-
letta/server/rest_api/interface.py,sha256=
|
|
183
|
+
letta/server/rest_api/interface.py,sha256=5lf5k5GJgUaRb8NCKotSK5PZDAcpYFmKjyPlMqtc964,51881
|
|
184
184
|
letta/server/rest_api/routers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
185
185
|
letta/server/rest_api/routers/v1/__init__.py,sha256=bXEZzmvHNX7N11NDwsxyajjci7yxP-2dYIvbeJi33vA,1070
|
|
186
|
-
letta/server/rest_api/routers/v1/agents.py,sha256=
|
|
186
|
+
letta/server/rest_api/routers/v1/agents.py,sha256=sjcBg_F_ZU00F6Ek2q8uByjV4D7bmgeqArH--6SILns,25171
|
|
187
187
|
letta/server/rest_api/routers/v1/blocks.py,sha256=oJYOpGUTd4AhKwVolVlZPIXO2EoOrBHkyi2PdrmbtmA,3888
|
|
188
188
|
letta/server/rest_api/routers/v1/health.py,sha256=pKCuVESlVOhGIb4VC4K-H82eZqfghmT6kvj2iOkkKuc,401
|
|
189
189
|
letta/server/rest_api/routers/v1/jobs.py,sha256=pKihW12hQdFwt6tHQXs94yOMv6xotlhBB3Vl7Q5ASKQ,2738
|
|
@@ -194,11 +194,11 @@ letta/server/rest_api/routers/v1/runs.py,sha256=4w06j5CYfRzVf5Jf9Fzh7zQyVxC1R6q9
|
|
|
194
194
|
letta/server/rest_api/routers/v1/sandbox_configs.py,sha256=RR7u3Yj2d9llopbUYyXxgJnV-UXY0LvUMoL41a1yXCk,5260
|
|
195
195
|
letta/server/rest_api/routers/v1/sources.py,sha256=g7NKgbZkS7y1vlukJHZ_yoWrk3AxyoWKTVGszp0Ky18,8414
|
|
196
196
|
letta/server/rest_api/routers/v1/tags.py,sha256=45G0cmcP-ER0OO5OanT_fGtGQfl9ZjRKU97mFwtwyfo,878
|
|
197
|
-
letta/server/rest_api/routers/v1/tools.py,sha256=
|
|
197
|
+
letta/server/rest_api/routers/v1/tools.py,sha256=Rp2_YH7KHeP_-WHjlwqu0wxtR27wvaO0wZToZtENB_w,12000
|
|
198
198
|
letta/server/rest_api/routers/v1/users.py,sha256=G5DBHSkPfBgVHN2Wkm-rVYiLQAudwQczIq2Z3YLdbVo,2277
|
|
199
199
|
letta/server/rest_api/static_files.py,sha256=NG8sN4Z5EJ8JVQdj19tkFa9iQ1kBPTab9f_CUxd_u4Q,3143
|
|
200
200
|
letta/server/rest_api/utils.py,sha256=dsjkZzgo9Rk3fjUf1ajjiiql1eeO5DAzmXprttI7bJU,3993
|
|
201
|
-
letta/server/server.py,sha256=
|
|
201
|
+
letta/server/server.py,sha256=bTlqKQDzYudHLflAt6kPy7K28tOhlJEoBj7gAe8tc94,59834
|
|
202
202
|
letta/server/startup.sh,sha256=722uKJWB2C4q3vjn39De2zzPacaZNw_1fN1SpLGjKIo,1569
|
|
203
203
|
letta/server/static_files/assets/index-048c9598.js,sha256=mR16XppvselwKCcNgONs4L7kZEVa4OEERm4lNZYtLSk,146819
|
|
204
204
|
letta/server/static_files/assets/index-0e31b727.css,sha256=SBbja96uiQVLDhDOroHgM6NSl7tS4lpJRCREgSS_hA8,7672
|
|
@@ -214,7 +214,7 @@ letta/server/ws_api/server.py,sha256=cBSzf-V4zT1bL_0i54OTI3cMXhTIIxqjSRF8pYjk7fg
|
|
|
214
214
|
letta/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
215
215
|
letta/services/agent_manager.py,sha256=I6F-nED9Q6O_FY2-O1gHDY-OaQIxbt3FsL88DENVTfE,50287
|
|
216
216
|
letta/services/block_manager.py,sha256=u56TXG46QDMbQZadDGCO7fY1vreJ69Xr_0MUF53xw4k,5519
|
|
217
|
-
letta/services/helpers/agent_manager_helper.py,sha256=
|
|
217
|
+
letta/services/helpers/agent_manager_helper.py,sha256=RH0MXLZASkP2LVbVNUfSYHrcBYZnVxFd9ejGjRK90Hw,11283
|
|
218
218
|
letta/services/job_manager.py,sha256=TR35-2kD1xOWJhO2V_YA4cYZc5HgA7vuafZ_I8ABi3U,13237
|
|
219
219
|
letta/services/message_manager.py,sha256=w6-B9Zz5z9UXcd6mKhinsaCINTsmxDsH9JJsV2_qlH4,8878
|
|
220
220
|
letta/services/organization_manager.py,sha256=h3hrknBhA3YQt90OeBzFnqjYM9NWKnk8jDKzXGm4AUg,3392
|
|
@@ -225,15 +225,15 @@ letta/services/sandbox_config_manager.py,sha256=eWDNTscRG9Gt_Ixho3-daOOno_9Kcebx
|
|
|
225
225
|
letta/services/source_manager.py,sha256=0JLKIv405oS5wc6bY5k2bxxZpS9O-VwUGHVdGPbJ3e4,7676
|
|
226
226
|
letta/services/step_manager.py,sha256=RngrVs2Sd_oDZv_UoQ1ToLY0RnH-6wS1DqIBPRm-Imc,2961
|
|
227
227
|
letta/services/tool_execution_sandbox.py,sha256=Tjufm58V9XzeYr8oF6g5b3OV5zZ7oPWUTqcC8GsBi9k,23162
|
|
228
|
-
letta/services/tool_manager.py,sha256=
|
|
228
|
+
letta/services/tool_manager.py,sha256=O_siJO7ZgipxJm4xLKaioEJMLvZPPucCsoLIDIU7814,8479
|
|
229
229
|
letta/services/user_manager.py,sha256=1U8BQ_-MBkEW2wnSFV_OsTwBmRAZLN8uHLFjnDjK3hA,4308
|
|
230
|
-
letta/settings.py,sha256=
|
|
230
|
+
letta/settings.py,sha256=sHJEaLPtEnWNZDew69F9tC8YTrkBh5BWXXtzOtAneFY,6071
|
|
231
231
|
letta/streaming_interface.py,sha256=lo2VAQRUJOdWTijwnXuKOC9uejqr2siUAEmZiQUXkj8,15710
|
|
232
|
-
letta/streaming_utils.py,sha256=
|
|
232
|
+
letta/streaming_utils.py,sha256=jLqFTVhUL76FeOuYk8TaRQHmPTf3HSRc2EoJwxJNK6U,11946
|
|
233
233
|
letta/system.py,sha256=iCcjvMKXvG1sa18Suy8Gjoa0djYGiPKi3ywMECce40Y,6974
|
|
234
234
|
letta/utils.py,sha256=FQgWuYF0CTCIyH41rVy_rD5_ATPIlBZ24ovBtf3T1tI,33291
|
|
235
|
-
letta_nightly-0.6.
|
|
236
|
-
letta_nightly-0.6.
|
|
237
|
-
letta_nightly-0.6.
|
|
238
|
-
letta_nightly-0.6.
|
|
239
|
-
letta_nightly-0.6.
|
|
235
|
+
letta_nightly-0.6.16.dev20250127040412.dist-info/LICENSE,sha256=mExtuZ_GYJgDEI38GWdiEYZizZS4KkVt2SF1g_GPNhI,10759
|
|
236
|
+
letta_nightly-0.6.16.dev20250127040412.dist-info/METADATA,sha256=mX062VfoNUauFPnIGtHmtjn5_RxuowFb1ubZXaGlhUU,22156
|
|
237
|
+
letta_nightly-0.6.16.dev20250127040412.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
|
238
|
+
letta_nightly-0.6.16.dev20250127040412.dist-info/entry_points.txt,sha256=2zdiyGNEZGV5oYBuS-y2nAAgjDgcC9yM_mHJBFSRt5U,40
|
|
239
|
+
letta_nightly-0.6.16.dev20250127040412.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|