letta-client 0.1.195__py3-none-any.whl → 0.1.196__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-client might be problematic. Click here for more details.

@@ -32,6 +32,7 @@ from .. import core
32
32
  from .types.update_agent_tool_rules_item import UpdateAgentToolRulesItem
33
33
  from .types.update_agent_response_format import UpdateAgentResponseFormat
34
34
  import datetime as dt
35
+ from ..types.message_type import MessageType
35
36
  from .types.agents_search_request_search_item import AgentsSearchRequestSearchItem
36
37
  from .types.agents_search_request_sort_by import AgentsSearchRequestSortBy
37
38
  from .types.agents_search_response import AgentsSearchResponse
@@ -1061,6 +1062,117 @@ class AgentsClient:
1061
1062
  raise ApiError(status_code=_response.status_code, body=_response.text)
1062
1063
  raise ApiError(status_code=_response.status_code, body=_response_json)
1063
1064
 
1065
+ def preview_raw_payload(
1066
+ self,
1067
+ agent_id: str,
1068
+ *,
1069
+ messages: typing.Sequence[MessageCreate],
1070
+ max_steps: typing.Optional[int] = OMIT,
1071
+ use_assistant_message: typing.Optional[bool] = OMIT,
1072
+ assistant_message_tool_name: typing.Optional[str] = OMIT,
1073
+ assistant_message_tool_kwarg: typing.Optional[str] = OMIT,
1074
+ include_return_message_types: typing.Optional[typing.Sequence[MessageType]] = OMIT,
1075
+ request_options: typing.Optional[RequestOptions] = None,
1076
+ ) -> typing.Dict[str, typing.Optional[typing.Any]]:
1077
+ """
1078
+ Inspect the raw LLM request payload without sending it.
1079
+
1080
+ This endpoint processes the message through the agent loop up until
1081
+ the LLM request, then returns the raw request payload that would
1082
+ be sent to the LLM provider. Useful for debugging and inspection.
1083
+
1084
+ Parameters
1085
+ ----------
1086
+ agent_id : str
1087
+
1088
+ messages : typing.Sequence[MessageCreate]
1089
+ The messages to be sent to the agent.
1090
+
1091
+ max_steps : typing.Optional[int]
1092
+ Maximum number of steps the agent should take to process the request.
1093
+
1094
+ use_assistant_message : typing.Optional[bool]
1095
+ Whether the server should parse specific tool call arguments (default `send_message`) as `AssistantMessage` objects.
1096
+
1097
+ assistant_message_tool_name : typing.Optional[str]
1098
+ The name of the designated message tool.
1099
+
1100
+ assistant_message_tool_kwarg : typing.Optional[str]
1101
+ The name of the message argument in the designated message tool.
1102
+
1103
+ include_return_message_types : typing.Optional[typing.Sequence[MessageType]]
1104
+ Only return specified message types in the response. If `None` (default) returns all messages.
1105
+
1106
+ request_options : typing.Optional[RequestOptions]
1107
+ Request-specific configuration.
1108
+
1109
+ Returns
1110
+ -------
1111
+ typing.Dict[str, typing.Optional[typing.Any]]
1112
+ Successful Response
1113
+
1114
+ Examples
1115
+ --------
1116
+ from letta_client import Letta, MessageCreate, TextContent
1117
+
1118
+ client = Letta(
1119
+ project="YOUR_PROJECT",
1120
+ token="YOUR_TOKEN",
1121
+ )
1122
+ client.agents.preview_raw_payload(
1123
+ agent_id="agent_id",
1124
+ messages=[
1125
+ MessageCreate(
1126
+ role="user",
1127
+ content=[
1128
+ TextContent(
1129
+ text="text",
1130
+ )
1131
+ ],
1132
+ )
1133
+ ],
1134
+ )
1135
+ """
1136
+ _response = self._client_wrapper.httpx_client.request(
1137
+ f"v1/agents/{jsonable_encoder(agent_id)}/messages/preview-raw-payload",
1138
+ method="POST",
1139
+ json={
1140
+ "messages": convert_and_respect_annotation_metadata(
1141
+ object_=messages, annotation=typing.Sequence[MessageCreate], direction="write"
1142
+ ),
1143
+ "max_steps": max_steps,
1144
+ "use_assistant_message": use_assistant_message,
1145
+ "assistant_message_tool_name": assistant_message_tool_name,
1146
+ "assistant_message_tool_kwarg": assistant_message_tool_kwarg,
1147
+ "include_return_message_types": include_return_message_types,
1148
+ },
1149
+ request_options=request_options,
1150
+ omit=OMIT,
1151
+ )
1152
+ try:
1153
+ if 200 <= _response.status_code < 300:
1154
+ return typing.cast(
1155
+ typing.Dict[str, typing.Optional[typing.Any]],
1156
+ construct_type(
1157
+ type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore
1158
+ object_=_response.json(),
1159
+ ),
1160
+ )
1161
+ if _response.status_code == 422:
1162
+ raise UnprocessableEntityError(
1163
+ typing.cast(
1164
+ HttpValidationError,
1165
+ construct_type(
1166
+ type_=HttpValidationError, # type: ignore
1167
+ object_=_response.json(),
1168
+ ),
1169
+ )
1170
+ )
1171
+ _response_json = _response.json()
1172
+ except JSONDecodeError:
1173
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1174
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1175
+
1064
1176
  def summarize_agent_conversation(
1065
1177
  self, agent_id: str, *, max_message_length: int, request_options: typing.Optional[RequestOptions] = None
1066
1178
  ) -> AgentState:
@@ -2298,6 +2410,125 @@ class AsyncAgentsClient:
2298
2410
  raise ApiError(status_code=_response.status_code, body=_response.text)
2299
2411
  raise ApiError(status_code=_response.status_code, body=_response_json)
2300
2412
 
2413
+ async def preview_raw_payload(
2414
+ self,
2415
+ agent_id: str,
2416
+ *,
2417
+ messages: typing.Sequence[MessageCreate],
2418
+ max_steps: typing.Optional[int] = OMIT,
2419
+ use_assistant_message: typing.Optional[bool] = OMIT,
2420
+ assistant_message_tool_name: typing.Optional[str] = OMIT,
2421
+ assistant_message_tool_kwarg: typing.Optional[str] = OMIT,
2422
+ include_return_message_types: typing.Optional[typing.Sequence[MessageType]] = OMIT,
2423
+ request_options: typing.Optional[RequestOptions] = None,
2424
+ ) -> typing.Dict[str, typing.Optional[typing.Any]]:
2425
+ """
2426
+ Inspect the raw LLM request payload without sending it.
2427
+
2428
+ This endpoint processes the message through the agent loop up until
2429
+ the LLM request, then returns the raw request payload that would
2430
+ be sent to the LLM provider. Useful for debugging and inspection.
2431
+
2432
+ Parameters
2433
+ ----------
2434
+ agent_id : str
2435
+
2436
+ messages : typing.Sequence[MessageCreate]
2437
+ The messages to be sent to the agent.
2438
+
2439
+ max_steps : typing.Optional[int]
2440
+ Maximum number of steps the agent should take to process the request.
2441
+
2442
+ use_assistant_message : typing.Optional[bool]
2443
+ Whether the server should parse specific tool call arguments (default `send_message`) as `AssistantMessage` objects.
2444
+
2445
+ assistant_message_tool_name : typing.Optional[str]
2446
+ The name of the designated message tool.
2447
+
2448
+ assistant_message_tool_kwarg : typing.Optional[str]
2449
+ The name of the message argument in the designated message tool.
2450
+
2451
+ include_return_message_types : typing.Optional[typing.Sequence[MessageType]]
2452
+ Only return specified message types in the response. If `None` (default) returns all messages.
2453
+
2454
+ request_options : typing.Optional[RequestOptions]
2455
+ Request-specific configuration.
2456
+
2457
+ Returns
2458
+ -------
2459
+ typing.Dict[str, typing.Optional[typing.Any]]
2460
+ Successful Response
2461
+
2462
+ Examples
2463
+ --------
2464
+ import asyncio
2465
+
2466
+ from letta_client import AsyncLetta, MessageCreate, TextContent
2467
+
2468
+ client = AsyncLetta(
2469
+ project="YOUR_PROJECT",
2470
+ token="YOUR_TOKEN",
2471
+ )
2472
+
2473
+
2474
+ async def main() -> None:
2475
+ await client.agents.preview_raw_payload(
2476
+ agent_id="agent_id",
2477
+ messages=[
2478
+ MessageCreate(
2479
+ role="user",
2480
+ content=[
2481
+ TextContent(
2482
+ text="text",
2483
+ )
2484
+ ],
2485
+ )
2486
+ ],
2487
+ )
2488
+
2489
+
2490
+ asyncio.run(main())
2491
+ """
2492
+ _response = await self._client_wrapper.httpx_client.request(
2493
+ f"v1/agents/{jsonable_encoder(agent_id)}/messages/preview-raw-payload",
2494
+ method="POST",
2495
+ json={
2496
+ "messages": convert_and_respect_annotation_metadata(
2497
+ object_=messages, annotation=typing.Sequence[MessageCreate], direction="write"
2498
+ ),
2499
+ "max_steps": max_steps,
2500
+ "use_assistant_message": use_assistant_message,
2501
+ "assistant_message_tool_name": assistant_message_tool_name,
2502
+ "assistant_message_tool_kwarg": assistant_message_tool_kwarg,
2503
+ "include_return_message_types": include_return_message_types,
2504
+ },
2505
+ request_options=request_options,
2506
+ omit=OMIT,
2507
+ )
2508
+ try:
2509
+ if 200 <= _response.status_code < 300:
2510
+ return typing.cast(
2511
+ typing.Dict[str, typing.Optional[typing.Any]],
2512
+ construct_type(
2513
+ type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore
2514
+ object_=_response.json(),
2515
+ ),
2516
+ )
2517
+ if _response.status_code == 422:
2518
+ raise UnprocessableEntityError(
2519
+ typing.cast(
2520
+ HttpValidationError,
2521
+ construct_type(
2522
+ type_=HttpValidationError, # type: ignore
2523
+ object_=_response.json(),
2524
+ ),
2525
+ )
2526
+ )
2527
+ _response_json = _response.json()
2528
+ except JSONDecodeError:
2529
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2530
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2531
+
2301
2532
  async def summarize_agent_conversation(
2302
2533
  self, agent_id: str, *, max_message_length: int, request_options: typing.Optional[RequestOptions] = None
2303
2534
  ) -> AgentState:
@@ -24,7 +24,7 @@ class BaseClientWrapper:
24
24
  headers: typing.Dict[str, str] = {
25
25
  "X-Fern-Language": "Python",
26
26
  "X-Fern-SDK-Name": "letta-client",
27
- "X-Fern-SDK-Version": "0.1.195",
27
+ "X-Fern-SDK-Version": "0.1.196",
28
28
  }
29
29
  if self._project is not None:
30
30
  headers["X-Project"] = self._project
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-client
3
- Version: 0.1.195
3
+ Version: 0.1.196
4
4
  Summary:
5
5
  Requires-Python: >=3.8,<4.0
6
6
  Classifier: Intended Audience :: Developers
@@ -2,7 +2,7 @@ letta_client/__init__.py,sha256=PaPQ6XnP5KGKv2jElXjj6d483LOtbaRGJu3n88rW1WA,1825
2
2
  letta_client/agents/__init__.py,sha256=9L60SAZIihZzh_KhVxu0uX4RS7z2iKKctzQsS8ycXHc,1954
3
3
  letta_client/agents/blocks/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
4
4
  letta_client/agents/blocks/client.py,sha256=4UGPYxfGwNN3ZW-SkIdfVZK6cvCcumVAw0_AM8OmoBY,25046
5
- letta_client/agents/client.py,sha256=B7ZsJGQqmeTUdP8yybRWh6qaoybopFCujzP99EDwk_k,93753
5
+ letta_client/agents/client.py,sha256=yjzXAxIPz8u0zdRzH4ubZT7Bef1QwM6zDVWbMwCDBC8,102846
6
6
  letta_client/agents/context/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
7
7
  letta_client/agents/context/client.py,sha256=O1gxStQyfzXi4MblatWalLTWM425gS_fndW3W_es08U,4887
8
8
  letta_client/agents/core_memory/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
@@ -65,7 +65,7 @@ letta_client/client_side_access_tokens/types/client_side_access_tokens_create_re
65
65
  letta_client/client_side_access_tokens/types/client_side_access_tokens_create_response_policy_data_item_access_item.py,sha256=R-H25IpNp9feSrW8Yj3h9O3UTMVvFniQJElogKxLuoE,254
66
66
  letta_client/core/__init__.py,sha256=OKbX2aCZXgHCDUsCouqv-OiX32xA6eFFCKIUH9M5Vzk,1591
67
67
  letta_client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
68
- letta_client/core/client_wrapper.py,sha256=UtE6TVKokDXXKnvD1093LlaVoFP_IcL8DoJpDGUCjqY,2336
68
+ letta_client/core/client_wrapper.py,sha256=M4wzmiGIyliMl5J4ktQr7yvZyNx480SW0k8ahbdETto,2336
69
69
  letta_client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
70
70
  letta_client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
71
71
  letta_client/core/http_client.py,sha256=Z77OIxIbL4OAB2IDqjRq_sYa5yNYAWfmdhdCSSvh6Y4,19552
@@ -418,6 +418,6 @@ letta_client/types/web_search_options_user_location_approximate.py,sha256=Ywk01J
418
418
  letta_client/version.py,sha256=bttKLbIhO3UonCYQlqs600zzbQgfhCCMjeXR9WRzid4,79
419
419
  letta_client/voice/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
420
420
  letta_client/voice/client.py,sha256=47iQYCuW_qpKI4hM3pYVxn3hw7kgQj3emU1_oRpkRMA,5811
421
- letta_client-0.1.195.dist-info/METADATA,sha256=3SLZZ8EDus9F2ROhSieJd6cpr_cDPa2WLu6TwbgXnYg,5177
422
- letta_client-0.1.195.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
423
- letta_client-0.1.195.dist-info/RECORD,,
421
+ letta_client-0.1.196.dist-info/METADATA,sha256=HAjEGfvqyk0MiQTDjvv6hDh8Q8Epvwj-B-kgII73P1Y,5177
422
+ letta_client-0.1.196.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
423
+ letta_client-0.1.196.dist-info/RECORD,,