letta-client 0.1.47__py3-none-any.whl → 0.1.48__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-client might be problematic. Click here for more details.

letta_client/__init__.py CHANGED
@@ -165,7 +165,22 @@ from .types import (
165
165
  ValidationErrorLocItem,
166
166
  )
167
167
  from .errors import ConflictError, InternalServerError, NotFoundError, UnprocessableEntityError
168
- from . import agents, blocks, health, identities, jobs, models, providers, runs, sources, steps, tag, templates, tools
168
+ from . import (
169
+ agents,
170
+ blocks,
171
+ health,
172
+ identities,
173
+ jobs,
174
+ models,
175
+ providers,
176
+ runs,
177
+ sources,
178
+ steps,
179
+ tag,
180
+ templates,
181
+ tools,
182
+ voice,
183
+ )
169
184
  from .agents import (
170
185
  AgentsSearchRequestSearchItem,
171
186
  AgentsSearchRequestSearchItemField,
@@ -512,6 +527,7 @@ from .templates import (
512
527
  TemplatesCreateAgentsResponseAgentsItemUpdatedAtItem,
513
528
  )
514
529
  from .version import __version__
530
+ from .voice import CreateVoiceChatCompletionsRequest
515
531
 
516
532
  __all__ = [
517
533
  "ActionModel",
@@ -761,6 +777,7 @@ __all__ = [
761
777
  "ContinueToolRule",
762
778
  "CreateAgentRequestToolRulesItem",
763
779
  "CreateBlock",
780
+ "CreateVoiceChatCompletionsRequest",
764
781
  "E2BSandboxConfig",
765
782
  "EmbeddingConfig",
766
783
  "EmbeddingConfigEmbeddingEndpointType",
@@ -1036,4 +1053,5 @@ __all__ = [
1036
1053
  "tag",
1037
1054
  "templates",
1038
1055
  "tools",
1056
+ "voice",
1039
1057
  ]
@@ -16,6 +16,7 @@ from .providers.client import ProvidersClient
16
16
  from .runs.client import RunsClient
17
17
  from .steps.client import StepsClient
18
18
  from .tag.client import TagClient
19
+ from .voice.client import VoiceClient
19
20
  from .templates.client import TemplatesClient
20
21
  from .core.client_wrapper import AsyncClientWrapper
21
22
  from .tools.client import AsyncToolsClient
@@ -30,6 +31,7 @@ from .providers.client import AsyncProvidersClient
30
31
  from .runs.client import AsyncRunsClient
31
32
  from .steps.client import AsyncStepsClient
32
33
  from .tag.client import AsyncTagClient
34
+ from .voice.client import AsyncVoiceClient
33
35
  from .templates.client import AsyncTemplatesClient
34
36
 
35
37
 
@@ -103,6 +105,7 @@ class LettaBase:
103
105
  self.runs = RunsClient(client_wrapper=self._client_wrapper)
104
106
  self.steps = StepsClient(client_wrapper=self._client_wrapper)
105
107
  self.tag = TagClient(client_wrapper=self._client_wrapper)
108
+ self.voice = VoiceClient(client_wrapper=self._client_wrapper)
106
109
  self.templates = TemplatesClient(client_wrapper=self._client_wrapper)
107
110
 
108
111
 
@@ -176,6 +179,7 @@ class AsyncLettaBase:
176
179
  self.runs = AsyncRunsClient(client_wrapper=self._client_wrapper)
177
180
  self.steps = AsyncStepsClient(client_wrapper=self._client_wrapper)
178
181
  self.tag = AsyncTagClient(client_wrapper=self._client_wrapper)
182
+ self.voice = AsyncVoiceClient(client_wrapper=self._client_wrapper)
179
183
  self.templates = AsyncTemplatesClient(client_wrapper=self._client_wrapper)
180
184
 
181
185
 
@@ -16,7 +16,7 @@ class BaseClientWrapper:
16
16
  headers: typing.Dict[str, str] = {
17
17
  "X-Fern-Language": "Python",
18
18
  "X-Fern-SDK-Name": "letta-client",
19
- "X-Fern-SDK-Version": "0.1.47",
19
+ "X-Fern-SDK-Version": "0.1.48",
20
20
  }
21
21
  if self.token is not None:
22
22
  headers["Authorization"] = f"Bearer {self.token}"
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .types import CreateVoiceChatCompletionsRequest
4
+
5
+ __all__ = ["CreateVoiceChatCompletionsRequest"]
@@ -0,0 +1,176 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+ from ..core.client_wrapper import SyncClientWrapper
5
+ from .types.create_voice_chat_completions_request import CreateVoiceChatCompletionsRequest
6
+ from ..core.request_options import RequestOptions
7
+ from ..core.serialization import convert_and_respect_annotation_metadata
8
+ from ..core.unchecked_base_model import construct_type
9
+ from ..errors.unprocessable_entity_error import UnprocessableEntityError
10
+ from ..types.http_validation_error import HttpValidationError
11
+ from json.decoder import JSONDecodeError
12
+ from ..core.api_error import ApiError
13
+ from ..core.client_wrapper import AsyncClientWrapper
14
+
15
+ # this is used as the default value for optional parameters
16
+ OMIT = typing.cast(typing.Any, ...)
17
+
18
+
19
+ class VoiceClient:
20
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
21
+ self._client_wrapper = client_wrapper
22
+
23
+ def create_voice_chat_completions(
24
+ self, *, request: CreateVoiceChatCompletionsRequest, request_options: typing.Optional[RequestOptions] = None
25
+ ) -> typing.Optional[typing.Any]:
26
+ """
27
+ Parameters
28
+ ----------
29
+ request : CreateVoiceChatCompletionsRequest
30
+
31
+ request_options : typing.Optional[RequestOptions]
32
+ Request-specific configuration.
33
+
34
+ Returns
35
+ -------
36
+ typing.Optional[typing.Any]
37
+ Successful response
38
+
39
+ Examples
40
+ --------
41
+ from letta_client import (
42
+ ChatCompletionDeveloperMessageParam,
43
+ CompletionCreateParamsNonStreaming,
44
+ Letta,
45
+ )
46
+
47
+ client = Letta(
48
+ token="YOUR_TOKEN",
49
+ )
50
+ client.voice.create_voice_chat_completions(
51
+ request=CompletionCreateParamsNonStreaming(
52
+ messages=[
53
+ ChatCompletionDeveloperMessageParam(
54
+ content="content",
55
+ )
56
+ ],
57
+ model="model",
58
+ ),
59
+ )
60
+ """
61
+ _response = self._client_wrapper.httpx_client.request(
62
+ "v1/voice/chat/completions",
63
+ method="POST",
64
+ json=convert_and_respect_annotation_metadata(
65
+ object_=request, annotation=CreateVoiceChatCompletionsRequest, direction="write"
66
+ ),
67
+ request_options=request_options,
68
+ omit=OMIT,
69
+ )
70
+ try:
71
+ if 200 <= _response.status_code < 300:
72
+ return typing.cast(
73
+ typing.Optional[typing.Any],
74
+ construct_type(
75
+ type_=typing.Optional[typing.Any], # type: ignore
76
+ object_=_response.json(),
77
+ ),
78
+ )
79
+ if _response.status_code == 422:
80
+ raise UnprocessableEntityError(
81
+ typing.cast(
82
+ HttpValidationError,
83
+ construct_type(
84
+ type_=HttpValidationError, # type: ignore
85
+ object_=_response.json(),
86
+ ),
87
+ )
88
+ )
89
+ _response_json = _response.json()
90
+ except JSONDecodeError:
91
+ raise ApiError(status_code=_response.status_code, body=_response.text)
92
+ raise ApiError(status_code=_response.status_code, body=_response_json)
93
+
94
+
95
+ class AsyncVoiceClient:
96
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
97
+ self._client_wrapper = client_wrapper
98
+
99
+ async def create_voice_chat_completions(
100
+ self, *, request: CreateVoiceChatCompletionsRequest, request_options: typing.Optional[RequestOptions] = None
101
+ ) -> typing.Optional[typing.Any]:
102
+ """
103
+ Parameters
104
+ ----------
105
+ request : CreateVoiceChatCompletionsRequest
106
+
107
+ request_options : typing.Optional[RequestOptions]
108
+ Request-specific configuration.
109
+
110
+ Returns
111
+ -------
112
+ typing.Optional[typing.Any]
113
+ Successful response
114
+
115
+ Examples
116
+ --------
117
+ import asyncio
118
+
119
+ from letta_client import (
120
+ AsyncLetta,
121
+ ChatCompletionDeveloperMessageParam,
122
+ CompletionCreateParamsNonStreaming,
123
+ )
124
+
125
+ client = AsyncLetta(
126
+ token="YOUR_TOKEN",
127
+ )
128
+
129
+
130
+ async def main() -> None:
131
+ await client.voice.create_voice_chat_completions(
132
+ request=CompletionCreateParamsNonStreaming(
133
+ messages=[
134
+ ChatCompletionDeveloperMessageParam(
135
+ content="content",
136
+ )
137
+ ],
138
+ model="model",
139
+ ),
140
+ )
141
+
142
+
143
+ asyncio.run(main())
144
+ """
145
+ _response = await self._client_wrapper.httpx_client.request(
146
+ "v1/voice/chat/completions",
147
+ method="POST",
148
+ json=convert_and_respect_annotation_metadata(
149
+ object_=request, annotation=CreateVoiceChatCompletionsRequest, direction="write"
150
+ ),
151
+ request_options=request_options,
152
+ omit=OMIT,
153
+ )
154
+ try:
155
+ if 200 <= _response.status_code < 300:
156
+ return typing.cast(
157
+ typing.Optional[typing.Any],
158
+ construct_type(
159
+ type_=typing.Optional[typing.Any], # type: ignore
160
+ object_=_response.json(),
161
+ ),
162
+ )
163
+ if _response.status_code == 422:
164
+ raise UnprocessableEntityError(
165
+ typing.cast(
166
+ HttpValidationError,
167
+ construct_type(
168
+ type_=HttpValidationError, # type: ignore
169
+ object_=_response.json(),
170
+ ),
171
+ )
172
+ )
173
+ _response_json = _response.json()
174
+ except JSONDecodeError:
175
+ raise ApiError(status_code=_response.status_code, body=_response.text)
176
+ raise ApiError(status_code=_response.status_code, body=_response_json)
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .create_voice_chat_completions_request import CreateVoiceChatCompletionsRequest
4
+
5
+ __all__ = ["CreateVoiceChatCompletionsRequest"]
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+ from ...types.completion_create_params_non_streaming import CompletionCreateParamsNonStreaming
5
+ from ...types.completion_create_params_streaming import CompletionCreateParamsStreaming
6
+
7
+ CreateVoiceChatCompletionsRequest = typing.Union[CompletionCreateParamsNonStreaming, CompletionCreateParamsStreaming]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-client
3
- Version: 0.1.47
3
+ Version: 0.1.48
4
4
  Summary:
5
5
  Requires-Python: >=3.8,<4.0
6
6
  Classifier: Intended Audience :: Developers
@@ -1,4 +1,4 @@
1
- letta_client/__init__.py,sha256=9Irh5yC-SkPUD4m7mWClK5N-N0sXmHFV42zihbZp6Vo,55813
1
+ letta_client/__init__.py,sha256=bd8FHq-fZVf_PugqTtSZUPZoogZxcftmnDUhSK6rF54,55988
2
2
  letta_client/agents/__init__.py,sha256=KCn7wjwmY7PBtXQYfiFQPNtwPz5dPBB4A6V6V242U6w,22136
3
3
  letta_client/agents/blocks/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
4
4
  letta_client/agents/blocks/client.py,sha256=x2651Wftl9J8GhtTkF0os6iposDvOirWa_vdmyEZB38,23746
@@ -201,13 +201,13 @@ letta_client/agents/types/agents_search_response_agents_item_updated_at.py,sha25
201
201
  letta_client/agents/types/agents_search_response_agents_item_updated_at_item.py,sha256=Anb4fUgBP7Qf9Iggi_OYab0dPcWE-aIA6BvcAk8qIcg,166
202
202
  letta_client/agents/types/create_agent_request_tool_rules_item.py,sha256=GsXAkmphzNTrk56rg5n4GVWZY_uu--w0dumpskWyKIg,496
203
203
  letta_client/agents/types/update_agent_tool_rules_item.py,sha256=gCA9oFxIWEcbVV5Z85hL0RYjmRNYwfVrYmj6BFGlGTQ,489
204
- letta_client/base_client.py,sha256=ATvrb83SfeAseCcUAGyHzJKYfwg5Cv5axZqDirCj3Vc,8169
204
+ letta_client/base_client.py,sha256=ZD62gkVcq-3piS6Az6L9yM9vD1kssEdtK3c0pHoRWMI,8395
205
205
  letta_client/blocks/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
206
206
  letta_client/blocks/client.py,sha256=AeQQ-IdYhV-zqLTt3PTrJOtJ6XtBZcXNC108Y5EogVU,29178
207
207
  letta_client/client.py,sha256=y2cXN0ApFul2Lz-fVh5TbeYbQ8oUjnXcwJ6wUczEf2c,2457
208
208
  letta_client/core/__init__.py,sha256=OKbX2aCZXgHCDUsCouqv-OiX32xA6eFFCKIUH9M5Vzk,1591
209
209
  letta_client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
210
- letta_client/core/client_wrapper.py,sha256=zmzku-vVVodEjqdPTt_Ya0ia3U-HF76xvJaZMQGvpRs,1997
210
+ letta_client/core/client_wrapper.py,sha256=geodLXr_BFC34uUcdJw_hXkonpE-j-NqtCaLh6Fu-IA,1997
211
211
  letta_client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
212
212
  letta_client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
213
213
  letta_client/core/http_client.py,sha256=siUQ6UV0ARZALlxubqWSSAAPC9B4VW8y6MGlHStfaeo,19552
@@ -582,6 +582,10 @@ letta_client/types/user_update.py,sha256=0Bl1OjO7bfmlpsGQ36dSh6DH1UB_wJOTNewS0wD
582
582
  letta_client/types/validation_error.py,sha256=ACDS7wL5nQbS8ymFhWljwbBJmbugNa8bs2O5xEZC3u4,680
583
583
  letta_client/types/validation_error_loc_item.py,sha256=LAtjCHIllWRBFXvAZ5QZpp7CPXjdtN9EB7HrLVo6EP0,128
584
584
  letta_client/version.py,sha256=bttKLbIhO3UonCYQlqs600zzbQgfhCCMjeXR9WRzid4,79
585
- letta_client-0.1.47.dist-info/METADATA,sha256=bSbKbgJK71NZEvlvPkGoAFWlX4kVwi31FHLB8LslksI,4942
586
- letta_client-0.1.47.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
587
- letta_client-0.1.47.dist-info/RECORD,,
585
+ letta_client/voice/__init__.py,sha256=ZrZEuXIukVGhsfM-i0dIFfqjeSOBMPeEgDva7VvnipE,167
586
+ letta_client/voice/client.py,sha256=j3feSlNzeTVFXE7RUKEHGeMl_w0TJFBRUI3pXpLpUEI,6148
587
+ letta_client/voice/types/__init__.py,sha256=hBLJcrom99DkDxxsVRU2ni8kPx6SsCy8gtAJvNOz26w,199
588
+ letta_client/voice/types/create_voice_chat_completions_request.py,sha256=K4__83rXRCshfdobyAmH-5fUDJQ_PeSQetTUeC4Abk0,381
589
+ letta_client-0.1.48.dist-info/METADATA,sha256=a1tp07dRcEBKr5NqK0_BUC_fMDj74BK5S37Rh7gO4ac,4942
590
+ letta_client-0.1.48.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
591
+ letta_client-0.1.48.dist-info/RECORD,,