livekit-plugins-aws 0.1.0__tar.gz → 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of livekit-plugins-aws might be problematic. Click here for more details.

Files changed (24) hide show
  1. livekit_plugins_aws-1.0.0/.gitignore +168 -0
  2. {livekit_plugins_aws-0.1.0 → livekit_plugins_aws-1.0.0}/PKG-INFO +14 -24
  3. livekit_plugins_aws-1.0.0/livekit/plugins/aws/llm.py +271 -0
  4. {livekit_plugins_aws-0.1.0 → livekit_plugins_aws-1.0.0}/livekit/plugins/aws/models.py +1 -1
  5. livekit_plugins_aws-1.0.0/livekit/plugins/aws/stt.py +234 -0
  6. {livekit_plugins_aws-0.1.0 → livekit_plugins_aws-1.0.0}/livekit/plugins/aws/tts.py +72 -79
  7. livekit_plugins_aws-1.0.0/livekit/plugins/aws/utils.py +144 -0
  8. {livekit_plugins_aws-0.1.0 → livekit_plugins_aws-1.0.0}/livekit/plugins/aws/version.py +1 -1
  9. livekit_plugins_aws-1.0.0/pyproject.toml +44 -0
  10. livekit_plugins_aws-0.1.0/livekit/plugins/aws/_utils.py +0 -216
  11. livekit_plugins_aws-0.1.0/livekit/plugins/aws/llm.py +0 -350
  12. livekit_plugins_aws-0.1.0/livekit/plugins/aws/stt.py +0 -218
  13. livekit_plugins_aws-0.1.0/livekit_plugins_aws.egg-info/PKG-INFO +0 -53
  14. livekit_plugins_aws-0.1.0/livekit_plugins_aws.egg-info/SOURCES.txt +0 -17
  15. livekit_plugins_aws-0.1.0/livekit_plugins_aws.egg-info/dependency_links.txt +0 -1
  16. livekit_plugins_aws-0.1.0/livekit_plugins_aws.egg-info/requires.txt +0 -4
  17. livekit_plugins_aws-0.1.0/livekit_plugins_aws.egg-info/top_level.txt +0 -1
  18. livekit_plugins_aws-0.1.0/pyproject.toml +0 -3
  19. livekit_plugins_aws-0.1.0/setup.cfg +0 -4
  20. livekit_plugins_aws-0.1.0/setup.py +0 -61
  21. {livekit_plugins_aws-0.1.0 → livekit_plugins_aws-1.0.0}/README.md +0 -0
  22. {livekit_plugins_aws-0.1.0 → livekit_plugins_aws-1.0.0}/livekit/plugins/aws/__init__.py +0 -0
  23. {livekit_plugins_aws-0.1.0 → livekit_plugins_aws-1.0.0}/livekit/plugins/aws/log.py +0 -0
  24. {livekit_plugins_aws-0.1.0 → livekit_plugins_aws-1.0.0}/livekit/plugins/aws/py.typed +0 -0
@@ -0,0 +1,168 @@
1
+ **/.vscode
2
+ **/.DS_Store
3
+
4
+ # Byte-compiled / optimized / DLL files
5
+ __pycache__/
6
+ *.py[cod]
7
+ *$py.class
8
+
9
+ # C extensions
10
+ *.so
11
+
12
+ # Distribution / packaging
13
+ .Python
14
+ build/
15
+ develop-eggs/
16
+ dist/
17
+ downloads/
18
+ eggs/
19
+ .eggs/
20
+ lib/
21
+ lib64/
22
+ parts/
23
+ sdist/
24
+ var/
25
+ wheels/
26
+ share/python-wheels/
27
+ *.egg-info/
28
+ .installed.cfg
29
+ *.egg
30
+ MANIFEST
31
+
32
+ # PyInstaller
33
+ # Usually these files are written by a python script from a template
34
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
35
+ *.manifest
36
+ *.spec
37
+
38
+ # Installer logs
39
+ pip-log.txt
40
+ pip-delete-this-directory.txt
41
+
42
+ # Unit test / coverage reports
43
+ htmlcov/
44
+ .tox/
45
+ .nox/
46
+ .coverage
47
+ .coverage.*
48
+ .cache
49
+ nosetests.xml
50
+ coverage.xml
51
+ *.cover
52
+ *.py,cover
53
+ .hypothesis/
54
+ .pytest_cache/
55
+ cover/
56
+
57
+ # Translations
58
+ *.mo
59
+ *.pot
60
+
61
+ # Django stuff:
62
+ *.log
63
+ local_settings.py
64
+ db.sqlite3
65
+ db.sqlite3-journal
66
+
67
+ # Flask stuff:
68
+ instance/
69
+ .webassets-cache
70
+
71
+ # Scrapy stuff:
72
+ .scrapy
73
+
74
+ # Sphinx documentation
75
+ docs/_build/
76
+
77
+ # PyBuilder
78
+ .pybuilder/
79
+ target/
80
+
81
+ # Jupyter Notebook
82
+ .ipynb_checkpoints
83
+
84
+ # IPython
85
+ profile_default/
86
+ ipython_config.py
87
+
88
+ # pyenv
89
+ # For a library or package, you might want to ignore these files since the code is
90
+ # intended to run in multiple environments; otherwise, check them in:
91
+ # .python-version
92
+
93
+ # pipenv
94
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
95
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
96
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
97
+ # install all needed dependencies.
98
+ #Pipfile.lock
99
+
100
+ # poetry
101
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
102
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
103
+ # commonly ignored for libraries.
104
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
105
+ #poetry.lock
106
+
107
+ # pdm
108
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
109
+ #pdm.lock
110
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
111
+ # in version control.
112
+ # https://pdm.fming.dev/#use-with-ide
113
+ .pdm.toml
114
+
115
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
116
+ __pypackages__/
117
+
118
+ # Celery stuff
119
+ celerybeat-schedule
120
+ celerybeat.pid
121
+
122
+ # SageMath parsed files
123
+ *.sage.py
124
+
125
+ # Environments
126
+ .env
127
+ .venv
128
+ env/
129
+ venv/
130
+ ENV/
131
+ env.bak/
132
+ venv.bak/
133
+
134
+ # Spyder project settings
135
+ .spyderproject
136
+ .spyproject
137
+
138
+ # Rope project settings
139
+ .ropeproject
140
+
141
+ # mkdocs documentation
142
+ /site
143
+
144
+ # mypy
145
+ .mypy_cache/
146
+ .dmypy.json
147
+ dmypy.json
148
+
149
+ # Pyre type checker
150
+ .pyre/
151
+
152
+ # pytype static type analyzer
153
+ .pytype/
154
+
155
+ # Cython debug symbols
156
+ cython_debug/
157
+
158
+ # PyCharm
159
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
160
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
161
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
162
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
163
+ .idea/
164
+
165
+ node_modules
166
+
167
+ credentials.json
168
+ pyrightconfig.json
@@ -1,38 +1,28 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: livekit-plugins-aws
3
- Version: 0.1.0
3
+ Version: 1.0.0
4
4
  Summary: LiveKit Agents Plugin for services from AWS
5
- Home-page: https://github.com/livekit/agents
6
- License: Apache-2.0
7
5
  Project-URL: Documentation, https://docs.livekit.io
8
6
  Project-URL: Website, https://livekit.io/
9
7
  Project-URL: Source, https://github.com/livekit/agents
10
- Keywords: webrtc,realtime,audio,video,livekit,aws
8
+ Author-email: LiveKit <hello@livekit.io>
9
+ License-Expression: Apache-2.0
10
+ Keywords: audio,aws,livekit,realtime,video,webrtc
11
11
  Classifier: Intended Audience :: Developers
12
12
  Classifier: License :: OSI Approved :: Apache Software License
13
- Classifier: Topic :: Multimedia :: Sound/Audio
14
- Classifier: Topic :: Multimedia :: Video
15
- Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
16
13
  Classifier: Programming Language :: Python :: 3
14
+ Classifier: Programming Language :: Python :: 3 :: Only
17
15
  Classifier: Programming Language :: Python :: 3.9
18
16
  Classifier: Programming Language :: Python :: 3.10
19
- Classifier: Programming Language :: Python :: 3 :: Only
17
+ Classifier: Topic :: Multimedia :: Sound/Audio
18
+ Classifier: Topic :: Multimedia :: Video
19
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
20
20
  Requires-Python: >=3.9.0
21
+ Requires-Dist: aioboto3==14.1.0
22
+ Requires-Dist: amazon-transcribe==0.6.2
23
+ Requires-Dist: boto3==1.37.1
24
+ Requires-Dist: livekit-agents>=1.0.0
21
25
  Description-Content-Type: text/markdown
22
- Requires-Dist: livekit-agents>=0.12.0
23
- Requires-Dist: aiobotocore==2.19.0
24
- Requires-Dist: boto3==1.36.3
25
- Requires-Dist: amazon-transcribe>=0.6.2
26
- Dynamic: classifier
27
- Dynamic: description
28
- Dynamic: description-content-type
29
- Dynamic: home-page
30
- Dynamic: keywords
31
- Dynamic: license
32
- Dynamic: project-url
33
- Dynamic: requires-dist
34
- Dynamic: requires-python
35
- Dynamic: summary
36
26
 
37
27
  # LiveKit Plugins AWS
38
28
 
@@ -50,4 +40,4 @@ pip install livekit-plugins-aws
50
40
 
51
41
  ## Pre-requisites
52
42
 
53
- You'll need to specify an AWS Access Key and a Deployment Region. They can be set as environment variables: `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` and `AWS_DEFAULT_REGION`, respectively.
43
+ You'll need to specify an AWS Access Key and a Deployment Region. They can be set as environment variables: `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY` and `AWS_DEFAULT_REGION`, respectively.
@@ -0,0 +1,271 @@
1
+ # Copyright 2023 LiveKit, Inc.
2
+ #
3
+
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ from __future__ import annotations
16
+
17
+ import os
18
+ from dataclasses import dataclass
19
+ from typing import Any, Literal
20
+
21
+ import aioboto3
22
+
23
+ from livekit.agents import APIConnectionError, APIStatusError, llm
24
+ from livekit.agents.llm import ChatContext, FunctionTool, FunctionToolCall, ToolChoice
25
+ from livekit.agents.types import (
26
+ DEFAULT_API_CONNECT_OPTIONS,
27
+ NOT_GIVEN,
28
+ APIConnectOptions,
29
+ NotGivenOr,
30
+ )
31
+ from livekit.agents.utils import is_given
32
+
33
+ from .log import logger
34
+ from .utils import get_aws_async_session, to_chat_ctx, to_fnc_ctx
35
+
36
+ TEXT_MODEL = Literal["anthropic.claude-3-5-sonnet-20241022-v2:0"]
37
+
38
+
39
+ @dataclass
40
+ class _LLMOptions:
41
+ model: str | TEXT_MODEL
42
+ temperature: NotGivenOr[float]
43
+ tool_choice: NotGivenOr[ToolChoice]
44
+ max_output_tokens: NotGivenOr[int]
45
+ top_p: NotGivenOr[float]
46
+ additional_request_fields: NotGivenOr[dict[str, Any]]
47
+
48
+
49
+ class LLM(llm.LLM):
50
+ def __init__(
51
+ self,
52
+ *,
53
+ model: NotGivenOr[str | TEXT_MODEL] = NOT_GIVEN,
54
+ api_key: NotGivenOr[str] = NOT_GIVEN,
55
+ api_secret: NotGivenOr[str] = NOT_GIVEN,
56
+ region: NotGivenOr[str] = NOT_GIVEN,
57
+ temperature: NotGivenOr[float] = NOT_GIVEN,
58
+ max_output_tokens: NotGivenOr[int] = NOT_GIVEN,
59
+ top_p: NotGivenOr[float] = NOT_GIVEN,
60
+ tool_choice: NotGivenOr[ToolChoice] = NOT_GIVEN,
61
+ additional_request_fields: NotGivenOr[dict[str, Any]] = NOT_GIVEN,
62
+ session: aioboto3.Session | None = None,
63
+ ) -> None:
64
+ """
65
+ Create a new instance of AWS Bedrock LLM.
66
+
67
+ ``api_key`` and ``api_secret`` must be set to your AWS Access key id and secret access key, either using the argument or by setting the
68
+ ``AWS_ACCESS_KEY_ID`` and ``AWS_SECRET_ACCESS_KEY`` environmental variables.
69
+
70
+ See https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/bedrock-runtime/client/converse_stream.html for more details on the AWS Bedrock Runtime API.
71
+
72
+ Args:
73
+ model (TEXT_MODEL, optional): model or inference profile arn to use(https://docs.aws.amazon.com/bedrock/latest/userguide/inference-profiles-use.html). Defaults to 'anthropic.claude-3-5-sonnet-20240620-v1:0'.
74
+ api_key(str, optional): AWS access key id.
75
+ api_secret(str, optional): AWS secret access key
76
+ region (str, optional): The region to use for AWS API requests. Defaults value is "us-east-1".
77
+ temperature (float, optional): Sampling temperature for response generation. Defaults to 0.8.
78
+ max_output_tokens (int, optional): Maximum number of tokens to generate in the output. Defaults to None.
79
+ top_p (float, optional): The nucleus sampling probability for response generation. Defaults to None.
80
+ tool_choice (ToolChoice, optional): Specifies whether to use tools during response generation. Defaults to "auto".
81
+ additional_request_fields (dict[str, Any], optional): Additional request fields to send to the AWS Bedrock Converse API. Defaults to None.
82
+ session (aioboto3.Session, optional): Optional aioboto3 session to use.
83
+ """ # noqa: E501
84
+ super().__init__()
85
+
86
+ self._session = session or get_aws_async_session(
87
+ api_key=api_key if is_given(api_key) else None,
88
+ api_secret=api_secret if is_given(api_secret) else None,
89
+ region=region if is_given(region) else None,
90
+ )
91
+
92
+ model = model if is_given(model) else os.environ.get("BEDROCK_INFERENCE_PROFILE_ARN")
93
+ if not model:
94
+ raise ValueError(
95
+ "model or inference profile arn must be set using the argument or by setting the BEDROCK_INFERENCE_PROFILE_ARN environment variable." # noqa: E501
96
+ )
97
+ self._opts = _LLMOptions(
98
+ model=model,
99
+ temperature=temperature,
100
+ tool_choice=tool_choice,
101
+ max_output_tokens=max_output_tokens,
102
+ top_p=top_p,
103
+ additional_request_fields=additional_request_fields,
104
+ )
105
+
106
+ def chat(
107
+ self,
108
+ *,
109
+ chat_ctx: ChatContext,
110
+ tools: list[FunctionTool] | None = None,
111
+ conn_options: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS,
112
+ temperature: NotGivenOr[float] = NOT_GIVEN,
113
+ tool_choice: NotGivenOr[ToolChoice] = NOT_GIVEN,
114
+ ) -> LLMStream:
115
+ opts = {}
116
+
117
+ if is_given(self._opts.model):
118
+ opts["modelId"] = self._opts.model
119
+
120
+ def _get_tool_config() -> dict[str, Any] | None:
121
+ nonlocal tool_choice
122
+
123
+ if not tools:
124
+ return None
125
+
126
+ tool_config: dict[str, Any] = {"tools": to_fnc_ctx(tools)}
127
+ tool_choice = tool_choice if is_given(tool_choice) else self._opts.tool_choice
128
+ if is_given(tool_choice):
129
+ if isinstance(tool_choice, dict) and tool_choice.get("type") == "function":
130
+ tool_config["toolChoice"] = {"tool": {"name": tool_choice["function"]["name"]}}
131
+ elif tool_choice == "required":
132
+ tool_config["toolChoice"] = {"any": {}}
133
+ elif tool_choice == "auto":
134
+ tool_config["toolChoice"] = {"auto": {}}
135
+ else:
136
+ return None
137
+
138
+ return tool_config
139
+
140
+ tool_config = _get_tool_config()
141
+ if tool_config:
142
+ opts["toolConfig"] = tool_config
143
+ messages, system_message = to_chat_ctx(chat_ctx, id(self))
144
+ opts["messages"] = messages
145
+ if system_message:
146
+ opts["system"] = [system_message]
147
+
148
+ inference_config = {}
149
+ if is_given(self._opts.max_output_tokens):
150
+ inference_config["maxTokens"] = self._opts.max_output_tokens
151
+ temperature = temperature if is_given(temperature) else self._opts.temperature
152
+ if is_given(temperature):
153
+ inference_config["temperature"] = temperature
154
+ if is_given(self._opts.top_p):
155
+ inference_config["topP"] = self._opts.top_p
156
+
157
+ opts["inferenceConfig"] = inference_config
158
+ if is_given(self._opts.additional_request_fields):
159
+ opts["additionalModelRequestFields"] = self._opts.additional_request_fields
160
+
161
+ return LLMStream(
162
+ self,
163
+ chat_ctx=chat_ctx,
164
+ tools=tools,
165
+ session=self._session,
166
+ conn_options=conn_options,
167
+ extra_kwargs=opts,
168
+ )
169
+
170
+
171
+ class LLMStream(llm.LLMStream):
172
+ def __init__(
173
+ self,
174
+ llm: LLM,
175
+ *,
176
+ chat_ctx: ChatContext,
177
+ session: aioboto3.Session,
178
+ conn_options: APIConnectOptions,
179
+ tools: list[FunctionTool] | None,
180
+ extra_kwargs: dict[str, Any],
181
+ ) -> None:
182
+ super().__init__(llm, chat_ctx=chat_ctx, tools=tools, conn_options=conn_options)
183
+ self._llm: LLM = llm
184
+ self._opts = extra_kwargs
185
+ self._session = session
186
+ self._tool_call_id: str | None = None
187
+ self._fnc_name: str | None = None
188
+ self._fnc_raw_arguments: str | None = None
189
+ self._text: str = ""
190
+
191
+ async def _run(self) -> None:
192
+ retryable = True
193
+ try:
194
+ async with self._session.client("bedrock-runtime") as client:
195
+ response = await client.converse_stream(**self._opts) # type: ignore
196
+ request_id = response["ResponseMetadata"]["RequestId"]
197
+ if response["ResponseMetadata"]["HTTPStatusCode"] != 200:
198
+ raise APIStatusError(
199
+ f"aws bedrock llm: error generating content: {response}",
200
+ retryable=False,
201
+ request_id=request_id,
202
+ )
203
+
204
+ async for chunk in response["stream"]:
205
+ chat_chunk = self._parse_chunk(request_id, chunk)
206
+ if chat_chunk is not None:
207
+ retryable = False
208
+ self._event_ch.send_nowait(chat_chunk)
209
+
210
+ except Exception as e:
211
+ raise APIConnectionError(
212
+ f"aws bedrock llm: error generating content: {e}",
213
+ retryable=retryable,
214
+ ) from e
215
+
216
+ def _parse_chunk(self, request_id: str, chunk: dict) -> llm.ChatChunk | None:
217
+ if "contentBlockStart" in chunk:
218
+ tool_use = chunk["contentBlockStart"]["start"]["toolUse"]
219
+ self._tool_call_id = tool_use["toolUseId"]
220
+ self._fnc_name = tool_use["name"]
221
+ self._fnc_raw_arguments = ""
222
+
223
+ elif "contentBlockDelta" in chunk:
224
+ delta = chunk["contentBlockDelta"]["delta"]
225
+ if "toolUse" in delta:
226
+ self._fnc_raw_arguments += delta["toolUse"]["input"]
227
+ elif "text" in delta:
228
+ return llm.ChatChunk(
229
+ id=request_id,
230
+ delta=llm.ChoiceDelta(content=delta["text"], role="assistant"),
231
+ )
232
+ else:
233
+ logger.warning(f"aws bedrock llm: unknown chunk type: {chunk}")
234
+
235
+ elif "metadata" in chunk:
236
+ metadata = chunk["metadata"]
237
+ return llm.ChatChunk(
238
+ id=request_id,
239
+ usage=llm.CompletionUsage(
240
+ completion_tokens=metadata["usage"]["outputTokens"],
241
+ prompt_tokens=metadata["usage"]["inputTokens"],
242
+ total_tokens=metadata["usage"]["totalTokens"],
243
+ ),
244
+ )
245
+ elif "contentBlockStop" in chunk:
246
+ if self._tool_call_id:
247
+ if self._tool_call_id is None:
248
+ logger.warning("aws bedrock llm: no tool call id in the response")
249
+ return None
250
+ if self._fnc_name is None:
251
+ logger.warning("aws bedrock llm: no function name in the response")
252
+ return None
253
+ if self._fnc_raw_arguments is None:
254
+ logger.warning("aws bedrock llm: no function arguments in the response")
255
+ return None
256
+ chat_chunk = llm.ChatChunk(
257
+ id=request_id,
258
+ delta=llm.ChoiceDelta(
259
+ role="assistant",
260
+ tool_calls=[
261
+ FunctionToolCall(
262
+ arguments=self._fnc_raw_arguments,
263
+ name=self._fnc_name,
264
+ call_id=self._tool_call_id,
265
+ ),
266
+ ],
267
+ ),
268
+ )
269
+ self._tool_call_id = self._fnc_name = self._fnc_raw_arguments = None
270
+ return chat_chunk
271
+ return None
@@ -45,4 +45,4 @@ TTS_LANGUAGE = Literal[
45
45
  "de-CH",
46
46
  ]
47
47
 
48
- TTS_OUTPUT_FORMAT = Literal["pcm", "mp3"]
48
+ TTS_OUTPUT_FORMAT = Literal["mp3"]