livekit-plugins-anthropic 1.1.7__tar.gz → 1.3.12__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {livekit_plugins_anthropic-1.1.7 → livekit_plugins_anthropic-1.3.12}/.gitignore +8 -1
- {livekit_plugins_anthropic-1.1.7 → livekit_plugins_anthropic-1.3.12}/PKG-INFO +3 -3
- {livekit_plugins_anthropic-1.1.7 → livekit_plugins_anthropic-1.3.12}/livekit/plugins/anthropic/llm.py +17 -6
- livekit_plugins_anthropic-1.3.12/livekit/plugins/anthropic/models.py +15 -0
- livekit_plugins_anthropic-1.3.12/livekit/plugins/anthropic/utils.py +11 -0
- {livekit_plugins_anthropic-1.1.7 → livekit_plugins_anthropic-1.3.12}/livekit/plugins/anthropic/version.py +1 -1
- {livekit_plugins_anthropic-1.1.7 → livekit_plugins_anthropic-1.3.12}/pyproject.toml +2 -2
- livekit_plugins_anthropic-1.1.7/livekit/plugins/anthropic/models.py +0 -9
- livekit_plugins_anthropic-1.1.7/livekit/plugins/anthropic/utils.py +0 -54
- {livekit_plugins_anthropic-1.1.7 → livekit_plugins_anthropic-1.3.12}/README.md +0 -0
- {livekit_plugins_anthropic-1.1.7 → livekit_plugins_anthropic-1.3.12}/livekit/plugins/anthropic/__init__.py +0 -0
- {livekit_plugins_anthropic-1.1.7 → livekit_plugins_anthropic-1.3.12}/livekit/plugins/anthropic/log.py +0 -0
- {livekit_plugins_anthropic-1.1.7 → livekit_plugins_anthropic-1.3.12}/livekit/plugins/anthropic/py.typed +0 -0
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: livekit-plugins-anthropic
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.3.12
|
|
4
4
|
Summary: Agent Framework plugin for services from Anthropic
|
|
5
5
|
Project-URL: Documentation, https://docs.livekit.io
|
|
6
6
|
Project-URL: Website, https://livekit.io/
|
|
7
7
|
Project-URL: Source, https://github.com/livekit/agents
|
|
8
8
|
Author-email: LiveKit <hello@livekit.io>
|
|
9
9
|
License-Expression: Apache-2.0
|
|
10
|
-
Keywords: audio,livekit,realtime,video,webrtc
|
|
10
|
+
Keywords: ai,audio,livekit,realtime,video,voice,webrtc
|
|
11
11
|
Classifier: Intended Audience :: Developers
|
|
12
12
|
Classifier: License :: OSI Approved :: Apache Software License
|
|
13
13
|
Classifier: Programming Language :: Python :: 3
|
|
@@ -20,7 +20,7 @@ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
|
20
20
|
Requires-Python: >=3.9.0
|
|
21
21
|
Requires-Dist: anthropic>=0.41
|
|
22
22
|
Requires-Dist: httpx
|
|
23
|
-
Requires-Dist: livekit-agents>=1.
|
|
23
|
+
Requires-Dist: livekit-agents>=1.3.12
|
|
24
24
|
Description-Content-Type: text/markdown
|
|
25
25
|
|
|
26
26
|
# Anthropic plugin for LiveKit Agents
|
|
@@ -25,7 +25,7 @@ import anthropic
|
|
|
25
25
|
from livekit.agents import APIConnectionError, APIStatusError, APITimeoutError, llm
|
|
26
26
|
from livekit.agents.llm import ToolChoice
|
|
27
27
|
from livekit.agents.llm.chat_context import ChatContext
|
|
28
|
-
from livekit.agents.llm.tool_context import
|
|
28
|
+
from livekit.agents.llm.tool_context import Tool
|
|
29
29
|
from livekit.agents.types import (
|
|
30
30
|
DEFAULT_API_CONNECT_OPTIONS,
|
|
31
31
|
NOT_GIVEN,
|
|
@@ -35,7 +35,7 @@ from livekit.agents.types import (
|
|
|
35
35
|
from livekit.agents.utils import is_given
|
|
36
36
|
|
|
37
37
|
from .models import ChatModels
|
|
38
|
-
from .utils import CACHE_CONTROL_EPHEMERAL
|
|
38
|
+
from .utils import CACHE_CONTROL_EPHEMERAL
|
|
39
39
|
|
|
40
40
|
|
|
41
41
|
@dataclass
|
|
@@ -100,7 +100,7 @@ class LLM(llm.LLM):
|
|
|
100
100
|
if not anthropic_api_key:
|
|
101
101
|
raise ValueError("Anthropic API key is required")
|
|
102
102
|
|
|
103
|
-
self._client = anthropic.AsyncClient(
|
|
103
|
+
self._client = client or anthropic.AsyncClient(
|
|
104
104
|
api_key=anthropic_api_key,
|
|
105
105
|
base_url=base_url if is_given(base_url) else None,
|
|
106
106
|
http_client=httpx.AsyncClient(
|
|
@@ -114,11 +114,19 @@ class LLM(llm.LLM):
|
|
|
114
114
|
),
|
|
115
115
|
)
|
|
116
116
|
|
|
117
|
+
@property
|
|
118
|
+
def model(self) -> str:
|
|
119
|
+
return self._opts.model
|
|
120
|
+
|
|
121
|
+
@property
|
|
122
|
+
def provider(self) -> str:
|
|
123
|
+
return self._client._base_url.netloc.decode("utf-8")
|
|
124
|
+
|
|
117
125
|
def chat(
|
|
118
126
|
self,
|
|
119
127
|
*,
|
|
120
128
|
chat_ctx: ChatContext,
|
|
121
|
-
tools: list[
|
|
129
|
+
tools: list[Tool] | None = None,
|
|
122
130
|
conn_options: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS,
|
|
123
131
|
parallel_tool_calls: NotGivenOr[bool] = NOT_GIVEN,
|
|
124
132
|
tool_choice: NotGivenOr[ToolChoice] = NOT_GIVEN,
|
|
@@ -141,7 +149,7 @@ class LLM(llm.LLM):
|
|
|
141
149
|
extra["max_tokens"] = self._opts.max_tokens if is_given(self._opts.max_tokens) else 1024
|
|
142
150
|
|
|
143
151
|
if tools:
|
|
144
|
-
extra["tools"] =
|
|
152
|
+
extra["tools"] = llm.ToolContext(tools).parse_function_tools("anthropic")
|
|
145
153
|
tool_choice = (
|
|
146
154
|
cast(ToolChoice, tool_choice) if is_given(tool_choice) else self._opts.tool_choice
|
|
147
155
|
)
|
|
@@ -181,6 +189,9 @@ class LLM(llm.LLM):
|
|
|
181
189
|
if extra.get("system"):
|
|
182
190
|
extra["system"][-1]["cache_control"] = CACHE_CONTROL_EPHEMERAL
|
|
183
191
|
|
|
192
|
+
if extra.get("tools"):
|
|
193
|
+
extra["tools"][-1]["cache_control"] = CACHE_CONTROL_EPHEMERAL
|
|
194
|
+
|
|
184
195
|
seen_assistant = False
|
|
185
196
|
for msg in reversed(messages):
|
|
186
197
|
if (
|
|
@@ -219,7 +230,7 @@ class LLMStream(llm.LLMStream):
|
|
|
219
230
|
*,
|
|
220
231
|
anthropic_stream: Awaitable[anthropic.AsyncStream[anthropic.types.RawMessageStreamEvent]],
|
|
221
232
|
chat_ctx: llm.ChatContext,
|
|
222
|
-
tools: list[
|
|
233
|
+
tools: list[Tool],
|
|
223
234
|
conn_options: APIConnectOptions,
|
|
224
235
|
) -> None:
|
|
225
236
|
super().__init__(llm, chat_ctx=chat_ctx, tools=tools, conn_options=conn_options)
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from typing import Literal
|
|
2
|
+
|
|
3
|
+
# https://docs.anthropic.com/en/docs/about-claude/model-deprecations#model-status
|
|
4
|
+
|
|
5
|
+
ChatModels = Literal[
|
|
6
|
+
"claude-3-5-sonnet-20240620", # deprecated
|
|
7
|
+
"claude-3-opus-20240229", # deprecated
|
|
8
|
+
"claude-3-5-sonnet-20241022", # deprecated
|
|
9
|
+
"claude-3-haiku-20240307",
|
|
10
|
+
"claude-3-5-haiku-20241022",
|
|
11
|
+
"claude-3-7-sonnet-20250219",
|
|
12
|
+
"claude-sonnet-4-20250514",
|
|
13
|
+
"claude-opus-4-20250514",
|
|
14
|
+
"claude-opus-4-1-20250805",
|
|
15
|
+
]
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import anthropic
|
|
2
|
+
|
|
3
|
+
# We can define up to 4 cache breakpoints, we will add them at:
|
|
4
|
+
# - the last tool definition
|
|
5
|
+
# - the last system message
|
|
6
|
+
# - the last assistant message
|
|
7
|
+
# - the last user message before the last assistant message
|
|
8
|
+
# https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching#structuring-your-prompt
|
|
9
|
+
CACHE_CONTROL_EPHEMERAL = anthropic.types.CacheControlEphemeralParam(type="ephemeral")
|
|
10
|
+
|
|
11
|
+
__all__ = ["CACHE_CONTROL_EPHEMERAL"]
|
|
@@ -10,7 +10,7 @@ readme = "README.md"
|
|
|
10
10
|
license = "Apache-2.0"
|
|
11
11
|
requires-python = ">=3.9.0"
|
|
12
12
|
authors = [{ name = "LiveKit", email = "hello@livekit.io" }]
|
|
13
|
-
keywords = ["
|
|
13
|
+
keywords = ["voice", "ai", "realtime", "audio", "video", "livekit", "webrtc"]
|
|
14
14
|
classifiers = [
|
|
15
15
|
"Intended Audience :: Developers",
|
|
16
16
|
"License :: OSI Approved :: Apache Software License",
|
|
@@ -22,7 +22,7 @@ classifiers = [
|
|
|
22
22
|
"Programming Language :: Python :: 3.10",
|
|
23
23
|
"Programming Language :: Python :: 3 :: Only",
|
|
24
24
|
]
|
|
25
|
-
dependencies = ["livekit-agents>=1.
|
|
25
|
+
dependencies = ["livekit-agents>=1.3.12", "anthropic>=0.41", "httpx"]
|
|
26
26
|
|
|
27
27
|
[project.urls]
|
|
28
28
|
Documentation = "https://docs.livekit.io"
|
|
@@ -1,54 +0,0 @@
|
|
|
1
|
-
from typing import Literal, Optional, Union
|
|
2
|
-
|
|
3
|
-
import anthropic
|
|
4
|
-
from livekit.agents import llm
|
|
5
|
-
from livekit.agents.llm import FunctionTool, RawFunctionTool
|
|
6
|
-
from livekit.agents.llm.tool_context import (
|
|
7
|
-
get_raw_function_info,
|
|
8
|
-
is_function_tool,
|
|
9
|
-
is_raw_function_tool,
|
|
10
|
-
)
|
|
11
|
-
|
|
12
|
-
# We can define up to 4 cache breakpoints, we will add them at:
|
|
13
|
-
# - the last tool definition
|
|
14
|
-
# - the last system message
|
|
15
|
-
# - the last assistant message
|
|
16
|
-
# - the last user message before the last assistant message
|
|
17
|
-
# https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching#structuring-your-prompt
|
|
18
|
-
CACHE_CONTROL_EPHEMERAL = anthropic.types.CacheControlEphemeralParam(type="ephemeral")
|
|
19
|
-
|
|
20
|
-
__all__ = ["to_fnc_ctx", "CACHE_CONTROL_EPHEMERAL"]
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
def to_fnc_ctx(
|
|
24
|
-
fncs: list[Union[FunctionTool, RawFunctionTool]], caching: Optional[Literal["ephemeral"]]
|
|
25
|
-
) -> list[anthropic.types.ToolParam]:
|
|
26
|
-
tools: list[anthropic.types.ToolParam] = []
|
|
27
|
-
for fnc in fncs:
|
|
28
|
-
tools.append(_build_anthropic_schema(fnc))
|
|
29
|
-
|
|
30
|
-
if tools and caching == "ephemeral":
|
|
31
|
-
tools[-1]["cache_control"] = CACHE_CONTROL_EPHEMERAL
|
|
32
|
-
|
|
33
|
-
return tools
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
def _build_anthropic_schema(
|
|
37
|
-
function_tool: Union[FunctionTool, RawFunctionTool],
|
|
38
|
-
) -> anthropic.types.ToolParam:
|
|
39
|
-
if is_function_tool(function_tool):
|
|
40
|
-
fnc = llm.utils.build_legacy_openai_schema(function_tool, internally_tagged=True)
|
|
41
|
-
return anthropic.types.ToolParam(
|
|
42
|
-
name=fnc["name"],
|
|
43
|
-
description=fnc["description"] or "",
|
|
44
|
-
input_schema=fnc["parameters"],
|
|
45
|
-
)
|
|
46
|
-
elif is_raw_function_tool(function_tool):
|
|
47
|
-
info = get_raw_function_info(function_tool)
|
|
48
|
-
return anthropic.types.ToolParam(
|
|
49
|
-
name=info.name,
|
|
50
|
-
description=info.raw_schema.get("description", ""),
|
|
51
|
-
input_schema=info.raw_schema.get("parameters", {}),
|
|
52
|
-
)
|
|
53
|
-
else:
|
|
54
|
-
raise ValueError("Invalid function tool")
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|