agent-framework-anthropic 1.0.0b251104__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of agent-framework-anthropic might be problematic. Click here for more details.
- agent_framework_anthropic/__init__.py +15 -0
- agent_framework_anthropic/_chat_client.py +658 -0
- agent_framework_anthropic-1.0.0b251104.dist-info/METADATA +43 -0
- agent_framework_anthropic-1.0.0b251104.dist-info/RECORD +6 -0
- agent_framework_anthropic-1.0.0b251104.dist-info/WHEEL +4 -0
- agent_framework_anthropic-1.0.0b251104.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# Copyright (c) Microsoft. All rights reserved.
|
|
2
|
+
|
|
3
|
+
import importlib.metadata
|
|
4
|
+
|
|
5
|
+
from ._chat_client import AnthropicClient
|
|
6
|
+
|
|
7
|
+
try:
|
|
8
|
+
__version__ = importlib.metadata.version(__name__)
|
|
9
|
+
except importlib.metadata.PackageNotFoundError:
|
|
10
|
+
__version__ = "0.0.0" # Fallback for development mode
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
"AnthropicClient",
|
|
14
|
+
"__version__",
|
|
15
|
+
]
|
|
@@ -0,0 +1,658 @@
|
|
|
1
|
+
# Copyright (c) Microsoft. All rights reserved.
|
|
2
|
+
|
|
3
|
+
from collections.abc import AsyncIterable, MutableMapping, MutableSequence, Sequence
|
|
4
|
+
from typing import Any, ClassVar, Final, TypeVar
|
|
5
|
+
|
|
6
|
+
from agent_framework import (
|
|
7
|
+
AGENT_FRAMEWORK_USER_AGENT,
|
|
8
|
+
AIFunction,
|
|
9
|
+
Annotations,
|
|
10
|
+
BaseChatClient,
|
|
11
|
+
ChatMessage,
|
|
12
|
+
ChatOptions,
|
|
13
|
+
ChatResponse,
|
|
14
|
+
ChatResponseUpdate,
|
|
15
|
+
CitationAnnotation,
|
|
16
|
+
Contents,
|
|
17
|
+
FinishReason,
|
|
18
|
+
FunctionCallContent,
|
|
19
|
+
FunctionResultContent,
|
|
20
|
+
HostedCodeInterpreterTool,
|
|
21
|
+
HostedMCPTool,
|
|
22
|
+
HostedWebSearchTool,
|
|
23
|
+
Role,
|
|
24
|
+
TextContent,
|
|
25
|
+
TextReasoningContent,
|
|
26
|
+
TextSpanRegion,
|
|
27
|
+
ToolProtocol,
|
|
28
|
+
UsageContent,
|
|
29
|
+
UsageDetails,
|
|
30
|
+
get_logger,
|
|
31
|
+
prepare_function_call_results,
|
|
32
|
+
use_chat_middleware,
|
|
33
|
+
use_function_invocation,
|
|
34
|
+
)
|
|
35
|
+
from agent_framework._pydantic import AFBaseSettings
|
|
36
|
+
from agent_framework.exceptions import ServiceInitializationError
|
|
37
|
+
from agent_framework.observability import use_observability
|
|
38
|
+
from anthropic import AsyncAnthropic
|
|
39
|
+
from anthropic.types.beta import (
|
|
40
|
+
BetaContentBlock,
|
|
41
|
+
BetaMessage,
|
|
42
|
+
BetaMessageDeltaUsage,
|
|
43
|
+
BetaRawContentBlockDelta,
|
|
44
|
+
BetaRawMessageStreamEvent,
|
|
45
|
+
BetaTextBlock,
|
|
46
|
+
BetaUsage,
|
|
47
|
+
)
|
|
48
|
+
from pydantic import SecretStr, ValidationError
|
|
49
|
+
|
|
50
|
+
logger = get_logger("agent_framework.anthropic")
|
|
51
|
+
|
|
52
|
+
ANTHROPIC_DEFAULT_MAX_TOKENS: Final[int] = 1024
|
|
53
|
+
BETA_FLAGS: Final[list[str]] = ["mcp-client-2025-04-04", "code-execution-2025-08-25"]
|
|
54
|
+
|
|
55
|
+
ROLE_MAP: dict[Role, str] = {
|
|
56
|
+
Role.USER: "user",
|
|
57
|
+
Role.ASSISTANT: "assistant",
|
|
58
|
+
Role.SYSTEM: "user",
|
|
59
|
+
Role.TOOL: "user",
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
FINISH_REASON_MAP: dict[str, FinishReason] = {
|
|
63
|
+
"stop_sequence": FinishReason.STOP,
|
|
64
|
+
"max_tokens": FinishReason.LENGTH,
|
|
65
|
+
"tool_use": FinishReason.TOOL_CALLS,
|
|
66
|
+
"end_turn": FinishReason.STOP,
|
|
67
|
+
"refusal": FinishReason.CONTENT_FILTER,
|
|
68
|
+
"pause_turn": FinishReason.STOP,
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class AnthropicSettings(AFBaseSettings):
|
|
73
|
+
"""Anthropic Project settings.
|
|
74
|
+
|
|
75
|
+
The settings are first loaded from environment variables with the prefix 'ANTHROPIC_'.
|
|
76
|
+
If the environment variables are not found, the settings can be loaded from a .env file
|
|
77
|
+
with the encoding 'utf-8'. If the settings are not found in the .env file, the settings
|
|
78
|
+
are ignored; however, validation will fail alerting that the settings are missing.
|
|
79
|
+
|
|
80
|
+
Keyword Args:
|
|
81
|
+
api_key: The Anthropic API key.
|
|
82
|
+
chat_model_id: The Anthropic chat model ID.
|
|
83
|
+
env_file_path: If provided, the .env settings are read from this file path location.
|
|
84
|
+
env_file_encoding: The encoding of the .env file, defaults to 'utf-8'.
|
|
85
|
+
|
|
86
|
+
Examples:
|
|
87
|
+
.. code-block:: python
|
|
88
|
+
|
|
89
|
+
from agent_framework.anthropic import AnthropicSettings
|
|
90
|
+
|
|
91
|
+
# Using environment variables
|
|
92
|
+
# Set ANTHROPIC_API_KEY=your_anthropic_api_key
|
|
93
|
+
# ANTHROPIC_CHAT_MODEL_ID=claude-sonnet-4-5-20250929
|
|
94
|
+
|
|
95
|
+
# Or passing parameters directly
|
|
96
|
+
settings = AnthropicSettings(chat_model_id="claude-sonnet-4-5-20250929")
|
|
97
|
+
|
|
98
|
+
# Or loading from a .env file
|
|
99
|
+
settings = AnthropicSettings(env_file_path="path/to/.env")
|
|
100
|
+
"""
|
|
101
|
+
|
|
102
|
+
env_prefix: ClassVar[str] = "ANTHROPIC_"
|
|
103
|
+
|
|
104
|
+
api_key: SecretStr | None = None
|
|
105
|
+
chat_model_id: str | None = None
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
TAnthropicClient = TypeVar("TAnthropicClient", bound="AnthropicClient")
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
@use_function_invocation
|
|
112
|
+
@use_observability
|
|
113
|
+
@use_chat_middleware
|
|
114
|
+
class AnthropicClient(BaseChatClient):
|
|
115
|
+
"""Anthropic Chat client."""
|
|
116
|
+
|
|
117
|
+
OTEL_PROVIDER_NAME: ClassVar[str] = "anthropic" # type: ignore[reportIncompatibleVariableOverride, misc]
|
|
118
|
+
|
|
119
|
+
def __init__(
|
|
120
|
+
self,
|
|
121
|
+
*,
|
|
122
|
+
api_key: str | None = None,
|
|
123
|
+
model_id: str | None = None,
|
|
124
|
+
anthropic_client: AsyncAnthropic | None = None,
|
|
125
|
+
env_file_path: str | None = None,
|
|
126
|
+
env_file_encoding: str | None = None,
|
|
127
|
+
**kwargs: Any,
|
|
128
|
+
) -> None:
|
|
129
|
+
"""Initialize an Anthropic Agent client.
|
|
130
|
+
|
|
131
|
+
Keyword Args:
|
|
132
|
+
api_key: The Anthropic API key to use for authentication.
|
|
133
|
+
model_id: The ID of the model to use.
|
|
134
|
+
anthropic_client: An existing Anthropic client to use. If not provided, one will be created.
|
|
135
|
+
This can be used to further configure the client before passing it in.
|
|
136
|
+
For instance if you need to set a different base_url for testing or private deployments.
|
|
137
|
+
env_file_path: Path to environment file for loading settings.
|
|
138
|
+
env_file_encoding: Encoding of the environment file.
|
|
139
|
+
kwargs: Additional keyword arguments passed to the parent class.
|
|
140
|
+
|
|
141
|
+
Examples:
|
|
142
|
+
.. code-block:: python
|
|
143
|
+
|
|
144
|
+
from agent_framework.anthropic import AnthropicClient
|
|
145
|
+
from azure.identity.aio import DefaultAzureCredential
|
|
146
|
+
|
|
147
|
+
# Using environment variables
|
|
148
|
+
# Set ANTHROPIC_API_KEY=your_anthropic_api_key
|
|
149
|
+
# ANTHROPIC_CHAT_MODEL_ID=claude-sonnet-4-5-20250929
|
|
150
|
+
|
|
151
|
+
# Or passing parameters directly
|
|
152
|
+
client = AnthropicClient(
|
|
153
|
+
model_id="claude-sonnet-4-5-20250929",
|
|
154
|
+
api_key="your_anthropic_api_key",
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
# Or loading from a .env file
|
|
158
|
+
client = AnthropicClient(env_file_path="path/to/.env")
|
|
159
|
+
|
|
160
|
+
# Or passing in an existing client
|
|
161
|
+
from anthropic import AsyncAnthropic
|
|
162
|
+
|
|
163
|
+
anthropic_client = AsyncAnthropic(
|
|
164
|
+
api_key="your_anthropic_api_key", base_url="https://custom-anthropic-endpoint.com"
|
|
165
|
+
)
|
|
166
|
+
client = AnthropicClient(
|
|
167
|
+
model_id="claude-sonnet-4-5-20250929",
|
|
168
|
+
anthropic_client=anthropic_client,
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
"""
|
|
172
|
+
try:
|
|
173
|
+
anthropic_settings = AnthropicSettings(
|
|
174
|
+
api_key=api_key, # type: ignore[arg-type]
|
|
175
|
+
chat_model_id=model_id,
|
|
176
|
+
env_file_path=env_file_path,
|
|
177
|
+
env_file_encoding=env_file_encoding,
|
|
178
|
+
)
|
|
179
|
+
except ValidationError as ex:
|
|
180
|
+
raise ServiceInitializationError("Failed to create Anthropic settings.", ex) from ex
|
|
181
|
+
|
|
182
|
+
if anthropic_client is None:
|
|
183
|
+
if not anthropic_settings.api_key:
|
|
184
|
+
raise ServiceInitializationError(
|
|
185
|
+
"Anthropic API key is required. Set via 'api_key' parameter "
|
|
186
|
+
"or 'ANTHROPIC_API_KEY' environment variable."
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
anthropic_client = AsyncAnthropic(
|
|
190
|
+
api_key=anthropic_settings.api_key.get_secret_value(),
|
|
191
|
+
default_headers={"User-Agent": AGENT_FRAMEWORK_USER_AGENT},
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
# Initialize parent
|
|
195
|
+
super().__init__(**kwargs)
|
|
196
|
+
|
|
197
|
+
# Initialize instance variables
|
|
198
|
+
self.anthropic_client = anthropic_client
|
|
199
|
+
self.model_id = anthropic_settings.chat_model_id
|
|
200
|
+
# streaming requires tracking the last function call ID and name
|
|
201
|
+
self._last_call_id_name: tuple[str, str] | None = None
|
|
202
|
+
|
|
203
|
+
# region Get response methods
|
|
204
|
+
|
|
205
|
+
async def _inner_get_response(
|
|
206
|
+
self,
|
|
207
|
+
*,
|
|
208
|
+
messages: MutableSequence[ChatMessage],
|
|
209
|
+
chat_options: ChatOptions,
|
|
210
|
+
**kwargs: Any,
|
|
211
|
+
) -> ChatResponse:
|
|
212
|
+
# Extract necessary state from messages and options
|
|
213
|
+
run_options = self._create_run_options(messages, chat_options, **kwargs)
|
|
214
|
+
message = await self.anthropic_client.beta.messages.create(**run_options, stream=False)
|
|
215
|
+
return self._process_message(message)
|
|
216
|
+
|
|
217
|
+
async def _inner_get_streaming_response(
|
|
218
|
+
self,
|
|
219
|
+
*,
|
|
220
|
+
messages: MutableSequence[ChatMessage],
|
|
221
|
+
chat_options: ChatOptions,
|
|
222
|
+
**kwargs: Any,
|
|
223
|
+
) -> AsyncIterable[ChatResponseUpdate]:
|
|
224
|
+
# Extract necessary state from messages and options
|
|
225
|
+
run_options = self._create_run_options(messages, chat_options, **kwargs)
|
|
226
|
+
async for chunk in await self.anthropic_client.beta.messages.create(**run_options, stream=True):
|
|
227
|
+
parsed_chunk = self._process_stream_event(chunk)
|
|
228
|
+
if parsed_chunk:
|
|
229
|
+
yield parsed_chunk
|
|
230
|
+
|
|
231
|
+
# region Create Run Options and Helpers
|
|
232
|
+
|
|
233
|
+
def _create_run_options(
|
|
234
|
+
self,
|
|
235
|
+
messages: MutableSequence[ChatMessage],
|
|
236
|
+
chat_options: ChatOptions,
|
|
237
|
+
**kwargs: Any,
|
|
238
|
+
) -> dict[str, Any]:
|
|
239
|
+
"""Create run options for the Anthropic client based on messages and chat options.
|
|
240
|
+
|
|
241
|
+
Args:
|
|
242
|
+
messages: The list of chat messages.
|
|
243
|
+
chat_options: The chat options.
|
|
244
|
+
kwargs: Additional keyword arguments.
|
|
245
|
+
|
|
246
|
+
Returns:
|
|
247
|
+
A dictionary of run options for the Anthropic client.
|
|
248
|
+
"""
|
|
249
|
+
run_options: dict[str, Any] = {
|
|
250
|
+
"model": chat_options.model_id or self.model_id,
|
|
251
|
+
"messages": self._convert_messages_to_anthropic_format(messages),
|
|
252
|
+
"max_tokens": chat_options.max_tokens or ANTHROPIC_DEFAULT_MAX_TOKENS,
|
|
253
|
+
"extra_headers": {"User-Agent": AGENT_FRAMEWORK_USER_AGENT},
|
|
254
|
+
"betas": BETA_FLAGS,
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
# Add any additional options from chat_options or kwargs
|
|
258
|
+
if chat_options.temperature is not None:
|
|
259
|
+
run_options["temperature"] = chat_options.temperature
|
|
260
|
+
if chat_options.top_p is not None:
|
|
261
|
+
run_options["top_p"] = chat_options.top_p
|
|
262
|
+
if chat_options.stop is not None:
|
|
263
|
+
run_options["stop_sequences"] = chat_options.stop
|
|
264
|
+
if messages and isinstance(messages[0], ChatMessage) and messages[0].role == Role.SYSTEM:
|
|
265
|
+
# first system message is passed as instructions
|
|
266
|
+
run_options["system"] = messages[0].text
|
|
267
|
+
if chat_options.tool_choice is not None:
|
|
268
|
+
match (
|
|
269
|
+
chat_options.tool_choice if isinstance(chat_options.tool_choice, str) else chat_options.tool_choice.mode
|
|
270
|
+
):
|
|
271
|
+
case "auto":
|
|
272
|
+
run_options["tool_choice"] = {"type": "auto"}
|
|
273
|
+
if chat_options.allow_multiple_tool_calls is not None:
|
|
274
|
+
run_options["tool_choice"][ # type:ignore[reportArgumentType]
|
|
275
|
+
"disable_parallel_tool_use"
|
|
276
|
+
] = not chat_options.allow_multiple_tool_calls
|
|
277
|
+
case "required":
|
|
278
|
+
if chat_options.tool_choice.required_function_name:
|
|
279
|
+
run_options["tool_choice"] = {
|
|
280
|
+
"type": "tool",
|
|
281
|
+
"name": chat_options.tool_choice.required_function_name,
|
|
282
|
+
}
|
|
283
|
+
if chat_options.allow_multiple_tool_calls is not None:
|
|
284
|
+
run_options["tool_choice"][ # type:ignore[reportArgumentType]
|
|
285
|
+
"disable_parallel_tool_use"
|
|
286
|
+
] = not chat_options.allow_multiple_tool_calls
|
|
287
|
+
else:
|
|
288
|
+
run_options["tool_choice"] = {"type": "any"}
|
|
289
|
+
if chat_options.allow_multiple_tool_calls is not None:
|
|
290
|
+
run_options["tool_choice"][ # type:ignore[reportArgumentType]
|
|
291
|
+
"disable_parallel_tool_use"
|
|
292
|
+
] = not chat_options.allow_multiple_tool_calls
|
|
293
|
+
case "none":
|
|
294
|
+
run_options["tool_choice"] = {"type": "none"}
|
|
295
|
+
case _:
|
|
296
|
+
logger.debug(f"Ignoring unsupported tool choice mode: {chat_options.tool_choice.mode} for now")
|
|
297
|
+
if tools_and_mcp := self._convert_tools_to_anthropic_format(chat_options.tools):
|
|
298
|
+
run_options.update(tools_and_mcp)
|
|
299
|
+
if chat_options.additional_properties:
|
|
300
|
+
run_options.update(chat_options.additional_properties)
|
|
301
|
+
run_options.update(kwargs)
|
|
302
|
+
return run_options
|
|
303
|
+
|
|
304
|
+
def _convert_messages_to_anthropic_format(self, messages: MutableSequence[ChatMessage]) -> list[dict[str, Any]]:
|
|
305
|
+
"""Convert a list of ChatMessages to the format expected by the Anthropic client.
|
|
306
|
+
|
|
307
|
+
This skips the first message if it is a system message,
|
|
308
|
+
as Anthropic expects system instructions as a separate parameter.
|
|
309
|
+
"""
|
|
310
|
+
# first system message is passed as instructions
|
|
311
|
+
if messages and isinstance(messages[0], ChatMessage) and messages[0].role == Role.SYSTEM:
|
|
312
|
+
return [self._convert_message_to_anthropic_format(msg) for msg in messages[1:]]
|
|
313
|
+
return [self._convert_message_to_anthropic_format(msg) for msg in messages]
|
|
314
|
+
|
|
315
|
+
def _convert_message_to_anthropic_format(self, message: ChatMessage) -> dict[str, Any]:
|
|
316
|
+
"""Convert a ChatMessage to the format expected by the Anthropic client.
|
|
317
|
+
|
|
318
|
+
Args:
|
|
319
|
+
message: The ChatMessage to convert.
|
|
320
|
+
|
|
321
|
+
Returns:
|
|
322
|
+
A dictionary representing the message in Anthropic format.
|
|
323
|
+
"""
|
|
324
|
+
a_content: list[dict[str, Any]] = []
|
|
325
|
+
for content in message.contents:
|
|
326
|
+
match content.type:
|
|
327
|
+
case "text":
|
|
328
|
+
a_content.append({"type": "text", "text": content.text})
|
|
329
|
+
case "data":
|
|
330
|
+
if content.has_top_level_media_type("image"):
|
|
331
|
+
a_content.append({
|
|
332
|
+
"type": "image",
|
|
333
|
+
"source": {"data": content.uri, "media_type": content.media_type},
|
|
334
|
+
})
|
|
335
|
+
case "uri":
|
|
336
|
+
if content.has_top_level_media_type("image"):
|
|
337
|
+
a_content.append({"type": "image", "source": {"type": "url", "url": content.uri}})
|
|
338
|
+
case "function_call":
|
|
339
|
+
a_content.append({
|
|
340
|
+
"type": "tool_use",
|
|
341
|
+
"id": content.call_id,
|
|
342
|
+
"name": content.name,
|
|
343
|
+
"input": content.parse_arguments(),
|
|
344
|
+
})
|
|
345
|
+
case "function_result":
|
|
346
|
+
a_content.append({
|
|
347
|
+
"type": "tool_result",
|
|
348
|
+
"tool_use_id": content.call_id,
|
|
349
|
+
"content": prepare_function_call_results(content.result),
|
|
350
|
+
"is_error": content.exception is not None,
|
|
351
|
+
})
|
|
352
|
+
case "text_reasoning":
|
|
353
|
+
a_content.append({"type": "thinking", "thinking": content.text})
|
|
354
|
+
case _:
|
|
355
|
+
logger.debug(f"Ignoring unsupported content type: {content.type} for now")
|
|
356
|
+
|
|
357
|
+
return {
|
|
358
|
+
"role": ROLE_MAP.get(message.role, "user"),
|
|
359
|
+
"content": a_content,
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
def _convert_tools_to_anthropic_format(
|
|
363
|
+
self, tools: list[ToolProtocol | MutableMapping[str, Any]] | None
|
|
364
|
+
) -> dict[str, Any] | None:
|
|
365
|
+
if not tools:
|
|
366
|
+
return None
|
|
367
|
+
tool_list: list[MutableMapping[str, Any]] = []
|
|
368
|
+
mcp_server_list: list[MutableMapping[str, Any]] = []
|
|
369
|
+
for tool in tools:
|
|
370
|
+
match tool:
|
|
371
|
+
case MutableMapping():
|
|
372
|
+
tool_list.append(tool)
|
|
373
|
+
case AIFunction():
|
|
374
|
+
tool_list.append({
|
|
375
|
+
"type": "custom",
|
|
376
|
+
"name": tool.name,
|
|
377
|
+
"description": tool.description,
|
|
378
|
+
"input_schema": tool.parameters(),
|
|
379
|
+
})
|
|
380
|
+
case HostedWebSearchTool():
|
|
381
|
+
search_tool: dict[str, Any] = {
|
|
382
|
+
"type": "web_search_20250305",
|
|
383
|
+
"name": "web_search",
|
|
384
|
+
}
|
|
385
|
+
if tool.additional_properties:
|
|
386
|
+
search_tool.update(tool.additional_properties)
|
|
387
|
+
tool_list.append(search_tool)
|
|
388
|
+
case HostedCodeInterpreterTool():
|
|
389
|
+
code_tool: dict[str, Any] = {
|
|
390
|
+
"type": "code_execution_20250825",
|
|
391
|
+
"name": "code_interpreter",
|
|
392
|
+
}
|
|
393
|
+
tool_list.append(code_tool)
|
|
394
|
+
case HostedMCPTool():
|
|
395
|
+
server_def: dict[str, Any] = {
|
|
396
|
+
"type": "url",
|
|
397
|
+
"name": tool.name,
|
|
398
|
+
"url": str(tool.url),
|
|
399
|
+
}
|
|
400
|
+
if tool.allowed_tools:
|
|
401
|
+
server_def["tool_configuration"] = {"allowed_tools": list(tool.allowed_tools)}
|
|
402
|
+
if tool.headers and (auth := tool.headers.get("authorization")):
|
|
403
|
+
server_def["authorization_token"] = auth
|
|
404
|
+
mcp_server_list.append(server_def)
|
|
405
|
+
case _:
|
|
406
|
+
logger.debug(f"Ignoring unsupported tool type: {type(tool)} for now")
|
|
407
|
+
|
|
408
|
+
all_tools: dict[str, list[MutableMapping[str, Any]]] = {}
|
|
409
|
+
if tool_list:
|
|
410
|
+
all_tools["tools"] = tool_list
|
|
411
|
+
if mcp_server_list:
|
|
412
|
+
all_tools["mcp_servers"] = mcp_server_list
|
|
413
|
+
return all_tools
|
|
414
|
+
|
|
415
|
+
# region Response Processing Methods
|
|
416
|
+
|
|
417
|
+
def _process_message(self, message: BetaMessage) -> ChatResponse:
|
|
418
|
+
"""Process the response from the Anthropic client.
|
|
419
|
+
|
|
420
|
+
Args:
|
|
421
|
+
message: The message returned by the Anthropic client.
|
|
422
|
+
|
|
423
|
+
Returns:
|
|
424
|
+
A ChatResponse object containing the processed response.
|
|
425
|
+
"""
|
|
426
|
+
return ChatResponse(
|
|
427
|
+
response_id=message.id,
|
|
428
|
+
messages=[
|
|
429
|
+
ChatMessage(
|
|
430
|
+
role=Role.ASSISTANT,
|
|
431
|
+
contents=self._parse_message_contents(message.content),
|
|
432
|
+
raw_representation=message,
|
|
433
|
+
)
|
|
434
|
+
],
|
|
435
|
+
usage_details=self._parse_message_usage(message.usage),
|
|
436
|
+
model_id=message.model,
|
|
437
|
+
finish_reason=FINISH_REASON_MAP.get(message.stop_reason) if message.stop_reason else None,
|
|
438
|
+
raw_response=message,
|
|
439
|
+
)
|
|
440
|
+
|
|
441
|
+
def _process_stream_event(self, event: BetaRawMessageStreamEvent) -> ChatResponseUpdate | None:
|
|
442
|
+
"""Process a streaming event from the Anthropic client.
|
|
443
|
+
|
|
444
|
+
Args:
|
|
445
|
+
event: The streaming event returned by the Anthropic client.
|
|
446
|
+
|
|
447
|
+
Returns:
|
|
448
|
+
A ChatResponseUpdate object containing the processed update.
|
|
449
|
+
"""
|
|
450
|
+
match event.type:
|
|
451
|
+
case "message_start":
|
|
452
|
+
usage_details: list[UsageContent] = []
|
|
453
|
+
if event.message.usage and (details := self._parse_message_usage(event.message.usage)):
|
|
454
|
+
usage_details.append(UsageContent(details=details))
|
|
455
|
+
|
|
456
|
+
return ChatResponseUpdate(
|
|
457
|
+
response_id=event.message.id,
|
|
458
|
+
contents=[*self._parse_message_contents(event.message.content), *usage_details],
|
|
459
|
+
model_id=event.message.model,
|
|
460
|
+
finish_reason=FINISH_REASON_MAP.get(event.message.stop_reason)
|
|
461
|
+
if event.message.stop_reason
|
|
462
|
+
else None,
|
|
463
|
+
raw_response=event,
|
|
464
|
+
)
|
|
465
|
+
case "message_delta":
|
|
466
|
+
usage = self._parse_message_usage(event.usage)
|
|
467
|
+
return ChatResponseUpdate(
|
|
468
|
+
contents=[UsageContent(details=usage, raw_representation=event.usage)] if usage else [],
|
|
469
|
+
raw_response=event,
|
|
470
|
+
)
|
|
471
|
+
case "message_stop":
|
|
472
|
+
logger.debug("Received message_stop event; no content to process.")
|
|
473
|
+
case "content_block_start":
|
|
474
|
+
contents = self._parse_message_contents([event.content_block])
|
|
475
|
+
return ChatResponseUpdate(
|
|
476
|
+
contents=contents,
|
|
477
|
+
raw_response=event,
|
|
478
|
+
)
|
|
479
|
+
case "content_block_delta":
|
|
480
|
+
contents = self._parse_message_contents([event.delta])
|
|
481
|
+
return ChatResponseUpdate(
|
|
482
|
+
contents=contents,
|
|
483
|
+
raw_response=event,
|
|
484
|
+
)
|
|
485
|
+
case "content_block_stop":
|
|
486
|
+
logger.debug("Received content_block_stop event; no content to process.")
|
|
487
|
+
case _:
|
|
488
|
+
logger.debug(f"Ignoring unsupported event type: {event.type}")
|
|
489
|
+
return None
|
|
490
|
+
|
|
491
|
+
def _parse_message_usage(self, usage: BetaUsage | BetaMessageDeltaUsage | None) -> UsageDetails | None:
|
|
492
|
+
"""Parse usage details from the Anthropic message usage."""
|
|
493
|
+
if not usage:
|
|
494
|
+
return None
|
|
495
|
+
usage_details = UsageDetails(output_token_count=usage.output_tokens)
|
|
496
|
+
if usage.input_tokens is not None:
|
|
497
|
+
usage_details.input_token_count = usage.input_tokens
|
|
498
|
+
if usage.cache_creation_input_tokens is not None:
|
|
499
|
+
usage_details.additional_counts["anthropic.cache_creation_input_tokens"] = usage.cache_creation_input_tokens
|
|
500
|
+
if usage.cache_read_input_tokens is not None:
|
|
501
|
+
usage_details.additional_counts["anthropic.cache_read_input_tokens"] = usage.cache_read_input_tokens
|
|
502
|
+
return usage_details
|
|
503
|
+
|
|
504
|
+
def _parse_message_contents(
|
|
505
|
+
self, content: Sequence[BetaContentBlock | BetaRawContentBlockDelta | BetaTextBlock]
|
|
506
|
+
) -> list[Contents]:
|
|
507
|
+
"""Parse contents from the Anthropic message."""
|
|
508
|
+
contents: list[Contents] = []
|
|
509
|
+
for content_block in content:
|
|
510
|
+
match content_block.type:
|
|
511
|
+
case "text" | "text_delta":
|
|
512
|
+
contents.append(
|
|
513
|
+
TextContent(
|
|
514
|
+
text=content_block.text,
|
|
515
|
+
raw_representation=content_block,
|
|
516
|
+
annotations=self._parse_citations(content_block),
|
|
517
|
+
)
|
|
518
|
+
)
|
|
519
|
+
case "tool_use":
|
|
520
|
+
self._last_call_id_name = (content_block.id, content_block.name)
|
|
521
|
+
contents.append(
|
|
522
|
+
FunctionCallContent(
|
|
523
|
+
call_id=content_block.id,
|
|
524
|
+
name=content_block.name,
|
|
525
|
+
arguments=content_block.input,
|
|
526
|
+
raw_representation=content_block,
|
|
527
|
+
)
|
|
528
|
+
)
|
|
529
|
+
case "mcp_tool_use" | "server_tool_use":
|
|
530
|
+
self._last_call_id_name = (content_block.id, content_block.name)
|
|
531
|
+
contents.append(
|
|
532
|
+
FunctionCallContent(
|
|
533
|
+
call_id=content_block.id,
|
|
534
|
+
name=content_block.name,
|
|
535
|
+
arguments=content_block.input,
|
|
536
|
+
raw_representation=content_block,
|
|
537
|
+
)
|
|
538
|
+
)
|
|
539
|
+
case "mcp_tool_result":
|
|
540
|
+
call_id, name = self._last_call_id_name or (None, None)
|
|
541
|
+
contents.append(
|
|
542
|
+
FunctionResultContent(
|
|
543
|
+
call_id=content_block.tool_use_id,
|
|
544
|
+
name=name if name and call_id == content_block.tool_use_id else "mcp_tool",
|
|
545
|
+
result=self._parse_message_contents(content_block.content)
|
|
546
|
+
if isinstance(content_block.content, list)
|
|
547
|
+
else content_block.content,
|
|
548
|
+
raw_representation=content_block,
|
|
549
|
+
)
|
|
550
|
+
)
|
|
551
|
+
case "web_search_tool_result" | "web_fetch_tool_result":
|
|
552
|
+
call_id, name = self._last_call_id_name or (None, None)
|
|
553
|
+
contents.append(
|
|
554
|
+
FunctionResultContent(
|
|
555
|
+
call_id=content_block.tool_use_id,
|
|
556
|
+
name=name if name and call_id == content_block.tool_use_id else "web_tool",
|
|
557
|
+
result=content_block.content,
|
|
558
|
+
raw_representation=content_block,
|
|
559
|
+
)
|
|
560
|
+
)
|
|
561
|
+
case (
|
|
562
|
+
"code_execution_tool_result"
|
|
563
|
+
| "bash_code_execution_tool_result"
|
|
564
|
+
| "text_editor_code_execution_tool_result"
|
|
565
|
+
):
|
|
566
|
+
call_id, name = self._last_call_id_name or (None, None)
|
|
567
|
+
contents.append(
|
|
568
|
+
FunctionResultContent(
|
|
569
|
+
call_id=content_block.tool_use_id,
|
|
570
|
+
name=name if name and call_id == content_block.tool_use_id else "code_execution_tool",
|
|
571
|
+
result=content_block.content,
|
|
572
|
+
raw_representation=content_block,
|
|
573
|
+
)
|
|
574
|
+
)
|
|
575
|
+
case "input_json_delta":
|
|
576
|
+
call_id, name = self._last_call_id_name if self._last_call_id_name else ("", "")
|
|
577
|
+
contents.append(
|
|
578
|
+
FunctionCallContent(
|
|
579
|
+
call_id=call_id,
|
|
580
|
+
name=name,
|
|
581
|
+
arguments=content_block.partial_json,
|
|
582
|
+
raw_representation=content_block,
|
|
583
|
+
)
|
|
584
|
+
)
|
|
585
|
+
case "thinking" | "thinking_delta":
|
|
586
|
+
contents.append(TextReasoningContent(text=content_block.thinking, raw_representation=content_block))
|
|
587
|
+
case _:
|
|
588
|
+
logger.debug(f"Ignoring unsupported content type: {content_block.type} for now")
|
|
589
|
+
return contents
|
|
590
|
+
|
|
591
|
+
def _parse_citations(
|
|
592
|
+
self, content_block: BetaContentBlock | BetaRawContentBlockDelta | BetaTextBlock
|
|
593
|
+
) -> list[Annotations] | None:
|
|
594
|
+
content_citations = getattr(content_block, "citations", None)
|
|
595
|
+
if not content_citations:
|
|
596
|
+
return None
|
|
597
|
+
annotations: list[Annotations] = []
|
|
598
|
+
for citation in content_citations:
|
|
599
|
+
cit = CitationAnnotation(raw_representation=citation)
|
|
600
|
+
match citation.type:
|
|
601
|
+
case "char_location":
|
|
602
|
+
cit.title = citation.title
|
|
603
|
+
cit.snippet = citation.cited_text
|
|
604
|
+
if citation.file_id:
|
|
605
|
+
cit.file_id = citation.file_id
|
|
606
|
+
if not cit.annotated_regions:
|
|
607
|
+
cit.annotated_regions = []
|
|
608
|
+
cit.annotated_regions.append(
|
|
609
|
+
TextSpanRegion(start_index=citation.start_char_index, end_index=citation.end_char_index)
|
|
610
|
+
)
|
|
611
|
+
case "page_location":
|
|
612
|
+
cit.title = citation.document_title
|
|
613
|
+
cit.snippet = citation.cited_text
|
|
614
|
+
if citation.file_id:
|
|
615
|
+
cit.file_id = citation.file_id
|
|
616
|
+
if not cit.annotated_regions:
|
|
617
|
+
cit.annotated_regions = []
|
|
618
|
+
cit.annotated_regions.append(
|
|
619
|
+
TextSpanRegion(
|
|
620
|
+
start_index=citation.start_page_number,
|
|
621
|
+
end_index=citation.end_page_number,
|
|
622
|
+
)
|
|
623
|
+
)
|
|
624
|
+
case "content_block_location":
|
|
625
|
+
cit.title = citation.document_title
|
|
626
|
+
cit.snippet = citation.cited_text
|
|
627
|
+
if citation.file_id:
|
|
628
|
+
cit.file_id = citation.file_id
|
|
629
|
+
if not cit.annotated_regions:
|
|
630
|
+
cit.annotated_regions = []
|
|
631
|
+
cit.annotated_regions.append(
|
|
632
|
+
TextSpanRegion(start_index=citation.start_block_index, end_index=citation.end_block_index)
|
|
633
|
+
)
|
|
634
|
+
case "web_search_result_location":
|
|
635
|
+
cit.title = citation.title
|
|
636
|
+
cit.snippet = citation.cited_text
|
|
637
|
+
cit.url = citation.url
|
|
638
|
+
case "search_result_location":
|
|
639
|
+
cit.title = citation.title
|
|
640
|
+
cit.snippet = citation.cited_text
|
|
641
|
+
cit.url = citation.source
|
|
642
|
+
if not cit.annotated_regions:
|
|
643
|
+
cit.annotated_regions = []
|
|
644
|
+
cit.annotated_regions.append(
|
|
645
|
+
TextSpanRegion(start_index=citation.start_block_index, end_index=citation.end_block_index)
|
|
646
|
+
)
|
|
647
|
+
case _:
|
|
648
|
+
logger.debug(f"Unknown citation type encountered: {citation.type}")
|
|
649
|
+
annotations.append(cit)
|
|
650
|
+
return annotations or None
|
|
651
|
+
|
|
652
|
+
def service_url(self) -> str:
|
|
653
|
+
"""Get the service URL for the chat client.
|
|
654
|
+
|
|
655
|
+
Returns:
|
|
656
|
+
The service URL for the chat client, or None if not set.
|
|
657
|
+
"""
|
|
658
|
+
return str(self.anthropic_client.base_url)
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: agent-framework-anthropic
|
|
3
|
+
Version: 1.0.0b251104
|
|
4
|
+
Summary: Anthropic integration for Microsoft Agent Framework.
|
|
5
|
+
Author-email: Microsoft <af-support@microsoft.com>
|
|
6
|
+
Requires-Python: >=3.10
|
|
7
|
+
Description-Content-Type: text/markdown
|
|
8
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
9
|
+
Classifier: Development Status :: 4 - Beta
|
|
10
|
+
Classifier: Intended Audience :: Developers
|
|
11
|
+
Classifier: Programming Language :: Python :: 3
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
16
|
+
Classifier: Typing :: Typed
|
|
17
|
+
License-File: LICENSE
|
|
18
|
+
Requires-Dist: agent-framework-core
|
|
19
|
+
Requires-Dist: anthropic>=0.70.0,<1
|
|
20
|
+
Project-URL: homepage, https://aka.ms/agent-framework
|
|
21
|
+
Project-URL: issues, https://github.com/microsoft/agent-framework/issues
|
|
22
|
+
Project-URL: release_notes, https://github.com/microsoft/agent-framework/releases?q=tag%3Apython-1&expanded=true
|
|
23
|
+
Project-URL: source, https://github.com/microsoft/agent-framework/tree/main/python
|
|
24
|
+
|
|
25
|
+
# Get Started with Microsoft Agent Framework Anthropic
|
|
26
|
+
|
|
27
|
+
Please install this package via pip:
|
|
28
|
+
|
|
29
|
+
```bash
|
|
30
|
+
pip install agent-framework-anthropic --pre
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
## Anthropic Integration
|
|
34
|
+
|
|
35
|
+
The Anthropic integration enables communication with the Anthropic API, allowing your Agent Framework applications to leverage Anthropic's capabilities.
|
|
36
|
+
|
|
37
|
+
### Basic Usage Example
|
|
38
|
+
|
|
39
|
+
See the [Anthropic agent examples](https://github.com/microsoft/agent-framework/tree/main/python/samples/getting_started/agents/anthropic/) which demonstrate:
|
|
40
|
+
|
|
41
|
+
- Connecting to a Anthropic endpoint with an agent
|
|
42
|
+
- Streaming and non-streaming responses
|
|
43
|
+
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
agent_framework_anthropic/__init__.py,sha256=xJ6Y1YjhBISmOCW2GS0pxo02Gz-NPFFoGqwPaoE8qVU,343
|
|
2
|
+
agent_framework_anthropic/_chat_client.py,sha256=Ph2kU8WruF2x5N4fkSJW_TPmHHRRVyY_veihxsmg0Qg,28617
|
|
3
|
+
agent_framework_anthropic-1.0.0b251104.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
|
|
4
|
+
agent_framework_anthropic-1.0.0b251104.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
5
|
+
agent_framework_anthropic-1.0.0b251104.dist-info/METADATA,sha256=1DticVOxBHy7R7EWDuNWjTCtecHwAEnDol8LHjhUIJA,1697
|
|
6
|
+
agent_framework_anthropic-1.0.0b251104.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) Microsoft Corporation.
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE
|