mirascope 2.0.0a2__py3-none-any.whl → 2.0.0a4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (252) hide show
  1. mirascope/__init__.py +2 -2
  2. mirascope/api/__init__.py +6 -0
  3. mirascope/api/_generated/README.md +207 -0
  4. mirascope/api/_generated/__init__.py +141 -0
  5. mirascope/api/_generated/client.py +163 -0
  6. mirascope/api/_generated/core/__init__.py +52 -0
  7. mirascope/api/_generated/core/api_error.py +23 -0
  8. mirascope/api/_generated/core/client_wrapper.py +58 -0
  9. mirascope/api/_generated/core/datetime_utils.py +30 -0
  10. mirascope/api/_generated/core/file.py +70 -0
  11. mirascope/api/_generated/core/force_multipart.py +16 -0
  12. mirascope/api/_generated/core/http_client.py +619 -0
  13. mirascope/api/_generated/core/http_response.py +55 -0
  14. mirascope/api/_generated/core/jsonable_encoder.py +102 -0
  15. mirascope/api/_generated/core/pydantic_utilities.py +310 -0
  16. mirascope/api/_generated/core/query_encoder.py +60 -0
  17. mirascope/api/_generated/core/remove_none_from_dict.py +11 -0
  18. mirascope/api/_generated/core/request_options.py +35 -0
  19. mirascope/api/_generated/core/serialization.py +282 -0
  20. mirascope/api/_generated/docs/__init__.py +4 -0
  21. mirascope/api/_generated/docs/client.py +95 -0
  22. mirascope/api/_generated/docs/raw_client.py +132 -0
  23. mirascope/api/_generated/environment.py +9 -0
  24. mirascope/api/_generated/errors/__init__.py +17 -0
  25. mirascope/api/_generated/errors/bad_request_error.py +15 -0
  26. mirascope/api/_generated/errors/conflict_error.py +15 -0
  27. mirascope/api/_generated/errors/forbidden_error.py +15 -0
  28. mirascope/api/_generated/errors/internal_server_error.py +15 -0
  29. mirascope/api/_generated/errors/not_found_error.py +15 -0
  30. mirascope/api/_generated/health/__init__.py +7 -0
  31. mirascope/api/_generated/health/client.py +96 -0
  32. mirascope/api/_generated/health/raw_client.py +129 -0
  33. mirascope/api/_generated/health/types/__init__.py +8 -0
  34. mirascope/api/_generated/health/types/health_check_response.py +24 -0
  35. mirascope/api/_generated/health/types/health_check_response_status.py +5 -0
  36. mirascope/api/_generated/organizations/__init__.py +25 -0
  37. mirascope/api/_generated/organizations/client.py +380 -0
  38. mirascope/api/_generated/organizations/raw_client.py +876 -0
  39. mirascope/api/_generated/organizations/types/__init__.py +23 -0
  40. mirascope/api/_generated/organizations/types/organizations_create_response.py +24 -0
  41. mirascope/api/_generated/organizations/types/organizations_create_response_role.py +7 -0
  42. mirascope/api/_generated/organizations/types/organizations_get_response.py +24 -0
  43. mirascope/api/_generated/organizations/types/organizations_get_response_role.py +7 -0
  44. mirascope/api/_generated/organizations/types/organizations_list_response_item.py +24 -0
  45. mirascope/api/_generated/organizations/types/organizations_list_response_item_role.py +7 -0
  46. mirascope/api/_generated/organizations/types/organizations_update_response.py +24 -0
  47. mirascope/api/_generated/organizations/types/organizations_update_response_role.py +7 -0
  48. mirascope/api/_generated/projects/__init__.py +17 -0
  49. mirascope/api/_generated/projects/client.py +458 -0
  50. mirascope/api/_generated/projects/raw_client.py +1016 -0
  51. mirascope/api/_generated/projects/types/__init__.py +15 -0
  52. mirascope/api/_generated/projects/types/projects_create_response.py +30 -0
  53. mirascope/api/_generated/projects/types/projects_get_response.py +30 -0
  54. mirascope/api/_generated/projects/types/projects_list_response_item.py +30 -0
  55. mirascope/api/_generated/projects/types/projects_update_response.py +30 -0
  56. mirascope/api/_generated/reference.md +753 -0
  57. mirascope/api/_generated/traces/__init__.py +55 -0
  58. mirascope/api/_generated/traces/client.py +162 -0
  59. mirascope/api/_generated/traces/raw_client.py +168 -0
  60. mirascope/api/_generated/traces/types/__init__.py +95 -0
  61. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item.py +36 -0
  62. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource.py +31 -0
  63. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item.py +25 -0
  64. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value.py +54 -0
  65. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value_array_value.py +23 -0
  66. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value_kvlist_value.py +28 -0
  67. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_resource_attributes_item_value_kvlist_value_values_item.py +24 -0
  68. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item.py +35 -0
  69. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope.py +35 -0
  70. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item.py +27 -0
  71. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value.py +54 -0
  72. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value_array_value.py +23 -0
  73. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value_kvlist_value.py +28 -0
  74. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_scope_attributes_item_value_kvlist_value_values_item.py +24 -0
  75. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item.py +60 -0
  76. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item.py +29 -0
  77. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value.py +54 -0
  78. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value_array_value.py +23 -0
  79. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value_kvlist_value.py +28 -0
  80. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_attributes_item_value_kvlist_value_values_item.py +24 -0
  81. mirascope/api/_generated/traces/types/traces_create_request_resource_spans_item_scope_spans_item_spans_item_status.py +24 -0
  82. mirascope/api/_generated/traces/types/traces_create_response.py +27 -0
  83. mirascope/api/_generated/traces/types/traces_create_response_partial_success.py +28 -0
  84. mirascope/api/_generated/types/__init__.py +37 -0
  85. mirascope/api/_generated/types/already_exists_error.py +24 -0
  86. mirascope/api/_generated/types/already_exists_error_tag.py +5 -0
  87. mirascope/api/_generated/types/database_error.py +24 -0
  88. mirascope/api/_generated/types/database_error_tag.py +5 -0
  89. mirascope/api/_generated/types/http_api_decode_error.py +29 -0
  90. mirascope/api/_generated/types/http_api_decode_error_tag.py +5 -0
  91. mirascope/api/_generated/types/issue.py +40 -0
  92. mirascope/api/_generated/types/issue_tag.py +17 -0
  93. mirascope/api/_generated/types/not_found_error_body.py +24 -0
  94. mirascope/api/_generated/types/not_found_error_tag.py +5 -0
  95. mirascope/api/_generated/types/permission_denied_error.py +24 -0
  96. mirascope/api/_generated/types/permission_denied_error_tag.py +7 -0
  97. mirascope/api/_generated/types/property_key.py +7 -0
  98. mirascope/api/_generated/types/property_key_key.py +27 -0
  99. mirascope/api/_generated/types/property_key_key_tag.py +5 -0
  100. mirascope/api/client.py +255 -0
  101. mirascope/api/settings.py +81 -0
  102. mirascope/llm/__init__.py +45 -11
  103. mirascope/llm/calls/calls.py +81 -57
  104. mirascope/llm/calls/decorator.py +121 -115
  105. mirascope/llm/content/__init__.py +3 -2
  106. mirascope/llm/context/_utils.py +19 -6
  107. mirascope/llm/exceptions.py +30 -16
  108. mirascope/llm/formatting/_utils.py +9 -5
  109. mirascope/llm/formatting/format.py +2 -2
  110. mirascope/llm/formatting/from_call_args.py +2 -2
  111. mirascope/llm/messages/message.py +13 -5
  112. mirascope/llm/models/__init__.py +2 -2
  113. mirascope/llm/models/models.py +189 -81
  114. mirascope/llm/prompts/__init__.py +13 -12
  115. mirascope/llm/prompts/_utils.py +27 -24
  116. mirascope/llm/prompts/decorator.py +133 -204
  117. mirascope/llm/prompts/prompts.py +424 -0
  118. mirascope/llm/prompts/protocols.py +25 -59
  119. mirascope/llm/providers/__init__.py +44 -0
  120. mirascope/llm/{clients → providers}/_missing_import_stubs.py +8 -6
  121. mirascope/llm/providers/anthropic/__init__.py +29 -0
  122. mirascope/llm/providers/anthropic/_utils/__init__.py +23 -0
  123. mirascope/llm/providers/anthropic/_utils/beta_decode.py +271 -0
  124. mirascope/llm/providers/anthropic/_utils/beta_encode.py +216 -0
  125. mirascope/llm/{clients → providers}/anthropic/_utils/decode.py +44 -11
  126. mirascope/llm/providers/anthropic/_utils/encode.py +356 -0
  127. mirascope/llm/providers/anthropic/beta_provider.py +322 -0
  128. mirascope/llm/providers/anthropic/model_id.py +23 -0
  129. mirascope/llm/providers/anthropic/model_info.py +87 -0
  130. mirascope/llm/providers/anthropic/provider.py +416 -0
  131. mirascope/llm/{clients → providers}/base/__init__.py +3 -3
  132. mirascope/llm/{clients → providers}/base/_utils.py +25 -8
  133. mirascope/llm/{clients/base/client.py → providers/base/base_provider.py} +255 -126
  134. mirascope/llm/providers/google/__init__.py +21 -0
  135. mirascope/llm/{clients → providers}/google/_utils/decode.py +61 -7
  136. mirascope/llm/{clients → providers}/google/_utils/encode.py +44 -30
  137. mirascope/llm/providers/google/model_id.py +22 -0
  138. mirascope/llm/providers/google/model_info.py +62 -0
  139. mirascope/llm/providers/google/provider.py +442 -0
  140. mirascope/llm/providers/load_provider.py +54 -0
  141. mirascope/llm/providers/mlx/__init__.py +24 -0
  142. mirascope/llm/providers/mlx/_utils.py +129 -0
  143. mirascope/llm/providers/mlx/encoding/__init__.py +8 -0
  144. mirascope/llm/providers/mlx/encoding/base.py +69 -0
  145. mirascope/llm/providers/mlx/encoding/transformers.py +147 -0
  146. mirascope/llm/providers/mlx/mlx.py +237 -0
  147. mirascope/llm/providers/mlx/model_id.py +17 -0
  148. mirascope/llm/providers/mlx/provider.py +415 -0
  149. mirascope/llm/providers/model_id.py +16 -0
  150. mirascope/llm/providers/ollama/__init__.py +19 -0
  151. mirascope/llm/providers/ollama/provider.py +71 -0
  152. mirascope/llm/providers/openai/__init__.py +6 -0
  153. mirascope/llm/providers/openai/completions/__init__.py +25 -0
  154. mirascope/llm/{clients → providers}/openai/completions/_utils/__init__.py +2 -0
  155. mirascope/llm/{clients → providers}/openai/completions/_utils/decode.py +60 -6
  156. mirascope/llm/{clients → providers}/openai/completions/_utils/encode.py +37 -26
  157. mirascope/llm/providers/openai/completions/base_provider.py +513 -0
  158. mirascope/llm/providers/openai/completions/provider.py +22 -0
  159. mirascope/llm/providers/openai/model_id.py +31 -0
  160. mirascope/llm/providers/openai/model_info.py +303 -0
  161. mirascope/llm/providers/openai/provider.py +398 -0
  162. mirascope/llm/providers/openai/responses/__init__.py +21 -0
  163. mirascope/llm/{clients → providers}/openai/responses/_utils/decode.py +59 -6
  164. mirascope/llm/{clients → providers}/openai/responses/_utils/encode.py +34 -23
  165. mirascope/llm/providers/openai/responses/provider.py +469 -0
  166. mirascope/llm/providers/provider_id.py +23 -0
  167. mirascope/llm/providers/provider_registry.py +169 -0
  168. mirascope/llm/providers/together/__init__.py +19 -0
  169. mirascope/llm/providers/together/provider.py +40 -0
  170. mirascope/llm/responses/__init__.py +3 -0
  171. mirascope/llm/responses/base_response.py +14 -5
  172. mirascope/llm/responses/base_stream_response.py +35 -6
  173. mirascope/llm/responses/finish_reason.py +1 -0
  174. mirascope/llm/responses/response.py +33 -13
  175. mirascope/llm/responses/root_response.py +12 -13
  176. mirascope/llm/responses/stream_response.py +35 -23
  177. mirascope/llm/responses/usage.py +95 -0
  178. mirascope/llm/tools/__init__.py +9 -2
  179. mirascope/llm/tools/_utils.py +12 -3
  180. mirascope/llm/tools/protocols.py +4 -4
  181. mirascope/llm/tools/tool_schema.py +44 -9
  182. mirascope/llm/tools/tools.py +10 -9
  183. mirascope/ops/__init__.py +156 -0
  184. mirascope/ops/_internal/__init__.py +5 -0
  185. mirascope/ops/_internal/closure.py +1118 -0
  186. mirascope/ops/_internal/configuration.py +126 -0
  187. mirascope/ops/_internal/context.py +76 -0
  188. mirascope/ops/_internal/exporters/__init__.py +26 -0
  189. mirascope/ops/_internal/exporters/exporters.py +342 -0
  190. mirascope/ops/_internal/exporters/processors.py +104 -0
  191. mirascope/ops/_internal/exporters/types.py +165 -0
  192. mirascope/ops/_internal/exporters/utils.py +29 -0
  193. mirascope/ops/_internal/instrumentation/__init__.py +8 -0
  194. mirascope/ops/_internal/instrumentation/llm/__init__.py +8 -0
  195. mirascope/ops/_internal/instrumentation/llm/encode.py +238 -0
  196. mirascope/ops/_internal/instrumentation/llm/gen_ai_types/__init__.py +38 -0
  197. mirascope/ops/_internal/instrumentation/llm/gen_ai_types/gen_ai_input_messages.py +31 -0
  198. mirascope/ops/_internal/instrumentation/llm/gen_ai_types/gen_ai_output_messages.py +38 -0
  199. mirascope/ops/_internal/instrumentation/llm/gen_ai_types/gen_ai_system_instructions.py +18 -0
  200. mirascope/ops/_internal/instrumentation/llm/gen_ai_types/shared.py +100 -0
  201. mirascope/ops/_internal/instrumentation/llm/llm.py +1288 -0
  202. mirascope/ops/_internal/propagation.py +198 -0
  203. mirascope/ops/_internal/protocols.py +51 -0
  204. mirascope/ops/_internal/session.py +139 -0
  205. mirascope/ops/_internal/spans.py +232 -0
  206. mirascope/ops/_internal/traced_calls.py +371 -0
  207. mirascope/ops/_internal/traced_functions.py +394 -0
  208. mirascope/ops/_internal/tracing.py +276 -0
  209. mirascope/ops/_internal/types.py +13 -0
  210. mirascope/ops/_internal/utils.py +75 -0
  211. mirascope/ops/_internal/versioned_calls.py +512 -0
  212. mirascope/ops/_internal/versioned_functions.py +346 -0
  213. mirascope/ops/_internal/versioning.py +303 -0
  214. mirascope/ops/exceptions.py +21 -0
  215. {mirascope-2.0.0a2.dist-info → mirascope-2.0.0a4.dist-info}/METADATA +78 -3
  216. mirascope-2.0.0a4.dist-info/RECORD +247 -0
  217. {mirascope-2.0.0a2.dist-info → mirascope-2.0.0a4.dist-info}/WHEEL +1 -1
  218. mirascope/graphs/__init__.py +0 -22
  219. mirascope/graphs/finite_state_machine.py +0 -625
  220. mirascope/llm/agents/__init__.py +0 -15
  221. mirascope/llm/agents/agent.py +0 -97
  222. mirascope/llm/agents/agent_template.py +0 -45
  223. mirascope/llm/agents/decorator.py +0 -176
  224. mirascope/llm/calls/base_call.py +0 -33
  225. mirascope/llm/clients/__init__.py +0 -34
  226. mirascope/llm/clients/anthropic/__init__.py +0 -25
  227. mirascope/llm/clients/anthropic/_utils/encode.py +0 -243
  228. mirascope/llm/clients/anthropic/clients.py +0 -819
  229. mirascope/llm/clients/anthropic/model_ids.py +0 -8
  230. mirascope/llm/clients/google/__init__.py +0 -20
  231. mirascope/llm/clients/google/clients.py +0 -853
  232. mirascope/llm/clients/google/model_ids.py +0 -15
  233. mirascope/llm/clients/openai/__init__.py +0 -25
  234. mirascope/llm/clients/openai/completions/__init__.py +0 -28
  235. mirascope/llm/clients/openai/completions/_utils/model_features.py +0 -81
  236. mirascope/llm/clients/openai/completions/clients.py +0 -833
  237. mirascope/llm/clients/openai/completions/model_ids.py +0 -8
  238. mirascope/llm/clients/openai/responses/__init__.py +0 -26
  239. mirascope/llm/clients/openai/responses/_utils/__init__.py +0 -13
  240. mirascope/llm/clients/openai/responses/_utils/model_features.py +0 -87
  241. mirascope/llm/clients/openai/responses/clients.py +0 -832
  242. mirascope/llm/clients/openai/responses/model_ids.py +0 -8
  243. mirascope/llm/clients/openai/shared/__init__.py +0 -7
  244. mirascope/llm/clients/openai/shared/_utils.py +0 -55
  245. mirascope/llm/clients/providers.py +0 -175
  246. mirascope-2.0.0a2.dist-info/RECORD +0 -102
  247. /mirascope/llm/{clients → providers}/base/kwargs.py +0 -0
  248. /mirascope/llm/{clients → providers}/base/params.py +0 -0
  249. /mirascope/llm/{clients/anthropic → providers/google}/_utils/__init__.py +0 -0
  250. /mirascope/llm/{clients → providers}/google/message.py +0 -0
  251. /mirascope/llm/{clients/google → providers/openai/responses}/_utils/__init__.py +0 -0
  252. {mirascope-2.0.0a2.dist-info → mirascope-2.0.0a4.dist-info}/licenses/LICENSE +0 -0
@@ -1,97 +0,0 @@
1
- """The `Agent` class for LLM agents."""
2
-
3
- from abc import ABC
4
- from collections.abc import Sequence
5
- from dataclasses import dataclass
6
- from typing import Generic
7
-
8
- from ..context import Context, DepsT
9
- from ..formatting import FormattableT
10
- from ..messages import UserContent
11
- from ..models import Model
12
- from ..responses import AsyncResponse, AsyncStreamResponse, Response, StreamResponse
13
- from ..tools import AsyncContextTool, AsyncTool, ContextTool, Tool
14
-
15
-
16
- @dataclass
17
- class BaseAgent(Generic[DepsT, FormattableT], ABC):
18
- """Agent class for generating responses using LLMs with tools."""
19
-
20
- ctx: Context[DepsT]
21
- """The context for the agent, such as the history of messages."""
22
-
23
- format: type[FormattableT] | None
24
- """The response format for the generated response."""
25
-
26
- model: Model
27
- """The default model the agent will use if not specified through context."""
28
-
29
-
30
- @dataclass
31
- class Agent(BaseAgent[DepsT, FormattableT]):
32
- """Agent class for generating responses using LLMs with tools."""
33
-
34
- tools: Sequence[Tool | ContextTool[DepsT]] | None
35
- """The tools available to the agent, if any."""
36
-
37
- def __call__(
38
- self,
39
- content: UserContent,
40
- *,
41
- ctx: Context[DepsT] | None = None,
42
- ) -> Response[FormattableT]:
43
- """Generates a response by running the agent loop."""
44
- raise NotImplementedError()
45
-
46
- def call(
47
- self,
48
- content: UserContent,
49
- *,
50
- ctx: Context[DepsT] | None = None,
51
- ) -> Response[FormattableT]:
52
- """Generates a response by running the agent loop."""
53
- raise NotImplementedError()
54
-
55
- def stream(
56
- self,
57
- content: UserContent,
58
- *,
59
- ctx: Context[DepsT] | None = None,
60
- ) -> StreamResponse[FormattableT]:
61
- """Streams the response generated by running the agent loop."""
62
- raise NotImplementedError()
63
-
64
-
65
- @dataclass
66
- class AsyncAgent(BaseAgent[DepsT, FormattableT]):
67
- """Asynchronous agent class for generating responses using LLMs with tools."""
68
-
69
- tools: Sequence[AsyncTool | AsyncContextTool[DepsT]] | None
70
- """The tools available to the agent, if any."""
71
-
72
- async def __call__(
73
- self,
74
- content: UserContent,
75
- *,
76
- ctx: Context[DepsT] | None = None,
77
- ) -> AsyncResponse[FormattableT]:
78
- """Generates a response by running the agent loop asynchronously."""
79
- raise NotImplementedError()
80
-
81
- async def call(
82
- self,
83
- content: UserContent,
84
- *,
85
- ctx: Context[DepsT] | None = None,
86
- ) -> AsyncResponse[FormattableT]:
87
- """Generates a response by running the agent loop asynchronously."""
88
- raise NotImplementedError()
89
-
90
- async def stream(
91
- self,
92
- content: UserContent,
93
- *,
94
- ctx: Context[DepsT] | None = None,
95
- ) -> AsyncStreamResponse[FormattableT]:
96
- """Streams the response generated by running the agent loop asynchronously."""
97
- raise NotImplementedError()
@@ -1,45 +0,0 @@
1
- from typing import Generic, overload
2
-
3
- from ..context import DepsT
4
- from ..formatting import FormattableT
5
- from .agent import Agent, AsyncAgent
6
-
7
-
8
- class AgentTemplate(Generic[DepsT, FormattableT]):
9
- @overload
10
- def __call__(
11
- self: "AgentTemplate[None, FormattableT]",
12
- ) -> Agent[None, FormattableT]:
13
- """Create an Agent with no deps"""
14
-
15
- @overload
16
- def __call__(
17
- self: "AgentTemplate[DepsT, FormattableT]", deps: DepsT
18
- ) -> Agent[DepsT, FormattableT]:
19
- """Create an Agent with deps"""
20
-
21
- def __call__(
22
- self: "AgentTemplate[None, FormattableT] | AgentTemplate[DepsT, FormattableT]",
23
- deps: DepsT | None = None,
24
- ) -> Agent[None, FormattableT] | Agent[DepsT, FormattableT]:
25
- raise NotImplementedError()
26
-
27
-
28
- class AsyncAgentTemplate(Generic[DepsT, FormattableT]):
29
- @overload
30
- async def __call__(
31
- self: "AsyncAgentTemplate[None, FormattableT]",
32
- ) -> AsyncAgent[None, FormattableT]:
33
- """Create an AsyncAgent with no deps"""
34
-
35
- @overload
36
- async def __call__(
37
- self: "AsyncAgentTemplate[DepsT, FormattableT]", deps: DepsT
38
- ) -> AsyncAgent[DepsT, FormattableT]:
39
- """Create an AsyncAgent with deps"""
40
-
41
- async def __call__(
42
- self: "AsyncAgentTemplate[None, FormattableT] | AsyncAgentTemplate[DepsT, FormattableT]",
43
- deps: DepsT | None = None,
44
- ) -> AsyncAgent[None, FormattableT] | AsyncAgent[DepsT, FormattableT]:
45
- raise NotImplementedError()
@@ -1,176 +0,0 @@
1
- """The `llm.agent` decorator for turning a function into an agent."""
2
-
3
- from __future__ import annotations
4
-
5
- from typing import TYPE_CHECKING, Any, Literal, Protocol, overload
6
- from typing_extensions import TypeVar, Unpack
7
-
8
- from ..tools import AsyncContextTool, AsyncTool, ContextTool, Tool
9
- from .agent_template import AgentTemplate, AsyncAgentTemplate
10
-
11
- if TYPE_CHECKING:
12
- from ..clients import (
13
- AnthropicClient,
14
- AnthropicModelId,
15
- BaseClient,
16
- GoogleClient,
17
- GoogleModelId,
18
- ModelId,
19
- OpenAICompletionsClient,
20
- OpenAICompletionsModelId,
21
- Params,
22
- Provider,
23
- )
24
-
25
- from ..context import Context, DepsT
26
- from ..formatting import FormattableT
27
- from ..types import P
28
-
29
- AgentToolT = TypeVar(
30
- "AgentToolT",
31
- bound="Tool | AsyncTool | ContextTool[Any] | AsyncContextTool[Any] | None",
32
- covariant=True,
33
- default=None,
34
- )
35
-
36
-
37
- class SystemPrompt(Protocol[P]):
38
- """Protocol for a prompt template function that returns a system prompt as a string (no context)."""
39
-
40
- def __call__(self) -> str: ...
41
-
42
-
43
- class ContextSystemPrompt(Protocol[P, DepsT]):
44
- """Protocol for a prompt template function that returns a system prompt as a string (with context)."""
45
-
46
- def __call__(self, ctx: Context[DepsT]) -> str: ...
47
-
48
-
49
- class AsyncSystemPrompt(Protocol[P]):
50
- """Protocol for an async prompt template function that returns a system prompt as a string (no context)."""
51
-
52
- async def __call__(self) -> str: ...
53
-
54
-
55
- class AsyncContextSystemPrompt(Protocol[P, DepsT]):
56
- """Protocol for an async prompt template function that returns a system prompt as a string (with context)."""
57
-
58
- async def __call__(self, ctx: Context[DepsT]) -> str: ...
59
-
60
-
61
- class AgentDecorator(Protocol[P, AgentToolT, FormattableT]):
62
- """Protocol for the `agent` decorator."""
63
-
64
- @overload
65
- def __call__(
66
- self: AgentDecorator[
67
- P,
68
- None | Tool | ContextTool[DepsT],
69
- FormattableT,
70
- ],
71
- fn: SystemPrompt[P] | ContextSystemPrompt[P, DepsT],
72
- ) -> AgentTemplate[DepsT, FormattableT]:
73
- """Decorator for creating a sync agent."""
74
- ...
75
-
76
- @overload
77
- def __call__(
78
- self: AgentDecorator[
79
- P,
80
- None | AsyncTool | AsyncContextTool[DepsT],
81
- FormattableT,
82
- ],
83
- fn: AsyncSystemPrompt[P] | AsyncContextSystemPrompt[P, DepsT],
84
- ) -> AsyncAgentTemplate[DepsT, FormattableT]:
85
- """Decorator for creating an async agent."""
86
- ...
87
-
88
- def __call__(
89
- self,
90
- fn: SystemPrompt[P]
91
- | ContextSystemPrompt[P, DepsT]
92
- | AsyncSystemPrompt[P]
93
- | AsyncContextSystemPrompt[P, DepsT],
94
- ) -> AgentTemplate[DepsT, FormattableT] | AsyncAgentTemplate[DepsT, FormattableT]:
95
- """Decorator for creating an agent."""
96
- raise NotImplementedError()
97
-
98
-
99
- @overload
100
- def agent(
101
- *,
102
- provider: Literal["anthropic"],
103
- model_id: AnthropicModelId,
104
- tools: list[AgentToolT] | None = None,
105
- format: type[FormattableT] | None = None,
106
- client: AnthropicClient | None = None,
107
- **params: Unpack[Params],
108
- ) -> AgentDecorator[..., AgentToolT, FormattableT]:
109
- """Decorator for creating an Anthropic agent."""
110
- ...
111
-
112
-
113
- @overload
114
- def agent(
115
- *,
116
- provider: Literal["google"],
117
- model_id: GoogleModelId,
118
- tools: list[AgentToolT] | None = None,
119
- format: type[FormattableT] | None = None,
120
- client: GoogleClient | None = None,
121
- **params: Unpack[Params],
122
- ) -> AgentDecorator[..., AgentToolT, FormattableT]:
123
- """Decorator for creating a Google agent."""
124
- ...
125
-
126
-
127
- @overload
128
- def agent(
129
- *,
130
- provider: Literal["openai:completions"],
131
- model_id: OpenAICompletionsModelId,
132
- tools: list[AgentToolT] | None = None,
133
- format: type[FormattableT] | None = None,
134
- client: OpenAICompletionsClient | None = None,
135
- **params: Unpack[Params],
136
- ) -> AgentDecorator[..., AgentToolT, FormattableT]:
137
- """Decorator for creating an OpenAI agent."""
138
- ...
139
-
140
-
141
- @overload
142
- def agent(
143
- *,
144
- provider: Provider,
145
- model_id: ModelId,
146
- tools: list[AgentToolT] | None = None,
147
- format: type[FormattableT] | None = None,
148
- client: None = None,
149
- **params: Unpack[Params],
150
- ) -> AgentDecorator[..., AgentToolT, FormattableT]:
151
- """Decorator for creating an agent using any registered model."""
152
- ...
153
-
154
-
155
- def agent(
156
- *,
157
- provider: Provider,
158
- model_id: ModelId,
159
- tools: list[AgentToolT] | None = None,
160
- format: type[FormattableT] | None = None,
161
- client: BaseClient | None = None,
162
- **params: Unpack[Params],
163
- ) -> AgentDecorator[..., AgentToolT, FormattableT]:
164
- """Decorator for creating an agent or structured agent.
165
-
166
- Args:
167
- model_id: The model to use for the agent.
168
- tools: The tools available to the agent.
169
- format: The response format type for the agent.
170
- client: The client to use for the agent.
171
- **params: Additional parameters for the model.
172
-
173
- Returns:
174
- An of `AgentDecorator`.
175
- """
176
- raise NotImplementedError()
@@ -1,33 +0,0 @@
1
- """The `BaseCall` class for LLM calls."""
2
-
3
- from abc import ABC
4
- from dataclasses import dataclass
5
- from typing import Generic
6
-
7
- from ..formatting import Format, FormattableT
8
- from ..models import Model, get_model_from_context
9
- from ..prompts import PromptT
10
- from ..tools import ToolkitT
11
- from ..types import P
12
-
13
-
14
- @dataclass
15
- class BaseCall(Generic[P, PromptT, ToolkitT, FormattableT], ABC):
16
- """A base class for generating responses using LLMs."""
17
-
18
- default_model: Model
19
- """The default model that will be used if no model is set in context."""
20
-
21
- toolkit: ToolkitT
22
- """The toolkit containing this call's tools."""
23
-
24
- format: type[FormattableT] | Format[FormattableT] | None
25
- """The response format for the generated response."""
26
-
27
- fn: PromptT
28
- """The Prompt function that generates the Prompt."""
29
-
30
- @property
31
- def model(self) -> Model:
32
- """The model used for generating responses. May be overwritten via `with llm.model(...)."""
33
- return get_model_from_context() or self.default_model
@@ -1,34 +0,0 @@
1
- """Client interfaces for LLM providers."""
2
-
3
- from .anthropic import (
4
- AnthropicClient,
5
- AnthropicModelId,
6
- )
7
- from .base import BaseClient, ClientT, Params
8
- from .google import GoogleClient, GoogleModelId
9
- from .openai import (
10
- OpenAICompletionsClient,
11
- OpenAICompletionsModelId,
12
- OpenAIResponsesClient,
13
- OpenAIResponsesModelId,
14
- )
15
- from .providers import PROVIDERS, ModelId, Provider, client, get_client
16
-
17
- __all__ = [
18
- "PROVIDERS",
19
- "AnthropicClient",
20
- "AnthropicModelId",
21
- "BaseClient",
22
- "ClientT",
23
- "GoogleClient",
24
- "GoogleModelId",
25
- "ModelId",
26
- "OpenAICompletionsClient",
27
- "OpenAICompletionsModelId",
28
- "OpenAIResponsesClient",
29
- "OpenAIResponsesModelId",
30
- "Params",
31
- "Provider",
32
- "client",
33
- "get_client",
34
- ]
@@ -1,25 +0,0 @@
1
- """Anthropic client implementation."""
2
-
3
- from typing import TYPE_CHECKING, Any
4
-
5
- if TYPE_CHECKING:
6
- from .clients import AnthropicClient, client, get_client
7
- from .model_ids import AnthropicModelId
8
- else:
9
- try:
10
- from .clients import AnthropicClient, client, get_client
11
- from .model_ids import AnthropicModelId
12
- except ImportError: # pragma: no cover
13
- from .._missing_import_stubs import create_client_stub, create_import_error_stub
14
-
15
- AnthropicClient = create_client_stub("anthropic", "AnthropicClient")
16
- AnthropicModelId = str
17
- client = create_import_error_stub("anthropic", "AnthropicClient")
18
- get_client = create_import_error_stub("anthropic", "AnthropicClient")
19
-
20
- __all__ = [
21
- "AnthropicClient",
22
- "AnthropicModelId",
23
- "client",
24
- "get_client",
25
- ]
@@ -1,243 +0,0 @@
1
- """Anthropic message encoding and request preparation."""
2
-
3
- import json
4
- from collections.abc import Sequence
5
- from functools import lru_cache
6
- from typing import Literal, TypedDict, cast
7
- from typing_extensions import Required
8
-
9
- from anthropic import Omit, types as anthropic_types
10
-
11
- from ....content import ContentPart, ImageMimeType
12
- from ....exceptions import FeatureNotSupportedError, FormattingModeNotSupportedError
13
- from ....formatting import (
14
- Format,
15
- FormattableT,
16
- _utils as _formatting_utils,
17
- resolve_format,
18
- )
19
- from ....messages import AssistantMessage, Message, UserMessage
20
- from ....tools import FORMAT_TOOL_NAME, BaseToolkit, ToolSchema
21
- from ...base import Params, _utils as _base_utils
22
- from ..model_ids import AnthropicModelId
23
-
24
- DEFAULT_MAX_TOKENS = 16000
25
-
26
- AnthropicImageMimeType = Literal["image/jpeg", "image/png", "image/gif", "image/webp"]
27
-
28
-
29
- def encode_image_mime_type(
30
- mime_type: ImageMimeType,
31
- ) -> AnthropicImageMimeType:
32
- """Convert an ImageMimeType into anthropic supported mime type"""
33
- if mime_type in ("image/jpeg", "image/png", "image/gif", "image/webp"):
34
- return mime_type
35
- raise FeatureNotSupportedError(
36
- feature=f"Image with mime_type: {mime_type}", provider="anthropic"
37
- ) # pragma: no cover
38
-
39
-
40
- class MessageCreateKwargs(TypedDict, total=False):
41
- """Kwargs for Anthropic Message.create method."""
42
-
43
- model: Required[str]
44
- max_tokens: Required[int]
45
- messages: Sequence[anthropic_types.MessageParam]
46
- system: str | Omit
47
- tools: Sequence[anthropic_types.ToolParam] | Omit
48
- tool_choice: anthropic_types.ToolChoiceParam | Omit
49
- temperature: float | Omit
50
- top_p: float | Omit
51
- top_k: int | Omit
52
- stop_sequences: list[str] | Omit
53
- thinking: anthropic_types.ThinkingConfigParam | Omit
54
-
55
-
56
- def _encode_content(
57
- content: Sequence[ContentPart], encode_thoughts: bool
58
- ) -> str | Sequence[anthropic_types.ContentBlockParam]:
59
- """Convert mirascope content to Anthropic content format."""
60
-
61
- if len(content) == 1 and content[0].type == "text":
62
- return content[0].text
63
-
64
- blocks: list[anthropic_types.ContentBlockParam] = []
65
-
66
- for part in content:
67
- if part.type == "text":
68
- blocks.append(anthropic_types.TextBlockParam(type="text", text=part.text))
69
- elif part.type == "image":
70
- source: (
71
- anthropic_types.Base64ImageSourceParam
72
- | anthropic_types.URLImageSourceParam
73
- )
74
- if part.source.type == "base64_image_source":
75
- source = anthropic_types.Base64ImageSourceParam(
76
- type="base64",
77
- media_type=encode_image_mime_type(part.source.mime_type),
78
- data=part.source.data,
79
- )
80
- else: # url_image_source
81
- source = anthropic_types.URLImageSourceParam(
82
- type="url",
83
- url=part.source.url,
84
- )
85
- blocks.append(anthropic_types.ImageBlockParam(type="image", source=source))
86
- elif part.type == "audio":
87
- raise FeatureNotSupportedError(
88
- "audio input",
89
- "anthropic",
90
- message="Anthropic does not support audio inputs.",
91
- )
92
- elif part.type == "tool_output":
93
- blocks.append(
94
- anthropic_types.ToolResultBlockParam(
95
- type="tool_result",
96
- tool_use_id=part.id,
97
- content=str(part.value),
98
- )
99
- )
100
- elif part.type == "tool_call":
101
- blocks.append(
102
- anthropic_types.ToolUseBlockParam(
103
- type="tool_use",
104
- id=part.id,
105
- name=part.name,
106
- input=json.loads(part.args),
107
- )
108
- )
109
- elif part.type == "thought":
110
- if encode_thoughts:
111
- blocks.append(
112
- anthropic_types.TextBlockParam(
113
- type="text", text="**Thinking:** " + part.thought
114
- )
115
- )
116
- else:
117
- raise NotImplementedError(f"Unsupported content type: {part.type}")
118
-
119
- return blocks
120
-
121
-
122
- def _encode_message(
123
- message: UserMessage | AssistantMessage,
124
- model_id: AnthropicModelId,
125
- encode_thoughts: bool,
126
- ) -> anthropic_types.MessageParam:
127
- """Convert user or assistant `Message`s to Anthropic `MessageParam` format.
128
-
129
- Args:
130
- messages: A Sequence containing `UserMessage`s or `AssistantMessage`s
131
- model_id: The Anthropic model ID being used
132
-
133
- Returns:
134
- A Sequence of converted Anthropic `MessageParam`
135
- """
136
-
137
- if (
138
- message.role == "assistant"
139
- and message.provider == "anthropic"
140
- and message.model_id == model_id
141
- and message.raw_message
142
- and not encode_thoughts
143
- ):
144
- return cast(anthropic_types.MessageParam, message.raw_message)
145
- return {
146
- "role": message.role,
147
- "content": _encode_content(message.content, encode_thoughts),
148
- }
149
-
150
-
151
- @lru_cache(maxsize=128)
152
- def _convert_tool_to_tool_param(tool: ToolSchema) -> anthropic_types.ToolParam:
153
- """Convert a single Mirascope tool to Anthropic tool format with caching."""
154
- schema_dict = tool.parameters.model_dump(by_alias=True, exclude_none=True)
155
- schema_dict["type"] = "object"
156
- return anthropic_types.ToolParam(
157
- name=tool.name,
158
- description=tool.description,
159
- input_schema=schema_dict,
160
- )
161
-
162
-
163
- def encode_request(
164
- *,
165
- model_id: AnthropicModelId,
166
- messages: Sequence[Message],
167
- tools: Sequence[ToolSchema] | BaseToolkit | None,
168
- format: type[FormattableT] | Format[FormattableT] | None,
169
- params: Params,
170
- ) -> tuple[Sequence[Message], Format[FormattableT] | None, MessageCreateKwargs]:
171
- """Prepares a request for the `Anthropic.messages.create` method."""
172
- kwargs: MessageCreateKwargs = MessageCreateKwargs(
173
- {
174
- "model": model_id,
175
- "max_tokens": DEFAULT_MAX_TOKENS,
176
- }
177
- )
178
- encode_thoughts = False
179
-
180
- with _base_utils.ensure_all_params_accessed(
181
- params=params, provider="anthropic", unsupported_params=["seed"]
182
- ) as param_accessor:
183
- if param_accessor.temperature is not None:
184
- kwargs["temperature"] = param_accessor.temperature
185
- if param_accessor.max_tokens is not None:
186
- kwargs["max_tokens"] = param_accessor.max_tokens
187
- if param_accessor.top_p is not None:
188
- kwargs["top_p"] = param_accessor.top_p
189
- if param_accessor.top_k is not None:
190
- kwargs["top_k"] = param_accessor.top_k
191
- if param_accessor.stop_sequences is not None:
192
- kwargs["stop_sequences"] = param_accessor.stop_sequences
193
- if param_accessor.thinking is not None:
194
- if param_accessor.thinking:
195
- # Set budget to 50% of max_tokens with minimum of 1024
196
- budget_tokens = max(1024, kwargs["max_tokens"] // 2)
197
- kwargs["thinking"] = {"type": "enabled", "budget_tokens": budget_tokens}
198
- else:
199
- kwargs["thinking"] = {"type": "disabled"}
200
- if param_accessor.encode_thoughts_as_text:
201
- encode_thoughts = True
202
-
203
- tools = tools.tools if isinstance(tools, BaseToolkit) else tools or []
204
- anthropic_tools = [_convert_tool_to_tool_param(tool) for tool in tools]
205
- format = resolve_format(format, default_mode="tool")
206
- if format is not None:
207
- if format.mode == "strict":
208
- raise FormattingModeNotSupportedError(
209
- formatting_mode="strict", provider="anthropic"
210
- )
211
- elif format.mode == "tool":
212
- format_tool_schema = _formatting_utils.create_tool_schema(format)
213
- anthropic_tools.append(_convert_tool_to_tool_param(format_tool_schema))
214
- if tools:
215
- kwargs["tool_choice"] = {"type": "any"}
216
- else:
217
- kwargs["tool_choice"] = {
218
- "type": "tool",
219
- "name": FORMAT_TOOL_NAME,
220
- "disable_parallel_tool_use": True,
221
- }
222
-
223
- if format.formatting_instructions:
224
- messages = _base_utils.add_system_instructions(
225
- messages, format.formatting_instructions
226
- )
227
-
228
- if anthropic_tools:
229
- kwargs["tools"] = anthropic_tools
230
-
231
- system_message_content, remaining_messages = _base_utils.extract_system_message(
232
- messages
233
- )
234
-
235
- kwargs["messages"] = [
236
- _encode_message(remaining_message, model_id, encode_thoughts)
237
- for remaining_message in remaining_messages
238
- ]
239
-
240
- if system_message_content:
241
- kwargs["system"] = system_message_content
242
-
243
- return messages, format, kwargs