mirascope 1.24.2__py3-none-any.whl → 1.25.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,70 @@
1
+ """Centralized thinking-related imports and compatibility handling for Anthropic."""
2
+
3
+ from typing import TYPE_CHECKING, Literal
4
+
5
+ from pydantic import BaseModel
6
+ from typing_extensions import TypedDict
7
+
8
+
9
+ # Always define the stubs with underscore names
10
+ class _ThinkingConfigParam(TypedDict):
11
+ type: Literal["enabled"]
12
+ budget_tokens: int
13
+
14
+
15
+ class _ThinkingBlock(BaseModel):
16
+ signature: str
17
+ thinking: str
18
+ type: Literal["thinking"]
19
+
20
+
21
+ class _ThinkingDelta(BaseModel):
22
+ thinking: str
23
+ type: Literal["thinking_delta"]
24
+
25
+
26
+ class _SignatureDelta(BaseModel):
27
+ signature: str
28
+ type: Literal["signature_delta"]
29
+
30
+
31
+ HAS_THINKING_SUPPORT = True
32
+
33
+ # Define the public names based on what's available
34
+ if TYPE_CHECKING:
35
+ # For static analysis, always use our stubs so types are consistent
36
+ ThinkingConfigParam = _ThinkingConfigParam
37
+ ThinkingBlock = _ThinkingBlock
38
+ ThinkingDelta = _ThinkingDelta
39
+ SignatureDelta = _SignatureDelta
40
+ else:
41
+ # At runtime, use real types if available, otherwise stubs
42
+ try:
43
+ from anthropic.types import ( # pyright: ignore [reportAttributeAccessIssue]
44
+ ThinkingBlock,
45
+ ThinkingDelta,
46
+ )
47
+ from anthropic.types.signature_delta import (
48
+ SignatureDelta, # pyright: ignore [reportMissingImports]
49
+ )
50
+ from anthropic.types.thinking_config_param import (
51
+ ThinkingConfigParam, # pyright: ignore [reportMissingImports]
52
+ )
53
+
54
+ HAS_THINKING_SUPPORT = True
55
+
56
+ except ImportError: # pragma: no cover
57
+ ThinkingConfigParam = _ThinkingConfigParam
58
+ ThinkingBlock = _ThinkingBlock
59
+ ThinkingDelta = _ThinkingDelta
60
+ SignatureDelta = _SignatureDelta
61
+ HAS_THINKING_SUPPORT = False
62
+
63
+
64
+ __all__ = [
65
+ "HAS_THINKING_SUPPORT",
66
+ "SignatureDelta",
67
+ "ThinkingBlock",
68
+ "ThinkingConfigParam",
69
+ "ThinkingDelta",
70
+ ]
@@ -19,6 +19,7 @@ from ...base import BaseMessageParam, BaseTool, _utils
19
19
  from ...base._utils import AsyncCreateFn, CreateFn
20
20
  from ...base.stream_config import StreamConfig
21
21
  from .._call_kwargs import AnthropicCallKwargs
22
+ from .._thinking import HAS_THINKING_SUPPORT
22
23
  from ..call_params import AnthropicCallParams
23
24
  from ..dynamic_config import AnthropicDynamicConfig, AsyncAnthropicDynamicConfig
24
25
  from ..tool import AnthropicTool
@@ -95,6 +96,15 @@ def setup_call(
95
96
  list[type[AnthropicTool]] | None,
96
97
  AnthropicCallKwargs,
97
98
  ]:
99
+ thinking_enabled = call_params.get("thinking") is not None
100
+
101
+ # Validate thinking parameter before processing
102
+ if thinking_enabled and not HAS_THINKING_SUPPORT:
103
+ raise ValueError( # pragma: no cover
104
+ "Thinking parameter requires anthropic>=0.47.0. "
105
+ "Please upgrade: pip install 'anthropic>=0.47.0'"
106
+ )
107
+
98
108
  prompt_template, messages, tool_types, base_call_kwargs = _utils.setup_call(
99
109
  fn,
100
110
  fn_args,
@@ -111,7 +121,9 @@ def setup_call(
111
121
  if messages[0]["role"] == "system":
112
122
  call_kwargs["system"] = messages.pop(0)["content"] # pyright: ignore [reportGeneralTypeIssues]
113
123
 
114
- if json_mode:
124
+ use_json_mode = json_mode or (response_model and thinking_enabled)
125
+
126
+ if use_json_mode:
115
127
  json_mode_content = _utils.json_mode_content(response_model)
116
128
  if isinstance(messages[-1]["content"], str):
117
129
  messages[-1]["content"] += json_mode_content
@@ -11,6 +11,7 @@ from httpx import Timeout
11
11
  from typing_extensions import NotRequired
12
12
 
13
13
  from ..base import BaseCallParams
14
+ from ._thinking import ThinkingConfigParam
14
15
 
15
16
 
16
17
  class AnthropicCallParams(BaseCallParams):
@@ -21,6 +22,7 @@ class AnthropicCallParams(BaseCallParams):
21
22
  Attributes:
22
23
  max_tokens: ...
23
24
  tool_choice: ...
25
+ thinking: ...
24
26
  metadata: ...
25
27
  stop_sequences: ...
26
28
  temperature: ...
@@ -32,6 +34,7 @@ class AnthropicCallParams(BaseCallParams):
32
34
  extra_headers: NotRequired[dict[str, str] | None]
33
35
  max_tokens: int
34
36
  tool_choice: NotRequired[ToolChoice | None]
37
+ thinking: NotRequired[ThinkingConfigParam | None]
35
38
  metadata: NotRequired[Metadata | None]
36
39
  stop_sequences: NotRequired[list[str] | None]
37
40
  system: NotRequired[str | Iterable[TextBlockParam] | None]
@@ -71,9 +71,29 @@ class AnthropicCallResponse(
71
71
  @computed_field
72
72
  @property
73
73
  def content(self) -> str:
74
- """Returns the string text of the 0th text block."""
75
- block = self.response.content[0]
76
- return block.text if block.type == "text" else ""
74
+ """Returns the text content from the first text block."""
75
+ for block in self.response.content:
76
+ if block.type == "text":
77
+ return block.text
78
+ return ""
79
+
80
+ @computed_field
81
+ @property
82
+ def thinking(self) -> str | None:
83
+ """Returns the thinking content from the first thinking block."""
84
+ for block in self.response.content:
85
+ if block.type == "thinking":
86
+ return block.thinking
87
+ return None
88
+
89
+ @computed_field
90
+ @property
91
+ def signature(self) -> str | None:
92
+ """Returns the signature from the first thinking block."""
93
+ for block in self.response.content:
94
+ if block.type == "thinking":
95
+ return block.signature
96
+ return None
77
97
 
78
98
  @computed_field
79
99
  @property
@@ -61,6 +61,26 @@ class AnthropicCallResponseChunk(
61
61
  else ""
62
62
  )
63
63
 
64
+ @property
65
+ def thinking(self) -> str:
66
+ """Returns the thinking content from thinking deltas."""
67
+ return (
68
+ self.chunk.delta.thinking
69
+ if self.chunk.type == "content_block_delta"
70
+ and self.chunk.delta.type == "thinking_delta"
71
+ else ""
72
+ )
73
+
74
+ @property
75
+ def signature(self) -> str:
76
+ """Returns the signature content from signature deltas."""
77
+ return (
78
+ self.chunk.delta.signature
79
+ if self.chunk.type == "content_block_delta"
80
+ and self.chunk.delta.type == "signature_delta"
81
+ else ""
82
+ )
83
+
64
84
  @property
65
85
  def finish_reasons(self) -> list[FinishReason] | None:
66
86
  """Returns the finish reason of the response."""
@@ -3,6 +3,9 @@
3
3
  usage docs: learn/streams.md
4
4
  """
5
5
 
6
+ from collections.abc import AsyncGenerator, Generator
7
+ from typing import Any
8
+
6
9
  from anthropic.types import (
7
10
  Message,
8
11
  MessageParam,
@@ -16,8 +19,11 @@ from anthropic.types.text_block_param import TextBlockParam
16
19
  from anthropic.types.tool_use_block_param import ToolUseBlockParam
17
20
  from pydantic import BaseModel
18
21
 
22
+ from ..base.call_kwargs import BaseCallKwargs
23
+ from ..base.metadata import Metadata
19
24
  from ..base.stream import BaseStream
20
25
  from ..base.types import CostMetadata
26
+ from ._thinking import ThinkingBlock
21
27
  from .call_params import AnthropicCallParams
22
28
  from .call_response import AnthropicCallResponse
23
29
  from .call_response_chunk import AnthropicCallResponseChunk
@@ -64,6 +70,47 @@ class AnthropicStream(
64
70
 
65
71
  _provider = "anthropic"
66
72
 
73
+ def __init__(
74
+ self,
75
+ *,
76
+ stream: Generator[
77
+ tuple[AnthropicCallResponseChunk, AnthropicTool | None], None, None
78
+ ]
79
+ | AsyncGenerator[tuple[AnthropicCallResponseChunk, AnthropicTool | None], None],
80
+ metadata: Metadata,
81
+ tool_types: list[type[AnthropicTool]] | None,
82
+ call_response_type: type[AnthropicCallResponse],
83
+ model: str,
84
+ prompt_template: str | None,
85
+ fn_args: dict[str, Any],
86
+ dynamic_config: AsyncAnthropicDynamicConfig | AnthropicDynamicConfig,
87
+ messages: list[MessageParam],
88
+ call_params: AnthropicCallParams,
89
+ call_kwargs: BaseCallKwargs[ToolParam],
90
+ ) -> None:
91
+ """Initialize AnthropicStream with thinking content tracking."""
92
+ super().__init__(
93
+ stream=stream,
94
+ metadata=metadata,
95
+ tool_types=tool_types,
96
+ call_response_type=call_response_type,
97
+ model=model,
98
+ prompt_template=prompt_template,
99
+ fn_args=fn_args,
100
+ dynamic_config=dynamic_config,
101
+ messages=messages,
102
+ call_params=call_params,
103
+ call_kwargs=call_kwargs,
104
+ )
105
+ self.thinking = ""
106
+ self.signature = ""
107
+
108
+ def _update_properties(self, chunk: AnthropicCallResponseChunk) -> None:
109
+ """Updates the properties of the stream, including thinking content."""
110
+ super()._update_properties(chunk)
111
+ self.thinking += chunk.thinking
112
+ self.signature += chunk.signature
113
+
67
114
  def _construct_message_param(
68
115
  self, tool_calls: list[ToolUseBlock] | None = None, content: str | None = None
69
116
  ) -> MessageParam:
@@ -102,6 +149,16 @@ class AnthropicStream(
102
149
 
103
150
  content_blocks: list[ContentBlock] = []
104
151
 
152
+ # Add thinking block first if we have thinking content
153
+ if hasattr(self, "thinking") and self.thinking:
154
+ content_blocks.append(
155
+ ThinkingBlock( # pyright: ignore [reportArgumentType]
156
+ type="thinking",
157
+ thinking=self.thinking,
158
+ signature=getattr(self, "signature", ""),
159
+ )
160
+ )
161
+
105
162
  if isinstance(self.message_param["content"], str):
106
163
  content_blocks.append(
107
164
  TextBlock(text=self.message_param["content"], type="text")
@@ -13,6 +13,7 @@ from google.genai.types import (
13
13
  FunctionResponseDict,
14
14
  GenerateContentResponse,
15
15
  GenerateContentResponseUsageMetadata,
16
+ Part,
16
17
  PartDict,
17
18
  # Import manually SchemaDict to avoid Pydantic error
18
19
  SchemaDict, # noqa: F401
@@ -72,11 +73,33 @@ class GoogleCallResponse(
72
73
 
73
74
  _provider = "google"
74
75
 
76
+ def _parts(self) -> list[Part]:
77
+ """Returns the parts of the 0th candidate."""
78
+ if (
79
+ not (candidates := self.response.candidates)
80
+ or not (content := candidates[0].content)
81
+ or not (parts := content.parts)
82
+ ):
83
+ return []
84
+ return parts
85
+
75
86
  @computed_field
76
87
  @property
77
88
  def content(self) -> str:
78
89
  """Returns the contained string content for the 0th choice."""
79
- return self.response.candidates[0].content.parts[0].text # pyright: ignore [reportOptionalSubscript, reportReturnType, reportOptionalMemberAccess, reportOptionalIterable]
90
+ for part in self._parts():
91
+ if not part.thought and part.text:
92
+ return part.text
93
+ return ""
94
+
95
+ @computed_field
96
+ @property
97
+ def thinking(self) -> str | None:
98
+ """Returns the thought content from the first thought part"""
99
+ for part in self._parts():
100
+ if part.thought and part.text:
101
+ return part.text
102
+ return None
80
103
 
81
104
  @computed_field
82
105
  @property
@@ -52,10 +52,28 @@ class GoogleCallResponseChunk(
52
52
  not (candidates := self.chunk.candidates)
53
53
  or not (content := candidates[0].content)
54
54
  or not (parts := content.parts)
55
- or not (text := parts[0].text)
56
55
  ):
57
56
  return ""
58
- return text
57
+
58
+ for part in parts:
59
+ if not part.thought and part.text:
60
+ return part.text
61
+ return ""
62
+
63
+ @property
64
+ def thinking(self) -> str:
65
+ """Returns the thinking content from thinking parts."""
66
+ if (
67
+ not (candidates := self.chunk.candidates)
68
+ or not (content := candidates[0].content)
69
+ or not (parts := content.parts)
70
+ ):
71
+ return ""
72
+
73
+ for part in parts:
74
+ if part.thought and part.text:
75
+ return part.text
76
+ return ""
59
77
 
60
78
  @property
61
79
  def finish_reasons(self) -> list[GoogleFinishReason]:
@@ -3,7 +3,8 @@
3
3
  usage docs: learn/streams.md
4
4
  """
5
5
 
6
- from typing import cast
6
+ from collections.abc import AsyncGenerator, Generator
7
+ from typing import Any, cast
7
8
 
8
9
  from google.genai.types import (
9
10
  Candidate,
@@ -19,6 +20,8 @@ from google.genai.types import (
19
20
  Tool,
20
21
  )
21
22
 
23
+ from ..base.call_kwargs import BaseCallKwargs
24
+ from ..base.metadata import Metadata
22
25
  from ..base.stream import BaseStream
23
26
  from ..base.types import CostMetadata
24
27
  from .call_params import GoogleCallParams
@@ -64,6 +67,43 @@ class GoogleStream(
64
67
 
65
68
  _provider = "google"
66
69
 
70
+ def __init__(
71
+ self,
72
+ *,
73
+ stream: Generator[tuple[GoogleCallResponseChunk, GoogleTool | None], None, None]
74
+ | AsyncGenerator[tuple[GoogleCallResponseChunk, GoogleTool | None], None],
75
+ metadata: Metadata,
76
+ tool_types: list[type[GoogleTool]] | None,
77
+ call_response_type: type[GoogleCallResponse],
78
+ model: str,
79
+ prompt_template: str | None,
80
+ fn_args: dict[str, Any],
81
+ dynamic_config: GoogleDynamicConfig,
82
+ messages: list[ContentListUnion | ContentListUnionDict],
83
+ call_params: GoogleCallParams,
84
+ call_kwargs: BaseCallKwargs[Tool],
85
+ ) -> None:
86
+ """Initialize GoogleStream with thinking content tracking."""
87
+ super().__init__(
88
+ stream=stream,
89
+ metadata=metadata,
90
+ tool_types=tool_types,
91
+ call_response_type=call_response_type,
92
+ model=model,
93
+ prompt_template=prompt_template,
94
+ fn_args=fn_args,
95
+ dynamic_config=dynamic_config,
96
+ messages=messages,
97
+ call_params=call_params,
98
+ call_kwargs=call_kwargs,
99
+ )
100
+ self.thinking = ""
101
+
102
+ def _update_properties(self, chunk: GoogleCallResponseChunk) -> None:
103
+ """Updates the properties of the stream, including thinking content."""
104
+ super()._update_properties(chunk)
105
+ self.thinking += chunk.thinking
106
+
67
107
  def _construct_message_param(
68
108
  self, tool_calls: list[FunctionCall] | None = None, content: str | None = None
69
109
  ) -> ContentDict:
@@ -103,6 +143,22 @@ class GoogleStream(
103
143
  total_token_count = int(candidates_token_count or 0) + int(
104
144
  prompt_token_count or 0
105
145
  )
146
+
147
+ parts: list[PartDict] = []
148
+
149
+ # Add thinking part first if we have thinking content
150
+ if self.thinking:
151
+ parts.append({"text": self.thinking, "thought": True})
152
+
153
+ for pd in self.message_param.get("parts") or []:
154
+ if pd.get("text") == "":
155
+ # These parts are generated based only on chunk content;
156
+ # thinking parts have empty content and are reconstructed separately.
157
+ # Skip this so the thinking parts aren't duplicated.
158
+ continue
159
+ else:
160
+ parts.append(pd)
161
+
106
162
  response = GenerateContentResponse(
107
163
  candidates=[
108
164
  Candidate(
@@ -111,7 +167,7 @@ class GoogleStream(
111
167
  else FinishReason.STOP,
112
168
  content=Content(
113
169
  role=self.message_param["role"], # pyright: ignore [reportTypedDictNotRequiredAccess]
114
- parts=self.message_param["parts"], # pyright: ignore [reportTypedDictNotRequiredAccess, reportArgumentType]
170
+ parts=parts, # pyright: ignore [reportArgumentType]
115
171
  ),
116
172
  )
117
173
  ],
@@ -29,7 +29,7 @@ def _get_call_response_observation(
29
29
  "metadata": result.response,
30
30
  "tags": tags,
31
31
  "model": result.model,
32
- "output": result.message_param.get("content", None),
32
+ "output": result.content,
33
33
  }
34
34
 
35
35
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mirascope
3
- Version: 1.24.2
3
+ Version: 1.25.1
4
4
  Summary: LLM abstractions that aren't obstructions
5
5
  Project-URL: Homepage, https://mirascope.com
6
6
  Project-URL: Documentation, https://mirascope.com/WELCOME
@@ -46,12 +46,13 @@ mirascope/core/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
46
  mirascope/core/anthropic/__init__.py,sha256=GB-CULa3jYEPv1ZDyZjNCKQJbrc6ojqu8WNRSFElQ-4,918
47
47
  mirascope/core/anthropic/_call.py,sha256=LXUR__AyexD-hsPMPKpA7IFuh8Cfc0uAg1GrJSxiWnU,2358
48
48
  mirascope/core/anthropic/_call_kwargs.py,sha256=EoXSl2B5FoLD_Nv03-ttXjiKlpBihZGXu6U-Ol3qwZ8,389
49
- mirascope/core/anthropic/call_params.py,sha256=K51kCyIf6us3Tl2SPgkqrZoacZTNwaMuVj23hFJcVBk,1238
50
- mirascope/core/anthropic/call_response.py,sha256=xtXtsZ6Aze7fAgOMAtNQosa0wyURzc2kX8zZ06gHEp4,6390
51
- mirascope/core/anthropic/call_response_chunk.py,sha256=Pzpc4nHUqA-OhMaBvdT7sJOexgRbrevNebqHm1HGrrA,4113
49
+ mirascope/core/anthropic/_thinking.py,sha256=huHH20tdXgS6GpDz5cYFgf5HzmzrJ8FaUNfrBepkf4w,1882
50
+ mirascope/core/anthropic/call_params.py,sha256=OUUTHGCXxKilTmjqpgmAdgKuF-oO_WX7tZJdEtWarS0,1357
51
+ mirascope/core/anthropic/call_response.py,sha256=ufUDPAqZcGQtA5YDLirvJiY9MWnDr01O4ZzXS7PCk8c,7004
52
+ mirascope/core/anthropic/call_response_chunk.py,sha256=ERv3arJxwOUJTtGRcAVj4xHPbzZfo6U5itC0lYvrEfg,4738
52
53
  mirascope/core/anthropic/dynamic_config.py,sha256=kZV4ApAnm3P1X5gKPJ3hbr45K6tgaNX8L6Ca8NjTkxU,1192
53
54
  mirascope/core/anthropic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
- mirascope/core/anthropic/stream.py,sha256=VviSkdl_K6v6kCWXH8x0zOpspt13ZcafU7aUjZiqXsk,4852
55
+ mirascope/core/anthropic/stream.py,sha256=bQPuNs9vuwO5qdqGskXoj1WgzUUW0JxVgtAXkMnsaOs,6988
55
56
  mirascope/core/anthropic/tool.py,sha256=HtbYV5j4itV8v6lTyLDY72NMX2kxRaXVgpZ_m89HqIk,2891
56
57
  mirascope/core/anthropic/_utils/__init__.py,sha256=GDO3G2dvWsE8UhFyQ1lKkRVMeOrqqogBISRKJYJmoEQ,493
57
58
  mirascope/core/anthropic/_utils/_convert_common_call_params.py,sha256=ILd7AH_atmPUPj7I74EsmxG3rmWC7b5tgjnlR24jKUs,765
@@ -60,7 +61,7 @@ mirascope/core/anthropic/_utils/_convert_message_params.py,sha256=paDIPksOzZK5yh
60
61
  mirascope/core/anthropic/_utils/_get_json_output.py,sha256=vkHvhc96RLrGREYVCKr14Umq80EUa7pCtlcImjXB5gA,1157
61
62
  mirascope/core/anthropic/_utils/_handle_stream.py,sha256=6Ll2FQt1KWrz5jqgeP1NikHEjlrSbfPUQCH4eoX4eVA,4010
62
63
  mirascope/core/anthropic/_utils/_message_param_converter.py,sha256=1Blj5YT-ifJw12Y9reUK740CO2Fwp-m21_bLEal2PHw,6205
63
- mirascope/core/anthropic/_utils/_setup_call.py,sha256=tR-SFT_ZJd_Gk7RY4NOU3do536RRO3US4IfOuyAapOw,4340
64
+ mirascope/core/anthropic/_utils/_setup_call.py,sha256=xPY6O7MMOwaot5xZSSHqyGyazJxVHPx77W20jZ-cK6E,4812
64
65
  mirascope/core/azure/__init__.py,sha256=7Dpkf10T-TGxk7Lstej6x6s6On7QjI0qeE2ABO7QWmQ,852
65
66
  mirascope/core/azure/_call.py,sha256=SHqSJe6_4zgn4Y9PkpDl4vXvLuT4QmVnWUcws9e_RR8,2237
66
67
  mirascope/core/azure/_call_kwargs.py,sha256=q38xKSgCBWi8DLScepG-KnUfgi67AU6xr2uOHwCZ2mI,435
@@ -213,10 +214,10 @@ mirascope/core/google/__init__.py,sha256=5EhyiomPnjOS59FgfQP2uPCXS74ZJrGYvJ_CZbY
213
214
  mirascope/core/google/_call.py,sha256=GJOPyvHzVlSXvJpgQhJFg4wFHFUYsvvrbjhNxU-nSl8,2344
214
215
  mirascope/core/google/_call_kwargs.py,sha256=baCYcxWsmV06ATw6nuQhh6FPm3k6oWmKOn0MyjESDGc,372
215
216
  mirascope/core/google/call_params.py,sha256=9Dt5m1pPVjpl5Qppz6Egl_9FyGjjz9aGCnXkVps7C_Q,538
216
- mirascope/core/google/call_response.py,sha256=nAlH1XWQMiMfCKgi6sG3qIA0YccxIzLWzY_NNl2JtqM,7372
217
- mirascope/core/google/call_response_chunk.py,sha256=14ggxCYtl92LdyA3nxY8awKQVSBvFV1Wse2Jm7rfvJc,3535
217
+ mirascope/core/google/call_response.py,sha256=SuNN7WV5HuZBda36Vd0CLnMuzwcnZbDYDgM6z2P8pjY,7922
218
+ mirascope/core/google/call_response_chunk.py,sha256=4d4YS-NyGxidB_8EkD8V_XzohcBxnTg3h8JEvgrKWPk,4028
218
219
  mirascope/core/google/dynamic_config.py,sha256=O6j8F0fLVFuuNwURneu5OpPuu_bMEtbDEFHhJXRT6V0,857
219
- mirascope/core/google/stream.py,sha256=bTxB8OUrKXxzmcX0C7_-LqtBfaAAazA5HjKZGSxxtLw,4466
220
+ mirascope/core/google/stream.py,sha256=voWrLRDfoG7NtNSNNRS44x9tNM6Y_i_9_V8bO1-Sx_k,6551
220
221
  mirascope/core/google/tool.py,sha256=61a9Ejdxz41pwaab9VE2yvP_J1Aebua3BeRPJ_GJSnE,5138
221
222
  mirascope/core/google/_utils/__init__.py,sha256=vL0hx6WKW5lqpUcFTFCFGvmwtR-pts0JzWgCXhaUVrI,388
222
223
  mirascope/core/google/_utils/_convert_common_call_params.py,sha256=TF7GWBHcpfzb7XmrxKp3gnaONITYF93lqr4XkSVz_uU,1195
@@ -324,7 +325,7 @@ mirascope/integrations/__init__.py,sha256=ieLWknpbkO_gABIVl9790YTTCCRO9ISQ35-1Se
324
325
  mirascope/integrations/_middleware_factory.py,sha256=v-S-hVU5S7P9Lu8PiKyUhmtp_rbQaIJ1droC8BcKBAE,17362
325
326
  mirascope/integrations/tenacity.py,sha256=jk64MBncCMbgoQMaXQgjxg9Y9UstRqTt2RCeA86pdCU,326
326
327
  mirascope/integrations/langfuse/__init__.py,sha256=wG3eBXwGPbFedB28L1K_q1iCf_dERjVmTDCWK4nHZyM,71
327
- mirascope/integrations/langfuse/_utils.py,sha256=SsZfQkp-_Ytxzoq03XwEy3a-PuHB6oVs3uy3fAZKMOg,3457
328
+ mirascope/integrations/langfuse/_utils.py,sha256=FPqmiBhzWKitFXn6RkWJ1unH8z_ebNLtaBuRD73fQLs,3430
328
329
  mirascope/integrations/langfuse/_with_langfuse.py,sha256=UTA--jCB9K8SqUmaVz_X_rBJA9W0adpbrPWJtPOFU-4,1880
329
330
  mirascope/integrations/logfire/__init__.py,sha256=OWceKOygazwUG1XLGvSu3T2-AqnuBxf3--fbwGZ1G9o,68
330
331
  mirascope/integrations/logfire/_utils.py,sha256=2BU37ucJPPYviQvMbrD0Qcqhh5hLifYN7PeaBK4bAk0,7108
@@ -371,7 +372,7 @@ mirascope/v0/base/ops_utils.py,sha256=1Qq-VIwgHBaYutiZsS2MUQ4OgPC3APyywI5bTiTAmA
371
372
  mirascope/v0/base/prompts.py,sha256=FM2Yz98cSnDceYogiwPrp4BALf3_F3d4fIOCGAkd-SE,1298
372
373
  mirascope/v0/base/types.py,sha256=ZfatJoX0Yl0e3jhv0D_MhiSVHLYUeJsdN3um3iE10zY,352
373
374
  mirascope/v0/base/utils.py,sha256=XREPENRQTu8gpMhHU8RC8qH_am3FfGUvY-dJ6x8i-mw,681
374
- mirascope-1.24.2.dist-info/METADATA,sha256=Q3ljkU5ZnGDTLtmg_AcFwR3kdGnzDqxFMrB-7xG6ygU,8542
375
- mirascope-1.24.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
376
- mirascope-1.24.2.dist-info/licenses/LICENSE,sha256=LAs5Q8mdawTsVdONpDGukwsoc4KEUBmmonDEL39b23Y,1072
377
- mirascope-1.24.2.dist-info/RECORD,,
375
+ mirascope-1.25.1.dist-info/METADATA,sha256=dF-bij3SLbRZKCoSH4U5259IqmYcZwJxESGcSKw0Jqg,8542
376
+ mirascope-1.25.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
377
+ mirascope-1.25.1.dist-info/licenses/LICENSE,sha256=LAs5Q8mdawTsVdONpDGukwsoc4KEUBmmonDEL39b23Y,1072
378
+ mirascope-1.25.1.dist-info/RECORD,,