promptbuilder 0.4.22__py3-none-any.whl → 0.4.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -179,7 +179,7 @@ class BaseLLMClient(ABC, utils.InheritDecoratorsMixin):
179
179
  tool_config=ToolConfig(function_calling_config=FunctionCallingConfig(mode=tool_choice_mode)),
180
180
  )
181
181
 
182
- while autocomplete and response.candidates and response.candidates[0].finish_reason not in [FinishReason.STOP, FinishReason.MAX_TOKENS]:
182
+ while autocomplete and response.candidates and response.candidates[0].finish_reason in [FinishReason.STOP, FinishReason.MAX_TOKENS]:
183
183
  BaseLLMClient._append_generated_part(messages, response)
184
184
 
185
185
  response = self.create(
@@ -1,9 +1,9 @@
1
1
  import logging
2
2
  from abc import ABC, abstractmethod
3
- from typing import Optional, Any, Callable, Literal, TypeVar, Self
3
+ from typing import Optional, Any, Callable, Literal, TypeVar, Self, Protocol, runtime_checkable
4
4
  from enum import Enum
5
5
 
6
- from pydantic import BaseModel, model_validator
6
+ from pydantic import BaseModel, model_validator, ConfigDict
7
7
 
8
8
 
9
9
  logger = logging.getLogger(__name__)
@@ -17,6 +17,16 @@ PydanticStructure = TypeVar("PydanticStructure", bound=BaseModel)
17
17
  type ResultType = Literal["json"] | type[PydanticStructure] | None
18
18
 
19
19
 
20
+ @runtime_checkable
21
+ class PartLike(Protocol):
22
+ """Protocol for Part-like objects that have the same interface as Part."""
23
+ text: Optional[str]
24
+ function_call: Optional[Any] # Using Any to allow different FunctionCall types
25
+ function_response: Optional[Any] # Using Any to allow different FunctionResponse types
26
+ thought: Optional[bool]
27
+ inline_data: Optional[Any] # Using Any to allow different Blob types
28
+
29
+
20
30
  class CustomApiKey(ABC):
21
31
  @abstractmethod
22
32
  def __hash__(self):
@@ -80,14 +90,16 @@ class Part(BaseModel):
80
90
  return cls(inline_data=inline_data)
81
91
 
82
92
  class Content(BaseModel):
83
- parts: Optional[list[Part]] = None
84
- role: Optional[Role] = None
93
+ model_config = ConfigDict(arbitrary_types_allowed=True)
94
+
95
+ parts: list[Part | PartLike] | None = None
96
+ role: Role | None = None
85
97
 
86
98
  def as_str(self) -> str:
87
99
  if self.parts is None:
88
100
  return ""
89
101
  else:
90
- return "\n".join([part.as_str() for part in self.parts])
102
+ return "\n".join([(part.text or "") for part in self.parts])
91
103
 
92
104
 
93
105
  class FinishReason(Enum):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: promptbuilder
3
- Version: 0.4.22
3
+ Version: 0.4.23
4
4
  Summary: Library for building prompts for LLMs
5
5
  Home-page: https://github.com/kapulkin/promptbuilder
6
6
  Author: Kapulkin Stanislav
@@ -9,7 +9,7 @@ promptbuilder/agent/utils.py,sha256=vTkphKw04v_QDIJtoB2JKK0RGY6iI1t_0LbmuStunzI,
9
9
  promptbuilder/llm_client/__init__.py,sha256=wJ33cnRtZX_YPsbcGxEu3SEZMOhPX7-fHI59MEPUe7I,517
10
10
  promptbuilder/llm_client/aisuite_client.py,sha256=aMqg05zefzck9Lz7pm7jZoKFdzr_ymFYhrAjZtzdHlQ,15561
11
11
  promptbuilder/llm_client/anthropic_client.py,sha256=vWuyFZL_LohOE0UYjB1-zTr4tJZMUcGk8H10gpjzdkk,28074
12
- promptbuilder/llm_client/base_client.py,sha256=xDDCIAIuLVf6ouItLVzGhP0a3hkr71oTH9jH790zkWE,24556
12
+ promptbuilder/llm_client/base_client.py,sha256=tOTdv_dQOKNVcPaK_O-6aDCsK93TpXh_uqUXDIFHo4c,24552
13
13
  promptbuilder/llm_client/bedrock_client.py,sha256=e9vUClbybQb32028oDBW6IbyPYqj1ZSSv9y36ZqUWxM,27941
14
14
  promptbuilder/llm_client/config.py,sha256=exQEm35wp7lK5SfXNpN5H9VZEb2LVa4pyZ-cxGt1U-U,1124
15
15
  promptbuilder/llm_client/exceptions.py,sha256=t-X7r_a8B1jNu8eEavde1jXu5dz97yV3IG4YHOtgh0Y,4836
@@ -17,10 +17,10 @@ promptbuilder/llm_client/google_client.py,sha256=y1_CFXBijUiRTyAJsh-8a6CGIwwlZBs
17
17
  promptbuilder/llm_client/logfire_decorators.py,sha256=un_QnIekypOEcqTZ5v1y9pwijGnF95xwnwKO5rFSHVY,9667
18
18
  promptbuilder/llm_client/main.py,sha256=5r_MhKVTD4cS90AHR89JJRKiWYBk35Y3JvhvmOxkYHc,8110
19
19
  promptbuilder/llm_client/openai_client.py,sha256=GdyTbUPsbACXZYF0BnCRyLVw24_WM1R_MMr6pDpiiV4,24787
20
- promptbuilder/llm_client/types.py,sha256=2E-aPRb5uAkLFJocmjF1Lh2aQRq9r8a5JRIw-duHfjA,7460
20
+ promptbuilder/llm_client/types.py,sha256=P7lM-Q1e0z6r5y_X3Vy107TR__b59f26ALCUOs-ap58,8015
21
21
  promptbuilder/llm_client/utils.py,sha256=79lvSppjrrItHB5MIozbp_5Oq7TsOK4Qzt9Ae3XMLFw,7624
22
- promptbuilder-0.4.22.dist-info/licenses/LICENSE,sha256=fqXmInzgsvEOIaKSBgcrwKyYCGYF0MKErJ0YivtODcc,1096
23
- promptbuilder-0.4.22.dist-info/METADATA,sha256=GtIz1H1kWvHz_1nyCYbkzmkUwsGLYE5jtaKCpt59eHc,3729
24
- promptbuilder-0.4.22.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
25
- promptbuilder-0.4.22.dist-info/top_level.txt,sha256=UBVcYn4UgrPy3O3fmmnPEU_kieuplBMgheetIMei4EI,14
26
- promptbuilder-0.4.22.dist-info/RECORD,,
22
+ promptbuilder-0.4.23.dist-info/licenses/LICENSE,sha256=fqXmInzgsvEOIaKSBgcrwKyYCGYF0MKErJ0YivtODcc,1096
23
+ promptbuilder-0.4.23.dist-info/METADATA,sha256=tsEFAL-TTW24bAybDiFX9BmbOO8AFvk8ZO4D1870vEQ,3729
24
+ promptbuilder-0.4.23.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
25
+ promptbuilder-0.4.23.dist-info/top_level.txt,sha256=UBVcYn4UgrPy3O3fmmnPEU_kieuplBMgheetIMei4EI,14
26
+ promptbuilder-0.4.23.dist-info/RECORD,,