promptbuilder 0.4.32__py3-none-any.whl → 0.4.34__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- promptbuilder/agent/agent.py +2 -2
- promptbuilder/agent/context.py +12 -2
- promptbuilder/llm_client/logfire_decorators.py +3 -3
- promptbuilder/llm_client/types.py +6 -4
- promptbuilder/llm_client/vertex_client.py +9 -0
- {promptbuilder-0.4.32.dist-info → promptbuilder-0.4.34.dist-info}/METADATA +1 -1
- {promptbuilder-0.4.32.dist-info → promptbuilder-0.4.34.dist-info}/RECORD +10 -10
- {promptbuilder-0.4.32.dist-info → promptbuilder-0.4.34.dist-info}/WHEEL +0 -0
- {promptbuilder-0.4.32.dist-info → promptbuilder-0.4.34.dist-info}/licenses/LICENSE +0 -0
- {promptbuilder-0.4.32.dist-info → promptbuilder-0.4.34.dist-info}/top_level.txt +0 -0
promptbuilder/agent/agent.py
CHANGED
|
@@ -49,7 +49,7 @@ class Agent(Generic[MessageType, ContextType]):
|
|
|
49
49
|
return messages
|
|
50
50
|
|
|
51
51
|
async def _answer_with_llm(self, **kwargs: Any) -> Response:
|
|
52
|
-
messages = self._formatted_messages(self.context.dialog_history.
|
|
52
|
+
messages = self._formatted_messages(self.context.dialog_history.last_content_messages())
|
|
53
53
|
return await run_async(self.llm_client.create,
|
|
54
54
|
messages=messages,
|
|
55
55
|
system_message=self.system_message(**kwargs),
|
|
@@ -73,7 +73,7 @@ class AgentRouter(Agent[MessageType, ContextType]):
|
|
|
73
73
|
callable_trs = [self.tools.get(name) or self.routes.get(name) for name in self.tr_names if name not in trs_to_exclude]
|
|
74
74
|
trs = [callable_tr.tool for callable_tr in callable_trs]
|
|
75
75
|
|
|
76
|
-
messages = self._formatted_messages(self.context.dialog_history.
|
|
76
|
+
messages = self._formatted_messages(self.context.dialog_history.last_content_messages())
|
|
77
77
|
response = await run_async(self.llm_client.create,
|
|
78
78
|
messages=messages,
|
|
79
79
|
system_message=self.system_message(callable_trs=callable_trs),
|
promptbuilder/agent/context.py
CHANGED
|
@@ -6,12 +6,15 @@ from typing import Any, TypeVar, Generic
|
|
|
6
6
|
MessageType = TypeVar("MessageType", bound=Any)
|
|
7
7
|
|
|
8
8
|
class DialogHistory(Generic[MessageType]):
|
|
9
|
-
def
|
|
9
|
+
def last_content_messages(self, n: int = 0) -> list[Content]:
|
|
10
10
|
raise NotImplementedError("Subclasses must implement this method")
|
|
11
11
|
|
|
12
12
|
def add_message(self, message: MessageType):
|
|
13
13
|
raise NotImplementedError("Subclasses must implement this method")
|
|
14
14
|
|
|
15
|
+
def last_messages(self, n: int = 0) -> list[MessageType]:
|
|
16
|
+
raise NotImplementedError("Subclasses must implement this method")
|
|
17
|
+
|
|
15
18
|
def clear(self):
|
|
16
19
|
raise NotImplementedError("Subclasses must implement this method")
|
|
17
20
|
|
|
@@ -19,12 +22,19 @@ class InMemoryDialogHistory(DialogHistory[Content]):
|
|
|
19
22
|
def __init__(self):
|
|
20
23
|
self.messages: list[Content] = []
|
|
21
24
|
|
|
22
|
-
def
|
|
25
|
+
def last_content_messages(self, n: int = 0) -> list[Content]:
|
|
26
|
+
if n == 0:
|
|
27
|
+
return self.messages
|
|
23
28
|
return self.messages[-n:]
|
|
24
29
|
|
|
25
30
|
def add_message(self, message: Content):
|
|
26
31
|
self.messages.append(message)
|
|
27
32
|
|
|
33
|
+
def last_messages(self, n: int = 0) -> list[Content]:
|
|
34
|
+
if n == 0:
|
|
35
|
+
return self.messages
|
|
36
|
+
return self.messages[-n:]
|
|
37
|
+
|
|
28
38
|
def clear(self):
|
|
29
39
|
self.messages = []
|
|
30
40
|
|
|
@@ -7,7 +7,7 @@ import logfire
|
|
|
7
7
|
|
|
8
8
|
from promptbuilder.llm_client.utils import inherited_decorator
|
|
9
9
|
from promptbuilder.llm_client.config import GLOBAL_CONFIG
|
|
10
|
-
from promptbuilder.llm_client.types import Response, UsageMetadata
|
|
10
|
+
from promptbuilder.llm_client.types import Response, UsageMetadata, content_to_str
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
P = ParamSpec("P")
|
|
@@ -24,9 +24,9 @@ def extract_span_data(self, *args, **kwargs) -> dict[str, Any]:
|
|
|
24
24
|
logfire_messages.append({"role": "system", "content": system_message})
|
|
25
25
|
for content in messages:
|
|
26
26
|
if content.role == "user":
|
|
27
|
-
logfire_messages.append({"role": "user", "content": content
|
|
27
|
+
logfire_messages.append({"role": "user", "content": content_to_str(content)})
|
|
28
28
|
elif content.role == "model":
|
|
29
|
-
logfire_messages.append({"role": "assistant", "content": content
|
|
29
|
+
logfire_messages.append({"role": "assistant", "content": content_to_str(content)})
|
|
30
30
|
span_data["request_data"] = {"messages": logfire_messages}
|
|
31
31
|
span_data["messages"] = messages
|
|
32
32
|
|
|
@@ -96,11 +96,13 @@ class Content(BaseModel):
|
|
|
96
96
|
role: Role | None = None
|
|
97
97
|
|
|
98
98
|
def as_str(self) -> str:
|
|
99
|
-
|
|
100
|
-
return ""
|
|
101
|
-
else:
|
|
102
|
-
return "\n".join([(part.text or "") for part in self.parts])
|
|
99
|
+
return content_to_str(self)
|
|
103
100
|
|
|
101
|
+
def content_to_str(content: Content) -> str:
|
|
102
|
+
if content.parts is None:
|
|
103
|
+
return ""
|
|
104
|
+
else:
|
|
105
|
+
return "".join([part.text for part in content.parts if part.text])
|
|
104
106
|
|
|
105
107
|
class FinishReason(Enum):
|
|
106
108
|
"""Output only. The reason why the model stopped generating tokens.
|
|
@@ -198,6 +198,15 @@ class VertexLLMClient(BaseLLMClient):
|
|
|
198
198
|
v_messages = _to_vertex_content(messages)
|
|
199
199
|
GenerationConfig = getattr(importlib.import_module("vertexai.generative_models"), "GenerationConfig")
|
|
200
200
|
gen_cfg = GenerationConfig(max_output_tokens=max_tokens or self.default_max_tokens)
|
|
201
|
+
|
|
202
|
+
# Handle thinking config
|
|
203
|
+
if thinking_config is None:
|
|
204
|
+
thinking_config = self.default_thinking_config
|
|
205
|
+
if thinking_config is not None:
|
|
206
|
+
# Vertex AI supports thinking via response_logprobs and logprobs parameters
|
|
207
|
+
# but the exact implementation may vary - for now, we'll store it for potential future use
|
|
208
|
+
pass
|
|
209
|
+
|
|
201
210
|
req_opts: dict[str, Any] | None = {}
|
|
202
211
|
if timeout is not None:
|
|
203
212
|
req_opts["timeout"] = timeout
|
|
@@ -2,8 +2,8 @@ promptbuilder/__init__.py,sha256=o_NdXl7NppM399-fy5VGfYkSN8iYDAaFAwJNhdkW3bI,56
|
|
|
2
2
|
promptbuilder/embeddings.py,sha256=bu-soCNYiHxshc1jejGmI5iJTIdotqEhmvpImSjlFTY,8087
|
|
3
3
|
promptbuilder/prompt_builder.py,sha256=kK6WHr2umYmsanYb2fQVxqEajs_dzGPXRulTo40g36E,12428
|
|
4
4
|
promptbuilder/agent/__init__.py,sha256=qG4Jq4wbmCH5NKLOX6ZMtZ7lFURhJXf464BntR-u5rU,56
|
|
5
|
-
promptbuilder/agent/agent.py,sha256=
|
|
6
|
-
promptbuilder/agent/context.py,sha256=
|
|
5
|
+
promptbuilder/agent/agent.py,sha256=A1-r2J6bmVQW5yCSzJOTzknQhkGkKw8wUUzL1BtAEAA,9290
|
|
6
|
+
promptbuilder/agent/context.py,sha256=xJwNxx-NKq6SDdy4Rpa4sU60nnhQ1a2AXiUQIlNhlG4,1481
|
|
7
7
|
promptbuilder/agent/tool.py,sha256=VDbIHK3_Q62Ei7hwLF7nIgHq-PTMKnv1NSjHpDYkUZE,2651
|
|
8
8
|
promptbuilder/agent/utils.py,sha256=vTkphKw04v_QDIJtoB2JKK0RGY6iI1t_0LbmuStunzI,356
|
|
9
9
|
promptbuilder/llm_client/__init__.py,sha256=wJ33cnRtZX_YPsbcGxEu3SEZMOhPX7-fHI59MEPUe7I,517
|
|
@@ -15,14 +15,14 @@ promptbuilder/llm_client/config.py,sha256=exQEm35wp7lK5SfXNpN5H9VZEb2LVa4pyZ-cxG
|
|
|
15
15
|
promptbuilder/llm_client/exceptions.py,sha256=t-X7r_a8B1jNu8eEavde1jXu5dz97yV3IG4YHOtgh0Y,4836
|
|
16
16
|
promptbuilder/llm_client/google_client.py,sha256=ZjJjDUQZH6zAIRoi4xUx3IDEm8jRkVWGyehy5P_Ba_M,12170
|
|
17
17
|
promptbuilder/llm_client/litellm_client.py,sha256=NxVJORvQy5fSoqzPOagBliAhg2fjnFNnV5ZF7qckbVI,25561
|
|
18
|
-
promptbuilder/llm_client/logfire_decorators.py,sha256=
|
|
18
|
+
promptbuilder/llm_client/logfire_decorators.py,sha256=pEPm3nTfqkg9kZr3s5ORs3fMex_z4Ce8CAt301m3OIk,9697
|
|
19
19
|
promptbuilder/llm_client/main.py,sha256=2Q7J5FwivX2YwvptzoSEtCfvfcI9p5HC55D3mMb2se4,8243
|
|
20
20
|
promptbuilder/llm_client/openai_client.py,sha256=QMXX7VPYWFo1VvX8bWF6jpi95ZIOk_MMBpz-14GrT-k,25274
|
|
21
|
-
promptbuilder/llm_client/types.py,sha256=
|
|
21
|
+
promptbuilder/llm_client/types.py,sha256=fnkSMFjK9ViaRQsD6LILpLz8R2_E1TI9efjy8VNO0RQ,8139
|
|
22
22
|
promptbuilder/llm_client/utils.py,sha256=79lvSppjrrItHB5MIozbp_5Oq7TsOK4Qzt9Ae3XMLFw,7624
|
|
23
|
-
promptbuilder/llm_client/vertex_client.py,sha256=
|
|
24
|
-
promptbuilder-0.4.
|
|
25
|
-
promptbuilder-0.4.
|
|
26
|
-
promptbuilder-0.4.
|
|
27
|
-
promptbuilder-0.4.
|
|
28
|
-
promptbuilder-0.4.
|
|
23
|
+
promptbuilder/llm_client/vertex_client.py,sha256=OgbmRIYQXeK2kAh27ijhXuTzeg8APP38IyJ9WJqvDkY,15405
|
|
24
|
+
promptbuilder-0.4.34.dist-info/licenses/LICENSE,sha256=fqXmInzgsvEOIaKSBgcrwKyYCGYF0MKErJ0YivtODcc,1096
|
|
25
|
+
promptbuilder-0.4.34.dist-info/METADATA,sha256=vGhJa-lzL9vGZm-4GTwWmYtqzpjPGEkVt7SL5Lvkskg,3799
|
|
26
|
+
promptbuilder-0.4.34.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
27
|
+
promptbuilder-0.4.34.dist-info/top_level.txt,sha256=UBVcYn4UgrPy3O3fmmnPEU_kieuplBMgheetIMei4EI,14
|
|
28
|
+
promptbuilder-0.4.34.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|