promptbuilder 0.4.32__py3-none-any.whl → 0.4.33__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- promptbuilder/agent/agent.py +2 -2
- promptbuilder/agent/context.py +12 -2
- promptbuilder/llm_client/vertex_client.py +9 -0
- {promptbuilder-0.4.32.dist-info → promptbuilder-0.4.33.dist-info}/METADATA +1 -1
- {promptbuilder-0.4.32.dist-info → promptbuilder-0.4.33.dist-info}/RECORD +8 -8
- {promptbuilder-0.4.32.dist-info → promptbuilder-0.4.33.dist-info}/WHEEL +0 -0
- {promptbuilder-0.4.32.dist-info → promptbuilder-0.4.33.dist-info}/licenses/LICENSE +0 -0
- {promptbuilder-0.4.32.dist-info → promptbuilder-0.4.33.dist-info}/top_level.txt +0 -0
promptbuilder/agent/agent.py
CHANGED
|
@@ -49,7 +49,7 @@ class Agent(Generic[MessageType, ContextType]):
|
|
|
49
49
|
return messages
|
|
50
50
|
|
|
51
51
|
async def _answer_with_llm(self, **kwargs: Any) -> Response:
|
|
52
|
-
messages = self._formatted_messages(self.context.dialog_history.
|
|
52
|
+
messages = self._formatted_messages(self.context.dialog_history.last_content_messages())
|
|
53
53
|
return await run_async(self.llm_client.create,
|
|
54
54
|
messages=messages,
|
|
55
55
|
system_message=self.system_message(**kwargs),
|
|
@@ -73,7 +73,7 @@ class AgentRouter(Agent[MessageType, ContextType]):
|
|
|
73
73
|
callable_trs = [self.tools.get(name) or self.routes.get(name) for name in self.tr_names if name not in trs_to_exclude]
|
|
74
74
|
trs = [callable_tr.tool for callable_tr in callable_trs]
|
|
75
75
|
|
|
76
|
-
messages = self._formatted_messages(self.context.dialog_history.
|
|
76
|
+
messages = self._formatted_messages(self.context.dialog_history.last_content_messages())
|
|
77
77
|
response = await run_async(self.llm_client.create,
|
|
78
78
|
messages=messages,
|
|
79
79
|
system_message=self.system_message(callable_trs=callable_trs),
|
promptbuilder/agent/context.py
CHANGED
|
@@ -6,12 +6,15 @@ from typing import Any, TypeVar, Generic
|
|
|
6
6
|
MessageType = TypeVar("MessageType", bound=Any)
|
|
7
7
|
|
|
8
8
|
class DialogHistory(Generic[MessageType]):
|
|
9
|
-
def
|
|
9
|
+
def last_content_messages(self, n: int = 0) -> list[Content]:
|
|
10
10
|
raise NotImplementedError("Subclasses must implement this method")
|
|
11
11
|
|
|
12
12
|
def add_message(self, message: MessageType):
|
|
13
13
|
raise NotImplementedError("Subclasses must implement this method")
|
|
14
14
|
|
|
15
|
+
def last_messages(self, n: int = 0) -> list[MessageType]:
|
|
16
|
+
raise NotImplementedError("Subclasses must implement this method")
|
|
17
|
+
|
|
15
18
|
def clear(self):
|
|
16
19
|
raise NotImplementedError("Subclasses must implement this method")
|
|
17
20
|
|
|
@@ -19,12 +22,19 @@ class InMemoryDialogHistory(DialogHistory[Content]):
|
|
|
19
22
|
def __init__(self):
|
|
20
23
|
self.messages: list[Content] = []
|
|
21
24
|
|
|
22
|
-
def
|
|
25
|
+
def last_content_messages(self, n: int = 0) -> list[Content]:
|
|
26
|
+
if n == 0:
|
|
27
|
+
return self.messages
|
|
23
28
|
return self.messages[-n:]
|
|
24
29
|
|
|
25
30
|
def add_message(self, message: Content):
|
|
26
31
|
self.messages.append(message)
|
|
27
32
|
|
|
33
|
+
def last_messages(self, n: int = 0) -> list[Content]:
|
|
34
|
+
if n == 0:
|
|
35
|
+
return self.messages
|
|
36
|
+
return self.messages[-n:]
|
|
37
|
+
|
|
28
38
|
def clear(self):
|
|
29
39
|
self.messages = []
|
|
30
40
|
|
|
@@ -198,6 +198,15 @@ class VertexLLMClient(BaseLLMClient):
|
|
|
198
198
|
v_messages = _to_vertex_content(messages)
|
|
199
199
|
GenerationConfig = getattr(importlib.import_module("vertexai.generative_models"), "GenerationConfig")
|
|
200
200
|
gen_cfg = GenerationConfig(max_output_tokens=max_tokens or self.default_max_tokens)
|
|
201
|
+
|
|
202
|
+
# Handle thinking config
|
|
203
|
+
if thinking_config is None:
|
|
204
|
+
thinking_config = self.default_thinking_config
|
|
205
|
+
if thinking_config is not None:
|
|
206
|
+
# Vertex AI supports thinking via response_logprobs and logprobs parameters
|
|
207
|
+
# but the exact implementation may vary - for now, we'll store it for potential future use
|
|
208
|
+
pass
|
|
209
|
+
|
|
201
210
|
req_opts: dict[str, Any] | None = {}
|
|
202
211
|
if timeout is not None:
|
|
203
212
|
req_opts["timeout"] = timeout
|
|
@@ -2,8 +2,8 @@ promptbuilder/__init__.py,sha256=o_NdXl7NppM399-fy5VGfYkSN8iYDAaFAwJNhdkW3bI,56
|
|
|
2
2
|
promptbuilder/embeddings.py,sha256=bu-soCNYiHxshc1jejGmI5iJTIdotqEhmvpImSjlFTY,8087
|
|
3
3
|
promptbuilder/prompt_builder.py,sha256=kK6WHr2umYmsanYb2fQVxqEajs_dzGPXRulTo40g36E,12428
|
|
4
4
|
promptbuilder/agent/__init__.py,sha256=qG4Jq4wbmCH5NKLOX6ZMtZ7lFURhJXf464BntR-u5rU,56
|
|
5
|
-
promptbuilder/agent/agent.py,sha256=
|
|
6
|
-
promptbuilder/agent/context.py,sha256=
|
|
5
|
+
promptbuilder/agent/agent.py,sha256=A1-r2J6bmVQW5yCSzJOTzknQhkGkKw8wUUzL1BtAEAA,9290
|
|
6
|
+
promptbuilder/agent/context.py,sha256=xJwNxx-NKq6SDdy4Rpa4sU60nnhQ1a2AXiUQIlNhlG4,1481
|
|
7
7
|
promptbuilder/agent/tool.py,sha256=VDbIHK3_Q62Ei7hwLF7nIgHq-PTMKnv1NSjHpDYkUZE,2651
|
|
8
8
|
promptbuilder/agent/utils.py,sha256=vTkphKw04v_QDIJtoB2JKK0RGY6iI1t_0LbmuStunzI,356
|
|
9
9
|
promptbuilder/llm_client/__init__.py,sha256=wJ33cnRtZX_YPsbcGxEu3SEZMOhPX7-fHI59MEPUe7I,517
|
|
@@ -20,9 +20,9 @@ promptbuilder/llm_client/main.py,sha256=2Q7J5FwivX2YwvptzoSEtCfvfcI9p5HC55D3mMb2
|
|
|
20
20
|
promptbuilder/llm_client/openai_client.py,sha256=QMXX7VPYWFo1VvX8bWF6jpi95ZIOk_MMBpz-14GrT-k,25274
|
|
21
21
|
promptbuilder/llm_client/types.py,sha256=kgbg5FRzvZwu98y1OhAZJDneXBNPnsFZueQCr9HXIY4,8063
|
|
22
22
|
promptbuilder/llm_client/utils.py,sha256=79lvSppjrrItHB5MIozbp_5Oq7TsOK4Qzt9Ae3XMLFw,7624
|
|
23
|
-
promptbuilder/llm_client/vertex_client.py,sha256=
|
|
24
|
-
promptbuilder-0.4.
|
|
25
|
-
promptbuilder-0.4.
|
|
26
|
-
promptbuilder-0.4.
|
|
27
|
-
promptbuilder-0.4.
|
|
28
|
-
promptbuilder-0.4.
|
|
23
|
+
promptbuilder/llm_client/vertex_client.py,sha256=OgbmRIYQXeK2kAh27ijhXuTzeg8APP38IyJ9WJqvDkY,15405
|
|
24
|
+
promptbuilder-0.4.33.dist-info/licenses/LICENSE,sha256=fqXmInzgsvEOIaKSBgcrwKyYCGYF0MKErJ0YivtODcc,1096
|
|
25
|
+
promptbuilder-0.4.33.dist-info/METADATA,sha256=LzmyQ7EyHXCNSmW_M_cXFdKimdlQvizbK85I5qf-uPs,3799
|
|
26
|
+
promptbuilder-0.4.33.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
27
|
+
promptbuilder-0.4.33.dist-info/top_level.txt,sha256=UBVcYn4UgrPy3O3fmmnPEU_kieuplBMgheetIMei4EI,14
|
|
28
|
+
promptbuilder-0.4.33.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|