promptbuilder 0.4.31__tar.gz → 0.4.33__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {promptbuilder-0.4.31/promptbuilder.egg-info → promptbuilder-0.4.33}/PKG-INFO +1 -1
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/agent/agent.py +2 -2
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/agent/context.py +12 -2
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/base_client.py +4 -4
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/litellm_client.py +8 -8
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/vertex_client.py +9 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33/promptbuilder.egg-info}/PKG-INFO +1 -1
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/setup.py +1 -1
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/LICENSE +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/MANIFEST.in +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/Readme.md +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/__init__.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/agent/__init__.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/agent/tool.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/agent/utils.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/embeddings.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/__init__.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/aisuite_client.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/anthropic_client.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/bedrock_client.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/config.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/exceptions.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/google_client.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/logfire_decorators.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/main.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/openai_client.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/types.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/utils.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/prompt_builder.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder.egg-info/SOURCES.txt +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder.egg-info/dependency_links.txt +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder.egg-info/requires.txt +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder.egg-info/top_level.txt +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/pyproject.toml +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/setup.cfg +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/tests/test_llm_client.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/tests/test_llm_client_async.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/tests/test_timeout_google.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/tests/test_timeout_litellm.py +0 -0
- {promptbuilder-0.4.31 → promptbuilder-0.4.33}/tests/test_timeout_openai.py +0 -0
|
@@ -49,7 +49,7 @@ class Agent(Generic[MessageType, ContextType]):
|
|
|
49
49
|
return messages
|
|
50
50
|
|
|
51
51
|
async def _answer_with_llm(self, **kwargs: Any) -> Response:
|
|
52
|
-
messages = self._formatted_messages(self.context.dialog_history.
|
|
52
|
+
messages = self._formatted_messages(self.context.dialog_history.last_content_messages())
|
|
53
53
|
return await run_async(self.llm_client.create,
|
|
54
54
|
messages=messages,
|
|
55
55
|
system_message=self.system_message(**kwargs),
|
|
@@ -73,7 +73,7 @@ class AgentRouter(Agent[MessageType, ContextType]):
|
|
|
73
73
|
callable_trs = [self.tools.get(name) or self.routes.get(name) for name in self.tr_names if name not in trs_to_exclude]
|
|
74
74
|
trs = [callable_tr.tool for callable_tr in callable_trs]
|
|
75
75
|
|
|
76
|
-
messages = self._formatted_messages(self.context.dialog_history.
|
|
76
|
+
messages = self._formatted_messages(self.context.dialog_history.last_content_messages())
|
|
77
77
|
response = await run_async(self.llm_client.create,
|
|
78
78
|
messages=messages,
|
|
79
79
|
system_message=self.system_message(callable_trs=callable_trs),
|
|
@@ -6,12 +6,15 @@ from typing import Any, TypeVar, Generic
|
|
|
6
6
|
MessageType = TypeVar("MessageType", bound=Any)
|
|
7
7
|
|
|
8
8
|
class DialogHistory(Generic[MessageType]):
|
|
9
|
-
def
|
|
9
|
+
def last_content_messages(self, n: int = 0) -> list[Content]:
|
|
10
10
|
raise NotImplementedError("Subclasses must implement this method")
|
|
11
11
|
|
|
12
12
|
def add_message(self, message: MessageType):
|
|
13
13
|
raise NotImplementedError("Subclasses must implement this method")
|
|
14
14
|
|
|
15
|
+
def last_messages(self, n: int = 0) -> list[MessageType]:
|
|
16
|
+
raise NotImplementedError("Subclasses must implement this method")
|
|
17
|
+
|
|
15
18
|
def clear(self):
|
|
16
19
|
raise NotImplementedError("Subclasses must implement this method")
|
|
17
20
|
|
|
@@ -19,12 +22,19 @@ class InMemoryDialogHistory(DialogHistory[Content]):
|
|
|
19
22
|
def __init__(self):
|
|
20
23
|
self.messages: list[Content] = []
|
|
21
24
|
|
|
22
|
-
def
|
|
25
|
+
def last_content_messages(self, n: int = 0) -> list[Content]:
|
|
26
|
+
if n == 0:
|
|
27
|
+
return self.messages
|
|
23
28
|
return self.messages[-n:]
|
|
24
29
|
|
|
25
30
|
def add_message(self, message: Content):
|
|
26
31
|
self.messages.append(message)
|
|
27
32
|
|
|
33
|
+
def last_messages(self, n: int = 0) -> list[Content]:
|
|
34
|
+
if n == 0:
|
|
35
|
+
return self.messages
|
|
36
|
+
return self.messages[-n:]
|
|
37
|
+
|
|
28
38
|
def clear(self):
|
|
29
39
|
self.messages = []
|
|
30
40
|
|
|
@@ -98,7 +98,7 @@ class BaseLLMClient(ABC, utils.InheritDecoratorsMixin):
|
|
|
98
98
|
result_type=result_type,
|
|
99
99
|
thinking_config=thinking_config,
|
|
100
100
|
system_message=system_message,
|
|
101
|
-
max_tokens=max_tokens,
|
|
101
|
+
max_tokens=max_tokens if not autocomplete else None,
|
|
102
102
|
timeout=timeout,
|
|
103
103
|
tools=tools,
|
|
104
104
|
tool_config=tool_config,
|
|
@@ -116,7 +116,7 @@ class BaseLLMClient(ABC, utils.InheritDecoratorsMixin):
|
|
|
116
116
|
result_type=result_type,
|
|
117
117
|
thinking_config=thinking_config,
|
|
118
118
|
system_message=system_message,
|
|
119
|
-
max_tokens=max_tokens,
|
|
119
|
+
max_tokens=max_tokens if not autocomplete else None,
|
|
120
120
|
timeout=timeout,
|
|
121
121
|
tools=tools,
|
|
122
122
|
tool_config=tool_config,
|
|
@@ -449,7 +449,7 @@ class BaseLLMClientAsync(ABC, utils.InheritDecoratorsMixin):
|
|
|
449
449
|
result_type=result_type,
|
|
450
450
|
thinking_config=thinking_config,
|
|
451
451
|
system_message=system_message,
|
|
452
|
-
max_tokens=max_tokens,
|
|
452
|
+
max_tokens=max_tokens if not autocomplete else None,
|
|
453
453
|
timeout=timeout,
|
|
454
454
|
tools=tools,
|
|
455
455
|
tool_config=tool_config,
|
|
@@ -467,7 +467,7 @@ class BaseLLMClientAsync(ABC, utils.InheritDecoratorsMixin):
|
|
|
467
467
|
result_type=result_type,
|
|
468
468
|
thinking_config=thinking_config,
|
|
469
469
|
system_message=system_message,
|
|
470
|
-
max_tokens=max_tokens,
|
|
470
|
+
max_tokens=max_tokens if not autocomplete else None,
|
|
471
471
|
timeout=timeout,
|
|
472
472
|
tools=tools,
|
|
473
473
|
tool_config=tool_config,
|
|
@@ -23,7 +23,7 @@ from promptbuilder.llm_client.config import DecoratorConfigs
|
|
|
23
23
|
from promptbuilder.prompt_builder import PromptBuilder
|
|
24
24
|
|
|
25
25
|
|
|
26
|
-
class
|
|
26
|
+
class LiteLLMClient(BaseLLMClient):
|
|
27
27
|
provider: str = ""
|
|
28
28
|
user_tag: Role = "user"
|
|
29
29
|
assistant_tag: Role = "model"
|
|
@@ -241,7 +241,7 @@ class LiteLLMLLMClient(BaseLLMClient):
|
|
|
241
241
|
finish_reason_val = first_choice.get("finish_reason")
|
|
242
242
|
else:
|
|
243
243
|
finish_reason_val = getattr(first_choice, "finish_reason", None)
|
|
244
|
-
mapped_finish_reason =
|
|
244
|
+
mapped_finish_reason = LiteLLMClient._map_finish_reason(finish_reason_val)
|
|
245
245
|
|
|
246
246
|
content_parts: list[Part | Any] = list(parts)
|
|
247
247
|
return Response(
|
|
@@ -293,7 +293,7 @@ class LiteLLMLLMClient(BaseLLMClient):
|
|
|
293
293
|
finish_reason_val = first_choice.get("finish_reason")
|
|
294
294
|
else:
|
|
295
295
|
finish_reason_val = getattr(first_choice, "finish_reason", None)
|
|
296
|
-
mapped_finish_reason =
|
|
296
|
+
mapped_finish_reason = LiteLLMClient._map_finish_reason(finish_reason_val)
|
|
297
297
|
|
|
298
298
|
content_parts2: list[Part | Any] = list(parts)
|
|
299
299
|
return Response(
|
|
@@ -311,7 +311,7 @@ class LiteLLMLLMClient(BaseLLMClient):
|
|
|
311
311
|
raise ValueError(f"Unsupported result_type: {result_type}. Supported types are: None, 'json', or a Pydantic model.")
|
|
312
312
|
|
|
313
313
|
|
|
314
|
-
class
|
|
314
|
+
class LiteLLMClientAsync(BaseLLMClientAsync):
|
|
315
315
|
provider: str = ""
|
|
316
316
|
user_tag: Role = "user"
|
|
317
317
|
assistant_tag: Role = "model"
|
|
@@ -341,11 +341,11 @@ class LiteLLMLLMClientAsync(BaseLLMClientAsync):
|
|
|
341
341
|
|
|
342
342
|
@staticmethod
|
|
343
343
|
def make_function_call(tool_call) -> FunctionCall | None:
|
|
344
|
-
return
|
|
344
|
+
return LiteLLMClient.make_function_call(tool_call)
|
|
345
345
|
|
|
346
346
|
@staticmethod
|
|
347
347
|
def make_usage_metadata(usage) -> UsageMetadata:
|
|
348
|
-
return
|
|
348
|
+
return LiteLLMClient.make_usage_metadata(usage)
|
|
349
349
|
|
|
350
350
|
async def _create(
|
|
351
351
|
self,
|
|
@@ -450,7 +450,7 @@ class LiteLLMLLMClientAsync(BaseLLMClientAsync):
|
|
|
450
450
|
finish_reason_val = first_choice.get("finish_reason")
|
|
451
451
|
else:
|
|
452
452
|
finish_reason_val = getattr(first_choice, "finish_reason", None)
|
|
453
|
-
mapped_finish_reason =
|
|
453
|
+
mapped_finish_reason = LiteLLMClient._map_finish_reason(finish_reason_val)
|
|
454
454
|
|
|
455
455
|
content_parts3: list[Part | Any] = list(parts)
|
|
456
456
|
return Response(
|
|
@@ -502,7 +502,7 @@ class LiteLLMLLMClientAsync(BaseLLMClientAsync):
|
|
|
502
502
|
finish_reason_val = first_choice.get("finish_reason")
|
|
503
503
|
else:
|
|
504
504
|
finish_reason_val = getattr(first_choice, "finish_reason", None)
|
|
505
|
-
mapped_finish_reason =
|
|
505
|
+
mapped_finish_reason = LiteLLMClient._map_finish_reason(finish_reason_val)
|
|
506
506
|
|
|
507
507
|
content_parts4: list[Part | Any] = list(parts)
|
|
508
508
|
return Response(
|
|
@@ -198,6 +198,15 @@ class VertexLLMClient(BaseLLMClient):
|
|
|
198
198
|
v_messages = _to_vertex_content(messages)
|
|
199
199
|
GenerationConfig = getattr(importlib.import_module("vertexai.generative_models"), "GenerationConfig")
|
|
200
200
|
gen_cfg = GenerationConfig(max_output_tokens=max_tokens or self.default_max_tokens)
|
|
201
|
+
|
|
202
|
+
# Handle thinking config
|
|
203
|
+
if thinking_config is None:
|
|
204
|
+
thinking_config = self.default_thinking_config
|
|
205
|
+
if thinking_config is not None:
|
|
206
|
+
# Vertex AI supports thinking via response_logprobs and logprobs parameters
|
|
207
|
+
# but the exact implementation may vary - for now, we'll store it for potential future use
|
|
208
|
+
pass
|
|
209
|
+
|
|
201
210
|
req_opts: dict[str, Any] | None = {}
|
|
202
211
|
if timeout is not None:
|
|
203
212
|
req_opts["timeout"] = timeout
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{promptbuilder-0.4.31 → promptbuilder-0.4.33}/promptbuilder/llm_client/logfire_decorators.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|