vectorvein 0.1.20__tar.gz → 0.1.22__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. {vectorvein-0.1.20 → vectorvein-0.1.22}/PKG-INFO +1 -1
  2. {vectorvein-0.1.20 → vectorvein-0.1.22}/pyproject.toml +1 -1
  3. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/base_client.py +2 -0
  4. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/gemini_client.py +18 -0
  5. {vectorvein-0.1.20 → vectorvein-0.1.22}/README.md +0 -0
  6. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/__init__.py +0 -0
  7. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/__init__.py +0 -0
  8. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/anthropic_client.py +0 -0
  9. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/baichuan_client.py +0 -0
  10. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/deepseek_client.py +0 -0
  11. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/groq_client.py +0 -0
  12. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/local_client.py +0 -0
  13. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/minimax_client.py +0 -0
  14. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/mistral_client.py +0 -0
  15. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/moonshot_client.py +0 -0
  16. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/openai_client.py +0 -0
  17. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/openai_compatible_client.py +0 -0
  18. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/qwen_client.py +0 -0
  19. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/utils.py +0 -0
  20. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/yi_client.py +0 -0
  21. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/chat_clients/zhipuai_client.py +0 -0
  22. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/settings/__init__.py +0 -0
  23. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/types/defaults.py +0 -0
  24. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/types/enums.py +0 -0
  25. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/types/llm_parameters.py +0 -0
  26. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/utilities/media_processing.py +0 -0
  27. {vectorvein-0.1.20 → vectorvein-0.1.22}/src/vectorvein/utilities/retry.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vectorvein
3
- Version: 0.1.20
3
+ Version: 0.1.22
4
4
  Summary: Default template for PDM package
5
5
  Author-Email: Anderson <andersonby@163.com>
6
6
  License: MIT
@@ -16,7 +16,7 @@ description = "Default template for PDM package"
16
16
  name = "vectorvein"
17
17
  readme = "README.md"
18
18
  requires-python = ">=3.10"
19
- version = "0.1.20"
19
+ version = "0.1.22"
20
20
 
21
21
  [project.license]
22
22
  text = "MIT"
@@ -59,6 +59,7 @@ class BaseChatClient(ABC):
59
59
  max_tokens: int | None = None,
60
60
  tools: list | NotGiven = NOT_GIVEN,
61
61
  tool_choice: str | NotGiven = NOT_GIVEN,
62
+ response_format: dict | None = None,
62
63
  **kwargs,
63
64
  ) -> ChatCompletionMessage | Generator[ChatCompletionDeltaMessage, Any, None]:
64
65
  pass
@@ -130,6 +131,7 @@ class BaseAsyncChatClient(ABC):
130
131
  max_tokens: int | None = None,
131
132
  tools: list | NotGiven = NOT_GIVEN,
132
133
  tool_choice: str | NotGiven = NOT_GIVEN,
134
+ response_format: dict | None = None,
133
135
  **kwargs,
134
136
  ) -> ChatCompletionMessage | AsyncGenerator[ChatCompletionDeltaMessage, None]:
135
137
  pass
@@ -52,6 +52,8 @@ class GeminiChatClient(BaseChatClient):
52
52
  max_tokens: int | None = None,
53
53
  tools: list | None = None,
54
54
  tool_choice: str | None = None,
55
+ response_format: dict | None = None,
56
+ **kwargs,
55
57
  ):
56
58
  if model is not None:
57
59
  self.model = model
@@ -81,6 +83,11 @@ class GeminiChatClient(BaseChatClient):
81
83
  else:
82
84
  tools_params = {}
83
85
 
86
+ if response_format:
87
+ response_format_params = {"generationConfig": {"response_mime_type": "application/json"}}
88
+ else:
89
+ response_format_params = {}
90
+
84
91
  if self.random_endpoint:
85
92
  self.random_endpoint = True
86
93
  self.endpoint_id = random.choice(self.backend_settings.models[self.model].endpoints)
@@ -99,6 +106,8 @@ class GeminiChatClient(BaseChatClient):
99
106
  "maxOutputTokens": max_tokens,
100
107
  },
101
108
  **tools_params,
109
+ **response_format_params,
110
+ **kwargs,
102
111
  }
103
112
  if system_prompt:
104
113
  request_body["systemInstruction"] = {"parts": [{"text": system_prompt}]}
@@ -226,6 +235,8 @@ class AsyncGeminiChatClient(BaseAsyncChatClient):
226
235
  max_tokens: int | None = None,
227
236
  tools: list | None = None,
228
237
  tool_choice: str | None = None,
238
+ response_format: dict | None = None,
239
+ **kwargs,
229
240
  ):
230
241
  if model is not None:
231
242
  self.model = model
@@ -255,6 +266,11 @@ class AsyncGeminiChatClient(BaseAsyncChatClient):
255
266
  else:
256
267
  tools_params = {}
257
268
 
269
+ if response_format:
270
+ response_format_params = {"generationConfig": {"response_mime_type": "application/json"}}
271
+ else:
272
+ response_format_params = {}
273
+
258
274
  if self.random_endpoint:
259
275
  self.random_endpoint = True
260
276
  self.endpoint_id = random.choice(self.backend_settings.models[self.model].endpoints)
@@ -273,6 +289,8 @@ class AsyncGeminiChatClient(BaseAsyncChatClient):
273
289
  "maxOutputTokens": max_tokens,
274
290
  },
275
291
  **tools_params,
292
+ **response_format_params,
293
+ **kwargs,
276
294
  }
277
295
  if system_prompt:
278
296
  request_body["systemInstruction"] = {"parts": [{"text": system_prompt}]}
File without changes