versionhq 1.2.4.10__py3-none-any.whl → 1.2.4.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
versionhq/__init__.py CHANGED
@@ -35,7 +35,7 @@ from versionhq.agent_network.formation import form_agent_network
35
35
  from versionhq.task_graph.draft import workflow
36
36
 
37
37
 
38
- __version__ = "1.2.4.10"
38
+ __version__ = "1.2.4.11"
39
39
  __all__ = [
40
40
  "Agent",
41
41
 
versionhq/agent/model.py CHANGED
@@ -356,7 +356,6 @@ class Agent(BaseModel):
356
356
  response_format: Optional[Dict[str, Any]] = None,
357
357
  tools: Optional[List[InstanceOf[Tool]| InstanceOf[ToolSet] | Type[Tool]]] = None,
358
358
  tool_res_as_final: bool = False,
359
- # task: Any = None
360
359
  ) -> Tuple[str, UsageMetrics]:
361
360
  """
362
361
  Create formatted prompts using the developer prompt and the agent's backstory, then call the base model.
versionhq/llm/llm_vars.py CHANGED
@@ -29,7 +29,7 @@ PROVIDERS = {
29
29
  },
30
30
  "azure": {
31
31
  "api_base": "AZURE_OPENAI_ENDPOINT",
32
- "api_key":"AZURE_OPENAI_API_KEY",
32
+ "api_key": "AZURE_OPENAI_API_KEY",
33
33
  "api_version": "AZURE_OPENAI_API_VERSION",
34
34
  },
35
35
  "azure_ai": {
versionhq/llm/model.py CHANGED
@@ -9,7 +9,7 @@ from typing import Any, Dict, List, Optional
9
9
  from typing_extensions import Self
10
10
 
11
11
  import litellm
12
- from litellm import JSONSchemaValidationError, get_supported_openai_params
12
+ from litellm import JSONSchemaValidationError, get_supported_openai_params, supports_response_schema
13
13
  from pydantic import BaseModel, Field, PrivateAttr, model_validator, ConfigDict
14
14
 
15
15
  from versionhq.llm.llm_vars import LLM_CONTEXT_WINDOW_SIZES, TEXT_MODELS, MODEL_PARAMS, PROVIDERS, ENDPOINTS
@@ -239,6 +239,13 @@ class LLM(BaseModel):
239
239
  return valid_cred
240
240
 
241
241
 
242
+ def _supports_response_schema(self) -> bool:
243
+ try:
244
+ return supports_response_schema(model=self.model, custom_llm_provider=self.endpoint_provider)
245
+ except:
246
+ return False
247
+
248
+
242
249
  def _supports_function_calling(self) -> bool:
243
250
  try:
244
251
  if self.model:
@@ -300,6 +307,8 @@ class LLM(BaseModel):
300
307
 
301
308
  if self.provider == "gemini":
302
309
  self.response_format = { "type": "json_object" } if not tools and self.model != "gemini/gemini-2.0-flash-thinking-exp" else None
310
+ elif response_format and "json_schema" in response_format:
311
+ self.response_format = response_format if self._supports_function_calling() else { "type": "json_object" }
303
312
  else:
304
313
  self.response_format = response_format
305
314
 
versionhq/task/model.py CHANGED
@@ -398,7 +398,7 @@ class Task(BaseModel):
398
398
 
399
399
 
400
400
  def _structure_response_format(self, data_type: str = "object", model_provider: str = "gemini") -> Dict[str, Any] | None:
401
- """Structures `response_schema` into the LLM response format."""
401
+ """Structures `response_schema` to the LLM response format."""
402
402
 
403
403
  from versionhq.task.structured_response import StructuredOutput
404
404
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: versionhq
3
- Version: 1.2.4.10
3
+ Version: 1.2.4.11
4
4
  Summary: Autonomous agent networks for task automation with multi-step reasoning.
5
5
  Author-email: Kuriko Iwai <kuriko@versi0n.io>
6
6
  License: MIT License
@@ -1,4 +1,4 @@
1
- versionhq/__init__.py,sha256=nZhzt7FGZ7YWD3qOnbmac9HAXo-ES51K6CyEH1V2mQY,3356
1
+ versionhq/__init__.py,sha256=5dce8lm9xq8PjAQJ6N1jlSmSdhuq1GlDpDjnLKHKNOo,3356
2
2
  versionhq/_prompt/auto_feedback.py,sha256=bbj37yTa11lRHpx-sV_Wmpb4dVnDBB7_v8ageUobHXY,3780
3
3
  versionhq/_prompt/constants.py,sha256=DOwUFnVVObEFqgnaMCDnW8fnw1oPMgS8JAqOiTuqleI,932
4
4
  versionhq/_prompt/model.py,sha256=wJlDM9yzrqlXWxyw4HkYQzPii2MPfqkgTF3qhXoJN2M,8038
@@ -14,7 +14,7 @@ versionhq/_utils/usage_metrics.py,sha256=gDK6fZgT1njX4iPIPFapWxfxIiz-zZYv72p0u6M
14
14
  versionhq/_utils/vars.py,sha256=bZ5Dx_bFKlt3hi4-NNGXqdk7B23If_WaTIju2fiTyPQ,57
15
15
  versionhq/agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
16
  versionhq/agent/inhouse_agents.py,sha256=D2WAiXCYsnQK3_Fe7CbbtvXsHWOaN6vde6m_QoW7fH4,2629
17
- versionhq/agent/model.py,sha256=HW_-k9wBhHpo1mtvDW6mGV2X20qQ7_ds4ZBubpTf_JU,26969
17
+ versionhq/agent/model.py,sha256=Cw9BdkDq45Ubzayq62A-nFqREBEIxMY0wfm_Xy8yP_w,26942
18
18
  versionhq/agent/parser.py,sha256=riG0dkdQCxH7uJ0AbdVdg7WvL0BXhUgJht0VtQvxJBc,4082
19
19
  versionhq/agent/rpm_controller.py,sha256=grezIxyBci_lDlwAlgWFRyR5KOocXeOhYkgN02dNFNE,2360
20
20
  versionhq/agent/TEMPLATES/Backstory.py,sha256=dkfuATUQ2g2WoUKkmgAIch-RB--bektGoQaUlsDOn0g,529
@@ -38,8 +38,8 @@ versionhq/knowledge/source.py,sha256=-hEUPtJUHHMx4rUKtiHl19J8xAMw-WVBw34zwa2jZ08
38
38
  versionhq/knowledge/source_docling.py,sha256=XpavmLvh4dLcuTikj8MCE9KG52oQMafy7_wBneliMK0,4994
39
39
  versionhq/knowledge/storage.py,sha256=Kd-4r6aWM5EDaoXrzKXbgi1hY6tysSQARPGXM95qMmU,8266
40
40
  versionhq/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
- versionhq/llm/llm_vars.py,sha256=e_MdU8VHnbhW3ZyQivtygvdNs_oQHDhf_Ci9-fqiUuw,8907
42
- versionhq/llm/model.py,sha256=mwATPR2nqHcdVtB8ibOXbzFwc4IyDLNta34JvUtqH98,16864
41
+ versionhq/llm/llm_vars.py,sha256=jPa7m-321w6NK_FjGeVBLR7w9HfuG6FuoKtum-86yLk,8908
42
+ versionhq/llm/model.py,sha256=BEjWyFk0IWSUibHNc9apdFp3QdbGeBMQv4ZfvdgRjgE,17305
43
43
  versionhq/memory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
44
  versionhq/memory/contextual_memory.py,sha256=QEMVvHuEXxY7M6-12S8HhyFKf108KfX8Zzt7paPW048,3882
45
45
  versionhq/memory/model.py,sha256=VQR1229t7GQPMItlGAHLtJrb6LrZfSoRA1DRW4z0SOU,8234
@@ -53,7 +53,7 @@ versionhq/storage/utils.py,sha256=r5ghA_ktdR2IuzlzKqZYCjsNxztEMzyhWLneA4cFuWY,74
53
53
  versionhq/task/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
54
  versionhq/task/evaluation.py,sha256=9jFOmjP-yy1vxRn781KmpdQ_d4J_ZA1UX_21Q3m-iuE,4122
55
55
  versionhq/task/formatter.py,sha256=N8Kmk9vtrMtBdgJ8J7RmlKNMdZWSmV8O1bDexmCWgU0,643
56
- versionhq/task/model.py,sha256=XSiz36waswozJ0kU_xhNxpl1SXwobd9dYIzJsVUrykw,29615
56
+ versionhq/task/model.py,sha256=-dqCQVRYF918RDM9mK_J7r4lMRwFqZ2G9NSePAU7DJY,29613
57
57
  versionhq/task/structured_response.py,sha256=tqOHpch8CVmMj0aZXjdDWtPNcVmBW8DVZnBvPBwS4PM,5053
58
58
  versionhq/task/TEMPLATES/Description.py,sha256=hKhpbz0ztbkUMXz9KiL-P40fis9OB5ICOdL9jCtgAhU,864
59
59
  versionhq/task_graph/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -74,8 +74,8 @@ versionhq/tool/gpt/_enum.py,sha256=VaONDFZJNVe30Wf3Pl9s0XvxP_Xxqv3RNFcnqyigGFk,5
74
74
  versionhq/tool/gpt/cua.py,sha256=5yrgz_fc3IH_uB70J51wmRBWkfH53Qx-a29nmwWyOcs,12078
75
75
  versionhq/tool/gpt/file_search.py,sha256=r5JVlf-epKB8DDXyrzlkezguHUMir0JW-77LUHoy-w8,5813
76
76
  versionhq/tool/gpt/web_search.py,sha256=bpqEQopbq9KtqQ_0W7QAAJ5TyoKGiVM94-SMp5oqNFE,3483
77
- versionhq-1.2.4.10.dist-info/licenses/LICENSE,sha256=cRoGGdM73IiDs6nDWKqPlgSv7aR4n-qBXYnJlCMHCeE,1082
78
- versionhq-1.2.4.10.dist-info/METADATA,sha256=3f9k13Vg5g9m7xCnCeNsy_8MvMx119NEB2DVLsCrN6Q,21349
79
- versionhq-1.2.4.10.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
80
- versionhq-1.2.4.10.dist-info/top_level.txt,sha256=DClQwxDWqIUGeRJkA8vBlgeNsYZs4_nJWMonzFt5Wj0,10
81
- versionhq-1.2.4.10.dist-info/RECORD,,
77
+ versionhq-1.2.4.11.dist-info/licenses/LICENSE,sha256=cRoGGdM73IiDs6nDWKqPlgSv7aR4n-qBXYnJlCMHCeE,1082
78
+ versionhq-1.2.4.11.dist-info/METADATA,sha256=Q0GiGODsDeyH1lgzQvZGdslcxLzQKKgzFD1wt7pMZko,21349
79
+ versionhq-1.2.4.11.dist-info/WHEEL,sha256=1tXe9gY0PYatrMPMDd6jXqjfpz_B-Wqm32CPfRC58XU,91
80
+ versionhq-1.2.4.11.dist-info/top_level.txt,sha256=DClQwxDWqIUGeRJkA8vBlgeNsYZs4_nJWMonzFt5Wj0,10
81
+ versionhq-1.2.4.11.dist-info/RECORD,,