versionhq 1.1.9.14__py3-none-any.whl → 1.1.10.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,142 @@
1
+ #! FIXME
2
+ from typing import Dict, Optional, Type, List, Any, TypeVar
3
+
4
+ from pydantic import BaseModel, Field, InstanceOf
5
+
6
+ from versionhq.llm.llm_vars import SchemaType
7
+ from versionhq.llm.model import LLM
8
+
9
+
10
+ """
11
+ Structure a response schema (json schema) from the given Pydantic model.
12
+ """
13
+
14
+
15
+ class StructuredObject:
16
+ """
17
+ A class to store the structured dictionary.
18
+ """
19
+ provider: str = "openai"
20
+ field: Type[Field]
21
+
22
+ title: str
23
+ dtype: str = "object"
24
+ properties: Dict[str, Dict[str, str]] = dict()
25
+ required: List[str] = list()
26
+ additionalProperties: bool = False
27
+
28
+ def __init__(self, name, field: Type[Field], provider: str | InstanceOf[LLM] = "openai"):
29
+ self.title = name
30
+ self.field = field
31
+ self.dtype = "object"
32
+ self.additionalProperties = False
33
+ self.provider = provider if isinstance(provider, str) else provider.provider
34
+
35
+ def _format(self):
36
+ if not self.field:
37
+ pass
38
+ else:
39
+ description = self.field.description if hasattr(self.field, "description") and self.field.description is not None else ""
40
+ self.properties.update({"item": { "type": SchemaType(self.field.annotation.__args__).convert() }})
41
+ self.required.append("item")
42
+
43
+ return {
44
+ self.title: {
45
+ "type": self.dtype,
46
+ "description": description,
47
+ "properties": self.properties,
48
+ "additionalProperties": self.additionalProperties,
49
+ "required": self.required
50
+ }
51
+ }
52
+
53
+
54
+
55
+ class StructuredList:
56
+ """
57
+ A class to store a structured list with 1 nested object.
58
+ """
59
+ provider: str = "openai"
60
+ field: Type[Field]
61
+ title: str = ""
62
+ dtype: str = "array"
63
+ items: Dict[str, Dict[str, str]] = dict()
64
+
65
+ def __init__(self, name, field: Type[Field], provider: str | LLM = "openai"):
66
+ self.provider = provider if isinstance(provider, str) else provider.provider
67
+ self.field = field
68
+ self.title = name
69
+ self.dtype = "array"
70
+ self.items = dict()
71
+
72
+
73
+ def _format(self):
74
+ field = self.field
75
+ if not field:
76
+ pass
77
+ else:
78
+ description = "" if field.description is None else field.description
79
+ props = {}
80
+
81
+ for item in field.annotation.__args__:
82
+ nested_object_type = item.__origin__ if hasattr(item, "__origin__") else item
83
+
84
+ if nested_object_type == dict:
85
+ props.update({
86
+ "nest": {
87
+ "type": "object",
88
+ "properties": { "item": { "type": "string"} }, #! REFINEME - field title <>`item`
89
+ "required": ["item",],
90
+ "additionalProperties": False
91
+ }})
92
+
93
+ elif nested_object_type == list:
94
+ props.update({
95
+ "nest": {
96
+ "type": "array",
97
+ "items": { "item": { "type": "string" } }, #! REFINEME - field title <>`item`
98
+ }})
99
+ else:
100
+ props.update({ "nest": { "type": SchemaType(nested_object_type).convert() }})
101
+
102
+ self.items = { **props }
103
+ return {
104
+ self.title: {
105
+ "type": self.dtype,
106
+ "description": description,
107
+ "items": self.items,
108
+ }
109
+ }
110
+
111
+
112
+
113
+
114
+ class StructuredOutput(BaseModel):
115
+ response_format: Any = None
116
+ provider: str = "openai"
117
+ applicable_models: List[InstanceOf[LLM] | str] = list()
118
+ name: str = ""
119
+ schema: Dict[str, Any] = dict(type="object", additionalProperties=False, properties=dict(), required=list())
120
+
121
+
122
+ def _format(self, **kwargs):
123
+ if self.response_format is None:
124
+ pass
125
+
126
+ self.name = self.response_format.__name__
127
+
128
+ for name, field in self.response_format.model_fields.items():
129
+ self.schema["required"].append(name)
130
+
131
+ if hasattr(field.annotation, "__origin__") and field.annotation.__origin__ == dict:
132
+ self.schema["properties"].update(StructuredObject(name=name, field=field)._format())
133
+
134
+ elif hasattr(field.annotation, "__origin__") and field.annotation.__origin__ == list:
135
+ self.schema["properties"].update(StructuredList(name=name, field=field)._format())
136
+ else:
137
+ self.schema["properties"].update({ name: { "type": SchemaType(field.annotation).convert(), **kwargs }})
138
+
139
+ return {
140
+ "type": "json_schema",
141
+ "json_schema": { "name": self.name, "schema": self.schema }
142
+ }
versionhq/team/model.py CHANGED
@@ -435,11 +435,8 @@ class Team(BaseModel):
435
435
  if not agent.function_calling_llm and self.function_calling_llm:
436
436
  agent.function_calling_llm = self.function_calling_llm
437
437
 
438
- # if agent.allow_code_execution:
439
- # agent.tools += agent.get_code_execution_tools()
440
-
441
- if not agent.step_callback and self.step_callback:
442
- agent.step_callback = self.step_callback
438
+ if self.step_callback:
439
+ agent.callbacks.append(self.step_callback)
443
440
 
444
441
  if self.process is None:
445
442
  self.process = TaskHandlingProcess.sequential
@@ -1,6 +1,6 @@
1
1
  import os
2
2
  from dotenv import load_dotenv
3
- from typing import Any, List, Optional
3
+ from typing import Any, List, Optional, Dict
4
4
  from pydantic import BaseModel, Field
5
5
 
6
6
  load_dotenv(override=True)
@@ -42,10 +42,9 @@ class TeamPlanner:
42
42
  Based on the following task summary, draft a AI agent's role and goal in concise manner.
43
43
  Task summary: {unassgined_task.summary}
44
44
  """,
45
- expected_output_json=True,
46
- output_field_list=[
47
- ResponseField(title="goal", type=str, required=True),
48
- ResponseField(title="role", type=str, required=True),
45
+ response_fields=[
46
+ ResponseField(title="goal", data_type=str, required=True),
47
+ ResponseField(title="role", data_type=str, required=True),
49
48
  ],
50
49
  )
51
50
  res = task.execute_sync(agent=agent_creator)
@@ -67,7 +66,7 @@ class TeamPlanner:
67
66
  """
68
67
 
69
68
  from versionhq.agent.model import Agent
70
- from versionhq.task.model import Task, ResponseField
69
+ from versionhq.task.model import Task
71
70
 
72
71
  team_planner = Agent(
73
72
  role="team planner",
@@ -76,18 +75,18 @@ class TeamPlanner:
76
75
  )
77
76
 
78
77
  task_summary_list = [task.summary for task in self.tasks]
78
+
79
+ class TeamPlanIdea(BaseModel):
80
+ plan: str | Dict[str, Any] = Field(default=None, description="a decriptive plan to be executed by the team")
81
+
82
+
79
83
  task = Task(
80
84
  description=f"""
81
85
  Based on the following task summaries, create the most descriptive plan that the team can execute most efficiently. Take all the task summaries - task's description and tools available - into consideration. Your answer only contains a dictionary.
82
86
 
83
87
  Task summaries: {" ".join(task_summary_list)}
84
- """,
85
- expected_output_json=False,
86
- expected_output_pydantic=True,
87
- output_field_list=[
88
- ResponseField(title="task", type=str, required=True)
89
- for task in self.tasks
90
- ],
88
+ """,
89
+ pydantic_custom_output=TeamPlanIdea
91
90
  )
92
91
  output = task.execute_sync(agent=team_planner, context=context, tools=tools)
93
92
  return output
@@ -1,56 +0,0 @@
1
- from enum import Enum
2
-
3
- DEFAULT_AUTH_SCHEME = "OAUTH2"
4
-
5
- class ComposioAuthScheme(str, Enum):
6
- OAUTH2 = "OAUTH2"
7
- BEARER_TOKEN = "BEARER_TOKEN"
8
- API_KEY = "API_KEY"
9
-
10
-
11
- class ComposioAppName(str, Enum):
12
- """
13
- Enum to store app names that we can connect via Composio as data pipelines or destination services.
14
- """
15
-
16
- SALESFORCE = "salesforce"
17
- AIRTABLE = "airtable"
18
- MAILCHIMP = "mailchimp"
19
- HUBSPOT = "hubspot"
20
- KLAVIYO = "klaviyo"
21
- GOOGLESHEET = "googlesheets"
22
- GMAIL = "gmail"
23
- FACEBOOK = "facebook"
24
- TWITTER = "twitter"
25
- TWITTER_MEDIA = "twitter_media"
26
- LINKEDIN = "linkedin"
27
-
28
-
29
- composio_app_set = [
30
- (ComposioAppName.SALESFORCE, ComposioAuthScheme.OAUTH2),
31
- (ComposioAppName.AIRTABLE, ComposioAuthScheme.OAUTH2, ComposioAuthScheme.API_KEY, ComposioAuthScheme.BEARER_TOKEN),
32
- (ComposioAppName.MAILCHIMP, ComposioAuthScheme.OAUTH2),
33
- (ComposioAppName.HUBSPOT, ComposioAuthScheme.OAUTH2, ComposioAuthScheme.BEARER_TOKEN),
34
- (ComposioAppName.KLAVIYO, ComposioAuthScheme.OAUTH2, ComposioAuthScheme.API_KEY),
35
- (ComposioAppName.GOOGLESHEET, ComposioAuthScheme.OAUTH2),
36
- (ComposioAppName.GMAIL, ComposioAuthScheme.OAUTH2, ComposioAuthScheme.BEARER_TOKEN),
37
- (ComposioAppName.TWITTER, ComposioAuthScheme.OAUTH2),
38
- (ComposioAppName.TWITTER_MEDIA, ComposioAuthScheme.OAUTH2),
39
- (ComposioAppName.FACEBOOK, ComposioAuthScheme.OAUTH2),
40
- (ComposioAppName.LINKEDIN, ComposioAuthScheme.OAUTH2),
41
- ]
42
-
43
- class ComposioStatus(str, Enum):
44
- INITIATED = "INITIATED"
45
- ACTIVE = "ACTIVE"
46
- FAILED = "FAILED"
47
-
48
-
49
-
50
-
51
- class ComposioAction(str, Enum):
52
- """
53
- Enum to store composio's action that can be called via `Actions.xxx`
54
- """
55
- # HUBSPOT_INITIATE_DATA_IMPORT_PROCESS = "hubspot_initate_date_import_process"
56
- HUBSPOT_CREATE_PIPELINE_STAGE = "hubspot_create_pipeline_stage"
@@ -0,0 +1,40 @@
1
+ from typing import Any, Dict, Optional
2
+
3
+ from pydantic import BaseModel, PrivateAttr, Field, InstanceOf
4
+
5
+
6
+ class CacheHandler(BaseModel):
7
+ """
8
+ A class to add or read cache
9
+ """
10
+
11
+ _cache: Dict[str, Any] = PrivateAttr(default_factory=dict)
12
+
13
+ def add(self, tool_name: str, input: str, output: Any) -> None:
14
+ self._cache[f"{tool_name}-{input}"] = output
15
+
16
+ def read(self, tool_name: str, input: str) -> Optional[str]:
17
+ return self._cache.get(f"{tool_name}-{input}")
18
+
19
+
20
+
21
+ class CacheTool(BaseModel):
22
+ """
23
+ A cache tool to read the cached result.
24
+ """
25
+
26
+ name: str = "Cache Tool"
27
+ cache_handler: InstanceOf[CacheHandler] = Field(default_factory=CacheHandler)
28
+
29
+ def read_cache(self, key):
30
+ split = key.split("tool:")
31
+ tool = split[1].split("|input:")[0].strip()
32
+ tool_input = split[1].split("|input:")[1].strip()
33
+ return self.cache_handler.read(tool, tool_input)
34
+
35
+ def tool(self):
36
+ return Tool(
37
+ func=self.read_cache,
38
+ name=self.name,
39
+ description="Read from cache"
40
+ )
@@ -11,9 +11,10 @@ from pydantic_core import PydanticCustomError
11
11
  from composio import ComposioToolSet
12
12
  from composio_langchain import action
13
13
 
14
- from versionhq.tool import ComposioAppName, ComposioAuthScheme, composio_app_set, ComposioStatus, ComposioAction
14
+ from versionhq.tool.composio_tool_vars import ComposioAppName, ComposioAuthScheme, composio_app_set, ComposioStatus, ComposioAction
15
+ from versionhq.tool.cache_handler import CacheHandler
15
16
  from versionhq._utils.logger import Logger
16
- from versionhq._utils.cache_handler import CacheHandler
17
+
17
18
 
18
19
  load_dotenv(override=True)
19
20
 
@@ -0,0 +1,56 @@
1
+ from enum import Enum
2
+
3
+ DEFAULT_AUTH_SCHEME = "OAUTH2"
4
+
5
+ class ComposioAuthScheme(str, Enum):
6
+ OAUTH2 = "OAUTH2"
7
+ BEARER_TOKEN = "BEARER_TOKEN"
8
+ API_KEY = "API_KEY"
9
+
10
+
11
+ class ComposioAppName(str, Enum):
12
+ """
13
+ Enum to store app names that we can connect via Composio as data pipelines or destination services.
14
+ """
15
+
16
+ SALESFORCE = "salesforce"
17
+ AIRTABLE = "airtable"
18
+ MAILCHIMP = "mailchimp"
19
+ HUBSPOT = "hubspot"
20
+ KLAVIYO = "klaviyo"
21
+ GOOGLESHEET = "googlesheets"
22
+ GMAIL = "gmail"
23
+ FACEBOOK = "facebook"
24
+ TWITTER = "twitter"
25
+ TWITTER_MEDIA = "twitter_media"
26
+ LINKEDIN = "linkedin"
27
+
28
+
29
+ composio_app_set = [
30
+ (ComposioAppName.SALESFORCE, ComposioAuthScheme.OAUTH2),
31
+ (ComposioAppName.AIRTABLE, ComposioAuthScheme.OAUTH2, ComposioAuthScheme.API_KEY, ComposioAuthScheme.BEARER_TOKEN),
32
+ (ComposioAppName.MAILCHIMP, ComposioAuthScheme.OAUTH2),
33
+ (ComposioAppName.HUBSPOT, ComposioAuthScheme.OAUTH2, ComposioAuthScheme.BEARER_TOKEN),
34
+ (ComposioAppName.KLAVIYO, ComposioAuthScheme.OAUTH2, ComposioAuthScheme.API_KEY),
35
+ (ComposioAppName.GOOGLESHEET, ComposioAuthScheme.OAUTH2),
36
+ (ComposioAppName.GMAIL, ComposioAuthScheme.OAUTH2, ComposioAuthScheme.BEARER_TOKEN),
37
+ (ComposioAppName.TWITTER, ComposioAuthScheme.OAUTH2),
38
+ (ComposioAppName.TWITTER_MEDIA, ComposioAuthScheme.OAUTH2),
39
+ (ComposioAppName.FACEBOOK, ComposioAuthScheme.OAUTH2),
40
+ (ComposioAppName.LINKEDIN, ComposioAuthScheme.OAUTH2),
41
+ ]
42
+
43
+ class ComposioStatus(str, Enum):
44
+ INITIATED = "INITIATED"
45
+ ACTIVE = "ACTIVE"
46
+ FAILED = "FAILED"
47
+
48
+
49
+
50
+
51
+ class ComposioAction(str, Enum):
52
+ """
53
+ Enum to store composio's action that can be called via `Actions.xxx`
54
+ """
55
+ # HUBSPOT_INITIATE_DATA_IMPORT_PROCESS = "hubspot_initate_date_import_process"
56
+ HUBSPOT_CREATE_PIPELINE_STAGE = "hubspot_create_pipeline_stage"
@@ -10,12 +10,11 @@ def tool(*args):
10
10
  """
11
11
 
12
12
  def create_tool(tool_name: str) -> Callable:
13
-
14
- def _make_tool(f: Callable) -> Tool:
15
- if f.__doc__ is None:
13
+ def _make_tool(func: Callable) -> Tool:
14
+ if func.__doc__ is None:
16
15
  raise ValueError("Function must have a docstring")
17
16
 
18
- if f.__annotations__ is None:
17
+ if func.__annotations__ is None:
19
18
  raise ValueError("Function must have type annotations")
20
19
 
21
20
  class_name = "".join(tool_name.split()).title()
@@ -24,11 +23,11 @@ def tool(*args):
24
23
  (BaseModel,),
25
24
  {
26
25
  "__annotations__": {
27
- k: v for k, v in f.__annotations__.items() if k != "return"
26
+ k: v for k, v in func.__annotations__.items() if k != "return"
28
27
  },
29
28
  },
30
29
  )
31
- return Tool(name=tool_name, function=f, args_schema=args_schema)
30
+ return Tool(name=tool_name, func=func, args_schema=args_schema)
32
31
 
33
32
  return _make_tool
34
33