gofannon 0.1.0__py3-none-any.whl → 0.25.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,2 +0,0 @@
1
- from.search import Search
2
- from.get_article import GetArticle
gofannon/base/__init__.py CHANGED
@@ -1,30 +1,17 @@
1
1
  import time
2
2
  from abc import ABC, abstractmethod
3
3
  from dataclasses import dataclass
4
- from typing import Dict, Any, Callable
4
+ from typing import Any, Callable
5
5
  import json
6
6
  import logging
7
7
  from pathlib import Path
8
8
  from ..config import ToolConfig
9
9
 
10
- from typing import Any, Dict
10
+ from .smol_agents import SmolAgentsMixin
11
+ from .langchain import LangchainMixin
12
+ from .bedrock import BedrockMixin
11
13
 
12
14
 
13
- try:
14
- from smolagents.tools import Tool as SmolTool
15
- from smolagents.tools import tool as smol_tool_decorator
16
- _HAS_SMOLAGENTS = True
17
- except ImportError:
18
- _HAS_SMOLAGENTS = False
19
-
20
- try:
21
- from langchain.tools import BaseTool as LangchainBaseTool
22
- from langchain.pydantic_v1 import BaseModel, Field
23
- from typing import Type, Optional
24
- _HAS_LANGCHAIN = True
25
- except ImportError:
26
- _HAS_LANGCHAIN = False
27
-
28
15
  @dataclass
29
16
  class ToolResult:
30
17
  success: bool
@@ -32,6 +19,7 @@ class ToolResult:
32
19
  error: str = None
33
20
  retryable: bool = False
34
21
 
22
+
35
23
  class WorkflowContext:
36
24
  def __init__(self, firebase_config=None):
37
25
  self.data = {}
@@ -48,34 +36,37 @@ class WorkflowContext:
48
36
 
49
37
  def _save_local(self, name):
50
38
  path = self.local_storage / f"{name}.json"
51
- with open(path, 'w') as f:
52
- json.dump({
53
- 'data': self.data,
54
- 'execution_log': self.execution_log
55
- }, f)
39
+ with open(path, "w") as f:
40
+ json.dump({"data": self.data, "execution_log": self.execution_log}, f)
56
41
 
57
42
  def _save_to_firebase(self, name):
58
43
  from firebase_admin import firestore
44
+
59
45
  db = firestore.client()
60
- doc_ref = db.collection('checkpoints').document(name)
61
- doc_ref.set({
62
- 'data': self.data,
63
- 'execution_log': self.execution_log,
64
- 'timestamp': firestore.SERVER_TIMESTAMP
65
- })
46
+ doc_ref = db.collection("checkpoints").document(name)
47
+ doc_ref.set(
48
+ {
49
+ "data": self.data,
50
+ "execution_log": self.execution_log,
51
+ "timestamp": firestore.SERVER_TIMESTAMP,
52
+ }
53
+ )
66
54
 
67
55
  def log_execution(self, tool_name, duration, input_data, output_data):
68
56
  entry = {
69
- 'tool': tool_name,
70
- 'duration': duration,
71
- 'input': input_data,
72
- 'output': output_data
57
+ "tool": tool_name,
58
+ "duration": duration,
59
+ "input": input_data,
60
+ "output": output_data,
73
61
  }
74
62
  self.execution_log.append(entry)
75
63
 
76
- class BaseTool(ABC):
64
+
65
+ class BaseTool(SmolAgentsMixin, LangchainMixin, BedrockMixin, ABC):
77
66
  def __init__(self, **kwargs):
78
- self.logger = logging.getLogger(f"{self.__class__.__module__}.{self.__class__.__name__}")
67
+ self.logger = logging.getLogger(
68
+ f"{self.__class__.__module__}.{self.__class__.__name__}"
69
+ )
79
70
  self._load_config()
80
71
  self._configure(**kwargs)
81
72
  self.logger.debug("Initialized %s tool", self.__class__.__name__)
@@ -88,7 +79,7 @@ class BaseTool(ABC):
88
79
 
89
80
  def _load_config(self):
90
81
  """Auto-load config based on tool type"""
91
- if hasattr(self, 'API_SERVICE'):
82
+ if hasattr(self, "API_SERVICE"):
92
83
  self.api_key = ToolConfig.get(f"{self.API_SERVICE}_api_key")
93
84
 
94
85
  @property
@@ -98,7 +89,7 @@ class BaseTool(ABC):
98
89
 
99
90
  @property
100
91
  def output_schema(self):
101
- return self.definition.get('function', {}).get('parameters', {})
92
+ return self.definition.get("function", {}).get("parameters", {})
102
93
 
103
94
  @abstractmethod
104
95
  def fn(self, *args, **kwargs):
@@ -114,138 +105,9 @@ class BaseTool(ABC):
114
105
  tool_name=self.__class__.__name__,
115
106
  duration=duration,
116
107
  input_data=kwargs,
117
- output_data=result
108
+ output_data=result,
118
109
  )
119
110
 
120
111
  return ToolResult(success=True, output=result)
121
112
  except Exception as e:
122
- return ToolResult(
123
- success=False,
124
- output=None,
125
- error=str(e),
126
- retryable=True
127
- )
128
-
129
- def import_from_smolagents(self, smol_tool: "SmolTool"):
130
- """
131
- Takes a smolagents Tool instance and adapts it into this Tool.
132
- """
133
- if not _HAS_SMOLAGENTS:
134
- raise RuntimeError(
135
- "smolagents is not installed or could not be imported. "
136
- "Install it or check your environment."
137
- )
138
- self.name = smol_tool.name[0]
139
- self.description = smol_tool.description #getattr(smol_tool, "description", "No description provided.")
140
-
141
-
142
- def adapted_fn(*args, **kwargs):
143
- return smol_tool.forward(*args, **kwargs)
144
-
145
-
146
- self.fn = adapted_fn
147
-
148
- def export_to_smolagents(self) -> "SmolTool":
149
- """
150
- Export this Tool as a smolagents Tool instance.
151
- This sets up a smolagents-style forward method that calls self.fn.
152
- """
153
- if not _HAS_SMOLAGENTS:
154
- raise RuntimeError(
155
- "smolagents is not installed or could not be imported. "
156
- "Install it or check your environment."
157
- )
158
-
159
- # Provide a standard forward function that calls self.fn
160
- def smol_forward(*args, **kwargs):
161
- return self.fn(*args, **kwargs)
162
-
163
-
164
- inputs_definition = {
165
- "example_arg": {
166
- "type": "string",
167
- "description": "Example argument recognized by this tool"
168
- }
169
- }
170
- output_type = "string"
171
-
172
- # Construct a new smolagents Tool with the minimal fields
173
- exported_tool = SmolTool()
174
- exported_tool.name = getattr(self, "name", "exported_base_tool")
175
- exported_tool.description = getattr(self, "description", "Exported from Tool")
176
- exported_tool.inputs = inputs_definition
177
- exported_tool.output_type = output_type
178
- exported_tool.forward = smol_forward
179
- exported_tool.is_initialized = True
180
-
181
- return exported_tool
182
-
183
- def import_from_langchain(self, langchain_tool: "LangchainBaseTool"):
184
- if not _HAS_LANGCHAIN:
185
- raise RuntimeError("langchain is not installed. Install with `pip install langchain-core`")
186
-
187
- self.name = getattr(langchain_tool, "name", "exported_langchain_tool")
188
- self.description = getattr(langchain_tool, "description", "No description provided.")
189
-
190
- maybe_args_schema = getattr(langchain_tool, "args_schema", None)
191
- if maybe_args_schema and hasattr(maybe_args_schema, "schema") and callable(maybe_args_schema.schema):
192
- args_schema = maybe_args_schema.schema()
193
- else:
194
- args_schema = {}
195
-
196
- # Store parameters to avoid modifying the definition property directly
197
- self._parameters = args_schema.get("properties", {})
198
- self._required = args_schema.get("required", [])
199
-
200
- # Adapt the LangChain tool's execution method
201
- def adapted_fn(*args, **kwargs):
202
- return langchain_tool._run(*args, **kwargs)
203
-
204
- self.fn = adapted_fn
205
-
206
- def export_to_langchain(self) -> "LangchainBaseTool":
207
- if not _HAS_LANGCHAIN:
208
- raise RuntimeError(
209
- "langchain is not installed. Install with `pip install langchain-core`"
210
- )
211
-
212
- from pydantic import create_model
213
-
214
- # Create type mapping from JSON schema types to Python types
215
- type_map = {
216
- "number": float,
217
- "string": str,
218
- "integer": int,
219
- "boolean": bool,
220
- "object": dict,
221
- "array": list
222
- }
223
-
224
- parameters = self.definition.get("function", {}).get("parameters", {})
225
- param_properties = parameters.get("properties", {})
226
-
227
- # Dynamically create ArgsSchema using pydantic.create_model
228
- fields = {}
229
- for param_name, param_def in param_properties.items():
230
- param_type = param_def.get("type", "string")
231
- description = param_def.get("description", "")
232
- fields[param_name] = (
233
- type_map.get(param_type, str),
234
- Field(..., description=description)
235
- )
236
-
237
- ArgsSchema = create_model('ArgsSchema', **fields)
238
-
239
- # Create tool subclass with our functionality
240
- class ExportedTool(LangchainBaseTool):
241
- name: str = self.definition.get("function", {}).get("name", "")
242
- description: str = self.definition.get("function", {}).get("description", "")
243
- args_schema: Type[BaseModel] = ArgsSchema
244
- fn: Callable = self.fn
245
-
246
- def _run(self, *args, **kwargs):
247
- return self.fn(*args, **kwargs)
248
-
249
- # Instantiate and return the tool
250
- tool = ExportedTool()
251
- return tool
113
+ return ToolResult(success=False, output=None, error=str(e), retryable=True)