gofannon 0.1.0__py3-none-any.whl → 0.25.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,85 @@
1
+ from typing import Type, Callable
2
+
3
+ try:
4
+ from langchain.tools import BaseTool as LangchainBaseTool
5
+ from langchain.pydantic_v1 import BaseModel, Field
6
+
7
+ _HAS_LANGCHAIN = True
8
+ except ImportError:
9
+ _HAS_LANGCHAIN = False
10
+
11
+
12
+ class LangchainMixin:
13
+ def import_from_langchain(self, langchain_tool):
14
+ if not _HAS_LANGCHAIN:
15
+ raise RuntimeError(
16
+ "langchain is not installed. Install with `pip install langchain-core`"
17
+ )
18
+
19
+ self.name = getattr(langchain_tool, "name", "exported_langchain_tool")
20
+ self.description = getattr(
21
+ langchain_tool, "description", "No description provided."
22
+ )
23
+
24
+ maybe_args_schema = getattr(langchain_tool, "args_schema", None)
25
+ if (
26
+ maybe_args_schema
27
+ and hasattr(maybe_args_schema, "schema")
28
+ and callable(maybe_args_schema.schema)
29
+ ):
30
+ args_schema = maybe_args_schema.schema()
31
+ else:
32
+ args_schema = {}
33
+
34
+ self._parameters = args_schema.get("properties", {})
35
+ self._required = args_schema.get("required", [])
36
+
37
+ def adapted_fn(*args, **kwargs):
38
+ return langchain_tool._run(*args, **kwargs)
39
+
40
+ self.fn = adapted_fn
41
+
42
+ def export_to_langchain(self):
43
+ if not _HAS_LANGCHAIN:
44
+ raise RuntimeError(
45
+ "langchain is not installed. Install with `pip install langchain-core`"
46
+ )
47
+
48
+ from pydantic import create_model
49
+
50
+ type_map = {
51
+ "number": float,
52
+ "string": str,
53
+ "integer": int,
54
+ "boolean": bool,
55
+ "object": dict,
56
+ "array": list,
57
+ }
58
+
59
+ parameters = self.definition.get("function", {}).get("parameters", {})
60
+ param_properties = parameters.get("properties", {})
61
+
62
+ fields = {}
63
+ for param_name, param_def in param_properties.items():
64
+ param_type = param_def.get("type", "string")
65
+ description = param_def.get("description", "")
66
+ fields[param_name] = (
67
+ type_map.get(param_type, str),
68
+ Field(..., description=description),
69
+ )
70
+
71
+ ArgsSchema = create_model("ArgsSchema", **fields)
72
+
73
+ class ExportedTool(LangchainBaseTool):
74
+ name: str = self.definition.get("function", {}).get("name", "")
75
+ description: str = self.definition.get("function", {}).get(
76
+ "description", ""
77
+ )
78
+ args_schema: Type[BaseModel] = ArgsSchema
79
+ fn: Callable = self.fn
80
+
81
+ def _run(self, *args, **kwargs):
82
+ return self.fn(*args, **kwargs)
83
+
84
+ tool = ExportedTool()
85
+ return tool
@@ -0,0 +1,51 @@
1
+ try:
2
+ from smolagents.tools import Tool as SmolTool
3
+ from smolagents.tools import tool as smol_tool_decorator
4
+
5
+ _HAS_SMOLAGENTS = True
6
+ except ImportError:
7
+ _HAS_SMOLAGENTS = False
8
+
9
+
10
+ class SmolAgentsMixin:
11
+ def import_from_smolagents(self, smol_tool):
12
+ if not _HAS_SMOLAGENTS:
13
+ raise RuntimeError(
14
+ "smolagents is not installed or could not be imported. "
15
+ "Install it or check your environment."
16
+ )
17
+ self.name = smol_tool.name[0]
18
+ self.description = smol_tool.description
19
+
20
+ def adapted_fn(*args, **kwargs):
21
+ return smol_tool.forward(*args, **kwargs)
22
+
23
+ self.fn = adapted_fn
24
+
25
+ def export_to_smolagents(self):
26
+ if not _HAS_SMOLAGENTS:
27
+ raise RuntimeError(
28
+ "smolagents is not installed or could not be imported. "
29
+ "Install it or check your environment."
30
+ )
31
+
32
+ def smol_forward(*args, **kwargs):
33
+ return self.fn(*args, **kwargs)
34
+
35
+ inputs_definition = {
36
+ "example_arg": {
37
+ "type": "string",
38
+ "description": "Example argument recognized by this tool",
39
+ }
40
+ }
41
+ output_type = "string"
42
+
43
+ exported_tool = SmolTool()
44
+ exported_tool.name = getattr(self, "name", "exported_base_tool")
45
+ exported_tool.description = getattr(self, "description", "Exported from Tool")
46
+ exported_tool.inputs = inputs_definition
47
+ exported_tool.output_type = output_type
48
+ exported_tool.forward = smol_forward
49
+ exported_tool.is_initialized = True
50
+
51
+ return exported_tool
@@ -1,5 +1 @@
1
- from.exponents import Exponents
2
- from.multiplication import Multiplication
3
- from.division import Division
4
- from.addition import Addition
5
- from.subtraction import Subtraction
1
+
gofannon/config.py CHANGED
@@ -11,7 +11,9 @@ class ToolConfig:
11
11
  self.config = {
12
12
  'github_api_key': os.getenv('GITHUB_API_KEY'),
13
13
  'deepinfra_api_key': os.getenv('DEEPINFRA_API_KEY'),
14
- 'arxiv_api_key': os.getenv('ARXIV_API_KEY')
14
+ 'arxiv_api_key': os.getenv('ARXIV_API_KEY'),
15
+ 'google_search_api_key': os.getenv('GOOGLE_SEARCH_API_KEY'),
16
+ 'google_search_engine_id': os.getenv('GOOGLE_SEARCH_ENGINE_ID')
15
17
  }
16
18
 
17
19
  @classmethod
@@ -1,11 +1,4 @@
1
1
 
2
- from.create_issue import CreateIssue
3
- from.get_repo_contents import GetRepoContents
4
- from.commit_file import CommitFile
5
- from.commit_files import CommitFiles
6
- from.search import SearchRepos
7
- from .read_issue import ReadIssue
8
-
9
2
 
10
3
 
11
4
 
@@ -0,0 +1,104 @@
1
+ import os
2
+ import json
3
+ import logging
4
+ import importlib.util
5
+ from github import Github
6
+ from openai import OpenAI
7
+ from gofannon.config import FunctionRegistry
8
+ from gofannon.base import BaseTool
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+ def load_review_checks():
13
+ """
14
+ Dynamically load the review checks module from a configurable file.
15
+ The path is specified via the environment variable PR_REVIEW_CHECKS_PATH.
16
+ If not defined, it defaults to ".github/scripts/pr_review_checks.py".
17
+ Returns a list of check classes (having names ending with 'Check').
18
+ """
19
+ checks_path = os.getenv("PR_REVIEW_CHECKS_PATH", ".github/scripts/pr_review_checks.py")
20
+ spec = importlib.util.spec_from_file_location("pr_review_checks", checks_path)
21
+ module = importlib.util.module_from_spec(spec)
22
+ spec.loader.exec_module(module)
23
+ # Collect all classes in the module with names ending in 'Check'
24
+ checks = [cls for name, cls in module.__dict__.items() if name.endswith("Check") and isinstance(cls, type)]
25
+ return checks
26
+
27
+ @FunctionRegistry.register
28
+ class PRReviewTool(BaseTool):
29
+ def __init__(self, name="pr_review_tool"):
30
+ super().__init__()
31
+ self.name = name
32
+ self.api_key = os.getenv("OPENAI_API_KEY")
33
+ self.base_url = os.getenv("OPENAI_BASE_URL")
34
+ self.model_name = os.getenv("OPENAI_MODEL_NAME")
35
+ self.client = OpenAI(api_key=self.api_key, base_url=self.base_url)
36
+
37
+ @property
38
+ def definition(self):
39
+ return {
40
+ "type": "function",
41
+ "function": {
42
+ "name": self.name,
43
+ "description": "Perform an automated pull request review using gofannon tools. "
44
+ "It aggregates configurable checks (e.g. code quality and schema validation).",
45
+ "parameters": {
46
+ "type": "object",
47
+ "properties": {
48
+ "pr_number": {
49
+ "type": "integer",
50
+ "description": "The pull request number."
51
+ },
52
+ "repo_name": {
53
+ "type": "string",
54
+ "description": "The repository name in the format owner/repo."
55
+ }
56
+ },
57
+ "required": ["pr_number", "repo_name"]
58
+ }
59
+ }
60
+ }
61
+
62
+ def fn(self, pr_number, repo_name):
63
+ # Connect to GitHub and get pull request details.
64
+ g = Github(os.getenv("GITHUB_TOKEN"))
65
+ repo = g.get_repo(repo_name)
66
+ pr = repo.get_pull(pr_number)
67
+ all_comments = []
68
+ check_results = {}
69
+
70
+ # Load review check classes dynamically.
71
+ check_classes = load_review_checks()
72
+ checks = [check_class(self.client, self.model_name) for check_class in check_classes]
73
+
74
+ for check in checks:
75
+ check_name = check.__class__.__name__
76
+ check_results[check_name] = []
77
+ if hasattr(check, 'process_pr_file'):
78
+ for file in pr.get_files():
79
+ file_comments, analyzed = check.process_pr_file(file, repo, pr)
80
+ if analyzed:
81
+ for comment in file_comments:
82
+ comment['check_name'] = check_name
83
+ all_comments.append(comment)
84
+ check_results[check_name].append(comment)
85
+ if hasattr(check, 'process_pr'):
86
+ pr_comments, _ = check.process_pr(pr)
87
+ for comment in pr_comments:
88
+ comment['check_name'] = check_name
89
+ all_comments.append(comment)
90
+ check_results[check_name].append(comment)
91
+
92
+ summary = "## 🤖 Automated PR Review Summary 🤖\n\n"
93
+ for check_name, comments in check_results.items():
94
+ summary += f"### 🤖 {check_name} 🤖\n\n"
95
+ for comment in comments:
96
+ body = comment.get("body", "")
97
+ file_path = comment.get("path", "")
98
+ if file_path and file_path != "GENERAL":
99
+ body = f"**File:** `{file_path}`\n{body}"
100
+ summary += f"{body}\n\n"
101
+ if not all_comments:
102
+ summary += "\nNo issues found. Code looks good!"
103
+
104
+ return summary
File without changes
@@ -0,0 +1,55 @@
1
+ from ..base import BaseTool
2
+ from googleapiclient.discovery import build
3
+ from ..config import ToolConfig, FunctionRegistry
4
+ import logging
5
+
6
+ logger = logging.getLogger(__name__)
7
+
8
+ @FunctionRegistry.register
9
+ class GoogleSearch(BaseTool):
10
+ def __init__(self, api_key=None, engine_id=None, name="google_search"):
11
+ super().__init__()
12
+ self.api_key = api_key or ToolConfig.get("google_search_api_key")
13
+ self.engine_id = engine_id or ToolConfig.get("google_search_engine_id")
14
+ self.name = name
15
+ self.API_SERVICE = 'google_search'
16
+
17
+ @property
18
+ def definition(self):
19
+ return {
20
+ "type": "function",
21
+ "function": {
22
+ "name": self.name,
23
+ "description": "Searches Google for the given query and returns snippets from the results.",
24
+ "parameters": {
25
+ "type": "object",
26
+ "properties": {
27
+ "query": {
28
+ "type": "string",
29
+ "description": "The search query."
30
+ },
31
+ "num_results": {
32
+ "type": "integer",
33
+ "description": "The maximum number of results to return (default: 5)."
34
+ }
35
+ },
36
+ "required": ["query"]
37
+ }
38
+ }
39
+ }
40
+
41
+ def fn(self, query, num_results=5):
42
+ logger.debug(f"Searching Google for: {query}")
43
+ try:
44
+ service = build("customsearch", "v1", developerKey=self.api_key)
45
+ cse = service.cse()
46
+ result = cse.list(q=query, cx=self.engine_id, num=num_results).execute()
47
+
48
+ search_results = []
49
+ for item in result['items']:
50
+ search_results.append(f"Title: {item['title']}\nSnippet: {item['snippet']}\nLink: {item['link']}")
51
+ return "\n\n".join(search_results)
52
+
53
+ except Exception as e:
54
+ logger.error(f"Error during Google Search: {e}")
55
+ return f"Error during Google Search: {e}"
File without changes
@@ -0,0 +1,42 @@
1
+ from selenium import webdriver
2
+ from selenium.webdriver.chrome.options import Options
3
+ from ..base import BaseTool
4
+ import logging
5
+
6
+ logger = logging.getLogger(__name__)
7
+
8
+ class HeadlessBrowserBase(BaseTool):
9
+ """
10
+ Base class for headless browser tools.
11
+ On initialization, the provider parameter may be one of:
12
+ "selenium-chrome", "selenium-firefox", "lightpanda", or "remote".
13
+ Currently, only "selenium-chrome" is supported.
14
+ """
15
+ def __init__(self, provider="selenium-chrome", **kwargs):
16
+ super().__init__(**kwargs)
17
+ self.provider = provider.lower()
18
+ supported = ["selenium-chrome", "selenium-firefox", "lightpanda", "remote"]
19
+ if self.provider not in supported:
20
+ raise ValueError(f"Unsupported provider: {self.provider}. Supported providers: {supported}")
21
+
22
+ def _get_driver(self):
23
+ if self.provider == "selenium-chrome":
24
+ chrome_options = Options()
25
+ chrome_options.add_argument("--headless")
26
+ chrome_options.add_argument("--disable-gpu")
27
+ chrome_options.add_argument("--no-sandbox")
28
+ # Adjust the executable_path if needed or ensure the chromedriver is in PATH.
29
+ driver = webdriver.Chrome(options=chrome_options)
30
+ return driver
31
+ else:
32
+ raise NotImplementedError(f"Provider {self.provider} is not supported in the current implementation.")
33
+
34
+ def get_page_source(self, url):
35
+ driver = self._get_driver()
36
+ try:
37
+ driver.get(url)
38
+ # Optionally, add waits here if needed so that JavaScript can fully execute.
39
+ page_source = driver.page_source
40
+ finally:
41
+ driver.quit()
42
+ return page_source
@@ -0,0 +1,35 @@
1
+ from .base import HeadlessBrowserBase
2
+ from ..config import FunctionRegistry
3
+ import logging
4
+
5
+ logger = logging.getLogger(__name__)
6
+
7
+ @FunctionRegistry.register
8
+ class HeadlessBrowserGet(HeadlessBrowserBase):
9
+ def __init__(self, provider="selenium-chrome", name="headless_browser_get"):
10
+ super().__init__(provider=provider)
11
+ self.name = name
12
+
13
+ @property
14
+ def definition(self):
15
+ return {
16
+ "type": "function",
17
+ "function": {
18
+ "name": self.name,
19
+ "description": "Retrieve the contents of a web page with JavaScript rendered using a headless browser.",
20
+ "parameters": {
21
+ "type": "object",
22
+ "properties": {
23
+ "url": {
24
+ "type": "string",
25
+ "description": "The URL of the web page to fetch"
26
+ }
27
+ },
28
+ "required": ["url"]
29
+ }
30
+ }
31
+ }
32
+
33
+ def fn(self, url):
34
+ logger.debug(f"Fetching URL with headless browser using provider {self.provider}: {url}")
35
+ return self.get_page_source(url)
@@ -0,0 +1 @@
1
+
@@ -0,0 +1,126 @@
1
+ import jsonschema
2
+ import jsonschema.exceptions
3
+ from ..base import BaseTool
4
+ from ..config import FunctionRegistry
5
+
6
+ import logging
7
+ import json
8
+
9
+ import requests
10
+ from jsonschema import validate
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+ valid_iss_schema = {
15
+ "type": "object",
16
+ "properties": {
17
+ "message": {"type": "string", "enum": ["success"]},
18
+ "timestamp": {"type": "integer"},
19
+ "iss_position": {
20
+ "type": "object",
21
+ "properties": {
22
+ "longitude": {"type": "string"},
23
+ "latitude": {"type": "string"},
24
+ },
25
+ "required": ["longitude", "latitude"],
26
+ },
27
+ },
28
+ "required": ["message", "timestamp", "iss_position"],
29
+ }
30
+
31
+ error_response_string = "Error: The ISS endpoint returned an error. No location for the ISS can be determined"
32
+ error_response_json = {
33
+ "message": "failure",
34
+ "error": "Error: The ISS endpoint returned an error. No location for the ISS can be determined",
35
+ }
36
+
37
+
38
+ @FunctionRegistry.register
39
+ class IssLocator(BaseTool):
40
+ def __init__(self, name="iss_locator", format_json=True):
41
+ super().__init__()
42
+ self.name = name
43
+ self.format_json = format_json
44
+
45
+ @property
46
+ def definition(self):
47
+ return {
48
+ "type": "function",
49
+ "function": {
50
+ "name": self.name,
51
+ "description": "Returns the latitude / longitude of the current location of the International Space Station (ISS)",
52
+ "parameters": {},
53
+ "required": [],
54
+ },
55
+ }
56
+
57
+ # Function returns one of two messages, depending on the requested format:
58
+ #
59
+ # With format_json=False, Returns one of two strings:
60
+ # "According to OpenNotify.org, the International Space Station can be found at (lat, long) (x,y)"
61
+ # or
62
+ # "The ISS endpoint returned an error. No location for the ISS can be determined"
63
+ #
64
+ # With format_json=True (default), the following strings:
65
+ # "{
66
+ # "message": "success",
67
+ # "timestamp": 1739999640,
68
+ # "iss_position": {"longitude": "-11.6885", "latitude": "-50.0654"},
69
+ # }"
70
+ # or
71
+ # "{
72
+ # "message": "failure",
73
+ # "error": "Error: The ISS endpoint returned an error. No location for the ISS can be determined",
74
+ # }"
75
+
76
+ def fn(self):
77
+ base_url = "http://api.open-notify.org/iss-now.json"
78
+ logger.debug(f"Fetching ISS pos from OpenNotify.org at {base_url}")
79
+ if self.format_json:
80
+ response = json.dumps(error_response_json)
81
+ else:
82
+ response = error_response_string
83
+
84
+ try:
85
+ http_response = requests.get(base_url)
86
+ response_json = http_response.json()
87
+ # Validate the returned schema is valid.
88
+ validate(response_json, valid_iss_schema)
89
+ # Does not seem to be a way to evaluate strings as floats in a range in jsonschema,
90
+ # other than using a REALLY ugly regex.
91
+ lat = float(response_json["iss_position"]["latitude"])
92
+ long = float(response_json["iss_position"]["longitude"])
93
+ if (lat > -90 and lat < 90) and (long > -180 and long < 180):
94
+ if self.format_json:
95
+ response = json.dumps(response_json)
96
+ else:
97
+ response = f"According to OpenNotify.org, the International Space Station can be found at (lat, long) ({lat}, {long})"
98
+ else:
99
+ raise ValueError(f"(latitude, longitude) out of range: ({lat},{long})")
100
+ except requests.exceptions.HTTPError as errh:
101
+ logger.debug(f"HTTP exception: GET at {base_url} returns {errh}")
102
+ pass
103
+ except requests.exceptions.ConnectionError as errc:
104
+ logger.debug(f"HTTP connection exception: GET at {base_url} returns {errc}")
105
+ pass
106
+ except requests.exceptions.Timeout as errt:
107
+ logger.debug(f"HTTP timeout exception: GET at {base_url} returns {errt}")
108
+ pass
109
+ except requests.exceptions.RequestException as err:
110
+ logger.debug(f"Requests exception: GET at {base_url} returns {err}")
111
+ pass
112
+ except jsonschema.exceptions.ValidationError as errj:
113
+ logger.debug(
114
+ f"JSON validation failure GET at {base_url} malformed response: {errj}"
115
+ )
116
+ pass
117
+ except ValueError as errv:
118
+ logger.debug(
119
+ f"Value Exception GET at {base_url} malformed response: {errv}"
120
+ )
121
+ pass
122
+ except Exception as erre:
123
+ logger.debug(f"General exception GET at {base_url} Error: {erre}")
124
+ pass
125
+
126
+ return response
@@ -1,3 +0,0 @@
1
- from .sequential_cot import SequentialCoT
2
- from .hierarchical_cot import HierarchicalCoT
3
- from .tree_of_thought import TreeOfThought