golf-mcp 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of golf-mcp might be problematic. Click here for more details.
- golf/__init__.py +1 -0
- golf/auth/__init__.py +109 -0
- golf/auth/helpers.py +56 -0
- golf/auth/oauth.py +798 -0
- golf/auth/provider.py +110 -0
- golf/cli/__init__.py +1 -0
- golf/cli/main.py +223 -0
- golf/commands/__init__.py +3 -0
- golf/commands/build.py +78 -0
- golf/commands/init.py +197 -0
- golf/commands/run.py +68 -0
- golf/core/__init__.py +1 -0
- golf/core/builder.py +1169 -0
- golf/core/builder_auth.py +157 -0
- golf/core/builder_telemetry.py +208 -0
- golf/core/config.py +205 -0
- golf/core/parser.py +509 -0
- golf/core/transformer.py +168 -0
- golf/examples/__init__.py +1 -0
- golf/examples/basic/.env +3 -0
- golf/examples/basic/.env.example +3 -0
- golf/examples/basic/README.md +117 -0
- golf/examples/basic/golf.json +9 -0
- golf/examples/basic/pre_build.py +28 -0
- golf/examples/basic/prompts/welcome.py +30 -0
- golf/examples/basic/resources/current_time.py +41 -0
- golf/examples/basic/resources/info.py +27 -0
- golf/examples/basic/resources/weather/common.py +48 -0
- golf/examples/basic/resources/weather/current.py +32 -0
- golf/examples/basic/resources/weather/forecast.py +32 -0
- golf/examples/basic/tools/github_user.py +67 -0
- golf/examples/basic/tools/hello.py +29 -0
- golf/examples/basic/tools/payments/charge.py +50 -0
- golf/examples/basic/tools/payments/common.py +34 -0
- golf/examples/basic/tools/payments/refund.py +50 -0
- golf_mcp-0.1.0.dist-info/METADATA +78 -0
- golf_mcp-0.1.0.dist-info/RECORD +41 -0
- golf_mcp-0.1.0.dist-info/WHEEL +5 -0
- golf_mcp-0.1.0.dist-info/entry_points.txt +2 -0
- golf_mcp-0.1.0.dist-info/licenses/LICENSE +201 -0
- golf_mcp-0.1.0.dist-info/top_level.txt +1 -0
golf/core/parser.py
ADDED
|
@@ -0,0 +1,509 @@
|
|
|
1
|
+
"""Python file parser for extracting tools, resources, and prompts using AST."""
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
import re
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any, Dict, List, Optional
|
|
9
|
+
import hashlib
|
|
10
|
+
|
|
11
|
+
from rich.console import Console
|
|
12
|
+
|
|
13
|
+
console = Console()
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ComponentType(str, Enum):
|
|
17
|
+
"""Type of component discovered by the parser."""
|
|
18
|
+
|
|
19
|
+
TOOL = "tool"
|
|
20
|
+
RESOURCE = "resource"
|
|
21
|
+
PROMPT = "prompt"
|
|
22
|
+
UNKNOWN = "unknown"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class ParsedComponent:
|
|
27
|
+
"""Represents a parsed MCP component (tool, resource, or prompt)."""
|
|
28
|
+
|
|
29
|
+
name: str # Derived from file path or explicit name
|
|
30
|
+
type: ComponentType
|
|
31
|
+
file_path: Path
|
|
32
|
+
module_path: str
|
|
33
|
+
docstring: Optional[str] = None
|
|
34
|
+
input_schema: Optional[Dict[str, Any]] = None
|
|
35
|
+
output_schema: Optional[Dict[str, Any]] = None
|
|
36
|
+
uri_template: Optional[str] = None # For resources
|
|
37
|
+
parameters: Optional[List[str]] = None # For resources with URI params
|
|
38
|
+
parent_module: Optional[str] = None # For nested components
|
|
39
|
+
entry_function: Optional[str] = None # Store the name of the function to use
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class AstParser:
|
|
43
|
+
"""AST-based parser for extracting MCP components from Python files."""
|
|
44
|
+
|
|
45
|
+
def __init__(self, project_root: Path):
|
|
46
|
+
"""Initialize the parser.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
project_root: Root directory of the project
|
|
50
|
+
"""
|
|
51
|
+
self.project_root = project_root
|
|
52
|
+
self.components: Dict[str, ParsedComponent] = {}
|
|
53
|
+
|
|
54
|
+
def parse_directory(self, directory: Path) -> List[ParsedComponent]:
|
|
55
|
+
"""Parse all Python files in a directory recursively."""
|
|
56
|
+
components = []
|
|
57
|
+
|
|
58
|
+
for file_path in directory.glob("**/*.py"):
|
|
59
|
+
# Skip __pycache__ and other hidden directories
|
|
60
|
+
if "__pycache__" in file_path.parts or any(part.startswith('.') for part in file_path.parts):
|
|
61
|
+
continue
|
|
62
|
+
|
|
63
|
+
try:
|
|
64
|
+
file_components = self.parse_file(file_path)
|
|
65
|
+
components.extend(file_components)
|
|
66
|
+
except Exception as e:
|
|
67
|
+
relative_path = file_path.relative_to(self.project_root)
|
|
68
|
+
console.print(f"[bold red]Error parsing {relative_path}:[/bold red] {e}")
|
|
69
|
+
|
|
70
|
+
return components
|
|
71
|
+
|
|
72
|
+
def parse_file(self, file_path: Path) -> List[ParsedComponent]:
|
|
73
|
+
"""Parse a single Python file using AST to extract MCP components."""
|
|
74
|
+
# Handle common.py files
|
|
75
|
+
if file_path.name == "common.py":
|
|
76
|
+
# Register as a known shared module but don't return as a component
|
|
77
|
+
return []
|
|
78
|
+
|
|
79
|
+
# Skip __init__.py files for direct parsing
|
|
80
|
+
if file_path.name == "__init__.py":
|
|
81
|
+
return []
|
|
82
|
+
|
|
83
|
+
# Determine component type based on directory structure
|
|
84
|
+
rel_path = file_path.relative_to(self.project_root)
|
|
85
|
+
parent_dir = rel_path.parts[0] if rel_path.parts else None
|
|
86
|
+
|
|
87
|
+
component_type = ComponentType.UNKNOWN
|
|
88
|
+
if parent_dir == "tools":
|
|
89
|
+
component_type = ComponentType.TOOL
|
|
90
|
+
elif parent_dir == "resources":
|
|
91
|
+
component_type = ComponentType.RESOURCE
|
|
92
|
+
elif parent_dir == "prompts":
|
|
93
|
+
component_type = ComponentType.PROMPT
|
|
94
|
+
|
|
95
|
+
if component_type == ComponentType.UNKNOWN:
|
|
96
|
+
return [] # Not in a recognized directory
|
|
97
|
+
|
|
98
|
+
# Read the file content and parse it with AST
|
|
99
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
100
|
+
file_content = f.read()
|
|
101
|
+
|
|
102
|
+
try:
|
|
103
|
+
tree = ast.parse(file_content)
|
|
104
|
+
except SyntaxError as e:
|
|
105
|
+
raise ValueError(f"Syntax error in {file_path}: {e}")
|
|
106
|
+
|
|
107
|
+
# Extract module docstring
|
|
108
|
+
module_docstring = ast.get_docstring(tree)
|
|
109
|
+
if not module_docstring:
|
|
110
|
+
raise ValueError(f"Missing module docstring in {file_path}")
|
|
111
|
+
|
|
112
|
+
# Find the entry function - look for "export = function_name" pattern,
|
|
113
|
+
# or any top-level function (like "run") as a fallback
|
|
114
|
+
entry_function = None
|
|
115
|
+
export_target = None
|
|
116
|
+
|
|
117
|
+
# Look for export = function_name assignment
|
|
118
|
+
for node in tree.body:
|
|
119
|
+
if isinstance(node, ast.Assign):
|
|
120
|
+
for target in node.targets:
|
|
121
|
+
if isinstance(target, ast.Name) and target.id == "export":
|
|
122
|
+
if isinstance(node.value, ast.Name):
|
|
123
|
+
export_target = node.value.id
|
|
124
|
+
break
|
|
125
|
+
|
|
126
|
+
# Find all top-level functions
|
|
127
|
+
functions = []
|
|
128
|
+
for node in tree.body:
|
|
129
|
+
if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
|
130
|
+
functions.append(node)
|
|
131
|
+
# If this function matches our export target, it's our entry function
|
|
132
|
+
if export_target and node.name == export_target:
|
|
133
|
+
entry_function = node
|
|
134
|
+
|
|
135
|
+
# Check for the run function as a fallback
|
|
136
|
+
run_function = None
|
|
137
|
+
for func in functions:
|
|
138
|
+
if func.name == "run":
|
|
139
|
+
run_function = func
|
|
140
|
+
|
|
141
|
+
# If we have an export but didn't find the target function, warn
|
|
142
|
+
if export_target and not entry_function:
|
|
143
|
+
console.print(f"[yellow]Warning: Export target '{export_target}' not found in {file_path}[/yellow]")
|
|
144
|
+
|
|
145
|
+
# Use the export target function if found, otherwise fall back to run
|
|
146
|
+
entry_function = entry_function or run_function
|
|
147
|
+
|
|
148
|
+
# If no valid function found, skip this file
|
|
149
|
+
if not entry_function:
|
|
150
|
+
return []
|
|
151
|
+
|
|
152
|
+
# Create component
|
|
153
|
+
component = ParsedComponent(
|
|
154
|
+
name="", # Will be set later
|
|
155
|
+
type=component_type,
|
|
156
|
+
file_path=file_path,
|
|
157
|
+
module_path=file_path.relative_to(self.project_root).as_posix(),
|
|
158
|
+
docstring=module_docstring,
|
|
159
|
+
entry_function=export_target or "run" # Store the name of the entry function
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
# Process the entry function
|
|
163
|
+
self._process_entry_function(component, entry_function, tree, file_path)
|
|
164
|
+
|
|
165
|
+
# Process other component-specific information
|
|
166
|
+
if component_type == ComponentType.TOOL:
|
|
167
|
+
self._process_tool(component, tree)
|
|
168
|
+
elif component_type == ComponentType.RESOURCE:
|
|
169
|
+
self._process_resource(component, tree)
|
|
170
|
+
elif component_type == ComponentType.PROMPT:
|
|
171
|
+
self._process_prompt(component, tree)
|
|
172
|
+
|
|
173
|
+
# Set component name based on file path
|
|
174
|
+
component.name = self._derive_component_name(file_path, component_type)
|
|
175
|
+
|
|
176
|
+
# Set parent module if it's in a nested structure
|
|
177
|
+
if len(rel_path.parts) > 2: # More than just "tools/file.py"
|
|
178
|
+
parent_parts = rel_path.parts[1:-1] # Skip the root category and the file itself
|
|
179
|
+
if parent_parts:
|
|
180
|
+
component.parent_module = ".".join(parent_parts)
|
|
181
|
+
|
|
182
|
+
return [component]
|
|
183
|
+
|
|
184
|
+
def _process_entry_function(self, component: ParsedComponent, func_node: ast.FunctionDef | ast.AsyncFunctionDef, tree: ast.Module, file_path: Path) -> None:
|
|
185
|
+
"""Process the entry function to extract parameters and return type."""
|
|
186
|
+
# Extract function docstring
|
|
187
|
+
func_docstring = ast.get_docstring(func_node)
|
|
188
|
+
|
|
189
|
+
# Extract parameter names and annotations
|
|
190
|
+
parameters = []
|
|
191
|
+
for arg in func_node.args.args:
|
|
192
|
+
# Skip self, cls parameters
|
|
193
|
+
if arg.arg in ("self", "cls"):
|
|
194
|
+
continue
|
|
195
|
+
|
|
196
|
+
# Skip ctx parameter - GolfMCP will inject this
|
|
197
|
+
if arg.arg == "ctx":
|
|
198
|
+
continue
|
|
199
|
+
|
|
200
|
+
parameters.append(arg.arg)
|
|
201
|
+
|
|
202
|
+
# Check for return annotation - STRICT requirement
|
|
203
|
+
if func_node.returns is None:
|
|
204
|
+
raise ValueError(f"Missing return annotation for {func_node.name} function in {file_path}")
|
|
205
|
+
|
|
206
|
+
# Store parameters
|
|
207
|
+
component.parameters = parameters
|
|
208
|
+
|
|
209
|
+
def _process_tool(self, component: ParsedComponent, tree: ast.Module) -> None:
|
|
210
|
+
"""Process a tool component to extract input/output schemas."""
|
|
211
|
+
# Look for Input and Output classes in the AST
|
|
212
|
+
input_class = None
|
|
213
|
+
output_class = None
|
|
214
|
+
|
|
215
|
+
for node in tree.body:
|
|
216
|
+
if isinstance(node, ast.ClassDef):
|
|
217
|
+
if node.name == "Input":
|
|
218
|
+
input_class = node
|
|
219
|
+
elif node.name == "Output":
|
|
220
|
+
output_class = node
|
|
221
|
+
|
|
222
|
+
# Process Input class if found
|
|
223
|
+
if input_class:
|
|
224
|
+
# Check if it inherits from BaseModel
|
|
225
|
+
for base in input_class.bases:
|
|
226
|
+
if isinstance(base, ast.Name) and base.id == "BaseModel":
|
|
227
|
+
component.input_schema = self._extract_pydantic_schema_from_ast(input_class)
|
|
228
|
+
break
|
|
229
|
+
|
|
230
|
+
# Process Output class if found
|
|
231
|
+
if output_class:
|
|
232
|
+
# Check if it inherits from BaseModel
|
|
233
|
+
for base in output_class.bases:
|
|
234
|
+
if isinstance(base, ast.Name) and base.id == "BaseModel":
|
|
235
|
+
component.output_schema = self._extract_pydantic_schema_from_ast(output_class)
|
|
236
|
+
break
|
|
237
|
+
|
|
238
|
+
def _process_resource(self, component: ParsedComponent, tree: ast.Module) -> None:
|
|
239
|
+
"""Process a resource component to extract URI template."""
|
|
240
|
+
# Look for resource_uri assignment in the AST
|
|
241
|
+
for node in tree.body:
|
|
242
|
+
if isinstance(node, ast.Assign):
|
|
243
|
+
for target in node.targets:
|
|
244
|
+
if isinstance(target, ast.Name) and target.id == "resource_uri":
|
|
245
|
+
if isinstance(node.value, ast.Constant):
|
|
246
|
+
uri_template = node.value.value
|
|
247
|
+
component.uri_template = uri_template
|
|
248
|
+
|
|
249
|
+
# Extract URI parameters (parts in {})
|
|
250
|
+
uri_params = re.findall(r"{([^}]+)}", uri_template)
|
|
251
|
+
if uri_params:
|
|
252
|
+
component.parameters = uri_params
|
|
253
|
+
break
|
|
254
|
+
|
|
255
|
+
def _process_prompt(self, component: ParsedComponent, tree: ast.Module) -> None:
|
|
256
|
+
"""Process a prompt component (no special processing needed)."""
|
|
257
|
+
pass
|
|
258
|
+
|
|
259
|
+
def _derive_component_name(self, file_path: Path, component_type: ComponentType) -> str:
|
|
260
|
+
"""Derive a component name from its file path according to the spec.
|
|
261
|
+
|
|
262
|
+
Following the spec: <filename> + ("-" + "-".join(PathRev) if PathRev else "")
|
|
263
|
+
where PathRev is the reversed list of parent directories under the category.
|
|
264
|
+
"""
|
|
265
|
+
rel_path = file_path.relative_to(self.project_root)
|
|
266
|
+
|
|
267
|
+
# Find which category directory this is in
|
|
268
|
+
category = None
|
|
269
|
+
category_idx = -1
|
|
270
|
+
for i, part in enumerate(rel_path.parts):
|
|
271
|
+
if part in ["tools", "resources", "prompts"]:
|
|
272
|
+
category = part
|
|
273
|
+
category_idx = i
|
|
274
|
+
break
|
|
275
|
+
|
|
276
|
+
if category_idx == -1:
|
|
277
|
+
return ""
|
|
278
|
+
|
|
279
|
+
# Get the filename without extension
|
|
280
|
+
filename = rel_path.stem
|
|
281
|
+
|
|
282
|
+
# Get parent directories between category and file
|
|
283
|
+
parent_dirs = list(rel_path.parts[category_idx+1:-1])
|
|
284
|
+
|
|
285
|
+
# Reverse parent dirs according to spec
|
|
286
|
+
parent_dirs.reverse()
|
|
287
|
+
|
|
288
|
+
# Form the ID according to spec
|
|
289
|
+
if parent_dirs:
|
|
290
|
+
return f"{filename}-{'-'.join(parent_dirs)}"
|
|
291
|
+
else:
|
|
292
|
+
return filename
|
|
293
|
+
|
|
294
|
+
def _extract_pydantic_schema_from_ast(self, class_node: ast.ClassDef) -> Dict[str, Any]:
|
|
295
|
+
"""Extract a JSON schema from an AST class definition.
|
|
296
|
+
|
|
297
|
+
This is a simplified version that extracts basic field information.
|
|
298
|
+
For complex annotations, a more sophisticated approach would be needed.
|
|
299
|
+
"""
|
|
300
|
+
schema = {
|
|
301
|
+
"type": "object",
|
|
302
|
+
"properties": {},
|
|
303
|
+
"required": []
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
for node in class_node.body:
|
|
307
|
+
if isinstance(node, ast.AnnAssign) and isinstance(node.target, ast.Name):
|
|
308
|
+
field_name = node.target.id
|
|
309
|
+
|
|
310
|
+
# Extract type annotation as string
|
|
311
|
+
annotation = ""
|
|
312
|
+
if isinstance(node.annotation, ast.Name):
|
|
313
|
+
annotation = node.annotation.id
|
|
314
|
+
elif isinstance(node.annotation, ast.Subscript):
|
|
315
|
+
# Simple handling of things like List[str]
|
|
316
|
+
annotation = ast.unparse(node.annotation)
|
|
317
|
+
else:
|
|
318
|
+
annotation = ast.unparse(node.annotation)
|
|
319
|
+
|
|
320
|
+
# Create property definition
|
|
321
|
+
prop = {
|
|
322
|
+
"type": self._type_hint_to_json_type(annotation),
|
|
323
|
+
"title": field_name.replace('_', ' ').title()
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
# Extract default value if present
|
|
327
|
+
if node.value is not None:
|
|
328
|
+
if isinstance(node.value, ast.Constant):
|
|
329
|
+
# Simple constant default
|
|
330
|
+
prop["default"] = node.value.value
|
|
331
|
+
elif (isinstance(node.value, ast.Call) and
|
|
332
|
+
isinstance(node.value.func, ast.Name) and
|
|
333
|
+
node.value.func.id == "Field"):
|
|
334
|
+
# Field object - extract its parameters
|
|
335
|
+
for keyword in node.value.keywords:
|
|
336
|
+
if keyword.arg == "default" or keyword.arg == "default_factory":
|
|
337
|
+
if isinstance(keyword.value, ast.Constant):
|
|
338
|
+
prop["default"] = keyword.value.value
|
|
339
|
+
elif keyword.arg == "description":
|
|
340
|
+
if isinstance(keyword.value, ast.Constant):
|
|
341
|
+
prop["description"] = keyword.value.value
|
|
342
|
+
elif keyword.arg == "title":
|
|
343
|
+
if isinstance(keyword.value, ast.Constant):
|
|
344
|
+
prop["title"] = keyword.value.value
|
|
345
|
+
|
|
346
|
+
# Check for position default argument (Field(..., "description"))
|
|
347
|
+
if node.value.args:
|
|
348
|
+
for i, arg in enumerate(node.value.args):
|
|
349
|
+
if i == 0 and isinstance(arg, ast.Constant) and arg.value != Ellipsis:
|
|
350
|
+
prop["default"] = arg.value
|
|
351
|
+
elif i == 1 and isinstance(arg, ast.Constant):
|
|
352
|
+
prop["description"] = arg.value
|
|
353
|
+
|
|
354
|
+
# Add to properties
|
|
355
|
+
schema["properties"][field_name] = prop
|
|
356
|
+
|
|
357
|
+
# Check if required (no default value or Field(...))
|
|
358
|
+
is_required = True
|
|
359
|
+
if node.value is not None:
|
|
360
|
+
if isinstance(node.value, ast.Constant):
|
|
361
|
+
is_required = False
|
|
362
|
+
elif (isinstance(node.value, ast.Call) and
|
|
363
|
+
isinstance(node.value.func, ast.Name) and
|
|
364
|
+
node.value.func.id == "Field"):
|
|
365
|
+
# Field has default if it doesn't use ... or if it has a default keyword
|
|
366
|
+
has_ellipsis = False
|
|
367
|
+
has_default = False
|
|
368
|
+
|
|
369
|
+
if node.value.args and isinstance(node.value.args[0], ast.Constant):
|
|
370
|
+
has_ellipsis = node.value.args[0].value is Ellipsis
|
|
371
|
+
|
|
372
|
+
for keyword in node.value.keywords:
|
|
373
|
+
if keyword.arg == "default" or keyword.arg == "default_factory":
|
|
374
|
+
has_default = True
|
|
375
|
+
|
|
376
|
+
is_required = has_ellipsis and not has_default
|
|
377
|
+
|
|
378
|
+
if is_required:
|
|
379
|
+
schema["required"].append(field_name)
|
|
380
|
+
|
|
381
|
+
return schema
|
|
382
|
+
|
|
383
|
+
def _type_hint_to_json_type(self, type_hint: str) -> str:
|
|
384
|
+
"""Convert a Python type hint to a JSON schema type.
|
|
385
|
+
|
|
386
|
+
This is a simplified version. A more sophisticated approach would
|
|
387
|
+
handle complex types correctly.
|
|
388
|
+
"""
|
|
389
|
+
type_map = {
|
|
390
|
+
"str": "string",
|
|
391
|
+
"int": "integer",
|
|
392
|
+
"float": "number",
|
|
393
|
+
"bool": "boolean",
|
|
394
|
+
"list": "array",
|
|
395
|
+
"dict": "object",
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
# Handle simple types
|
|
399
|
+
for py_type, json_type in type_map.items():
|
|
400
|
+
if py_type in type_hint.lower():
|
|
401
|
+
return json_type
|
|
402
|
+
|
|
403
|
+
# Default to string for unknown types
|
|
404
|
+
return "string"
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
def parse_project(project_path: Path) -> Dict[ComponentType, List[ParsedComponent]]:
|
|
408
|
+
"""Parse a GolfMCP project to extract all components."""
|
|
409
|
+
parser = AstParser(project_path)
|
|
410
|
+
|
|
411
|
+
components: Dict[ComponentType, List[ParsedComponent]] = {
|
|
412
|
+
ComponentType.TOOL: [],
|
|
413
|
+
ComponentType.RESOURCE: [],
|
|
414
|
+
ComponentType.PROMPT: []
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
# Parse each directory
|
|
418
|
+
for comp_type, dir_name in [
|
|
419
|
+
(ComponentType.TOOL, "tools"),
|
|
420
|
+
(ComponentType.RESOURCE, "resources"),
|
|
421
|
+
(ComponentType.PROMPT, "prompts")
|
|
422
|
+
]:
|
|
423
|
+
dir_path = project_path / dir_name
|
|
424
|
+
if dir_path.exists() and dir_path.is_dir():
|
|
425
|
+
dir_components = parser.parse_directory(dir_path)
|
|
426
|
+
components[comp_type].extend([c for c in dir_components if c.type == comp_type])
|
|
427
|
+
|
|
428
|
+
# Check for ID collisions
|
|
429
|
+
all_ids = []
|
|
430
|
+
for comp_type, comps in components.items():
|
|
431
|
+
for comp in comps:
|
|
432
|
+
if comp.name in all_ids:
|
|
433
|
+
raise ValueError(f"ID collision detected: {comp.name} is used by multiple components")
|
|
434
|
+
all_ids.append(comp.name)
|
|
435
|
+
|
|
436
|
+
return components
|
|
437
|
+
|
|
438
|
+
|
|
439
|
+
def parse_project_incremental(project_path: Path, fingerprints: Dict[str, str]) -> Dict[ComponentType, List[ParsedComponent]]:
|
|
440
|
+
"""Parse a project with incremental file checking."""
|
|
441
|
+
parser = AstParser(project_root=project_path)
|
|
442
|
+
|
|
443
|
+
components: Dict[ComponentType, List[ParsedComponent]] = {
|
|
444
|
+
ComponentType.TOOL: [],
|
|
445
|
+
ComponentType.RESOURCE: [],
|
|
446
|
+
ComponentType.PROMPT: []
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
# Process each directory
|
|
450
|
+
for comp_type, dir_name in [
|
|
451
|
+
(ComponentType.TOOL, "tools"),
|
|
452
|
+
(ComponentType.RESOURCE, "resources"),
|
|
453
|
+
(ComponentType.PROMPT, "prompts")
|
|
454
|
+
]:
|
|
455
|
+
dir_path = project_path / dir_name
|
|
456
|
+
if not dir_path.exists() or not dir_path.is_dir():
|
|
457
|
+
continue
|
|
458
|
+
|
|
459
|
+
# Parse only changed files using fingerprints
|
|
460
|
+
for file_path in dir_path.glob("**/*.py"):
|
|
461
|
+
if "__pycache__" in file_path.parts or file_path.name == "__init__.py":
|
|
462
|
+
continue
|
|
463
|
+
|
|
464
|
+
# Check if file changed
|
|
465
|
+
with open(file_path, "rb") as f:
|
|
466
|
+
content = f.read()
|
|
467
|
+
file_hash = hashlib.sha1(content).hexdigest()
|
|
468
|
+
|
|
469
|
+
rel_path = str(file_path.relative_to(project_path))
|
|
470
|
+
|
|
471
|
+
if rel_path not in fingerprints or fingerprints[rel_path] != file_hash:
|
|
472
|
+
try:
|
|
473
|
+
file_components = parser.parse_file(file_path)
|
|
474
|
+
components[comp_type].extend([c for c in file_components if c.type == comp_type])
|
|
475
|
+
fingerprints[rel_path] = file_hash
|
|
476
|
+
except Exception as e:
|
|
477
|
+
console.print(f"[bold red]Error parsing {rel_path}: {e}[/bold red]")
|
|
478
|
+
|
|
479
|
+
return components
|
|
480
|
+
|
|
481
|
+
|
|
482
|
+
def parse_common_files(project_path: Path) -> Dict[str, Path]:
|
|
483
|
+
"""Find all common.py files in the project.
|
|
484
|
+
|
|
485
|
+
Args:
|
|
486
|
+
project_path: Path to the project root
|
|
487
|
+
|
|
488
|
+
Returns:
|
|
489
|
+
Dictionary mapping directory paths to common.py file paths
|
|
490
|
+
"""
|
|
491
|
+
common_files = {}
|
|
492
|
+
|
|
493
|
+
# Search for common.py files in tools, resources, and prompts directories
|
|
494
|
+
for dir_name in ["tools", "resources", "prompts"]:
|
|
495
|
+
base_dir = project_path / dir_name
|
|
496
|
+
if not base_dir.exists() or not base_dir.is_dir():
|
|
497
|
+
continue
|
|
498
|
+
|
|
499
|
+
# Find all common.py files (recursively)
|
|
500
|
+
for common_file in base_dir.glob("**/common.py"):
|
|
501
|
+
# Skip files in __pycache__ or other hidden directories
|
|
502
|
+
if "__pycache__" in common_file.parts or any(part.startswith('.') for part in common_file.parts):
|
|
503
|
+
continue
|
|
504
|
+
|
|
505
|
+
# Get the parent directory as the module path
|
|
506
|
+
module_path = str(common_file.parent.relative_to(project_path))
|
|
507
|
+
common_files[module_path] = common_file
|
|
508
|
+
|
|
509
|
+
return common_files
|
golf/core/transformer.py
ADDED
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
"""Transform GolfMCP components into standalone FastMCP code.
|
|
2
|
+
|
|
3
|
+
This module provides utilities for transforming GolfMCP's convention-based code
|
|
4
|
+
into explicit FastMCP component registrations.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import ast
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Dict, Any
|
|
10
|
+
|
|
11
|
+
from golf.core.parser import ParsedComponent
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ImportTransformer(ast.NodeTransformer):
|
|
15
|
+
"""AST transformer for rewriting imports in component files."""
|
|
16
|
+
|
|
17
|
+
def __init__(self,
|
|
18
|
+
original_path: Path,
|
|
19
|
+
target_path: Path,
|
|
20
|
+
import_map: Dict[str, str],
|
|
21
|
+
project_root: Path):
|
|
22
|
+
"""Initialize the import transformer.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
original_path: Path to the original file
|
|
26
|
+
target_path: Path to the target file
|
|
27
|
+
import_map: Mapping of original module paths to generated paths
|
|
28
|
+
project_root: Root path of the project
|
|
29
|
+
"""
|
|
30
|
+
self.original_path = original_path
|
|
31
|
+
self.target_path = target_path
|
|
32
|
+
self.import_map = import_map
|
|
33
|
+
self.project_root = project_root
|
|
34
|
+
|
|
35
|
+
def visit_Import(self, node: ast.Import) -> Any:
|
|
36
|
+
"""Transform import statements."""
|
|
37
|
+
return node
|
|
38
|
+
|
|
39
|
+
def visit_ImportFrom(self, node: ast.ImportFrom) -> Any:
|
|
40
|
+
"""Transform import from statements."""
|
|
41
|
+
if node.module is None:
|
|
42
|
+
return node
|
|
43
|
+
|
|
44
|
+
# Handle relative imports
|
|
45
|
+
if node.level > 0:
|
|
46
|
+
# Calculate the source module path
|
|
47
|
+
source_dir = self.original_path.parent
|
|
48
|
+
for _ in range(node.level - 1):
|
|
49
|
+
source_dir = source_dir.parent
|
|
50
|
+
|
|
51
|
+
if node.module:
|
|
52
|
+
source_module = source_dir / node.module.replace(".", "/")
|
|
53
|
+
else:
|
|
54
|
+
source_module = source_dir
|
|
55
|
+
|
|
56
|
+
# Check if this is a common module import
|
|
57
|
+
source_str = str(source_module.relative_to(self.project_root))
|
|
58
|
+
if source_str in self.import_map:
|
|
59
|
+
# Replace with absolute import
|
|
60
|
+
new_module = self.import_map[source_str]
|
|
61
|
+
return ast.ImportFrom(
|
|
62
|
+
module=new_module,
|
|
63
|
+
names=node.names,
|
|
64
|
+
level=0
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
return node
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def transform_component(
|
|
71
|
+
component: ParsedComponent,
|
|
72
|
+
output_file: Path,
|
|
73
|
+
project_path: Path,
|
|
74
|
+
import_map: Dict[str, str],
|
|
75
|
+
source_file: Path = None,
|
|
76
|
+
) -> str:
|
|
77
|
+
"""Transform a GolfMCP component into a standalone FastMCP component.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
component: Parsed component to transform
|
|
81
|
+
output_file: Path to write the transformed component to
|
|
82
|
+
project_path: Path to the project root
|
|
83
|
+
import_map: Mapping of original module paths to generated paths
|
|
84
|
+
source_file: Optional path to source file (for common.py files)
|
|
85
|
+
|
|
86
|
+
Returns:
|
|
87
|
+
Generated component code
|
|
88
|
+
"""
|
|
89
|
+
# Read the original file
|
|
90
|
+
if source_file is not None:
|
|
91
|
+
file_path = source_file
|
|
92
|
+
elif component is not None:
|
|
93
|
+
file_path = Path(component.file_path)
|
|
94
|
+
else:
|
|
95
|
+
raise ValueError("Either component or source_file must be provided")
|
|
96
|
+
|
|
97
|
+
with open(file_path, "r") as f:
|
|
98
|
+
source_code = f.read()
|
|
99
|
+
|
|
100
|
+
# Parse the source code into an AST
|
|
101
|
+
tree = ast.parse(source_code)
|
|
102
|
+
|
|
103
|
+
# Transform imports
|
|
104
|
+
transformer = ImportTransformer(
|
|
105
|
+
file_path,
|
|
106
|
+
output_file,
|
|
107
|
+
import_map,
|
|
108
|
+
project_path
|
|
109
|
+
)
|
|
110
|
+
tree = transformer.visit(tree)
|
|
111
|
+
|
|
112
|
+
# Get all imports and docstring
|
|
113
|
+
imports = []
|
|
114
|
+
docstring = None
|
|
115
|
+
|
|
116
|
+
# Find the module docstring if present
|
|
117
|
+
if (len(tree.body) > 0 and isinstance(tree.body[0], ast.Expr) and
|
|
118
|
+
isinstance(tree.body[0].value, ast.Constant) and
|
|
119
|
+
isinstance(tree.body[0].value.value, str)):
|
|
120
|
+
docstring = tree.body[0].value.value
|
|
121
|
+
|
|
122
|
+
# Find imports
|
|
123
|
+
for node in tree.body:
|
|
124
|
+
if isinstance(node, (ast.Import, ast.ImportFrom)):
|
|
125
|
+
imports.append(node)
|
|
126
|
+
|
|
127
|
+
# Generate the transformed code
|
|
128
|
+
transformed_imports = ast.unparse(ast.Module(body=imports, type_ignores=[]))
|
|
129
|
+
|
|
130
|
+
# Build full transformed code
|
|
131
|
+
transformed_code = transformed_imports + "\n\n"
|
|
132
|
+
|
|
133
|
+
# Add docstring if present, using proper triple quotes for multi-line docstrings
|
|
134
|
+
if docstring:
|
|
135
|
+
# Check if docstring contains newlines
|
|
136
|
+
if "\n" in docstring:
|
|
137
|
+
# Use triple quotes for multi-line docstrings
|
|
138
|
+
transformed_code += f'"""{docstring}"""\n\n'
|
|
139
|
+
else:
|
|
140
|
+
# Use single quotes for single-line docstrings
|
|
141
|
+
transformed_code += f'"{docstring}"\n\n'
|
|
142
|
+
|
|
143
|
+
# Add the rest of the code except imports and the original docstring
|
|
144
|
+
remaining_nodes = []
|
|
145
|
+
for node in tree.body:
|
|
146
|
+
# Skip imports
|
|
147
|
+
if isinstance(node, (ast.Import, ast.ImportFrom)):
|
|
148
|
+
continue
|
|
149
|
+
|
|
150
|
+
# Skip the original docstring
|
|
151
|
+
if (isinstance(node, ast.Expr) and
|
|
152
|
+
isinstance(node.value, ast.Constant) and
|
|
153
|
+
isinstance(node.value.value, str)):
|
|
154
|
+
continue
|
|
155
|
+
|
|
156
|
+
remaining_nodes.append(node)
|
|
157
|
+
|
|
158
|
+
remaining_code = ast.unparse(ast.Module(body=remaining_nodes, type_ignores=[]))
|
|
159
|
+
transformed_code += remaining_code
|
|
160
|
+
|
|
161
|
+
# Ensure the directory exists
|
|
162
|
+
output_file.parent.mkdir(parents=True, exist_ok=True)
|
|
163
|
+
|
|
164
|
+
# Write the transformed code to the output file
|
|
165
|
+
with open(output_file, "w") as f:
|
|
166
|
+
f.write(transformed_code)
|
|
167
|
+
|
|
168
|
+
return transformed_code
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
golf/examples/basic/.env
ADDED