fleet-python 0.2.13__py3-none-any.whl → 0.2.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fleet-python might be problematic. Click here for more details.

Files changed (41) hide show
  1. examples/diff_example.py +161 -0
  2. examples/dsl_example.py +50 -1
  3. examples/example_action_log.py +28 -0
  4. examples/example_mcp_anthropic.py +77 -0
  5. examples/example_mcp_openai.py +27 -0
  6. examples/example_task.py +199 -0
  7. examples/example_verifier.py +71 -0
  8. examples/query_builder_example.py +117 -0
  9. fleet/__init__.py +51 -40
  10. fleet/_async/base.py +14 -1
  11. fleet/_async/client.py +137 -19
  12. fleet/_async/env/client.py +4 -4
  13. fleet/_async/instance/__init__.py +1 -2
  14. fleet/_async/instance/client.py +3 -2
  15. fleet/_async/playwright.py +2 -2
  16. fleet/_async/resources/sqlite.py +654 -0
  17. fleet/_async/tasks.py +44 -0
  18. fleet/_async/verifiers/__init__.py +17 -0
  19. fleet/_async/verifiers/bundler.py +699 -0
  20. fleet/_async/verifiers/verifier.py +301 -0
  21. fleet/base.py +14 -1
  22. fleet/client.py +645 -12
  23. fleet/config.py +1 -1
  24. fleet/instance/__init__.py +1 -2
  25. fleet/instance/client.py +15 -5
  26. fleet/models.py +171 -4
  27. fleet/resources/browser.py +7 -8
  28. fleet/resources/mcp.py +60 -0
  29. fleet/resources/sqlite.py +654 -0
  30. fleet/tasks.py +44 -0
  31. fleet/types.py +18 -0
  32. fleet/verifiers/__init__.py +11 -5
  33. fleet/verifiers/bundler.py +699 -0
  34. fleet/verifiers/decorator.py +103 -0
  35. fleet/verifiers/verifier.py +301 -0
  36. {fleet_python-0.2.13.dist-info → fleet_python-0.2.15.dist-info}/METADATA +3 -42
  37. fleet_python-0.2.15.dist-info/RECORD +69 -0
  38. fleet_python-0.2.13.dist-info/RECORD +0 -52
  39. {fleet_python-0.2.13.dist-info → fleet_python-0.2.15.dist-info}/WHEEL +0 -0
  40. {fleet_python-0.2.13.dist-info → fleet_python-0.2.15.dist-info}/licenses/LICENSE +0 -0
  41. {fleet_python-0.2.13.dist-info → fleet_python-0.2.15.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,699 @@
1
+ """Fleet SDK Function Bundler - Dependency Detection and Bundle Creation.
2
+
3
+ Handles dependency detection and bundle creation for verifier functions with basic static analysis.
4
+ The client performs dependency detection and creates lightweight bundles.
5
+ The server uses uv to resolve dependencies and create the execution environment.
6
+ """
7
+
8
+ import inspect
9
+ import tempfile
10
+ import zipfile
11
+ from pathlib import Path
12
+ from typing import Any, Callable, Dict, Optional, List, Set
13
+ from io import BytesIO
14
+ import logging
15
+ import ast
16
+ from collections import defaultdict
17
+
18
+ try:
19
+ import importlib.metadata as imd
20
+ except ImportError:
21
+ import importlib_metadata as imd
22
+
23
+ logger = logging.getLogger(__name__)
24
+
25
+
26
+ class FunctionBundler:
27
+ """Handles dependency detection and bundle creation for verifier functions with basic static analysis."""
28
+
29
+ def __init__(self):
30
+ pass
31
+
32
+ def create_bundle(
33
+ self,
34
+ func: Callable,
35
+ extra_requirements: Optional[List[str]] = None,
36
+ verifier_id: Optional[str] = None,
37
+ ) -> bytes:
38
+ """Create a function bundle with statically extracted code."""
39
+
40
+ logger.info(f"Creating function bundle for {func.__name__}")
41
+
42
+ # 1. Parse the main function and find dependencies
43
+ mod_file = Path(func.__code__.co_filename)
44
+ project_root = self._find_project_root(mod_file)
45
+
46
+ # 2. Analyze dependencies with static analysis
47
+ dependencies = self._analyze_dependencies_with_static_analysis(
48
+ func, mod_file, project_root
49
+ )
50
+
51
+ # 3. Map external packages
52
+ requirements = self._map_to_pypi_packages(dependencies["external_packages"])
53
+
54
+ # Merge with extra requirements, handling version conflicts
55
+ if extra_requirements:
56
+ requirements = self._merge_requirements(requirements, extra_requirements)
57
+
58
+ # 4. Build optimized bundle
59
+ # Get source without decorator
60
+ src = self._get_function_source_without_decorator(func)
61
+ bundle_bytes = self._build_function_bundle(
62
+ func,
63
+ src,
64
+ requirements,
65
+ dependencies["extracted_code"],
66
+ project_root,
67
+ verifier_id,
68
+ dependencies.get("same_module_deps", []),
69
+ )
70
+
71
+ return bundle_bytes
72
+
73
+ def _analyze_dependencies_with_static_analysis(
74
+ self, func: Callable, mod_file: Path, project_root: Path
75
+ ) -> Dict[str, Any]:
76
+ """Analyze dependencies and extract functions using basic static analysis."""
77
+
78
+ # Parse the main function - handle indentation
79
+ main_func_code = inspect.getsource(func)
80
+ # Remove decorator and normalize indentation
81
+ main_func_lines = main_func_code.split("\n")
82
+
83
+ # Find the actual function definition line (skip decorators)
84
+ func_start_idx = 0
85
+ for i, line in enumerate(main_func_lines):
86
+ if line.strip().startswith("def "):
87
+ func_start_idx = i
88
+ break
89
+
90
+ # Extract function definition and body
91
+ func_lines = main_func_lines[func_start_idx:]
92
+
93
+ # Remove common leading whitespace
94
+ if func_lines:
95
+ import textwrap
96
+
97
+ normalized_func_code = textwrap.dedent("\n".join(func_lines))
98
+ main_func_ast = ast.parse(normalized_func_code)
99
+ else:
100
+ main_func_ast = ast.parse("")
101
+
102
+ # Find all import statements in the main function
103
+ imports_in_func = self._extract_imports_from_ast(main_func_ast)
104
+
105
+ # Also analyze the module containing the function
106
+ with open(mod_file, "r", encoding="utf-8") as f:
107
+ module_content = f.read()
108
+ module_ast = ast.parse(module_content)
109
+
110
+ # Find imports at module level
111
+ module_imports = self._extract_imports_from_ast(module_ast)
112
+
113
+ # Combine all imports
114
+ all_imports = {**imports_in_func, **module_imports}
115
+
116
+ # Find function calls within the verifier function
117
+ called_functions = self._extract_function_calls(main_func_ast)
118
+ logger.debug(f"Functions called in verifier: {called_functions}")
119
+
120
+ # Find all functions defined in the module
121
+ module_functions = {}
122
+ for node in ast.walk(module_ast):
123
+ if isinstance(node, ast.FunctionDef):
124
+ module_functions[node.name] = node
125
+
126
+ # Check which called functions are defined in the same module
127
+ same_module_deps = []
128
+ for func_name in called_functions:
129
+ if func_name in module_functions and func_name != func.__name__:
130
+ same_module_deps.append(func_name)
131
+ logger.debug(f"Found same-module dependency: {func_name}")
132
+
133
+ # Separate local and external imports
134
+ local_imports = {}
135
+ external_packages = set()
136
+ extracted_code = {}
137
+
138
+ for import_type, import_list in all_imports.items():
139
+ for import_info in import_list:
140
+ if import_type == "from_import":
141
+ module_name = import_info["module"]
142
+ imported_names = import_info["names"]
143
+
144
+ # Try to resolve as local import
145
+ local_path = self._resolve_local_import(
146
+ module_name, mod_file, project_root
147
+ )
148
+ if local_path and local_path.exists():
149
+ # Extract only the specific functions we need
150
+ extracted_functions = self._extract_specific_functions(
151
+ local_path, imported_names
152
+ )
153
+
154
+ if extracted_functions:
155
+ relative_path = str(local_path.relative_to(project_root))
156
+ extracted_code[relative_path] = extracted_functions
157
+ local_imports[module_name] = imported_names
158
+ else:
159
+ # External package
160
+ external_packages.add(module_name)
161
+
162
+ elif import_type == "import":
163
+ module_name = import_info["name"]
164
+ # Check if it's a local or external import
165
+ if not self._is_likely_stdlib(module_name):
166
+ try:
167
+ dist = imd.distribution(module_name)
168
+ external_packages.add(dist.metadata["Name"])
169
+ except imd.PackageNotFoundError:
170
+ # Could be local, but for now assume external
171
+ external_packages.add(module_name)
172
+
173
+ return {
174
+ "local_imports": local_imports,
175
+ "external_packages": external_packages,
176
+ "extracted_code": extracted_code,
177
+ "same_module_deps": same_module_deps, # Add same-module dependencies
178
+ }
179
+
180
+ def _extract_imports_from_ast(
181
+ self, tree: ast.AST
182
+ ) -> Dict[str, List[Dict[str, Any]]]:
183
+ """Extract import statements from AST."""
184
+ imports = defaultdict(list)
185
+
186
+ for node in ast.walk(tree):
187
+ if isinstance(node, ast.Import):
188
+ for alias in node.names:
189
+ imports["import"].append(
190
+ {"name": alias.name, "asname": alias.asname}
191
+ )
192
+ elif isinstance(node, ast.ImportFrom):
193
+ if node.module: # Skip relative imports without module
194
+ imports["from_import"].append(
195
+ {
196
+ "module": node.module,
197
+ "names": [alias.name for alias in node.names],
198
+ "level": node.level,
199
+ }
200
+ )
201
+
202
+ return dict(imports)
203
+
204
+ def _extract_function_calls(self, tree: ast.AST) -> Set[str]:
205
+ """Extract function calls from AST."""
206
+ function_calls = set()
207
+
208
+ for node in ast.walk(tree):
209
+ if isinstance(node, ast.Call):
210
+ # Handle direct function calls (e.g., func())
211
+ if isinstance(node.func, ast.Name):
212
+ function_calls.add(node.func.id)
213
+ # Handle method calls (e.g., obj.method())
214
+ elif isinstance(node.func, ast.Attribute):
215
+ # We might want to handle these differently
216
+ pass
217
+
218
+ return function_calls
219
+
220
+ def _extract_specific_functions(
221
+ self, file_path: Path, function_names: List[str]
222
+ ) -> str:
223
+ """Extract specific functions from a file, including their dependencies."""
224
+
225
+ try:
226
+ with open(file_path, "r", encoding="utf-8") as f:
227
+ content = f.read()
228
+
229
+ tree = ast.parse(content)
230
+
231
+ # Find all function definitions
232
+ functions = {}
233
+ classes = {}
234
+ imports = []
235
+
236
+ for node in ast.walk(tree):
237
+ if isinstance(node, ast.FunctionDef):
238
+ functions[node.name] = node
239
+ elif isinstance(node, ast.ClassDef):
240
+ classes[node.name] = node
241
+ elif isinstance(node, (ast.Import, ast.ImportFrom)):
242
+ imports.append(node)
243
+
244
+ # Extract required functions and their dependencies
245
+ required_functions = set(function_names)
246
+ extracted_nodes = []
247
+
248
+ # Add necessary imports
249
+ used_names = set()
250
+ for func_name in function_names:
251
+ if func_name in functions:
252
+ # Find all names used in this function
253
+ for node in ast.walk(functions[func_name]):
254
+ if isinstance(node, ast.Name):
255
+ used_names.add(node.id)
256
+
257
+ # Add imports that provide these names
258
+ for import_node in imports:
259
+ if isinstance(import_node, ast.Import):
260
+ for alias in import_node.names:
261
+ if alias.name in used_names:
262
+ extracted_nodes.append(import_node)
263
+ break
264
+ elif isinstance(import_node, ast.ImportFrom):
265
+ for alias in import_node.names:
266
+ if alias.name in used_names:
267
+ extracted_nodes.append(import_node)
268
+ break
269
+
270
+ # Add required functions
271
+ for func_name in required_functions:
272
+ if func_name in functions:
273
+ extracted_nodes.append(functions[func_name])
274
+
275
+ # Check if this function calls other local functions
276
+ for node in ast.walk(functions[func_name]):
277
+ if isinstance(node, ast.Call) and isinstance(
278
+ node.func, ast.Name
279
+ ):
280
+ called_func = node.func.id
281
+ if (
282
+ called_func in functions
283
+ and called_func not in required_functions
284
+ ):
285
+ required_functions.add(called_func)
286
+ extracted_nodes.append(functions[called_func])
287
+
288
+ # Convert back to source code
289
+ extracted_code = []
290
+ for node in extracted_nodes:
291
+ try:
292
+ code = ast.unparse(node)
293
+ extracted_code.append(code)
294
+ except Exception as e:
295
+ logger.warning(f"Could not unparse AST node: {e}")
296
+ # Fallback to original source extraction
297
+ lines = content.split("\n")
298
+ start_line = node.lineno - 1
299
+ end_line = (
300
+ node.end_lineno
301
+ if hasattr(node, "end_lineno")
302
+ else start_line + 1
303
+ )
304
+ code = "\n".join(lines[start_line:end_line])
305
+ extracted_code.append(code)
306
+
307
+ result = "\n\n".join(extracted_code)
308
+ logger.debug(f"Extracted {len(extracted_code)} items from {file_path}")
309
+ return result
310
+
311
+ except Exception as e:
312
+ logger.warning(f"Failed to extract functions from {file_path}: {e}")
313
+ # Fallback to including the entire file
314
+ with open(file_path, "r", encoding="utf-8") as f:
315
+ return f.read()
316
+
317
+ def _resolve_local_import(
318
+ self, module_name: str, current_file: Path, project_root: Path
319
+ ) -> Optional[Path]:
320
+ """Try to resolve a module name to a local file path."""
321
+
322
+ # Handle dotted imports (e.g., utils.helpers -> utils/helpers.py)
323
+ module_parts = module_name.split(".")
324
+
325
+ # Search from current file's directory up to project root
326
+ search_dirs = [current_file.parent]
327
+
328
+ # Add project root and its subdirectories to search path
329
+ current = current_file.parent
330
+ while current != project_root.parent:
331
+ search_dirs.append(current)
332
+ if current == project_root:
333
+ break
334
+ current = current.parent
335
+
336
+ for search_dir in search_dirs:
337
+ # Try as a package (directory with __init__.py)
338
+ package_dir = search_dir
339
+ for part in module_parts:
340
+ package_dir = package_dir / part
341
+
342
+ init_file = package_dir / "__init__.py"
343
+ if init_file.exists():
344
+ return init_file
345
+
346
+ # Try as a module (file.py)
347
+ module_file = search_dir
348
+ for part in module_parts[:-1]:
349
+ module_file = module_file / part
350
+ module_file = module_file / f"{module_parts[-1]}.py"
351
+
352
+ if module_file.exists():
353
+ return module_file
354
+
355
+ return None
356
+
357
+ def _find_project_root(self, mod_file: Path) -> Path:
358
+ """Find the project root by looking for common markers."""
359
+ current = mod_file.parent
360
+
361
+ # Look for common project root markers
362
+ markers = [
363
+ "pyproject.toml",
364
+ "setup.py",
365
+ "setup.cfg",
366
+ ".git",
367
+ ".hg",
368
+ "requirements.txt",
369
+ "Pipfile",
370
+ ]
371
+
372
+ while current != current.parent: # Not at filesystem root
373
+ if any((current / marker).exists() for marker in markers):
374
+ return current
375
+ current = current.parent
376
+
377
+ # Fallback to the directory containing the source file
378
+ return mod_file.parent
379
+
380
+ def _is_likely_stdlib(self, module_name: str) -> bool:
381
+ """Check if a module is likely part of the standard library."""
382
+ stdlib_modules = {
383
+ "os",
384
+ "sys",
385
+ "json",
386
+ "datetime",
387
+ "time",
388
+ "random",
389
+ "math",
390
+ "re",
391
+ "collections",
392
+ "itertools",
393
+ "functools",
394
+ "operator",
395
+ "pathlib",
396
+ "urllib",
397
+ "http",
398
+ "socket",
399
+ "threading",
400
+ "multiprocessing",
401
+ "logging",
402
+ "argparse",
403
+ "configparser",
404
+ "csv",
405
+ "xml",
406
+ "html",
407
+ "base64",
408
+ "hashlib",
409
+ "hmac",
410
+ "secrets",
411
+ "uuid",
412
+ "pickle",
413
+ "sqlite3",
414
+ "dbm",
415
+ "zipfile",
416
+ "tarfile",
417
+ "gzip",
418
+ "shutil",
419
+ "tempfile",
420
+ "glob",
421
+ "fnmatch",
422
+ "linecache",
423
+ "fileinput",
424
+ "stat",
425
+ "filecmp",
426
+ "calendar",
427
+ "zoneinfo",
428
+ "locale",
429
+ "gettext",
430
+ "io",
431
+ "traceback",
432
+ "inspect",
433
+ "types",
434
+ "copy",
435
+ "pprint",
436
+ "reprlib",
437
+ "enum",
438
+ "contextlib",
439
+ "abc",
440
+ "atexit",
441
+ "gc",
442
+ "weakref",
443
+ "typing",
444
+ "dataclasses",
445
+ "heapq",
446
+ "bisect",
447
+ "array",
448
+ "struct",
449
+ "codecs",
450
+ "unicodedata",
451
+ "stringprep",
452
+ "ast",
453
+ }
454
+ return module_name in stdlib_modules
455
+
456
+ def _map_to_pypi_packages(self, package_names: Set[str]) -> List[str]:
457
+ """Map module names to PyPI package names with versions."""
458
+ packages = set()
459
+
460
+ for mod in package_names:
461
+ try:
462
+ dist = imd.distribution(mod)
463
+ package_name = dist.metadata["Name"]
464
+ version = dist.version # Get the installed version
465
+ package_with_version = f"{package_name}=={version}"
466
+ packages.add(package_with_version)
467
+ logger.debug(f"Mapped {mod} -> {package_with_version}")
468
+ except imd.PackageNotFoundError:
469
+ # Skip stdlib or local modules
470
+ logger.debug(f"Skipping {mod} (stdlib or local)")
471
+ continue
472
+
473
+ package_list = list(packages)
474
+ logger.debug(f"Final package list: {package_list}")
475
+ return package_list
476
+
477
+ def _merge_requirements(
478
+ self, auto_detected: List[str], explicit: List[str]
479
+ ) -> List[str]:
480
+ """Merge requirements, preferring explicit versions over auto-detected ones."""
481
+ import re
482
+
483
+ # Parse package names from requirements
484
+ def parse_requirement(req: str) -> tuple:
485
+ """Extract package name and version spec from requirement string."""
486
+ # Match patterns like: package==1.0, package>=1.0, package~=1.0, etc.
487
+ match = re.match(r"^([a-zA-Z0-9\-_]+)(.*)$", req)
488
+ if match:
489
+ return match.group(1).lower(), match.group(2)
490
+ return req.lower(), ""
491
+
492
+ # Build a map of explicit requirements
493
+ explicit_map = {}
494
+ for req in explicit:
495
+ pkg_name, version_spec = parse_requirement(req)
496
+ explicit_map[pkg_name] = req
497
+
498
+ # Build final requirements list
499
+ final_requirements = []
500
+ seen_packages = set()
501
+
502
+ # First, add all explicit requirements
503
+ for req in explicit:
504
+ final_requirements.append(req)
505
+ pkg_name, _ = parse_requirement(req)
506
+ seen_packages.add(pkg_name)
507
+
508
+ # Then add auto-detected requirements that don't conflict
509
+ for req in auto_detected:
510
+ pkg_name, _ = parse_requirement(req)
511
+ if pkg_name not in seen_packages:
512
+ final_requirements.append(req)
513
+ seen_packages.add(pkg_name)
514
+ else:
515
+ logger.debug(
516
+ f"Skipping auto-detected {req}, using explicit version instead"
517
+ )
518
+
519
+ # Always ensure fleet-python is included
520
+ if "fleet-python" not in seen_packages:
521
+ final_requirements.append("fleet-python")
522
+
523
+ return sorted(final_requirements)
524
+
525
+ def _build_function_bundle(
526
+ self,
527
+ func: Callable,
528
+ src: str,
529
+ requirements: List[str],
530
+ extracted_code: Dict[str, str],
531
+ project_root: Path,
532
+ verifier_id: Optional[str] = None,
533
+ same_module_deps: List[str] = [],
534
+ ) -> bytes:
535
+ """Build a function bundle with statically extracted code."""
536
+
537
+ with tempfile.TemporaryDirectory() as temp_dir:
538
+ build_dir = Path(temp_dir) / "build"
539
+ build_dir.mkdir()
540
+
541
+ try:
542
+ # Create requirements.txt
543
+ requirements_file = build_dir / "requirements.txt"
544
+ # Ensure fleet-python is always included
545
+ if not requirements:
546
+ requirements = ["fleet-python"]
547
+ elif "fleet-python" not in [r.split("==")[0].split(">=")[0] for r in requirements]:
548
+ requirements.append("fleet-python")
549
+ requirements_file.write_text("\n".join(sorted(set(requirements))))
550
+
551
+ # Extract same-module dependencies
552
+ same_module_code = ""
553
+ if same_module_deps:
554
+ # Read the module file that contains the verifier function
555
+ mod_file = Path(func.__code__.co_filename)
556
+ with open(mod_file, "r", encoding="utf-8") as f:
557
+ module_content = f.read()
558
+
559
+ # Extract the source code for each dependency
560
+ for dep_name in same_module_deps:
561
+ dep_src = self._extract_function_source(
562
+ module_content, dep_name
563
+ )
564
+ if dep_src:
565
+ same_module_code += f"\n{dep_src}\n"
566
+ logger.debug(
567
+ f"Extracted same-module dependency: {dep_name}"
568
+ )
569
+
570
+ # Create verifier.py with the main function
571
+ verifier_file = build_dir / "verifier.py"
572
+ verifier_content = f"""# Auto-generated verifier module
573
+ {same_module_code}
574
+ {src}
575
+ """
576
+ verifier_file.write_text(verifier_content)
577
+
578
+ # Create local files with only extracted functions
579
+ for relative_path, code in extracted_code.items():
580
+ dest_path = build_dir / relative_path
581
+ dest_path.parent.mkdir(parents=True, exist_ok=True)
582
+
583
+ extracted_content = f"""# Extracted module (static analysis)
584
+ {code}
585
+ """
586
+ dest_path.write_text(extracted_content)
587
+ logger.debug(f"Created extracted file: {relative_path}")
588
+
589
+ # Ensure __init__.py files exist
590
+ self._ensure_init_files(Path(relative_path), build_dir)
591
+
592
+ # Create zip bundle
593
+ return self._create_zip_bundle(build_dir)
594
+
595
+ except Exception as e:
596
+ logger.error(f"Failed to build function bundle: {e}")
597
+ raise RuntimeError(f"Function bundle creation failed: {e}")
598
+
599
+ def _ensure_init_files(self, rel_path: Path, build_dir: Path):
600
+ """Ensure __init__.py files exist for all parent directories."""
601
+ current = rel_path.parent
602
+
603
+ while current != Path("."):
604
+ init_file = build_dir / current / "__init__.py"
605
+ if not init_file.exists():
606
+ init_file.parent.mkdir(parents=True, exist_ok=True)
607
+ init_file.write_text("# Auto-generated __init__.py")
608
+ logger.debug(f"Created __init__.py: {current}")
609
+ current = current.parent
610
+
611
+ def _create_zip_bundle(self, build_dir: Path) -> bytes:
612
+ """Create the final zip bundle in memory."""
613
+ zip_buffer = BytesIO()
614
+
615
+ with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zf:
616
+ for file_path in build_dir.rglob("*"):
617
+ if file_path.is_file():
618
+ arcname = file_path.relative_to(build_dir)
619
+ zf.write(file_path, arcname)
620
+
621
+ bundle_size = len(zip_buffer.getvalue())
622
+ logger.debug(f"Created function bundle ({bundle_size:,} bytes)")
623
+ return zip_buffer.getvalue()
624
+
625
+ def _extract_function_source(
626
+ self, module_content: str, function_name: str
627
+ ) -> Optional[str]:
628
+ """Extract the source code of a specific function from module content."""
629
+ try:
630
+ tree = ast.parse(module_content)
631
+
632
+ for node in ast.walk(tree):
633
+ if isinstance(node, ast.FunctionDef) and node.name == function_name:
634
+ # Get the source lines for this function
635
+ lines = module_content.split("\n")
636
+ start_line = node.lineno - 1
637
+ end_line = (
638
+ node.end_lineno
639
+ if hasattr(node, "end_lineno")
640
+ else start_line + 1
641
+ )
642
+
643
+ # Extract the function lines
644
+ func_lines = lines[start_line:end_line]
645
+
646
+ # Find the minimum indentation (excluding empty lines)
647
+ min_indent = float("inf")
648
+ for line in func_lines:
649
+ if line.strip(): # Non-empty line
650
+ indent = len(line) - len(line.lstrip())
651
+ min_indent = min(min_indent, indent)
652
+
653
+ # Remove the common indentation
654
+ if min_indent < float("inf"):
655
+ func_lines = [
656
+ line[min_indent:] if line.strip() else line
657
+ for line in func_lines
658
+ ]
659
+
660
+ return "\n".join(func_lines)
661
+
662
+ except Exception as e:
663
+ logger.warning(f"Failed to extract function {function_name}: {e}")
664
+
665
+ return None
666
+
667
+ def _get_function_source_without_decorator(self, func: Callable) -> str:
668
+ """Get function source code without the @verifier decorator."""
669
+ source = inspect.getsource(func)
670
+ lines = source.split('\n')
671
+
672
+ # Find where the function definition starts
673
+ func_start = -1
674
+ for i, line in enumerate(lines):
675
+ if line.strip().startswith('def '):
676
+ func_start = i
677
+ break
678
+
679
+ if func_start == -1:
680
+ # Couldn't find function definition, return original
681
+ return source
682
+
683
+ # Return only from the function definition onward
684
+ func_lines = lines[func_start:]
685
+
686
+ # Remove common indentation
687
+ if func_lines:
688
+ # Find minimum indentation (excluding empty lines)
689
+ min_indent = float('inf')
690
+ for line in func_lines:
691
+ if line.strip():
692
+ indent = len(line) - len(line.lstrip())
693
+ min_indent = min(min_indent, indent)
694
+
695
+ # Remove the common indentation
696
+ if min_indent < float('inf'):
697
+ func_lines = [line[min_indent:] if line.strip() else line for line in func_lines]
698
+
699
+ return '\n'.join(func_lines)