bubble-analysis 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bubble/__init__.py +3 -0
- bubble/cache.py +207 -0
- bubble/cli.py +470 -0
- bubble/config.py +52 -0
- bubble/detectors.py +90 -0
- bubble/enums.py +65 -0
- bubble/extractor.py +829 -0
- bubble/formatters.py +887 -0
- bubble/integrations/__init__.py +92 -0
- bubble/integrations/base.py +98 -0
- bubble/integrations/cli_scripts/__init__.py +49 -0
- bubble/integrations/cli_scripts/cli.py +108 -0
- bubble/integrations/cli_scripts/detector.py +149 -0
- bubble/integrations/django/__init__.py +63 -0
- bubble/integrations/django/cli.py +111 -0
- bubble/integrations/django/detector.py +331 -0
- bubble/integrations/django/semantics.py +40 -0
- bubble/integrations/fastapi/__init__.py +57 -0
- bubble/integrations/fastapi/cli.py +110 -0
- bubble/integrations/fastapi/detector.py +176 -0
- bubble/integrations/fastapi/semantics.py +14 -0
- bubble/integrations/flask/__init__.py +57 -0
- bubble/integrations/flask/cli.py +110 -0
- bubble/integrations/flask/detector.py +191 -0
- bubble/integrations/flask/semantics.py +19 -0
- bubble/integrations/formatters.py +268 -0
- bubble/integrations/generic/__init__.py +13 -0
- bubble/integrations/generic/config.py +106 -0
- bubble/integrations/generic/detector.py +346 -0
- bubble/integrations/generic/frameworks.py +145 -0
- bubble/integrations/models.py +68 -0
- bubble/integrations/queries.py +481 -0
- bubble/loader.py +118 -0
- bubble/models.py +397 -0
- bubble/propagation.py +737 -0
- bubble/protocols.py +104 -0
- bubble/queries.py +627 -0
- bubble/results.py +211 -0
- bubble/stubs.py +89 -0
- bubble/timing.py +144 -0
- bubble_analysis-0.2.0.dist-info/METADATA +264 -0
- bubble_analysis-0.2.0.dist-info/RECORD +46 -0
- bubble_analysis-0.2.0.dist-info/WHEEL +5 -0
- bubble_analysis-0.2.0.dist-info/entry_points.txt +2 -0
- bubble_analysis-0.2.0.dist-info/licenses/LICENSE +21 -0
- bubble_analysis-0.2.0.dist-info/top_level.txt +1 -0
bubble/__init__.py
ADDED
bubble/cache.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
"""File-level caching for extraction results."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import hashlib
|
|
6
|
+
import sqlite3
|
|
7
|
+
from dataclasses import asdict
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import TYPE_CHECKING, Any
|
|
10
|
+
|
|
11
|
+
import msgpack # type: ignore[import-untyped]
|
|
12
|
+
|
|
13
|
+
from bubble import __version__
|
|
14
|
+
from bubble.models import (
|
|
15
|
+
CallSite,
|
|
16
|
+
CatchSite,
|
|
17
|
+
ClassDef,
|
|
18
|
+
Entrypoint,
|
|
19
|
+
FunctionDef,
|
|
20
|
+
GlobalHandler,
|
|
21
|
+
ImportInfo,
|
|
22
|
+
RaiseSite,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from bubble.extractor import FileExtraction
|
|
27
|
+
|
|
28
|
+
CACHE_VERSION = "3"
|
|
29
|
+
CACHE_FILENAME = "cache.sqlite"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class FileCache:
|
|
33
|
+
"""SQLite-backed cache for file extraction results."""
|
|
34
|
+
|
|
35
|
+
def __init__(self, cache_dir: Path) -> None:
|
|
36
|
+
self.cache_dir = cache_dir
|
|
37
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
38
|
+
self.db_path = cache_dir / CACHE_FILENAME
|
|
39
|
+
self.db = self._open_db()
|
|
40
|
+
|
|
41
|
+
def _open_db(self) -> sqlite3.Connection:
|
|
42
|
+
db = sqlite3.connect(self.db_path, check_same_thread=False)
|
|
43
|
+
db.execute("PRAGMA journal_mode=WAL")
|
|
44
|
+
db.execute("PRAGMA synchronous=NORMAL")
|
|
45
|
+
|
|
46
|
+
db.executescript("""
|
|
47
|
+
CREATE TABLE IF NOT EXISTS cache_meta (
|
|
48
|
+
key TEXT PRIMARY KEY,
|
|
49
|
+
value TEXT
|
|
50
|
+
);
|
|
51
|
+
CREATE TABLE IF NOT EXISTS file_cache (
|
|
52
|
+
file_path TEXT PRIMARY KEY,
|
|
53
|
+
mtime_ns INTEGER,
|
|
54
|
+
size INTEGER,
|
|
55
|
+
content_hash TEXT,
|
|
56
|
+
extraction BLOB
|
|
57
|
+
);
|
|
58
|
+
CREATE INDEX IF NOT EXISTS idx_file_cache_mtime ON file_cache(mtime_ns);
|
|
59
|
+
""")
|
|
60
|
+
|
|
61
|
+
if not self._validate_version(db):
|
|
62
|
+
self._clear(db)
|
|
63
|
+
self._set_version(db)
|
|
64
|
+
|
|
65
|
+
return db
|
|
66
|
+
|
|
67
|
+
def _validate_version(self, db: sqlite3.Connection) -> bool:
|
|
68
|
+
row = db.execute("SELECT value FROM cache_meta WHERE key = 'version'").fetchone()
|
|
69
|
+
if row is None or row[0] != CACHE_VERSION:
|
|
70
|
+
return False
|
|
71
|
+
|
|
72
|
+
row = db.execute("SELECT value FROM cache_meta WHERE key = 'flow_version'").fetchone()
|
|
73
|
+
if row is None or row[0] != __version__:
|
|
74
|
+
return False
|
|
75
|
+
|
|
76
|
+
return True
|
|
77
|
+
|
|
78
|
+
def _set_version(self, db: sqlite3.Connection) -> None:
|
|
79
|
+
db.execute(
|
|
80
|
+
"INSERT OR REPLACE INTO cache_meta (key, value) VALUES ('version', ?)",
|
|
81
|
+
(CACHE_VERSION,),
|
|
82
|
+
)
|
|
83
|
+
db.execute(
|
|
84
|
+
"INSERT OR REPLACE INTO cache_meta (key, value) VALUES ('flow_version', ?)",
|
|
85
|
+
(__version__,),
|
|
86
|
+
)
|
|
87
|
+
db.commit()
|
|
88
|
+
|
|
89
|
+
def _clear(self, db: sqlite3.Connection) -> None:
|
|
90
|
+
db.execute("DELETE FROM file_cache")
|
|
91
|
+
db.execute("DELETE FROM cache_meta")
|
|
92
|
+
db.commit()
|
|
93
|
+
|
|
94
|
+
def get(self, file_path: Path) -> FileExtraction | None:
|
|
95
|
+
"""Get cached extraction if still valid."""
|
|
96
|
+
try:
|
|
97
|
+
stat = file_path.stat()
|
|
98
|
+
except OSError:
|
|
99
|
+
return None
|
|
100
|
+
|
|
101
|
+
row = self.db.execute(
|
|
102
|
+
"SELECT mtime_ns, size, extraction FROM file_cache WHERE file_path = ?",
|
|
103
|
+
(str(file_path),),
|
|
104
|
+
).fetchone()
|
|
105
|
+
|
|
106
|
+
if row is None:
|
|
107
|
+
return None
|
|
108
|
+
|
|
109
|
+
cached_mtime, cached_size, extraction_blob = row
|
|
110
|
+
if stat.st_mtime_ns != cached_mtime or stat.st_size != cached_size:
|
|
111
|
+
return None
|
|
112
|
+
|
|
113
|
+
return self._deserialize(extraction_blob)
|
|
114
|
+
|
|
115
|
+
def put(self, file_path: Path, extraction: FileExtraction) -> None:
|
|
116
|
+
"""Cache an extraction result."""
|
|
117
|
+
try:
|
|
118
|
+
stat = file_path.stat()
|
|
119
|
+
content_hash = hashlib.sha256(file_path.read_bytes()).hexdigest()
|
|
120
|
+
except OSError:
|
|
121
|
+
return
|
|
122
|
+
|
|
123
|
+
self.db.execute(
|
|
124
|
+
"""INSERT OR REPLACE INTO file_cache
|
|
125
|
+
(file_path, mtime_ns, size, content_hash, extraction)
|
|
126
|
+
VALUES (?, ?, ?, ?, ?)""",
|
|
127
|
+
(
|
|
128
|
+
str(file_path),
|
|
129
|
+
stat.st_mtime_ns,
|
|
130
|
+
stat.st_size,
|
|
131
|
+
content_hash,
|
|
132
|
+
self._serialize(extraction),
|
|
133
|
+
),
|
|
134
|
+
)
|
|
135
|
+
self.db.commit()
|
|
136
|
+
|
|
137
|
+
def _serialize(self, extraction: FileExtraction) -> bytes:
|
|
138
|
+
def class_to_dict(c: ClassDef) -> dict[str, Any]:
|
|
139
|
+
d = asdict(c)
|
|
140
|
+
d["abstract_methods"] = list(c.abstract_methods)
|
|
141
|
+
return d
|
|
142
|
+
|
|
143
|
+
data = {
|
|
144
|
+
"functions": [asdict(f) for f in extraction.functions],
|
|
145
|
+
"classes": [class_to_dict(c) for c in extraction.classes],
|
|
146
|
+
"raise_sites": [asdict(r) for r in extraction.raise_sites],
|
|
147
|
+
"catch_sites": [asdict(c) for c in extraction.catch_sites],
|
|
148
|
+
"call_sites": [asdict(c) for c in extraction.call_sites],
|
|
149
|
+
"imports": [asdict(i) for i in extraction.imports],
|
|
150
|
+
"entrypoints": [asdict(e) for e in extraction.entrypoints],
|
|
151
|
+
"global_handlers": [asdict(g) for g in extraction.global_handlers],
|
|
152
|
+
"import_map": extraction.import_map,
|
|
153
|
+
"return_types": extraction.return_types,
|
|
154
|
+
"detected_frameworks": list(extraction.detected_frameworks),
|
|
155
|
+
}
|
|
156
|
+
return msgpack.packb(data) # type: ignore[no-any-return]
|
|
157
|
+
|
|
158
|
+
def _deserialize(self, blob: bytes) -> FileExtraction:
|
|
159
|
+
"""Deserialize msgpack blob to FileExtraction. Types from msgpack are dynamic."""
|
|
160
|
+
from bubble.extractor import FileExtraction as FE
|
|
161
|
+
|
|
162
|
+
raw: Any = msgpack.unpackb(blob)
|
|
163
|
+
data: dict[str, list[dict[str, Any]] | dict[str, str]] = raw
|
|
164
|
+
result = FE()
|
|
165
|
+
|
|
166
|
+
funcs: list[dict[str, Any]] = data["functions"] # type: ignore[assignment]
|
|
167
|
+
result.functions = [FunctionDef(**f) for f in funcs]
|
|
168
|
+
|
|
169
|
+
classes: list[dict[str, Any]] = data["classes"] # type: ignore[assignment]
|
|
170
|
+
for c in classes:
|
|
171
|
+
if "abstract_methods" in c:
|
|
172
|
+
c["abstract_methods"] = set(c["abstract_methods"])
|
|
173
|
+
result.classes = [ClassDef(**c) for c in classes]
|
|
174
|
+
|
|
175
|
+
raises: list[dict[str, Any]] = data["raise_sites"] # type: ignore[assignment]
|
|
176
|
+
result.raise_sites = [RaiseSite(**r) for r in raises]
|
|
177
|
+
|
|
178
|
+
catches: list[dict[str, Any]] = data["catch_sites"] # type: ignore[assignment]
|
|
179
|
+
result.catch_sites = [CatchSite(**c) for c in catches]
|
|
180
|
+
|
|
181
|
+
calls: list[dict[str, Any]] = data["call_sites"] # type: ignore[assignment]
|
|
182
|
+
result.call_sites = [CallSite(**c) for c in calls]
|
|
183
|
+
|
|
184
|
+
imports: list[dict[str, Any]] = data["imports"] # type: ignore[assignment]
|
|
185
|
+
result.imports = [ImportInfo(**i) for i in imports]
|
|
186
|
+
|
|
187
|
+
eps: list[dict[str, Any]] = data["entrypoints"] # type: ignore[assignment]
|
|
188
|
+
result.entrypoints = [Entrypoint(**e) for e in eps]
|
|
189
|
+
|
|
190
|
+
handlers: list[dict[str, Any]] = data["global_handlers"] # type: ignore[assignment]
|
|
191
|
+
result.global_handlers = [GlobalHandler(**g) for g in handlers]
|
|
192
|
+
|
|
193
|
+
result.import_map = data["import_map"] # type: ignore[assignment]
|
|
194
|
+
result.return_types = data["return_types"] # type: ignore[assignment]
|
|
195
|
+
result.detected_frameworks = set(data.get("detected_frameworks", [])) # type: ignore[arg-type]
|
|
196
|
+
|
|
197
|
+
return result
|
|
198
|
+
|
|
199
|
+
def stats(self) -> dict[str, int]:
|
|
200
|
+
"""Return cache statistics."""
|
|
201
|
+
count = self.db.execute("SELECT COUNT(*) FROM file_cache").fetchone()[0]
|
|
202
|
+
size = self.db_path.stat().st_size if self.db_path.exists() else 0
|
|
203
|
+
return {"file_count": count, "size_bytes": size}
|
|
204
|
+
|
|
205
|
+
def close(self) -> None:
|
|
206
|
+
"""Close the database connection."""
|
|
207
|
+
self.db.close()
|
bubble/cli.py
ADDED
|
@@ -0,0 +1,470 @@
|
|
|
1
|
+
"""Command-line interface for bubble analysis.
|
|
2
|
+
|
|
3
|
+
This module handles argument parsing only. Business logic lives in queries.py,
|
|
4
|
+
output formatting lives in formatters.py.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Annotated
|
|
9
|
+
|
|
10
|
+
import typer
|
|
11
|
+
from rich.console import Console
|
|
12
|
+
|
|
13
|
+
from bubble import formatters, queries, timing
|
|
14
|
+
from bubble.enums import OutputFormat, ResolutionMode
|
|
15
|
+
from bubble.extractor import extract_from_directory
|
|
16
|
+
from bubble.models import ProgramModel
|
|
17
|
+
|
|
18
|
+
HELP_TEXT = """Exception flow analysis for Python codebases.
|
|
19
|
+
|
|
20
|
+
**Quick start:**
|
|
21
|
+
```
|
|
22
|
+
bubble flask audit # Check Flask routes for escaping exceptions
|
|
23
|
+
bubble fastapi audit # Check FastAPI routes for escaping exceptions
|
|
24
|
+
bubble cli audit # Check CLI scripts for escaping exceptions
|
|
25
|
+
bubble escapes <function> # Deep dive into one function
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
**Typical workflow:**
|
|
29
|
+
```
|
|
30
|
+
bubble flask entrypoints # See what Flask routes exist
|
|
31
|
+
bubble flask audit # Find which have uncaught exceptions
|
|
32
|
+
bubble escapes <function> # Investigate a specific one
|
|
33
|
+
bubble trace <function> # Visualize the call tree
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
**Core commands (framework-agnostic):**
|
|
37
|
+
```
|
|
38
|
+
bubble raises <Exception> # Where is this raised?
|
|
39
|
+
bubble escapes <function> # What can escape from this function?
|
|
40
|
+
bubble callers <function> # Who calls this?
|
|
41
|
+
bubble catches <Exception> # Where is this caught?
|
|
42
|
+
bubble trace <function> # Call tree visualization
|
|
43
|
+
bubble exceptions # Exception hierarchy
|
|
44
|
+
bubble stats # Codebase statistics
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
**Framework-specific commands:**
|
|
48
|
+
```
|
|
49
|
+
bubble flask audit/entrypoints/routes-to
|
|
50
|
+
bubble fastapi audit/entrypoints/routes-to
|
|
51
|
+
bubble cli audit/entrypoints/scripts-to
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
**All commands support:** `-f json` for structured output
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
app = typer.Typer(
|
|
58
|
+
name="bubble",
|
|
59
|
+
help=HELP_TEXT,
|
|
60
|
+
no_args_is_help=True,
|
|
61
|
+
rich_markup_mode="markdown",
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
console = Console()
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@app.callback()
|
|
68
|
+
def main_callback(
|
|
69
|
+
enable_timing: Annotated[
|
|
70
|
+
bool,
|
|
71
|
+
typer.Option(
|
|
72
|
+
"--timing",
|
|
73
|
+
help="Show timing breakdown for performance analysis",
|
|
74
|
+
),
|
|
75
|
+
] = False,
|
|
76
|
+
) -> None:
|
|
77
|
+
"""Flow: Exception flow analysis for Python codebases."""
|
|
78
|
+
if enable_timing:
|
|
79
|
+
timing.enable(console)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _register_integration_subcommands() -> None:
|
|
83
|
+
"""Register integration CLI subcommands."""
|
|
84
|
+
from bubble.integrations import get_registered_integrations, load_builtin_integrations
|
|
85
|
+
|
|
86
|
+
load_builtin_integrations()
|
|
87
|
+
|
|
88
|
+
for integration in get_registered_integrations():
|
|
89
|
+
app.add_typer(integration.cli_app, name=integration.name)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
_register_integration_subcommands()
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def build_model(directory: Path, use_cache: bool = True) -> ProgramModel:
|
|
96
|
+
"""Build the program model from a directory."""
|
|
97
|
+
with console.status(f"[bold blue]Analyzing[/bold blue] {directory.name}/..."):
|
|
98
|
+
return extract_from_directory(directory, use_cache=use_cache)
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
@app.command()
|
|
102
|
+
def raises(
|
|
103
|
+
exception_type: Annotated[str, typer.Argument(help="Exception type to search for")],
|
|
104
|
+
directory: Annotated[
|
|
105
|
+
Path, typer.Option("--directory", "-d", help="Directory to analyze")
|
|
106
|
+
] = Path("."),
|
|
107
|
+
include_subclasses: Annotated[
|
|
108
|
+
bool, typer.Option("--include-subclasses", "-s", help="Include subclasses")
|
|
109
|
+
] = False,
|
|
110
|
+
output_format: Annotated[str, typer.Option("--format", "-f", help="Output format")] = "text",
|
|
111
|
+
no_cache: Annotated[bool, typer.Option("--no-cache", help="Disable caching")] = False,
|
|
112
|
+
) -> None:
|
|
113
|
+
"""Find all places where an exception is raised."""
|
|
114
|
+
directory = directory.resolve()
|
|
115
|
+
model = build_model(directory, use_cache=not no_cache)
|
|
116
|
+
result = queries.find_raises(model, exception_type, include_subclasses)
|
|
117
|
+
formatters.raises(result, OutputFormat(output_format), directory, console)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
@app.command()
|
|
121
|
+
def exceptions(
|
|
122
|
+
directory: Annotated[
|
|
123
|
+
Path, typer.Option("--directory", "-d", help="Directory to analyze")
|
|
124
|
+
] = Path("."),
|
|
125
|
+
output_format: Annotated[str, typer.Option("--format", "-f", help="Output format")] = "text",
|
|
126
|
+
no_cache: Annotated[bool, typer.Option("--no-cache", help="Disable caching")] = False,
|
|
127
|
+
) -> None:
|
|
128
|
+
"""List the exception hierarchy in the codebase."""
|
|
129
|
+
directory = directory.resolve()
|
|
130
|
+
model = build_model(directory, use_cache=not no_cache)
|
|
131
|
+
result = queries.find_exceptions(model)
|
|
132
|
+
formatters.exceptions(result, OutputFormat(output_format), directory, console)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
@app.command()
|
|
136
|
+
def stats(
|
|
137
|
+
directory: Annotated[
|
|
138
|
+
Path, typer.Option("--directory", "-d", help="Directory to analyze")
|
|
139
|
+
] = Path("."),
|
|
140
|
+
output_format: Annotated[str, typer.Option("--format", "-f", help="Output format")] = "text",
|
|
141
|
+
no_cache: Annotated[bool, typer.Option("--no-cache", help="Disable caching")] = False,
|
|
142
|
+
) -> None:
|
|
143
|
+
"""Show statistics about the codebase."""
|
|
144
|
+
directory = directory.resolve()
|
|
145
|
+
model = build_model(directory, use_cache=not no_cache)
|
|
146
|
+
result = queries.get_stats(model)
|
|
147
|
+
formatters.stats(result, OutputFormat(output_format), console)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
@app.command()
|
|
151
|
+
def callers(
|
|
152
|
+
function_name: Annotated[str, typer.Argument(help="Function name to find callers of")],
|
|
153
|
+
directory: Annotated[
|
|
154
|
+
Path, typer.Option("--directory", "-d", help="Directory to analyze")
|
|
155
|
+
] = Path("."),
|
|
156
|
+
output_format: Annotated[str, typer.Option("--format", "-f", help="Output format")] = "text",
|
|
157
|
+
show_resolution: Annotated[
|
|
158
|
+
bool, typer.Option("--show-resolution", "-r", help="Show resolution details")
|
|
159
|
+
] = False,
|
|
160
|
+
no_cache: Annotated[bool, typer.Option("--no-cache", help="Disable caching")] = False,
|
|
161
|
+
) -> None:
|
|
162
|
+
"""Find all places where a function is called."""
|
|
163
|
+
directory = directory.resolve()
|
|
164
|
+
model = build_model(directory, use_cache=not no_cache)
|
|
165
|
+
result = queries.find_callers(model, function_name)
|
|
166
|
+
formatters.callers(result, OutputFormat(output_format), directory, console, show_resolution)
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
@app.command()
|
|
170
|
+
def escapes(
|
|
171
|
+
function_name: Annotated[str, typer.Argument(help="Function or route to analyze")],
|
|
172
|
+
directory: Annotated[
|
|
173
|
+
Path, typer.Option("--directory", "-d", help="Directory to analyze")
|
|
174
|
+
] = Path("."),
|
|
175
|
+
output_format: Annotated[str, typer.Option("--format", "-f", help="Output format")] = "text",
|
|
176
|
+
no_cache: Annotated[bool, typer.Option("--no-cache", help="Disable caching")] = False,
|
|
177
|
+
strict: Annotated[
|
|
178
|
+
bool, typer.Option("--strict", help="High precision - only resolved calls")
|
|
179
|
+
] = False,
|
|
180
|
+
aggressive: Annotated[
|
|
181
|
+
bool, typer.Option("--aggressive", help="High recall - include fuzzy matches")
|
|
182
|
+
] = False,
|
|
183
|
+
) -> None:
|
|
184
|
+
"""Show which exceptions can escape from a function.
|
|
185
|
+
|
|
186
|
+
This is the core (framework-agnostic) version. For framework-aware auditing,
|
|
187
|
+
use the integration commands (e.g., flow flask audit).
|
|
188
|
+
"""
|
|
189
|
+
from bubble.config import load_config
|
|
190
|
+
|
|
191
|
+
directory = directory.resolve()
|
|
192
|
+
config = load_config(directory)
|
|
193
|
+
|
|
194
|
+
if strict:
|
|
195
|
+
resolution_mode = ResolutionMode.STRICT
|
|
196
|
+
elif aggressive:
|
|
197
|
+
resolution_mode = ResolutionMode.AGGRESSIVE
|
|
198
|
+
else:
|
|
199
|
+
resolution_mode = ResolutionMode(config.resolution_mode)
|
|
200
|
+
|
|
201
|
+
model = build_model(directory, use_cache=not no_cache)
|
|
202
|
+
result = queries.find_escapes(model, function_name, resolution_mode=resolution_mode)
|
|
203
|
+
formatters.escapes(result, OutputFormat(output_format), directory, console)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
@app.command()
|
|
207
|
+
def catches(
|
|
208
|
+
exception_type: Annotated[str, typer.Argument(help="Exception type to search for")],
|
|
209
|
+
directory: Annotated[
|
|
210
|
+
Path, typer.Option("--directory", "-d", help="Directory to analyze")
|
|
211
|
+
] = Path("."),
|
|
212
|
+
include_subclasses: Annotated[
|
|
213
|
+
bool, typer.Option("--include-subclasses", "-s", help="Include subclasses")
|
|
214
|
+
] = False,
|
|
215
|
+
output_format: Annotated[str, typer.Option("--format", "-f", help="Output format")] = "text",
|
|
216
|
+
no_cache: Annotated[bool, typer.Option("--no-cache", help="Disable caching")] = False,
|
|
217
|
+
) -> None:
|
|
218
|
+
"""Find all places where an exception type is caught."""
|
|
219
|
+
directory = directory.resolve()
|
|
220
|
+
model = build_model(directory, use_cache=not no_cache)
|
|
221
|
+
result = queries.find_catches(model, exception_type, include_subclasses)
|
|
222
|
+
formatters.catches(result, OutputFormat(output_format), directory, console)
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
@app.command()
|
|
226
|
+
def cache(
|
|
227
|
+
action: Annotated[str, typer.Argument(help="Action: clear or stats")],
|
|
228
|
+
directory: Annotated[
|
|
229
|
+
Path, typer.Option("--directory", "-d", help="Directory to manage cache for")
|
|
230
|
+
] = Path("."),
|
|
231
|
+
) -> None:
|
|
232
|
+
"""Manage the extraction cache."""
|
|
233
|
+
from bubble.cache import FileCache
|
|
234
|
+
|
|
235
|
+
directory = directory.resolve()
|
|
236
|
+
cache_dir = directory / ".flow"
|
|
237
|
+
cache_file = cache_dir / "cache.sqlite"
|
|
238
|
+
|
|
239
|
+
if action == "clear":
|
|
240
|
+
if cache_file.exists():
|
|
241
|
+
cache_file.unlink()
|
|
242
|
+
console.print("[green]Cache cleared[/green]")
|
|
243
|
+
else:
|
|
244
|
+
console.print("[yellow]No cache to clear[/yellow]")
|
|
245
|
+
|
|
246
|
+
elif action == "stats":
|
|
247
|
+
if not cache_file.exists():
|
|
248
|
+
console.print("[yellow]No cache exists[/yellow]")
|
|
249
|
+
return
|
|
250
|
+
|
|
251
|
+
fc = FileCache(cache_dir)
|
|
252
|
+
stats_data = fc.stats()
|
|
253
|
+
fc.close()
|
|
254
|
+
formatters.cache_stats(stats_data["file_count"], stats_data["size_bytes"], console)
|
|
255
|
+
|
|
256
|
+
else:
|
|
257
|
+
console.print(f"[red]Unknown action: {action}[/red]")
|
|
258
|
+
console.print("Valid actions: clear, stats")
|
|
259
|
+
raise typer.Exit(1)
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
@app.command()
|
|
263
|
+
def trace(
|
|
264
|
+
function_name: Annotated[str, typer.Argument(help="Function or route to trace")],
|
|
265
|
+
directory: Annotated[
|
|
266
|
+
Path, typer.Option("--directory", "-d", help="Directory to analyze")
|
|
267
|
+
] = Path("."),
|
|
268
|
+
depth: Annotated[int, typer.Option("--depth", help="Maximum call depth")] = 10,
|
|
269
|
+
show_all: Annotated[
|
|
270
|
+
bool, typer.Option("--all", "-a", help="Show all calls, not just exception-raising paths")
|
|
271
|
+
] = False,
|
|
272
|
+
output_format: Annotated[str, typer.Option("--format", "-f", help="Output format")] = "text",
|
|
273
|
+
no_cache: Annotated[bool, typer.Option("--no-cache", help="Disable caching")] = False,
|
|
274
|
+
) -> None:
|
|
275
|
+
"""Visualize exception flow as a call tree."""
|
|
276
|
+
directory = directory.resolve()
|
|
277
|
+
model = build_model(directory, use_cache=not no_cache)
|
|
278
|
+
result = queries.trace_function(model, function_name, depth, show_all)
|
|
279
|
+
formatters.trace(result, OutputFormat(output_format), directory, console)
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
@app.command()
|
|
283
|
+
def subclasses(
|
|
284
|
+
class_name: Annotated[str, typer.Argument(help="Base class name to find subclasses of")],
|
|
285
|
+
directory: Annotated[
|
|
286
|
+
Path, typer.Option("--directory", "-d", help="Directory to analyze")
|
|
287
|
+
] = Path("."),
|
|
288
|
+
output_format: Annotated[str, typer.Option("--format", "-f", help="Output format")] = "text",
|
|
289
|
+
no_cache: Annotated[bool, typer.Option("--no-cache", help="Disable caching")] = False,
|
|
290
|
+
) -> None:
|
|
291
|
+
"""Show all subclasses of a class and their abstract method implementations."""
|
|
292
|
+
directory = directory.resolve()
|
|
293
|
+
model = build_model(directory, use_cache=not no_cache)
|
|
294
|
+
result = queries.find_subclasses(model, class_name)
|
|
295
|
+
formatters.subclasses(result, OutputFormat(output_format), directory, console)
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
@app.command()
|
|
299
|
+
def init(
|
|
300
|
+
directory: Annotated[
|
|
301
|
+
Path, typer.Option("--directory", "-d", help="Directory to initialize")
|
|
302
|
+
] = Path("."),
|
|
303
|
+
) -> None:
|
|
304
|
+
"""Initialize .flow/ directory with detector templates."""
|
|
305
|
+
directory = directory.resolve()
|
|
306
|
+
flow_dir = directory / ".flow"
|
|
307
|
+
detectors_dir = flow_dir / "detectors"
|
|
308
|
+
|
|
309
|
+
if flow_dir.exists():
|
|
310
|
+
console.print(f"[yellow].flow/ directory already exists at {flow_dir}[/yellow]")
|
|
311
|
+
console.print("[dim]Delete it first if you want to reinitialize.[/dim]")
|
|
312
|
+
raise typer.Exit(1)
|
|
313
|
+
|
|
314
|
+
console.print(f"\n[bold]Initializing flow analysis for {directory.name}/[/bold]\n")
|
|
315
|
+
|
|
316
|
+
with console.status("[bold blue]Analyzing codebase patterns...[/bold blue]"):
|
|
317
|
+
model = build_model(directory, use_cache=False)
|
|
318
|
+
|
|
319
|
+
result = queries.get_init_info(model, directory.name)
|
|
320
|
+
formatters.init_result(result, console)
|
|
321
|
+
|
|
322
|
+
flow_dir.mkdir(parents=True, exist_ok=True)
|
|
323
|
+
detectors_dir.mkdir(parents=True, exist_ok=True)
|
|
324
|
+
|
|
325
|
+
config_content = f"""# Flow analysis configuration for {directory.name}
|
|
326
|
+
version: "0.1"
|
|
327
|
+
|
|
328
|
+
# Frameworks detected (used for default detectors)
|
|
329
|
+
frameworks:
|
|
330
|
+
{chr(10).join(f" - {f.lower()}" for f in result.frameworks_detected) if result.frameworks_detected else " # none detected"}
|
|
331
|
+
|
|
332
|
+
# Directories to exclude from analysis
|
|
333
|
+
exclude:
|
|
334
|
+
- __pycache__
|
|
335
|
+
- .venv
|
|
336
|
+
- venv
|
|
337
|
+
- site-packages
|
|
338
|
+
- node_modules
|
|
339
|
+
- .git
|
|
340
|
+
- dist
|
|
341
|
+
- build
|
|
342
|
+
- tests
|
|
343
|
+
- test
|
|
344
|
+
|
|
345
|
+
# Base exception classes to track (add your custom exceptions here)
|
|
346
|
+
exception_bases:
|
|
347
|
+
- Exception
|
|
348
|
+
"""
|
|
349
|
+
|
|
350
|
+
(flow_dir / "config.yaml").write_text(config_content)
|
|
351
|
+
console.print(" [green]Created[/green] .flow/config.yaml")
|
|
352
|
+
|
|
353
|
+
example_detector = '''"""Example custom detector for project-specific patterns."""
|
|
354
|
+
|
|
355
|
+
from bubble.protocols import EntrypointDetector
|
|
356
|
+
from bubble.models import Entrypoint
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
class ExampleCeleryTaskDetector(EntrypointDetector):
|
|
360
|
+
"""Example: Detects Celery task decorators as entrypoints."""
|
|
361
|
+
|
|
362
|
+
def detect(self, source: str, file_path: str) -> list[Entrypoint]:
|
|
363
|
+
return []
|
|
364
|
+
'''
|
|
365
|
+
|
|
366
|
+
(detectors_dir / "_example.py").write_text(example_detector)
|
|
367
|
+
console.print(" [green]Created[/green] .flow/detectors/_example.py")
|
|
368
|
+
|
|
369
|
+
readme_content = """# Custom Detectors
|
|
370
|
+
|
|
371
|
+
Create Python files here to detect project-specific patterns.
|
|
372
|
+
See _example.py for a template.
|
|
373
|
+
"""
|
|
374
|
+
|
|
375
|
+
(detectors_dir / "README.md").write_text(readme_content)
|
|
376
|
+
console.print(" [green]Created[/green] .flow/detectors/README.md")
|
|
377
|
+
|
|
378
|
+
console.print()
|
|
379
|
+
console.print("[bold green]Initialization complete![/bold green]")
|
|
380
|
+
console.print()
|
|
381
|
+
console.print("[dim]Next steps:[/dim]")
|
|
382
|
+
console.print(" 1. Review .flow/config.yaml")
|
|
383
|
+
console.print(
|
|
384
|
+
" 2. Run 'flow flask entrypoints' or 'flow fastapi entrypoints' to verify detection"
|
|
385
|
+
)
|
|
386
|
+
console.print()
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
@app.command()
|
|
390
|
+
def stubs(
|
|
391
|
+
action: Annotated[str, typer.Argument(help="Action: list, init, or validate")],
|
|
392
|
+
library: Annotated[str | None, typer.Argument(help="Library name for init action")] = None,
|
|
393
|
+
directory: Annotated[Path, typer.Option("--directory", "-d", help="Project directory")] = Path(
|
|
394
|
+
"."
|
|
395
|
+
),
|
|
396
|
+
) -> None:
|
|
397
|
+
"""Manage exception stubs for external libraries."""
|
|
398
|
+
import shutil
|
|
399
|
+
|
|
400
|
+
from bubble.stubs import load_stubs, validate_stub_file
|
|
401
|
+
|
|
402
|
+
directory = directory.resolve()
|
|
403
|
+
builtin_dir = Path(__file__).parent / "stubs"
|
|
404
|
+
user_dir = directory / ".flow" / "stubs"
|
|
405
|
+
|
|
406
|
+
if action == "list":
|
|
407
|
+
stub_library = load_stubs(directory)
|
|
408
|
+
if not stub_library.stubs:
|
|
409
|
+
console.print("[yellow]No stubs loaded[/yellow]")
|
|
410
|
+
console.print()
|
|
411
|
+
console.print("[dim]Built-in stubs available:[/dim]")
|
|
412
|
+
if builtin_dir.exists():
|
|
413
|
+
for yaml_file in sorted(builtin_dir.glob("*.yaml")):
|
|
414
|
+
console.print(f" - {yaml_file.stem}")
|
|
415
|
+
return
|
|
416
|
+
|
|
417
|
+
console.print("\n[bold]Loaded exception stubs:[/bold]\n")
|
|
418
|
+
for module, functions in sorted(stub_library.stubs.items()):
|
|
419
|
+
exc_count = sum(len(excs) for excs in functions.values())
|
|
420
|
+
console.print(
|
|
421
|
+
f" [cyan]{module}[/cyan]: {len(functions)} functions, {exc_count} exceptions"
|
|
422
|
+
)
|
|
423
|
+
console.print()
|
|
424
|
+
|
|
425
|
+
elif action == "init":
|
|
426
|
+
if not library:
|
|
427
|
+
console.print("[red]Library name required for init action[/red]")
|
|
428
|
+
console.print("[dim]Example: flow stubs init requests[/dim]")
|
|
429
|
+
raise typer.Exit(1)
|
|
430
|
+
|
|
431
|
+
source_file = builtin_dir / f"{library}.yaml"
|
|
432
|
+
if not source_file.exists():
|
|
433
|
+
console.print(f"[red]No built-in stub for '{library}'[/red]")
|
|
434
|
+
console.print("[dim]Available stubs:[/dim]")
|
|
435
|
+
if builtin_dir.exists():
|
|
436
|
+
for yaml_file in sorted(builtin_dir.glob("*.yaml")):
|
|
437
|
+
console.print(f" - {yaml_file.stem}")
|
|
438
|
+
raise typer.Exit(1)
|
|
439
|
+
|
|
440
|
+
user_dir.mkdir(parents=True, exist_ok=True)
|
|
441
|
+
dest_file = user_dir / f"{library}.yaml"
|
|
442
|
+
shutil.copy(source_file, dest_file)
|
|
443
|
+
console.print(f"[green]Copied {library}.yaml to .flow/stubs/[/green]")
|
|
444
|
+
console.print("[dim]Edit this file to customize exception declarations.[/dim]")
|
|
445
|
+
|
|
446
|
+
elif action == "validate":
|
|
447
|
+
errors_found = False
|
|
448
|
+
for stub_dir in [builtin_dir, user_dir]:
|
|
449
|
+
if stub_dir.exists():
|
|
450
|
+
for yaml_file in stub_dir.glob("*.yaml"):
|
|
451
|
+
errors = validate_stub_file(yaml_file)
|
|
452
|
+
if errors:
|
|
453
|
+
errors_found = True
|
|
454
|
+
console.print(f"[red]Errors in {yaml_file.name}:[/red]")
|
|
455
|
+
for error in errors:
|
|
456
|
+
console.print(f" - {error}")
|
|
457
|
+
else:
|
|
458
|
+
console.print(f"[green]v[/green] {yaml_file.name}")
|
|
459
|
+
|
|
460
|
+
if not errors_found:
|
|
461
|
+
console.print("\n[green]All stub files are valid[/green]")
|
|
462
|
+
|
|
463
|
+
else:
|
|
464
|
+
console.print(f"[red]Unknown action: {action}[/red]")
|
|
465
|
+
console.print("Valid actions: list, init, validate")
|
|
466
|
+
raise typer.Exit(1)
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
if __name__ == "__main__":
|
|
470
|
+
app()
|