fissionpy 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
fissionpy/__init__.py ADDED
@@ -0,0 +1,3 @@
1
+ """Python 单体文件裂变拆分工具——基于 LibCST 将过大的单体文件按模块相关性拆分为多个模块文件。"""
2
+
3
+ __version__ = "0.1.0"
fissionpy/__main__.py ADDED
@@ -0,0 +1,6 @@
1
+ """支持 python -m fissionpy 方式运行。"""
2
+
3
+ from fissionpy.cli.app import app
4
+
5
+ if __name__ == "__main__":
6
+ app()
File without changes
@@ -0,0 +1,393 @@
1
+ """SQLite database layer for fissionpy project-level code fission tool."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import hashlib
6
+ import json
7
+ import sqlite3
8
+ from typing import Any
9
+
10
+
11
+ _SCHEMA = """\
12
+ CREATE TABLE IF NOT EXISTS files (
13
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
14
+ path TEXT NOT NULL UNIQUE,
15
+ hash TEXT NOT NULL,
16
+ last_parsed REAL NOT NULL,
17
+ status TEXT NOT NULL DEFAULT 'pending'
18
+ );
19
+
20
+ CREATE TABLE IF NOT EXISTS symbols (
21
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
22
+ file_id INTEGER NOT NULL REFERENCES files(id) ON DELETE CASCADE,
23
+ name TEXT NOT NULL,
24
+ kind TEXT NOT NULL,
25
+ line_start INTEGER NOT NULL,
26
+ line_end INTEGER NOT NULL,
27
+ source_text TEXT NOT NULL,
28
+ UNIQUE(file_id, name, line_start)
29
+ );
30
+
31
+ CREATE TABLE IF NOT EXISTS dependencies (
32
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
33
+ source_id INTEGER NOT NULL REFERENCES symbols(id) ON DELETE CASCADE,
34
+ target_id INTEGER NOT NULL REFERENCES symbols(id) ON DELETE CASCADE,
35
+ dep_type TEXT NOT NULL DEFAULT 'call',
36
+ cross_file INTEGER NOT NULL DEFAULT 0
37
+ );
38
+
39
+ CREATE TABLE IF NOT EXISTS file_imports (
40
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
41
+ source_file_id INTEGER NOT NULL REFERENCES files(id) ON DELETE CASCADE,
42
+ import_type TEXT NOT NULL,
43
+ module_path TEXT NOT NULL,
44
+ imported_names TEXT NOT NULL,
45
+ aliases TEXT,
46
+ is_star_import INTEGER NOT NULL DEFAULT 0,
47
+ line_number INTEGER NOT NULL
48
+ );
49
+
50
+ CREATE TABLE IF NOT EXISTS migration_progress (
51
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
52
+ symbol_id INTEGER NOT NULL REFERENCES symbols(id) ON DELETE CASCADE,
53
+ target_module TEXT NOT NULL,
54
+ status TEXT NOT NULL DEFAULT 'pending',
55
+ started_at REAL,
56
+ completed_at REAL,
57
+ UNIQUE(symbol_id, target_module)
58
+ );
59
+
60
+ CREATE TABLE IF NOT EXISTS import_updates (
61
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
62
+ target_file_id INTEGER NOT NULL REFERENCES files(id) ON DELETE CASCADE,
63
+ old_module_path TEXT NOT NULL,
64
+ new_module_path TEXT NOT NULL,
65
+ symbol_name TEXT NOT NULL,
66
+ alias TEXT,
67
+ status TEXT NOT NULL DEFAULT 'pending',
68
+ applied_at REAL
69
+ );
70
+ """
71
+
72
+ _INDEXES = """\
73
+ CREATE INDEX IF NOT EXISTS idx_symbols_file_id ON symbols(file_id);
74
+ CREATE INDEX IF NOT EXISTS idx_symbols_name ON symbols(name);
75
+ CREATE INDEX IF NOT EXISTS idx_dependencies_source_id ON dependencies(source_id);
76
+ CREATE INDEX IF NOT EXISTS idx_dependencies_target_id ON dependencies(target_id);
77
+ CREATE INDEX IF NOT EXISTS idx_dependencies_dep_type ON dependencies(dep_type);
78
+ CREATE INDEX IF NOT EXISTS idx_dependencies_cross_file ON dependencies(cross_file);
79
+ CREATE INDEX IF NOT EXISTS idx_file_imports_source_file_id ON file_imports(source_file_id);
80
+ CREATE INDEX IF NOT EXISTS idx_file_imports_module_path ON file_imports(module_path);
81
+ CREATE INDEX IF NOT EXISTS idx_migration_progress_symbol_id ON migration_progress(symbol_id);
82
+ CREATE INDEX IF NOT EXISTS idx_migration_progress_status ON migration_progress(status);
83
+ CREATE INDEX IF NOT EXISTS idx_import_updates_target_file_id ON import_updates(target_file_id);
84
+ CREATE INDEX IF NOT EXISTS idx_import_updates_status ON import_updates(status);
85
+ CREATE INDEX IF NOT EXISTS idx_files_status ON files(status);
86
+ """
87
+
88
+
89
+ def get_connection(db_path: str) -> sqlite3.Connection:
90
+ """Open a connection with foreign keys enabled and Row factory."""
91
+ conn = sqlite3.connect(db_path)
92
+ conn.execute("PRAGMA foreign_keys = ON")
93
+ conn.row_factory = sqlite3.Row
94
+ return conn
95
+
96
+
97
+ def init_db(db_path: str) -> None:
98
+ """Create all tables and indexes."""
99
+ conn = get_connection(db_path)
100
+ try:
101
+ conn.executescript(_SCHEMA)
102
+ conn.executescript(_INDEXES)
103
+ conn.commit()
104
+ finally:
105
+ conn.close()
106
+
107
+
108
+ def get_or_create_file(conn: sqlite3.Connection, path: str, file_hash: str) -> int:
109
+ """Return file_id for the given path, creating a new row if needed."""
110
+ row = conn.execute(
111
+ "SELECT id, hash FROM files WHERE path = ?", (path,)
112
+ ).fetchone()
113
+ if row is not None:
114
+ file_id: int = row["id"]
115
+ conn.execute(
116
+ "UPDATE files SET hash = ?, last_parsed = CAST(strftime('%s','now') AS REAL), status = 'pending' WHERE id = ?",
117
+ (file_hash, file_id),
118
+ )
119
+ return file_id
120
+ import time
121
+ cur = conn.execute(
122
+ "INSERT INTO files (path, hash, last_parsed, status) VALUES (?, ?, ?, 'pending')",
123
+ (path, file_hash, time.time()),
124
+ )
125
+ return cur.lastrowid # type: ignore[return-value]
126
+
127
+
128
+ def clear_file_data(conn: sqlite3.Connection, file_id: int) -> None:
129
+ """Delete symbols, dependencies, file_imports, and migration_progress for a file."""
130
+ symbol_ids = conn.execute(
131
+ "SELECT id FROM symbols WHERE file_id = ?", (file_id,)
132
+ ).fetchall()
133
+ symbol_id_list = [r["id"] for r in symbol_ids]
134
+ if symbol_id_list:
135
+ placeholders = ",".join("?" * len(symbol_id_list))
136
+ conn.execute(
137
+ f"DELETE FROM dependencies WHERE source_id IN ({placeholders}) OR target_id IN ({placeholders})",
138
+ symbol_id_list + symbol_id_list,
139
+ )
140
+ conn.execute(
141
+ f"DELETE FROM migration_progress WHERE symbol_id IN ({placeholders})",
142
+ symbol_id_list,
143
+ )
144
+ conn.execute("DELETE FROM file_imports WHERE source_file_id = ?", (file_id,))
145
+ conn.execute("DELETE FROM symbols WHERE file_id = ?", (file_id,))
146
+
147
+
148
+ def insert_symbol(
149
+ conn: sqlite3.Connection,
150
+ file_id: int,
151
+ name: str,
152
+ kind: str,
153
+ line_start: int,
154
+ line_end: int,
155
+ source_text: str,
156
+ ) -> int:
157
+ """Insert a symbol row and return its id."""
158
+ cur = conn.execute(
159
+ "INSERT INTO symbols (file_id, name, kind, line_start, line_end, source_text) VALUES (?, ?, ?, ?, ?, ?)",
160
+ (file_id, name, kind, line_start, line_end, source_text),
161
+ )
162
+ return cur.lastrowid # type: ignore[return-value]
163
+
164
+
165
+ def insert_dependency(
166
+ conn: sqlite3.Connection,
167
+ source_id: int,
168
+ target_id: int,
169
+ dep_type: str,
170
+ cross_file: int = 0,
171
+ ) -> int:
172
+ """Insert a dependency row and return its id."""
173
+ cur = conn.execute(
174
+ "INSERT INTO dependencies (source_id, target_id, dep_type, cross_file) VALUES (?, ?, ?, ?)",
175
+ (source_id, target_id, dep_type, cross_file),
176
+ )
177
+ return cur.lastrowid # type: ignore[return-value]
178
+
179
+
180
+ def insert_file_import(
181
+ conn: sqlite3.Connection,
182
+ source_file_id: int,
183
+ import_type: str,
184
+ module_path: str,
185
+ imported_names: str,
186
+ aliases: str | None,
187
+ is_star_import: int,
188
+ line_number: int,
189
+ ) -> int:
190
+ """Insert a file_import row and return its id."""
191
+ cur = conn.execute(
192
+ "INSERT INTO file_imports (source_file_id, import_type, module_path, imported_names, aliases, is_star_import, line_number) VALUES (?, ?, ?, ?, ?, ?, ?)",
193
+ (source_file_id, import_type, module_path, imported_names, aliases, is_star_import, line_number),
194
+ )
195
+ return cur.lastrowid # type: ignore[return-value]
196
+
197
+
198
+ def _row_to_dict(row: sqlite3.Row | None) -> dict[str, Any] | None:
199
+ """Convert a Row to a dict, returning None if row is None."""
200
+ if row is None:
201
+ return None
202
+ return dict(row)
203
+
204
+
205
+ def get_symbols_by_file(conn: sqlite3.Connection, file_id: int) -> list[dict[str, Any]]:
206
+ """Return all symbols for a given file as dicts."""
207
+ rows = conn.execute(
208
+ "SELECT * FROM symbols WHERE file_id = ?", (file_id,)
209
+ ).fetchall()
210
+ return [dict(r) for r in rows]
211
+
212
+
213
+ def get_dependencies_by_file(conn: sqlite3.Connection, file_id: int) -> list[dict[str, Any]]:
214
+ """Return all dependencies where source or target symbol belongs to the given file."""
215
+ rows = conn.execute(
216
+ """SELECT d.* FROM dependencies d
217
+ JOIN symbols s ON d.source_id = s.id
218
+ JOIN symbols t ON d.target_id = t.id
219
+ WHERE s.file_id = ? OR t.file_id = ?""",
220
+ (file_id, file_id),
221
+ ).fetchall()
222
+ return [dict(r) for r in rows]
223
+
224
+
225
+ def get_file_imports_by_file(conn: sqlite3.Connection, file_id: int) -> list[dict[str, Any]]:
226
+ """Return all file_imports for a given file as dicts."""
227
+ rows = conn.execute(
228
+ "SELECT * FROM file_imports WHERE source_file_id = ?", (file_id,)
229
+ ).fetchall()
230
+ return [dict(r) for r in rows]
231
+
232
+
233
+ def get_symbol_by_name(conn: sqlite3.Connection, file_id: int, name: str) -> dict[str, Any] | None:
234
+ """Return the first symbol matching name in the given file, or None."""
235
+ row = conn.execute(
236
+ "SELECT * FROM symbols WHERE file_id = ? AND name = ? LIMIT 1",
237
+ (file_id, name),
238
+ ).fetchone()
239
+ return _row_to_dict(row)
240
+
241
+
242
+ def get_file_by_path(conn: sqlite3.Connection, path: str) -> dict[str, Any] | None:
243
+ """Return the file row for the given path, or None."""
244
+ row = conn.execute(
245
+ "SELECT * FROM files WHERE path = ?", (path,)
246
+ ).fetchone()
247
+ return _row_to_dict(row)
248
+
249
+
250
+ def compute_file_hash(file_path: str) -> str:
251
+ """Compute SHA256 hex digest of a file's contents."""
252
+ h = hashlib.sha256()
253
+ with open(file_path, "rb") as f:
254
+ for chunk in iter(lambda: f.read(8192), b""):
255
+ h.update(chunk)
256
+ return h.hexdigest()
257
+
258
+
259
+ def save_analysis(
260
+ conn: sqlite3.Connection,
261
+ file_path: str,
262
+ file_hash: str,
263
+ symbols: list[dict[str, Any]],
264
+ dependencies: list[dict[str, Any]],
265
+ file_imports: list[dict[str, Any]],
266
+ force: bool = False,
267
+ ) -> int:
268
+ """Full save: upsert file, clear old data, insert symbols/dependencies/file_imports.
269
+
270
+ If force is False and the file hash matches the stored hash, skip re-parsing
271
+ and return the existing file_id immediately.
272
+ """
273
+ existing = get_file_by_path(conn, file_path)
274
+ if existing is not None and not force and existing["hash"] == file_hash:
275
+ return existing["id"]
276
+
277
+ file_id = get_or_create_file(conn, file_path, file_hash)
278
+ clear_file_data(conn, file_id)
279
+
280
+ symbol_id_map: dict[int, int] = {}
281
+ for sym in symbols:
282
+ old_id = sym.get("id")
283
+ new_id = insert_symbol(
284
+ conn,
285
+ file_id,
286
+ sym["name"],
287
+ sym["kind"],
288
+ sym["line_start"],
289
+ sym["line_end"],
290
+ sym["source_text"],
291
+ )
292
+ if old_id is not None:
293
+ symbol_id_map[old_id] = new_id
294
+
295
+ for dep in dependencies:
296
+ raw_source = dep.get("source_id")
297
+ raw_target = dep.get("target_id")
298
+ source_id = symbol_id_map.get(raw_source, raw_source) if raw_source is not None else None
299
+ target_id = symbol_id_map.get(raw_target, raw_target) if raw_target is not None else None
300
+ if source_id is not None and target_id is not None:
301
+ insert_dependency(conn, source_id, target_id, dep["dep_type"], dep.get("cross_file", 0))
302
+
303
+ for imp in file_imports:
304
+ insert_file_import(
305
+ conn,
306
+ file_id,
307
+ imp["import_type"],
308
+ imp["module_path"],
309
+ imp["imported_names"],
310
+ imp.get("aliases"),
311
+ imp.get("is_star_import", 0),
312
+ imp["line_number"],
313
+ )
314
+
315
+ conn.commit()
316
+ return file_id
317
+
318
+
319
+ def mark_file_deleted(conn: sqlite3.Connection, file_id: int) -> None:
320
+ """Set a file's status to 'deleted'."""
321
+ conn.execute("UPDATE files SET status = 'deleted' WHERE id = ?", (file_id,))
322
+ conn.commit()
323
+
324
+
325
+ def get_all_symbols_across_project(conn: sqlite3.Connection) -> list[dict[str, Any]]:
326
+ """Return all symbols from all parsed (non-deleted) files."""
327
+ rows = conn.execute(
328
+ """SELECT s.*, f.path AS file_path FROM symbols s
329
+ JOIN files f ON s.file_id = f.id
330
+ WHERE f.status = 'parsed'""",
331
+ ).fetchall()
332
+ return [dict(r) for r in rows]
333
+
334
+
335
+ def get_files_importing_symbol(
336
+ conn: sqlite3.Connection,
337
+ symbol_name: str,
338
+ source_file_id: int,
339
+ ) -> list[dict[str, Any]]:
340
+ """Find all file_imports that reference a symbol from a specific source file.
341
+
342
+ Matches file_imports where the module_path corresponds to the source file
343
+ and the symbol_name appears in the imported_names JSON array or is a star import.
344
+ """
345
+ source_file = conn.execute(
346
+ "SELECT path FROM files WHERE id = ?", (source_file_id,)
347
+ ).fetchone()
348
+ if source_file is None:
349
+ return []
350
+
351
+ from fissionpy.common.paths import file_path_to_module_path
352
+ from fissionpy.common.paths import find_project_root
353
+
354
+ project_root = find_project_root(source_file["path"])
355
+ if project_root is None:
356
+ module_path = ""
357
+ else:
358
+ module_path = file_path_to_module_path(source_file["path"], project_root)
359
+
360
+ python_import = module_path.replace("/", ".")
361
+ import_candidates = {module_path, python_import}
362
+ for candidate in list(import_candidates):
363
+ parts = candidate.replace("/", ".").split(".")
364
+ for i in range(1, len(parts)):
365
+ import_candidates.add(".".join(parts[i:]))
366
+ placeholders = ",".join("?" * len(import_candidates))
367
+ like_pattern = f'%"{symbol_name}"%'
368
+ module_suffix_like = f"%.{python_import}" if python_import else None
369
+ extra_sql = ""
370
+ extra_params: list[str] = []
371
+ if module_suffix_like and module_suffix_like not in import_candidates:
372
+ extra_sql = f" OR fi.module_path LIKE ?"
373
+ extra_params = [module_suffix_like]
374
+ rows = conn.execute(
375
+ f"""SELECT fi.*, f.path AS source_file_path FROM file_imports fi
376
+ JOIN files f ON fi.source_file_id = f.id
377
+ WHERE (fi.module_path IN ({placeholders}){extra_sql})
378
+ AND (fi.is_star_import = 1
379
+ OR fi.imported_names LIKE ?)""",
380
+ list(import_candidates) + extra_params + [like_pattern],
381
+ ).fetchall()
382
+ results = []
383
+ for r in rows:
384
+ d = dict(r)
385
+ if not d["is_star_import"]:
386
+ try:
387
+ names = json.loads(d["imported_names"])
388
+ except (json.JSONDecodeError, TypeError):
389
+ names = []
390
+ if symbol_name not in names:
391
+ continue
392
+ results.append(d)
393
+ return results
@@ -0,0 +1,212 @@
1
+ """Intra-file dependency analysis using LibCST ScopeProvider."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import dataclasses
6
+ from pathlib import Path
7
+
8
+ import libcst as cst
9
+ import libcst.metadata as cst_meta
10
+
11
+
12
+ @dataclasses.dataclass
13
+ class Dependency:
14
+ """A dependency relationship between two top-level symbols in the same file."""
15
+
16
+ source_name: str
17
+ target_name: str
18
+ dep_type: str
19
+
20
+
21
+ def _name_from_node(node: cst.BaseExpression) -> str | None:
22
+ """Extract a simple name string from a CST expression node."""
23
+ if isinstance(node, cst.Name):
24
+ return node.value
25
+ if isinstance(node, cst.Attribute):
26
+ if isinstance(node.value, cst.Name):
27
+ return node.value.value
28
+ return None
29
+
30
+
31
+ def _collect_names_from_annotation(node: cst.Annotation | cst.BaseExpression) -> set[str]:
32
+ """Recursively collect all Name references from a type annotation."""
33
+ names: set[str] = set()
34
+
35
+ def _walk(expr: cst.BaseExpression) -> None:
36
+ if isinstance(expr, cst.Name):
37
+ names.add(expr.value)
38
+ elif isinstance(expr, cst.Attribute):
39
+ if isinstance(expr.value, cst.Name):
40
+ names.add(expr.value.value)
41
+ elif isinstance(expr, cst.Subscript):
42
+ _walk(expr.value)
43
+ for element in expr.slice:
44
+ if isinstance(element, cst.SubscriptElement):
45
+ sl = element.slice
46
+ if isinstance(sl, cst.Index):
47
+ _walk(sl.value)
48
+ elif isinstance(expr, cst.BinaryOperation):
49
+ _walk(expr.left)
50
+ _walk(expr.right)
51
+
52
+ target = node.annotation if isinstance(node, cst.Annotation) else node
53
+ _walk(target)
54
+ return names
55
+
56
+
57
+ def _find_global_scope(scopes: dict[cst.CSTNode, cst_meta.Scope]) -> cst_meta.GlobalScope | None:
58
+ """Locate the GlobalScope from the resolved scope map."""
59
+ for scope in scopes.values():
60
+ if isinstance(scope, cst_meta.GlobalScope):
61
+ return scope
62
+ return None
63
+
64
+
65
+ def _collect_param_annotations(func: cst.FunctionDef) -> set[str]:
66
+ """Collect all name references from a function's parameter annotations."""
67
+ names: set[str] = set()
68
+ params = func.params
69
+ for param in params.params:
70
+ if param.annotation is not None:
71
+ names |= _collect_names_from_annotation(param.annotation)
72
+ for param in params.kwonly_params:
73
+ if param.annotation is not None:
74
+ names |= _collect_names_from_annotation(param.annotation)
75
+ for param in params.posonly_params:
76
+ if param.annotation is not None:
77
+ names |= _collect_names_from_annotation(param.annotation)
78
+ if isinstance(params.star_arg, cst.Param) and params.star_arg.annotation is not None:
79
+ names |= _collect_names_from_annotation(params.star_arg.annotation)
80
+ if params.star_kwarg is not None and params.star_kwarg.annotation is not None:
81
+ names |= _collect_names_from_annotation(params.star_kwarg.annotation)
82
+ return names
83
+
84
+
85
+ def _build_func_scope_map(
86
+ global_scope: cst_meta.GlobalScope,
87
+ scopes: dict[cst.CSTNode, cst_meta.Scope],
88
+ ) -> dict[int, str]:
89
+ """Map FunctionScope/ClassScope id to the function/class name.
90
+
91
+ Uses the fact that FunctionDef.params maps to its FunctionScope
92
+ in the scopes dict, and the assignment node carries the name.
93
+ """
94
+ result: dict[int, str] = {}
95
+ for a in global_scope.assignments:
96
+ if isinstance(a.node, cst.FunctionDef):
97
+ params_scope = scopes.get(a.node.params)
98
+ if params_scope is not None:
99
+ result[id(params_scope)] = a.name
100
+ elif isinstance(a.node, cst.ClassDef):
101
+ body = a.node.body
102
+ children = body if isinstance(body, list) else getattr(body, 'body', [])
103
+ for child in children:
104
+ child_scope = scopes.get(child)
105
+ if child_scope is not None and isinstance(child_scope, cst_meta.ClassScope):
106
+ result[id(child_scope)] = a.name
107
+ break
108
+ return result
109
+
110
+
111
+ def _resolve_source_name(
112
+ scope: cst_meta.Scope,
113
+ func_scope_map: dict[int, str],
114
+ global_scope: cst_meta.GlobalScope,
115
+ ) -> str | None:
116
+ """Walk up from a scope to find the owning top-level function/class name."""
117
+ current = scope
118
+ while current is not None and not isinstance(current, cst_meta.GlobalScope):
119
+ name = func_scope_map.get(id(current))
120
+ if name is not None:
121
+ return name
122
+ current = getattr(current, 'parent', None)
123
+ return None
124
+
125
+
126
+ def analyze_dependencies(source_code: str, top_level_names: set[str]) -> list[Dependency]:
127
+ """Analyze intra-file dependencies between top-level symbols.
128
+
129
+ Phase 1: Direct AST traversal for decorators, base classes, and annotations.
130
+ Phase 2: Scope-based access resolution for calls and references.
131
+ """
132
+ module = cst.parse_module(source_code)
133
+ wrapper = cst_meta.MetadataWrapper(module)
134
+ scopes = wrapper.resolve(cst_meta.ScopeProvider)
135
+
136
+ global_scope = _find_global_scope(scopes)
137
+ if global_scope is None:
138
+ return []
139
+
140
+ deps: set[tuple[str, str, str]] = set()
141
+
142
+ for stmt in module.body:
143
+ source_name: str | None = None
144
+
145
+ if isinstance(stmt, cst.FunctionDef):
146
+ source_name = stmt.name.value
147
+ for decorator in stmt.decorators:
148
+ name = _name_from_node(decorator.decorator)
149
+ if name and name in top_level_names:
150
+ deps.add((source_name, name, "decorator"))
151
+ if stmt.returns is not None:
152
+ for ann_name in _collect_names_from_annotation(stmt.returns):
153
+ if ann_name in top_level_names:
154
+ deps.add((source_name, ann_name, "annotation"))
155
+ for ann_name in _collect_param_annotations(stmt):
156
+ if ann_name in top_level_names:
157
+ deps.add((source_name, ann_name, "annotation"))
158
+
159
+ elif isinstance(stmt, cst.ClassDef):
160
+ source_name = stmt.name.value
161
+ for decorator in stmt.decorators:
162
+ name = _name_from_node(decorator.decorator)
163
+ if name and name in top_level_names:
164
+ deps.add((source_name, name, "decorator"))
165
+ for base in stmt.bases:
166
+ name = _name_from_node(base.value)
167
+ if name and name in top_level_names:
168
+ deps.add((source_name, name, "inherit"))
169
+ if isinstance(base.value, cst.Call) and isinstance(base.value.func, cst.Name):
170
+ if base.value.func.value in top_level_names:
171
+ deps.add((source_name, base.value.func.value, "inherit"))
172
+
173
+ elif isinstance(stmt, cst.SimpleStatementLine):
174
+ for node in stmt.body:
175
+ if isinstance(node, cst.AnnAssign) and isinstance(node.target, cst.Name):
176
+ source_name = node.target.value
177
+ if node.annotation is not None:
178
+ for ann_name in _collect_names_from_annotation(node.annotation):
179
+ if ann_name in top_level_names:
180
+ deps.add((source_name, ann_name, "annotation"))
181
+
182
+ func_scope_map = _build_func_scope_map(global_scope, scopes)
183
+
184
+ for a in global_scope.assignments:
185
+ target_name = a.name
186
+ if target_name not in top_level_names:
187
+ continue
188
+ for ref in a.references:
189
+ source_name = _resolve_source_name(ref.scope, func_scope_map, global_scope)
190
+ if source_name is None or source_name == target_name:
191
+ continue
192
+ if (source_name, target_name, "decorator") in deps:
193
+ continue
194
+ if (source_name, target_name, "inherit") in deps:
195
+ continue
196
+ if (source_name, target_name, "annotation") in deps:
197
+ continue
198
+ if ref.is_type_hint or ref.is_annotation:
199
+ deps.add((source_name, target_name, "annotation"))
200
+ else:
201
+ deps.add((source_name, target_name, "call"))
202
+
203
+ return [
204
+ Dependency(source_name=s, target_name=t, dep_type=d)
205
+ for s, t, d in sorted(deps)
206
+ ]
207
+
208
+
209
+ def analyze_file_dependencies(file_path: str, top_level_names: set[str]) -> list[Dependency]:
210
+ """Read a Python file and analyze its intra-file dependencies."""
211
+ source = Path(file_path).read_text(encoding="utf-8")
212
+ return analyze_dependencies(source, top_level_names)