kopipasta 0.34.0__py3-none-any.whl → 0.36.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kopipasta might be problematic. Click here for more details.

kopipasta/cache.py CHANGED
@@ -6,32 +6,35 @@ from typing import List, Tuple
6
6
  # Define FileTuple for type hinting
7
7
  FileTuple = Tuple[str, bool, List[str] | None, str]
8
8
 
9
+
9
10
  def get_cache_file_path() -> Path:
10
11
  """Gets the cross-platform path to the cache file for the last selection."""
11
12
  cache_dir = Path.home() / ".cache" / "kopipasta"
12
13
  cache_dir.mkdir(parents=True, exist_ok=True)
13
14
  return cache_dir / "last_selection.json"
14
15
 
16
+
15
17
  def save_selection_to_cache(files_to_include: List[FileTuple]):
16
18
  """Saves the list of selected file relative paths to the cache."""
17
19
  cache_file = get_cache_file_path()
18
20
  relative_paths = sorted([os.path.relpath(f[0]) for f in files_to_include])
19
21
  try:
20
- with open(cache_file, 'w', encoding='utf-8') as f:
22
+ with open(cache_file, "w", encoding="utf-8") as f:
21
23
  json.dump(relative_paths, f, indent=2)
22
24
  except IOError as e:
23
25
  print(f"\nWarning: Could not save selection to cache: {e}")
24
26
 
27
+
25
28
  def load_selection_from_cache() -> List[str]:
26
29
  """Loads the list of selected files from the cache file."""
27
30
  cache_file = get_cache_file_path()
28
31
  if not cache_file.exists():
29
32
  return []
30
33
  try:
31
- with open(cache_file, 'r', encoding='utf-8') as f:
34
+ with open(cache_file, "r", encoding="utf-8") as f:
32
35
  paths = json.load(f)
33
36
  # Filter out paths that no longer exist
34
37
  return [p for p in paths if os.path.exists(p)]
35
38
  except (IOError, json.JSONDecodeError) as e:
36
39
  print(f"\nWarning: Could not load previous selection from cache: {e}")
37
- return []
40
+ return []
kopipasta/file.py CHANGED
@@ -1,14 +1,14 @@
1
1
  import fnmatch
2
2
  import os
3
- from typing import List, Optional, Tuple
3
+ from typing import List, Optional, Tuple, Set
4
4
  from pathlib import Path
5
5
 
6
6
  FileTuple = Tuple[str, bool, Optional[List[str]], str]
7
7
 
8
- # --- Cache for .gitignore patterns ---
9
- # Key: Directory path
10
- # Value: List of patterns
8
+ # --- Caches ---
11
9
  _gitignore_cache: dict[str, list[str]] = {}
10
+ _is_ignored_cache: dict[str, bool] = {}
11
+
12
12
 
13
13
  def _read_gitignore_patterns(gitignore_path: str) -> list[str]:
14
14
  """Reads patterns from a single .gitignore file and caches them."""
@@ -19,31 +19,88 @@ def _read_gitignore_patterns(gitignore_path: str) -> list[str]:
19
19
  return []
20
20
  patterns = []
21
21
  try:
22
- with open(gitignore_path, 'r', encoding='utf-8') as f:
22
+ with open(gitignore_path, "r", encoding="utf-8") as f:
23
23
  for line in f:
24
24
  stripped_line = line.strip()
25
- if stripped_line and not stripped_line.startswith('#'):
25
+ if stripped_line and not stripped_line.startswith("#"):
26
26
  patterns.append(stripped_line)
27
27
  except IOError:
28
28
  pass
29
29
  _gitignore_cache[gitignore_path] = patterns
30
30
  return patterns
31
31
 
32
- def is_ignored(path: str, default_ignore_patterns: list[str], project_root: Optional[str] = None) -> bool:
32
+
33
+ def is_ignored(
34
+ path: str, default_ignore_patterns: list[str], project_root: Optional[str] = None
35
+ ) -> bool:
33
36
  """
34
- Checks if a path should be ignored based on default patterns and .gitignore files.
35
- Searches for .gitignore from the path's location up to the project_root.
37
+ Checks if a path should be ignored by splitting patterns into fast (basename)
38
+ and slow (full path) checks, with heavy caching and optimized inner loops.
36
39
  """
37
40
  path_abs = os.path.abspath(path)
41
+ if path_abs in _is_ignored_cache:
42
+ return _is_ignored_cache[path_abs]
43
+
44
+ parent_dir = os.path.dirname(path_abs)
45
+ if parent_dir != path_abs and _is_ignored_cache.get(parent_dir, False):
46
+ _is_ignored_cache[path_abs] = True
47
+ return True
48
+
38
49
  if project_root is None:
39
50
  project_root = os.getcwd()
40
51
  project_root_abs = os.path.abspath(project_root)
41
52
 
42
- # --- Step 1: Gather all patterns from all relevant .gitignore files ---
43
- all_patterns = set(default_ignore_patterns)
53
+ basename_patterns, path_patterns = get_all_patterns(
54
+ default_ignore_patterns, path_abs, project_root_abs
55
+ )
56
+
57
+ # --- Step 1: Fast check for basename patterns ---
58
+ path_basename = os.path.basename(path_abs)
59
+ for pattern in basename_patterns:
60
+ if fnmatch.fnmatch(path_basename, pattern):
61
+ _is_ignored_cache[path_abs] = True
62
+ return True
63
+
64
+ # --- Step 2: Optimized nested check for path patterns ---
65
+ try:
66
+ path_rel_to_root = os.path.relpath(path_abs, project_root_abs)
67
+ except ValueError:
68
+ _is_ignored_cache[path_abs] = False
69
+ return False
70
+
71
+ # Pre-calculate all path prefixes to check, avoiding re-joins in the loop.
72
+ path_parts = Path(path_rel_to_root).parts
73
+ path_prefixes = [os.path.join(*path_parts[:i + 1]) for i in range(1, len(path_parts) + 1)]
74
+
75
+ # Pre-process patterns to remove trailing slashes once.
76
+ processed_path_patterns = [p.rstrip("/") for p in path_patterns]
44
77
 
45
- # Determine the directory to start searching for .gitignore files
46
- search_start_dir = path_abs if os.path.isdir(path_abs) else os.path.dirname(path_abs)
78
+ for prefix in path_prefixes:
79
+ for pattern in processed_path_patterns:
80
+ if fnmatch.fnmatch(prefix, pattern):
81
+ _is_ignored_cache[path_abs] = True
82
+ return True
83
+
84
+ _is_ignored_cache[path_abs] = False
85
+ return False
86
+
87
+ def get_all_patterns(default_ignore_patterns, path_abs, project_root_abs) -> Tuple[Set[str], Set[str]]:
88
+ """
89
+ Gathers all applicable ignore patterns, splitting them into two sets
90
+ for optimized checking: one for basenames, one for full paths.
91
+ """
92
+ basename_patterns = set()
93
+ path_patterns = set()
94
+
95
+ for p in default_ignore_patterns:
96
+ if "/" in p:
97
+ path_patterns.add(p)
98
+ else:
99
+ basename_patterns.add(p)
100
+
101
+ search_start_dir = (
102
+ path_abs if os.path.isdir(path_abs) else os.path.dirname(path_abs)
103
+ )
47
104
 
48
105
  current_dir = search_start_dir
49
106
  while True:
@@ -52,78 +109,56 @@ def is_ignored(path: str, default_ignore_patterns: list[str], project_root: Opti
52
109
 
53
110
  if patterns_from_file:
54
111
  gitignore_dir_rel = os.path.relpath(current_dir, project_root_abs)
55
- if gitignore_dir_rel == '.': gitignore_dir_rel = ''
112
+ if gitignore_dir_rel == ".":
113
+ gitignore_dir_rel = ""
56
114
 
57
115
  for p in patterns_from_file:
58
- # Patterns with a '/' are relative to the .gitignore file's location.
59
- # We construct a new pattern relative to the project root.
60
- if '/' in p:
61
- all_patterns.add(os.path.join(gitignore_dir_rel, p.lstrip('/')))
116
+ if "/" in p:
117
+ # Path patterns are relative to the .gitignore file's location
118
+ path_patterns.add(os.path.join(gitignore_dir_rel, p.lstrip("/")))
62
119
  else:
63
- # Patterns without a '/' (e.g., `*.log`) can match anywhere.
64
- all_patterns.add(p)
120
+ basename_patterns.add(p)
65
121
 
66
- if not current_dir.startswith(project_root_abs) or current_dir == project_root_abs:
122
+ if (
123
+ not current_dir.startswith(project_root_abs)
124
+ or current_dir == project_root_abs
125
+ ):
67
126
  break
68
127
  parent = os.path.dirname(current_dir)
69
- if parent == current_dir: break
128
+ if parent == current_dir:
129
+ break
70
130
  current_dir = parent
131
+ return basename_patterns, path_patterns
71
132
 
72
- # --- Step 2: Check the path and its parents against the patterns ---
73
- try:
74
- path_rel_to_root = os.path.relpath(path_abs, project_root_abs)
75
- except ValueError:
76
- return False # Path is outside the project root
77
-
78
- path_parts = Path(path_rel_to_root).parts
79
-
80
- for pattern in all_patterns:
81
- # Check against basename for simple wildcards (e.g., `*.log`, `__pycache__`)
82
- # This is a primary matching mechanism.
83
- if fnmatch.fnmatch(os.path.basename(path_abs), pattern):
84
- return True
85
-
86
- # Check the full path and its parent directories against the pattern.
87
- # This handles directory ignores (`node_modules/`) and specific path ignores (`src/*.tmp`).
88
- for i in range(len(path_parts)):
89
- current_check_path = os.path.join(*path_parts[:i+1])
90
-
91
- # Handle directory patterns like `node_modules/`
92
- if pattern.endswith('/'):
93
- if fnmatch.fnmatch(current_check_path, pattern.rstrip('/')):
94
- return True
95
- # Handle full path patterns
96
- else:
97
- if fnmatch.fnmatch(current_check_path, pattern):
98
- return True
99
-
100
- return False
101
133
 
102
134
  def read_file_contents(file_path):
103
135
  try:
104
- with open(file_path, 'r') as file:
136
+ with open(file_path, "r") as file:
105
137
  return file.read()
106
138
  except Exception as e:
107
139
  print(f"Error reading {file_path}: {e}")
108
140
  return ""
109
141
 
142
+
110
143
  def is_binary(file_path):
111
144
  try:
112
- with open(file_path, 'rb') as file:
145
+ with open(file_path, "rb") as file:
113
146
  chunk = file.read(1024)
114
- if b'\0' in chunk:
147
+ if b"\0" in chunk:
115
148
  return True
116
- if file_path.lower().endswith(('.json', '.csv')):
149
+ if file_path.lower().endswith((".json", ".csv")):
117
150
  return False
118
151
  return False
119
152
  except IOError:
120
153
  return False
121
154
 
155
+
122
156
  def get_human_readable_size(size):
123
- for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
157
+ for unit in ["B", "KB", "MB", "GB", "TB"]:
124
158
  if size < 1024.0:
125
159
  return f"{size:.2f} {unit}"
126
160
  size /= 1024.0
127
161
 
162
+
128
163
  def is_large_file(file_path, threshold=102400):
129
164
  return os.path.getsize(file_path) > threshold
@@ -12,7 +12,10 @@ _tsconfig_configs_cache: Dict[str, Tuple[Optional[str], Dict[str, List[str]]]] =
12
12
 
13
13
  # --- TypeScript Alias and Import Resolution ---
14
14
 
15
- def find_relevant_tsconfig_path(file_path_abs: str, project_root_abs: str) -> Optional[str]:
15
+
16
+ def find_relevant_tsconfig_path(
17
+ file_path_abs: str, project_root_abs: str
18
+ ) -> Optional[str]:
16
19
  """
17
20
  Finds the most relevant tsconfig.json by searching upwards from the file's directory,
18
21
  stopping at project_root_abs.
@@ -21,17 +24,23 @@ def find_relevant_tsconfig_path(file_path_abs: str, project_root_abs: str) -> Op
21
24
  current_dir = os.path.dirname(os.path.normpath(file_path_abs))
22
25
  project_root_abs_norm = os.path.normpath(project_root_abs)
23
26
 
24
- while current_dir.startswith(project_root_abs_norm) and len(current_dir) >= len(project_root_abs_norm):
27
+ while current_dir.startswith(project_root_abs_norm) and len(current_dir) >= len(
28
+ project_root_abs_norm
29
+ ):
25
30
  potential_tsconfig = os.path.join(current_dir, "tsconfig.json")
26
31
  if os.path.isfile(potential_tsconfig):
27
32
  return os.path.normpath(potential_tsconfig)
28
33
 
29
34
  try:
30
- variant_tsconfigs = sorted([
31
- f for f in os.listdir(current_dir)
32
- if f.startswith("tsconfig.") and f.endswith(".json") and
33
- os.path.isfile(os.path.join(current_dir, f))
34
- ])
35
+ variant_tsconfigs = sorted(
36
+ [
37
+ f
38
+ for f in os.listdir(current_dir)
39
+ if f.startswith("tsconfig.")
40
+ and f.endswith(".json")
41
+ and os.path.isfile(os.path.join(current_dir, f))
42
+ ]
43
+ )
35
44
  if variant_tsconfigs:
36
45
  return os.path.normpath(os.path.join(current_dir, variant_tsconfigs[0]))
37
46
  except OSError:
@@ -39,7 +48,7 @@ def find_relevant_tsconfig_path(file_path_abs: str, project_root_abs: str) -> Op
39
48
 
40
49
  if current_dir == project_root_abs_norm:
41
50
  break
42
-
51
+
43
52
  parent_dir = os.path.dirname(current_dir)
44
53
  if parent_dir == current_dir:
45
54
  break
@@ -47,7 +56,9 @@ def find_relevant_tsconfig_path(file_path_abs: str, project_root_abs: str) -> Op
47
56
  return None
48
57
 
49
58
 
50
- def load_tsconfig_config(tsconfig_path_abs: str) -> Tuple[Optional[str], Dict[str, List[str]]]:
59
+ def load_tsconfig_config(
60
+ tsconfig_path_abs: str,
61
+ ) -> Tuple[Optional[str], Dict[str, List[str]]]:
51
62
  """
52
63
  Loads baseUrl and paths from a specific tsconfig.json.
53
64
  Caches results.
@@ -59,21 +70,25 @@ def load_tsconfig_config(tsconfig_path_abs: str) -> Tuple[Optional[str], Dict[st
59
70
  if not os.path.isfile(tsconfig_path_abs):
60
71
  _tsconfig_configs_cache[tsconfig_path_abs] = (None, {})
61
72
  return None, {}
62
-
73
+
63
74
  try:
64
- with open(tsconfig_path_abs, 'r', encoding='utf-8') as f:
75
+ with open(tsconfig_path_abs, "r", encoding="utf-8") as f:
65
76
  content = f.read()
66
- content = re.sub(r"//.*?\n", "\n", content)
77
+ content = re.sub(r"//.*?\n", "\n", content)
67
78
  content = re.sub(r"/\*.*?\*/", "", content, flags=re.DOTALL)
68
79
  config = json.loads(content)
69
-
80
+
70
81
  compiler_options = config.get("compilerOptions", {})
71
82
  tsconfig_dir = os.path.dirname(tsconfig_path_abs)
72
- base_url_from_config = compiler_options.get("baseUrl", ".")
73
- abs_base_url = os.path.normpath(os.path.join(tsconfig_dir, base_url_from_config))
74
-
83
+ base_url_from_config = compiler_options.get("baseUrl", ".")
84
+ abs_base_url = os.path.normpath(
85
+ os.path.join(tsconfig_dir, base_url_from_config)
86
+ )
87
+
75
88
  paths = compiler_options.get("paths", {})
76
- processed_paths = {key: (val if isinstance(val, list) else [val]) for key, val in paths.items()}
89
+ processed_paths = {
90
+ key: (val if isinstance(val, list) else [val]) for key, val in paths.items()
91
+ }
77
92
 
78
93
  # print(f"DEBUG: Loaded config from {os.path.relpath(tsconfig_path_abs)}: effective abs_baseUrl='{abs_base_url}', {len(processed_paths)} path alias(es).")
79
94
  _tsconfig_configs_cache[tsconfig_path_abs] = (abs_base_url, processed_paths)
@@ -88,19 +103,21 @@ def _probe_ts_path_candidates(candidate_base_path_abs: str) -> Optional[str]:
88
103
  """
89
104
  Given a candidate base absolute path, tries to find a corresponding file.
90
105
  """
91
- possible_extensions = ['.ts', '.tsx', '.js', '.jsx', '.json']
92
-
106
+ possible_extensions = [".ts", ".tsx", ".js", ".jsx", ".json"]
107
+
93
108
  if os.path.isfile(candidate_base_path_abs):
94
109
  return candidate_base_path_abs
95
110
 
96
111
  stem, original_ext = os.path.splitext(candidate_base_path_abs)
97
- base_for_ext_check = stem if original_ext.lower() in possible_extensions else candidate_base_path_abs
112
+ base_for_ext_check = (
113
+ stem if original_ext.lower() in possible_extensions else candidate_base_path_abs
114
+ )
98
115
 
99
116
  for ext in possible_extensions:
100
117
  path_with_ext = base_for_ext_check + ext
101
118
  if os.path.isfile(path_with_ext):
102
119
  return path_with_ext
103
-
120
+
104
121
  if os.path.isdir(base_for_ext_check):
105
122
  for ext in possible_extensions:
106
123
  index_file_path = os.path.join(base_for_ext_check, "index" + ext)
@@ -110,10 +127,10 @@ def _probe_ts_path_candidates(candidate_base_path_abs: str) -> Optional[str]:
110
127
 
111
128
 
112
129
  def resolve_ts_import_path(
113
- import_str: str,
114
- current_file_dir_abs: str,
115
- abs_base_url: Optional[str],
116
- alias_map: Dict[str, List[str]]
130
+ import_str: str,
131
+ current_file_dir_abs: str,
132
+ abs_base_url: Optional[str],
133
+ alias_map: Dict[str, List[str]],
117
134
  ) -> Optional[str]:
118
135
  """
119
136
  Resolves a TypeScript import string to an absolute file path.
@@ -125,26 +142,40 @@ def resolve_ts_import_path(
125
142
  for alias_pattern in sorted_alias_keys:
126
143
  alias_prefix_pattern = alias_pattern.replace("/*", "")
127
144
  if import_str.startswith(alias_prefix_pattern):
128
- import_suffix = import_str[len(alias_prefix_pattern):]
145
+ import_suffix = import_str[len(alias_prefix_pattern) :]
129
146
  for mapping_path_template_list in alias_map[alias_pattern]:
130
- for mapping_path_template in (mapping_path_template_list if isinstance(mapping_path_template_list, list) else [mapping_path_template_list]):
131
- if "/*" in alias_pattern :
132
- resolved_relative_to_base = mapping_path_template.replace("*", import_suffix, 1)
147
+ for mapping_path_template in (
148
+ mapping_path_template_list
149
+ if isinstance(mapping_path_template_list, list)
150
+ else [mapping_path_template_list]
151
+ ):
152
+ if "/*" in alias_pattern:
153
+ resolved_relative_to_base = mapping_path_template.replace(
154
+ "*", import_suffix, 1
155
+ )
133
156
  else:
134
157
  resolved_relative_to_base = mapping_path_template
135
158
  if abs_base_url:
136
- abs_candidate = os.path.normpath(os.path.join(abs_base_url, resolved_relative_to_base))
159
+ abs_candidate = os.path.normpath(
160
+ os.path.join(abs_base_url, resolved_relative_to_base)
161
+ )
137
162
  candidate_targets_abs.append(abs_candidate)
138
163
  else:
139
- print(f"Warning: TS Alias '{alias_pattern}' used, but no abs_base_url for context of '{current_file_dir_abs}'.")
164
+ print(
165
+ f"Warning: TS Alias '{alias_pattern}' used, but no abs_base_url for context of '{current_file_dir_abs}'."
166
+ )
140
167
  if candidate_targets_abs:
141
168
  alias_matched_and_resolved = True
142
169
  break
143
170
 
144
- if not alias_matched_and_resolved and import_str.startswith('.'):
171
+ if not alias_matched_and_resolved and import_str.startswith("."):
145
172
  abs_candidate = os.path.normpath(os.path.join(current_file_dir_abs, import_str))
146
173
  candidate_targets_abs.append(abs_candidate)
147
- elif not alias_matched_and_resolved and abs_base_url and not import_str.startswith('.'):
174
+ elif (
175
+ not alias_matched_and_resolved
176
+ and abs_base_url
177
+ and not import_str.startswith(".")
178
+ ):
148
179
  abs_candidate = os.path.normpath(os.path.join(abs_base_url, import_str))
149
180
  candidate_targets_abs.append(abs_candidate)
150
181
 
@@ -156,19 +187,19 @@ def resolve_ts_import_path(
156
187
 
157
188
 
158
189
  def parse_typescript_imports(
159
- file_content: str,
160
- file_path_abs: str,
161
- project_root_abs: str
190
+ file_content: str, file_path_abs: str, project_root_abs: str
162
191
  ) -> Set[str]:
163
192
  resolved_imports_abs_paths = set()
164
- relevant_tsconfig_abs_path = find_relevant_tsconfig_path(file_path_abs, project_root_abs)
165
-
193
+ relevant_tsconfig_abs_path = find_relevant_tsconfig_path(
194
+ file_path_abs, project_root_abs
195
+ )
196
+
166
197
  abs_base_url, alias_map = None, {}
167
198
  if relevant_tsconfig_abs_path:
168
199
  abs_base_url, alias_map = load_tsconfig_config(relevant_tsconfig_abs_path)
169
200
  else:
170
201
  # print(f"Warning: No tsconfig.json found for {os.path.relpath(file_path_abs, project_root_abs)}. Import resolution might be limited.")
171
- abs_base_url = project_root_abs
202
+ abs_base_url = project_root_abs
172
203
 
173
204
  import_regex = re.compile(
174
205
  r"""
@@ -178,31 +209,40 @@ def parse_typescript_imports(
178
209
  |require\s*\(\s*['"`]([^'"\n`]+?)['"`]\s*\)
179
210
  |import\s*\(\s*['"`]([^'"\n`]+?)['"`]\s*\)
180
211
  """,
181
- re.VERBOSE | re.MULTILINE
212
+ re.VERBOSE | re.MULTILINE,
182
213
  )
183
-
214
+
184
215
  current_file_dir_abs = os.path.dirname(file_path_abs)
185
216
 
186
217
  for match in import_regex.finditer(file_content):
187
218
  import_str_candidate = next((g for g in match.groups() if g is not None), None)
188
219
  if import_str_candidate:
189
220
  is_likely_external = (
190
- not import_str_candidate.startswith(('.', '/')) and
191
- not any(import_str_candidate.startswith(alias_pattern.replace("/*", "")) for alias_pattern in alias_map) and
192
- not (abs_base_url and os.path.exists(os.path.join(abs_base_url, import_str_candidate))) and
193
- (import_str_candidate.count('/') == 0 or (import_str_candidate.startswith('@') and import_str_candidate.count('/') == 1)) and
194
- '.' not in import_str_candidate.split('/')[0]
221
+ not import_str_candidate.startswith((".", "/"))
222
+ and not any(
223
+ import_str_candidate.startswith(alias_pattern.replace("/*", ""))
224
+ for alias_pattern in alias_map
225
+ )
226
+ and not (
227
+ abs_base_url
228
+ and os.path.exists(os.path.join(abs_base_url, import_str_candidate))
229
+ )
230
+ and (
231
+ import_str_candidate.count("/") == 0
232
+ or (
233
+ import_str_candidate.startswith("@")
234
+ and import_str_candidate.count("/") == 1
235
+ )
236
+ )
237
+ and "." not in import_str_candidate.split("/")[0]
195
238
  )
196
239
  if is_likely_external:
197
240
  continue
198
241
 
199
242
  resolved_abs_path = resolve_ts_import_path(
200
- import_str_candidate,
201
- current_file_dir_abs,
202
- abs_base_url,
203
- alias_map
243
+ import_str_candidate, current_file_dir_abs, abs_base_url, alias_map
204
244
  )
205
-
245
+
206
246
  if resolved_abs_path:
207
247
  norm_resolved_path = os.path.normpath(resolved_abs_path)
208
248
  if norm_resolved_path.startswith(os.path.normpath(project_root_abs)):
@@ -212,11 +252,12 @@ def parse_typescript_imports(
212
252
 
213
253
  # --- Python Import Resolution ---
214
254
 
255
+
215
256
  def resolve_python_import(
216
- module_name_parts: List[str],
217
- current_file_dir_abs: str,
218
- project_root_abs: str,
219
- level: int
257
+ module_name_parts: List[str],
258
+ current_file_dir_abs: str,
259
+ project_root_abs: str,
260
+ level: int,
220
261
  ) -> Optional[str]:
221
262
  base_path_to_search = ""
222
263
  if level > 0:
@@ -228,11 +269,11 @@ def resolve_python_import(
228
269
 
229
270
  candidate_rel_path = os.path.join(*module_name_parts)
230
271
  potential_abs_path = os.path.join(base_path_to_search, candidate_rel_path)
231
-
272
+
232
273
  py_file = potential_abs_path + ".py"
233
274
  if os.path.isfile(py_file):
234
275
  return os.path.normpath(py_file)
235
-
276
+
236
277
  init_file = os.path.join(potential_abs_path, "__init__.py")
237
278
  if os.path.isdir(potential_abs_path) and os.path.isfile(init_file):
238
279
  return os.path.normpath(init_file)
@@ -250,10 +291,12 @@ def resolve_python_import(
250
291
  return None
251
292
 
252
293
 
253
- def parse_python_imports(file_content: str, file_path_abs: str, project_root_abs: str) -> Set[str]:
294
+ def parse_python_imports(
295
+ file_content: str, file_path_abs: str, project_root_abs: str
296
+ ) -> Set[str]:
254
297
  resolved_imports = set()
255
298
  current_file_dir_abs = os.path.dirname(file_path_abs)
256
-
299
+
257
300
  try:
258
301
  tree = ast.parse(file_content, filename=file_path_abs)
259
302
  except SyntaxError:
@@ -263,22 +306,51 @@ def parse_python_imports(file_content: str, file_path_abs: str, project_root_abs
263
306
  for node in ast.walk(tree):
264
307
  if isinstance(node, ast.Import):
265
308
  for alias in node.names:
266
- module_parts = alias.name.split('.')
267
- resolved = resolve_python_import(module_parts, current_file_dir_abs, project_root_abs, level=0)
268
- if resolved and os.path.exists(resolved) and os.path.normpath(resolved).startswith(os.path.normpath(project_root_abs)):
309
+ module_parts = alias.name.split(".")
310
+ resolved = resolve_python_import(
311
+ module_parts, current_file_dir_abs, project_root_abs, level=0
312
+ )
313
+ if (
314
+ resolved
315
+ and os.path.exists(resolved)
316
+ and os.path.normpath(resolved).startswith(
317
+ os.path.normpath(project_root_abs)
318
+ )
319
+ ):
269
320
  resolved_imports.add(os.path.normpath(resolved))
270
321
  elif isinstance(node, ast.ImportFrom):
271
322
  level_to_resolve = node.level
272
323
  if node.module:
273
- module_parts = node.module.split('.')
274
- resolved = resolve_python_import(module_parts, current_file_dir_abs, project_root_abs, level_to_resolve)
275
- if resolved and os.path.exists(resolved) and os.path.normpath(resolved).startswith(os.path.normpath(project_root_abs)):
324
+ module_parts = node.module.split(".")
325
+ resolved = resolve_python_import(
326
+ module_parts,
327
+ current_file_dir_abs,
328
+ project_root_abs,
329
+ level_to_resolve,
330
+ )
331
+ if (
332
+ resolved
333
+ and os.path.exists(resolved)
334
+ and os.path.normpath(resolved).startswith(
335
+ os.path.normpath(project_root_abs)
336
+ )
337
+ ):
276
338
  resolved_imports.add(os.path.normpath(resolved))
277
339
  else:
278
340
  for alias in node.names:
279
- item_name_parts = alias.name.split('.')
280
- resolved = resolve_python_import(item_name_parts, current_file_dir_abs, project_root_abs, level=level_to_resolve)
281
- if resolved and os.path.exists(resolved) and os.path.normpath(resolved).startswith(os.path.normpath(project_root_abs)):
341
+ item_name_parts = alias.name.split(".")
342
+ resolved = resolve_python_import(
343
+ item_name_parts,
344
+ current_file_dir_abs,
345
+ project_root_abs,
346
+ level=level_to_resolve,
347
+ )
348
+ if (
349
+ resolved
350
+ and os.path.exists(resolved)
351
+ and os.path.normpath(resolved).startswith(
352
+ os.path.normpath(project_root_abs)
353
+ )
354
+ ):
282
355
  resolved_imports.add(os.path.normpath(resolved))
283
356
  return resolved_imports
284
-