kopipasta 0.34.0__tar.gz → 0.36.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kopipasta might be problematic. Click here for more details.
- {kopipasta-0.34.0/kopipasta.egg-info → kopipasta-0.36.0}/PKG-INFO +1 -1
- {kopipasta-0.34.0 → kopipasta-0.36.0}/kopipasta/cache.py +6 -3
- kopipasta-0.36.0/kopipasta/file.py +164 -0
- {kopipasta-0.34.0 → kopipasta-0.36.0}/kopipasta/import_parser.py +141 -69
- {kopipasta-0.34.0 → kopipasta-0.36.0}/kopipasta/main.py +611 -302
- {kopipasta-0.34.0 → kopipasta-0.36.0}/kopipasta/prompt.py +50 -39
- {kopipasta-0.34.0 → kopipasta-0.36.0}/kopipasta/tree_selector.py +314 -170
- {kopipasta-0.34.0 → kopipasta-0.36.0/kopipasta.egg-info}/PKG-INFO +1 -1
- {kopipasta-0.34.0 → kopipasta-0.36.0}/setup.py +2 -2
- {kopipasta-0.34.0 → kopipasta-0.36.0}/tests/test_file.py +7 -5
- kopipasta-0.36.0/tests/test_tree_selector.py +118 -0
- kopipasta-0.34.0/kopipasta/file.py +0 -129
- kopipasta-0.34.0/tests/test_tree_selector.py +0 -47
- {kopipasta-0.34.0 → kopipasta-0.36.0}/LICENSE +0 -0
- {kopipasta-0.34.0 → kopipasta-0.36.0}/MANIFEST.in +0 -0
- {kopipasta-0.34.0 → kopipasta-0.36.0}/README.md +0 -0
- {kopipasta-0.34.0 → kopipasta-0.36.0}/kopipasta/__init__.py +0 -0
- {kopipasta-0.34.0 → kopipasta-0.36.0}/kopipasta.egg-info/SOURCES.txt +0 -0
- {kopipasta-0.34.0 → kopipasta-0.36.0}/kopipasta.egg-info/dependency_links.txt +0 -0
- {kopipasta-0.34.0 → kopipasta-0.36.0}/kopipasta.egg-info/entry_points.txt +0 -0
- {kopipasta-0.34.0 → kopipasta-0.36.0}/kopipasta.egg-info/requires.txt +0 -0
- {kopipasta-0.34.0 → kopipasta-0.36.0}/kopipasta.egg-info/top_level.txt +0 -0
- {kopipasta-0.34.0 → kopipasta-0.36.0}/requirements.txt +0 -0
- {kopipasta-0.34.0 → kopipasta-0.36.0}/setup.cfg +0 -0
|
@@ -6,32 +6,35 @@ from typing import List, Tuple
|
|
|
6
6
|
# Define FileTuple for type hinting
|
|
7
7
|
FileTuple = Tuple[str, bool, List[str] | None, str]
|
|
8
8
|
|
|
9
|
+
|
|
9
10
|
def get_cache_file_path() -> Path:
|
|
10
11
|
"""Gets the cross-platform path to the cache file for the last selection."""
|
|
11
12
|
cache_dir = Path.home() / ".cache" / "kopipasta"
|
|
12
13
|
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
13
14
|
return cache_dir / "last_selection.json"
|
|
14
15
|
|
|
16
|
+
|
|
15
17
|
def save_selection_to_cache(files_to_include: List[FileTuple]):
|
|
16
18
|
"""Saves the list of selected file relative paths to the cache."""
|
|
17
19
|
cache_file = get_cache_file_path()
|
|
18
20
|
relative_paths = sorted([os.path.relpath(f[0]) for f in files_to_include])
|
|
19
21
|
try:
|
|
20
|
-
with open(cache_file,
|
|
22
|
+
with open(cache_file, "w", encoding="utf-8") as f:
|
|
21
23
|
json.dump(relative_paths, f, indent=2)
|
|
22
24
|
except IOError as e:
|
|
23
25
|
print(f"\nWarning: Could not save selection to cache: {e}")
|
|
24
26
|
|
|
27
|
+
|
|
25
28
|
def load_selection_from_cache() -> List[str]:
|
|
26
29
|
"""Loads the list of selected files from the cache file."""
|
|
27
30
|
cache_file = get_cache_file_path()
|
|
28
31
|
if not cache_file.exists():
|
|
29
32
|
return []
|
|
30
33
|
try:
|
|
31
|
-
with open(cache_file,
|
|
34
|
+
with open(cache_file, "r", encoding="utf-8") as f:
|
|
32
35
|
paths = json.load(f)
|
|
33
36
|
# Filter out paths that no longer exist
|
|
34
37
|
return [p for p in paths if os.path.exists(p)]
|
|
35
38
|
except (IOError, json.JSONDecodeError) as e:
|
|
36
39
|
print(f"\nWarning: Could not load previous selection from cache: {e}")
|
|
37
|
-
return []
|
|
40
|
+
return []
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
import fnmatch
|
|
2
|
+
import os
|
|
3
|
+
from typing import List, Optional, Tuple, Set
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
FileTuple = Tuple[str, bool, Optional[List[str]], str]
|
|
7
|
+
|
|
8
|
+
# --- Caches ---
|
|
9
|
+
_gitignore_cache: dict[str, list[str]] = {}
|
|
10
|
+
_is_ignored_cache: dict[str, bool] = {}
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _read_gitignore_patterns(gitignore_path: str) -> list[str]:
|
|
14
|
+
"""Reads patterns from a single .gitignore file and caches them."""
|
|
15
|
+
if gitignore_path in _gitignore_cache:
|
|
16
|
+
return _gitignore_cache[gitignore_path]
|
|
17
|
+
if not os.path.isfile(gitignore_path):
|
|
18
|
+
_gitignore_cache[gitignore_path] = []
|
|
19
|
+
return []
|
|
20
|
+
patterns = []
|
|
21
|
+
try:
|
|
22
|
+
with open(gitignore_path, "r", encoding="utf-8") as f:
|
|
23
|
+
for line in f:
|
|
24
|
+
stripped_line = line.strip()
|
|
25
|
+
if stripped_line and not stripped_line.startswith("#"):
|
|
26
|
+
patterns.append(stripped_line)
|
|
27
|
+
except IOError:
|
|
28
|
+
pass
|
|
29
|
+
_gitignore_cache[gitignore_path] = patterns
|
|
30
|
+
return patterns
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def is_ignored(
|
|
34
|
+
path: str, default_ignore_patterns: list[str], project_root: Optional[str] = None
|
|
35
|
+
) -> bool:
|
|
36
|
+
"""
|
|
37
|
+
Checks if a path should be ignored by splitting patterns into fast (basename)
|
|
38
|
+
and slow (full path) checks, with heavy caching and optimized inner loops.
|
|
39
|
+
"""
|
|
40
|
+
path_abs = os.path.abspath(path)
|
|
41
|
+
if path_abs in _is_ignored_cache:
|
|
42
|
+
return _is_ignored_cache[path_abs]
|
|
43
|
+
|
|
44
|
+
parent_dir = os.path.dirname(path_abs)
|
|
45
|
+
if parent_dir != path_abs and _is_ignored_cache.get(parent_dir, False):
|
|
46
|
+
_is_ignored_cache[path_abs] = True
|
|
47
|
+
return True
|
|
48
|
+
|
|
49
|
+
if project_root is None:
|
|
50
|
+
project_root = os.getcwd()
|
|
51
|
+
project_root_abs = os.path.abspath(project_root)
|
|
52
|
+
|
|
53
|
+
basename_patterns, path_patterns = get_all_patterns(
|
|
54
|
+
default_ignore_patterns, path_abs, project_root_abs
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# --- Step 1: Fast check for basename patterns ---
|
|
58
|
+
path_basename = os.path.basename(path_abs)
|
|
59
|
+
for pattern in basename_patterns:
|
|
60
|
+
if fnmatch.fnmatch(path_basename, pattern):
|
|
61
|
+
_is_ignored_cache[path_abs] = True
|
|
62
|
+
return True
|
|
63
|
+
|
|
64
|
+
# --- Step 2: Optimized nested check for path patterns ---
|
|
65
|
+
try:
|
|
66
|
+
path_rel_to_root = os.path.relpath(path_abs, project_root_abs)
|
|
67
|
+
except ValueError:
|
|
68
|
+
_is_ignored_cache[path_abs] = False
|
|
69
|
+
return False
|
|
70
|
+
|
|
71
|
+
# Pre-calculate all path prefixes to check, avoiding re-joins in the loop.
|
|
72
|
+
path_parts = Path(path_rel_to_root).parts
|
|
73
|
+
path_prefixes = [os.path.join(*path_parts[:i + 1]) for i in range(1, len(path_parts) + 1)]
|
|
74
|
+
|
|
75
|
+
# Pre-process patterns to remove trailing slashes once.
|
|
76
|
+
processed_path_patterns = [p.rstrip("/") for p in path_patterns]
|
|
77
|
+
|
|
78
|
+
for prefix in path_prefixes:
|
|
79
|
+
for pattern in processed_path_patterns:
|
|
80
|
+
if fnmatch.fnmatch(prefix, pattern):
|
|
81
|
+
_is_ignored_cache[path_abs] = True
|
|
82
|
+
return True
|
|
83
|
+
|
|
84
|
+
_is_ignored_cache[path_abs] = False
|
|
85
|
+
return False
|
|
86
|
+
|
|
87
|
+
def get_all_patterns(default_ignore_patterns, path_abs, project_root_abs) -> Tuple[Set[str], Set[str]]:
|
|
88
|
+
"""
|
|
89
|
+
Gathers all applicable ignore patterns, splitting them into two sets
|
|
90
|
+
for optimized checking: one for basenames, one for full paths.
|
|
91
|
+
"""
|
|
92
|
+
basename_patterns = set()
|
|
93
|
+
path_patterns = set()
|
|
94
|
+
|
|
95
|
+
for p in default_ignore_patterns:
|
|
96
|
+
if "/" in p:
|
|
97
|
+
path_patterns.add(p)
|
|
98
|
+
else:
|
|
99
|
+
basename_patterns.add(p)
|
|
100
|
+
|
|
101
|
+
search_start_dir = (
|
|
102
|
+
path_abs if os.path.isdir(path_abs) else os.path.dirname(path_abs)
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
current_dir = search_start_dir
|
|
106
|
+
while True:
|
|
107
|
+
gitignore_path = os.path.join(current_dir, ".gitignore")
|
|
108
|
+
patterns_from_file = _read_gitignore_patterns(gitignore_path)
|
|
109
|
+
|
|
110
|
+
if patterns_from_file:
|
|
111
|
+
gitignore_dir_rel = os.path.relpath(current_dir, project_root_abs)
|
|
112
|
+
if gitignore_dir_rel == ".":
|
|
113
|
+
gitignore_dir_rel = ""
|
|
114
|
+
|
|
115
|
+
for p in patterns_from_file:
|
|
116
|
+
if "/" in p:
|
|
117
|
+
# Path patterns are relative to the .gitignore file's location
|
|
118
|
+
path_patterns.add(os.path.join(gitignore_dir_rel, p.lstrip("/")))
|
|
119
|
+
else:
|
|
120
|
+
basename_patterns.add(p)
|
|
121
|
+
|
|
122
|
+
if (
|
|
123
|
+
not current_dir.startswith(project_root_abs)
|
|
124
|
+
or current_dir == project_root_abs
|
|
125
|
+
):
|
|
126
|
+
break
|
|
127
|
+
parent = os.path.dirname(current_dir)
|
|
128
|
+
if parent == current_dir:
|
|
129
|
+
break
|
|
130
|
+
current_dir = parent
|
|
131
|
+
return basename_patterns, path_patterns
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def read_file_contents(file_path):
|
|
135
|
+
try:
|
|
136
|
+
with open(file_path, "r") as file:
|
|
137
|
+
return file.read()
|
|
138
|
+
except Exception as e:
|
|
139
|
+
print(f"Error reading {file_path}: {e}")
|
|
140
|
+
return ""
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def is_binary(file_path):
|
|
144
|
+
try:
|
|
145
|
+
with open(file_path, "rb") as file:
|
|
146
|
+
chunk = file.read(1024)
|
|
147
|
+
if b"\0" in chunk:
|
|
148
|
+
return True
|
|
149
|
+
if file_path.lower().endswith((".json", ".csv")):
|
|
150
|
+
return False
|
|
151
|
+
return False
|
|
152
|
+
except IOError:
|
|
153
|
+
return False
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def get_human_readable_size(size):
|
|
157
|
+
for unit in ["B", "KB", "MB", "GB", "TB"]:
|
|
158
|
+
if size < 1024.0:
|
|
159
|
+
return f"{size:.2f} {unit}"
|
|
160
|
+
size /= 1024.0
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def is_large_file(file_path, threshold=102400):
|
|
164
|
+
return os.path.getsize(file_path) > threshold
|
|
@@ -12,7 +12,10 @@ _tsconfig_configs_cache: Dict[str, Tuple[Optional[str], Dict[str, List[str]]]] =
|
|
|
12
12
|
|
|
13
13
|
# --- TypeScript Alias and Import Resolution ---
|
|
14
14
|
|
|
15
|
-
|
|
15
|
+
|
|
16
|
+
def find_relevant_tsconfig_path(
|
|
17
|
+
file_path_abs: str, project_root_abs: str
|
|
18
|
+
) -> Optional[str]:
|
|
16
19
|
"""
|
|
17
20
|
Finds the most relevant tsconfig.json by searching upwards from the file's directory,
|
|
18
21
|
stopping at project_root_abs.
|
|
@@ -21,17 +24,23 @@ def find_relevant_tsconfig_path(file_path_abs: str, project_root_abs: str) -> Op
|
|
|
21
24
|
current_dir = os.path.dirname(os.path.normpath(file_path_abs))
|
|
22
25
|
project_root_abs_norm = os.path.normpath(project_root_abs)
|
|
23
26
|
|
|
24
|
-
while current_dir.startswith(project_root_abs_norm) and len(current_dir) >= len(
|
|
27
|
+
while current_dir.startswith(project_root_abs_norm) and len(current_dir) >= len(
|
|
28
|
+
project_root_abs_norm
|
|
29
|
+
):
|
|
25
30
|
potential_tsconfig = os.path.join(current_dir, "tsconfig.json")
|
|
26
31
|
if os.path.isfile(potential_tsconfig):
|
|
27
32
|
return os.path.normpath(potential_tsconfig)
|
|
28
33
|
|
|
29
34
|
try:
|
|
30
|
-
variant_tsconfigs = sorted(
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
+
variant_tsconfigs = sorted(
|
|
36
|
+
[
|
|
37
|
+
f
|
|
38
|
+
for f in os.listdir(current_dir)
|
|
39
|
+
if f.startswith("tsconfig.")
|
|
40
|
+
and f.endswith(".json")
|
|
41
|
+
and os.path.isfile(os.path.join(current_dir, f))
|
|
42
|
+
]
|
|
43
|
+
)
|
|
35
44
|
if variant_tsconfigs:
|
|
36
45
|
return os.path.normpath(os.path.join(current_dir, variant_tsconfigs[0]))
|
|
37
46
|
except OSError:
|
|
@@ -39,7 +48,7 @@ def find_relevant_tsconfig_path(file_path_abs: str, project_root_abs: str) -> Op
|
|
|
39
48
|
|
|
40
49
|
if current_dir == project_root_abs_norm:
|
|
41
50
|
break
|
|
42
|
-
|
|
51
|
+
|
|
43
52
|
parent_dir = os.path.dirname(current_dir)
|
|
44
53
|
if parent_dir == current_dir:
|
|
45
54
|
break
|
|
@@ -47,7 +56,9 @@ def find_relevant_tsconfig_path(file_path_abs: str, project_root_abs: str) -> Op
|
|
|
47
56
|
return None
|
|
48
57
|
|
|
49
58
|
|
|
50
|
-
def load_tsconfig_config(
|
|
59
|
+
def load_tsconfig_config(
|
|
60
|
+
tsconfig_path_abs: str,
|
|
61
|
+
) -> Tuple[Optional[str], Dict[str, List[str]]]:
|
|
51
62
|
"""
|
|
52
63
|
Loads baseUrl and paths from a specific tsconfig.json.
|
|
53
64
|
Caches results.
|
|
@@ -59,21 +70,25 @@ def load_tsconfig_config(tsconfig_path_abs: str) -> Tuple[Optional[str], Dict[st
|
|
|
59
70
|
if not os.path.isfile(tsconfig_path_abs):
|
|
60
71
|
_tsconfig_configs_cache[tsconfig_path_abs] = (None, {})
|
|
61
72
|
return None, {}
|
|
62
|
-
|
|
73
|
+
|
|
63
74
|
try:
|
|
64
|
-
with open(tsconfig_path_abs,
|
|
75
|
+
with open(tsconfig_path_abs, "r", encoding="utf-8") as f:
|
|
65
76
|
content = f.read()
|
|
66
|
-
content = re.sub(r"//.*?\n", "\n", content)
|
|
77
|
+
content = re.sub(r"//.*?\n", "\n", content)
|
|
67
78
|
content = re.sub(r"/\*.*?\*/", "", content, flags=re.DOTALL)
|
|
68
79
|
config = json.loads(content)
|
|
69
|
-
|
|
80
|
+
|
|
70
81
|
compiler_options = config.get("compilerOptions", {})
|
|
71
82
|
tsconfig_dir = os.path.dirname(tsconfig_path_abs)
|
|
72
|
-
base_url_from_config = compiler_options.get("baseUrl", ".")
|
|
73
|
-
abs_base_url = os.path.normpath(
|
|
74
|
-
|
|
83
|
+
base_url_from_config = compiler_options.get("baseUrl", ".")
|
|
84
|
+
abs_base_url = os.path.normpath(
|
|
85
|
+
os.path.join(tsconfig_dir, base_url_from_config)
|
|
86
|
+
)
|
|
87
|
+
|
|
75
88
|
paths = compiler_options.get("paths", {})
|
|
76
|
-
processed_paths = {
|
|
89
|
+
processed_paths = {
|
|
90
|
+
key: (val if isinstance(val, list) else [val]) for key, val in paths.items()
|
|
91
|
+
}
|
|
77
92
|
|
|
78
93
|
# print(f"DEBUG: Loaded config from {os.path.relpath(tsconfig_path_abs)}: effective abs_baseUrl='{abs_base_url}', {len(processed_paths)} path alias(es).")
|
|
79
94
|
_tsconfig_configs_cache[tsconfig_path_abs] = (abs_base_url, processed_paths)
|
|
@@ -88,19 +103,21 @@ def _probe_ts_path_candidates(candidate_base_path_abs: str) -> Optional[str]:
|
|
|
88
103
|
"""
|
|
89
104
|
Given a candidate base absolute path, tries to find a corresponding file.
|
|
90
105
|
"""
|
|
91
|
-
possible_extensions = [
|
|
92
|
-
|
|
106
|
+
possible_extensions = [".ts", ".tsx", ".js", ".jsx", ".json"]
|
|
107
|
+
|
|
93
108
|
if os.path.isfile(candidate_base_path_abs):
|
|
94
109
|
return candidate_base_path_abs
|
|
95
110
|
|
|
96
111
|
stem, original_ext = os.path.splitext(candidate_base_path_abs)
|
|
97
|
-
base_for_ext_check =
|
|
112
|
+
base_for_ext_check = (
|
|
113
|
+
stem if original_ext.lower() in possible_extensions else candidate_base_path_abs
|
|
114
|
+
)
|
|
98
115
|
|
|
99
116
|
for ext in possible_extensions:
|
|
100
117
|
path_with_ext = base_for_ext_check + ext
|
|
101
118
|
if os.path.isfile(path_with_ext):
|
|
102
119
|
return path_with_ext
|
|
103
|
-
|
|
120
|
+
|
|
104
121
|
if os.path.isdir(base_for_ext_check):
|
|
105
122
|
for ext in possible_extensions:
|
|
106
123
|
index_file_path = os.path.join(base_for_ext_check, "index" + ext)
|
|
@@ -110,10 +127,10 @@ def _probe_ts_path_candidates(candidate_base_path_abs: str) -> Optional[str]:
|
|
|
110
127
|
|
|
111
128
|
|
|
112
129
|
def resolve_ts_import_path(
|
|
113
|
-
import_str: str,
|
|
114
|
-
current_file_dir_abs: str,
|
|
115
|
-
abs_base_url: Optional[str],
|
|
116
|
-
alias_map: Dict[str, List[str]]
|
|
130
|
+
import_str: str,
|
|
131
|
+
current_file_dir_abs: str,
|
|
132
|
+
abs_base_url: Optional[str],
|
|
133
|
+
alias_map: Dict[str, List[str]],
|
|
117
134
|
) -> Optional[str]:
|
|
118
135
|
"""
|
|
119
136
|
Resolves a TypeScript import string to an absolute file path.
|
|
@@ -125,26 +142,40 @@ def resolve_ts_import_path(
|
|
|
125
142
|
for alias_pattern in sorted_alias_keys:
|
|
126
143
|
alias_prefix_pattern = alias_pattern.replace("/*", "")
|
|
127
144
|
if import_str.startswith(alias_prefix_pattern):
|
|
128
|
-
import_suffix = import_str[len(alias_prefix_pattern):]
|
|
145
|
+
import_suffix = import_str[len(alias_prefix_pattern) :]
|
|
129
146
|
for mapping_path_template_list in alias_map[alias_pattern]:
|
|
130
|
-
for mapping_path_template in (
|
|
131
|
-
|
|
132
|
-
|
|
147
|
+
for mapping_path_template in (
|
|
148
|
+
mapping_path_template_list
|
|
149
|
+
if isinstance(mapping_path_template_list, list)
|
|
150
|
+
else [mapping_path_template_list]
|
|
151
|
+
):
|
|
152
|
+
if "/*" in alias_pattern:
|
|
153
|
+
resolved_relative_to_base = mapping_path_template.replace(
|
|
154
|
+
"*", import_suffix, 1
|
|
155
|
+
)
|
|
133
156
|
else:
|
|
134
157
|
resolved_relative_to_base = mapping_path_template
|
|
135
158
|
if abs_base_url:
|
|
136
|
-
abs_candidate = os.path.normpath(
|
|
159
|
+
abs_candidate = os.path.normpath(
|
|
160
|
+
os.path.join(abs_base_url, resolved_relative_to_base)
|
|
161
|
+
)
|
|
137
162
|
candidate_targets_abs.append(abs_candidate)
|
|
138
163
|
else:
|
|
139
|
-
print(
|
|
164
|
+
print(
|
|
165
|
+
f"Warning: TS Alias '{alias_pattern}' used, but no abs_base_url for context of '{current_file_dir_abs}'."
|
|
166
|
+
)
|
|
140
167
|
if candidate_targets_abs:
|
|
141
168
|
alias_matched_and_resolved = True
|
|
142
169
|
break
|
|
143
170
|
|
|
144
|
-
if not alias_matched_and_resolved and import_str.startswith(
|
|
171
|
+
if not alias_matched_and_resolved and import_str.startswith("."):
|
|
145
172
|
abs_candidate = os.path.normpath(os.path.join(current_file_dir_abs, import_str))
|
|
146
173
|
candidate_targets_abs.append(abs_candidate)
|
|
147
|
-
elif
|
|
174
|
+
elif (
|
|
175
|
+
not alias_matched_and_resolved
|
|
176
|
+
and abs_base_url
|
|
177
|
+
and not import_str.startswith(".")
|
|
178
|
+
):
|
|
148
179
|
abs_candidate = os.path.normpath(os.path.join(abs_base_url, import_str))
|
|
149
180
|
candidate_targets_abs.append(abs_candidate)
|
|
150
181
|
|
|
@@ -156,19 +187,19 @@ def resolve_ts_import_path(
|
|
|
156
187
|
|
|
157
188
|
|
|
158
189
|
def parse_typescript_imports(
|
|
159
|
-
file_content: str,
|
|
160
|
-
file_path_abs: str,
|
|
161
|
-
project_root_abs: str
|
|
190
|
+
file_content: str, file_path_abs: str, project_root_abs: str
|
|
162
191
|
) -> Set[str]:
|
|
163
192
|
resolved_imports_abs_paths = set()
|
|
164
|
-
relevant_tsconfig_abs_path = find_relevant_tsconfig_path(
|
|
165
|
-
|
|
193
|
+
relevant_tsconfig_abs_path = find_relevant_tsconfig_path(
|
|
194
|
+
file_path_abs, project_root_abs
|
|
195
|
+
)
|
|
196
|
+
|
|
166
197
|
abs_base_url, alias_map = None, {}
|
|
167
198
|
if relevant_tsconfig_abs_path:
|
|
168
199
|
abs_base_url, alias_map = load_tsconfig_config(relevant_tsconfig_abs_path)
|
|
169
200
|
else:
|
|
170
201
|
# print(f"Warning: No tsconfig.json found for {os.path.relpath(file_path_abs, project_root_abs)}. Import resolution might be limited.")
|
|
171
|
-
abs_base_url = project_root_abs
|
|
202
|
+
abs_base_url = project_root_abs
|
|
172
203
|
|
|
173
204
|
import_regex = re.compile(
|
|
174
205
|
r"""
|
|
@@ -178,31 +209,40 @@ def parse_typescript_imports(
|
|
|
178
209
|
|require\s*\(\s*['"`]([^'"\n`]+?)['"`]\s*\)
|
|
179
210
|
|import\s*\(\s*['"`]([^'"\n`]+?)['"`]\s*\)
|
|
180
211
|
""",
|
|
181
|
-
re.VERBOSE | re.MULTILINE
|
|
212
|
+
re.VERBOSE | re.MULTILINE,
|
|
182
213
|
)
|
|
183
|
-
|
|
214
|
+
|
|
184
215
|
current_file_dir_abs = os.path.dirname(file_path_abs)
|
|
185
216
|
|
|
186
217
|
for match in import_regex.finditer(file_content):
|
|
187
218
|
import_str_candidate = next((g for g in match.groups() if g is not None), None)
|
|
188
219
|
if import_str_candidate:
|
|
189
220
|
is_likely_external = (
|
|
190
|
-
not import_str_candidate.startswith((
|
|
191
|
-
not any(
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
221
|
+
not import_str_candidate.startswith((".", "/"))
|
|
222
|
+
and not any(
|
|
223
|
+
import_str_candidate.startswith(alias_pattern.replace("/*", ""))
|
|
224
|
+
for alias_pattern in alias_map
|
|
225
|
+
)
|
|
226
|
+
and not (
|
|
227
|
+
abs_base_url
|
|
228
|
+
and os.path.exists(os.path.join(abs_base_url, import_str_candidate))
|
|
229
|
+
)
|
|
230
|
+
and (
|
|
231
|
+
import_str_candidate.count("/") == 0
|
|
232
|
+
or (
|
|
233
|
+
import_str_candidate.startswith("@")
|
|
234
|
+
and import_str_candidate.count("/") == 1
|
|
235
|
+
)
|
|
236
|
+
)
|
|
237
|
+
and "." not in import_str_candidate.split("/")[0]
|
|
195
238
|
)
|
|
196
239
|
if is_likely_external:
|
|
197
240
|
continue
|
|
198
241
|
|
|
199
242
|
resolved_abs_path = resolve_ts_import_path(
|
|
200
|
-
import_str_candidate,
|
|
201
|
-
current_file_dir_abs,
|
|
202
|
-
abs_base_url,
|
|
203
|
-
alias_map
|
|
243
|
+
import_str_candidate, current_file_dir_abs, abs_base_url, alias_map
|
|
204
244
|
)
|
|
205
|
-
|
|
245
|
+
|
|
206
246
|
if resolved_abs_path:
|
|
207
247
|
norm_resolved_path = os.path.normpath(resolved_abs_path)
|
|
208
248
|
if norm_resolved_path.startswith(os.path.normpath(project_root_abs)):
|
|
@@ -212,11 +252,12 @@ def parse_typescript_imports(
|
|
|
212
252
|
|
|
213
253
|
# --- Python Import Resolution ---
|
|
214
254
|
|
|
255
|
+
|
|
215
256
|
def resolve_python_import(
|
|
216
|
-
module_name_parts: List[str],
|
|
217
|
-
current_file_dir_abs: str,
|
|
218
|
-
project_root_abs: str,
|
|
219
|
-
level: int
|
|
257
|
+
module_name_parts: List[str],
|
|
258
|
+
current_file_dir_abs: str,
|
|
259
|
+
project_root_abs: str,
|
|
260
|
+
level: int,
|
|
220
261
|
) -> Optional[str]:
|
|
221
262
|
base_path_to_search = ""
|
|
222
263
|
if level > 0:
|
|
@@ -228,11 +269,11 @@ def resolve_python_import(
|
|
|
228
269
|
|
|
229
270
|
candidate_rel_path = os.path.join(*module_name_parts)
|
|
230
271
|
potential_abs_path = os.path.join(base_path_to_search, candidate_rel_path)
|
|
231
|
-
|
|
272
|
+
|
|
232
273
|
py_file = potential_abs_path + ".py"
|
|
233
274
|
if os.path.isfile(py_file):
|
|
234
275
|
return os.path.normpath(py_file)
|
|
235
|
-
|
|
276
|
+
|
|
236
277
|
init_file = os.path.join(potential_abs_path, "__init__.py")
|
|
237
278
|
if os.path.isdir(potential_abs_path) and os.path.isfile(init_file):
|
|
238
279
|
return os.path.normpath(init_file)
|
|
@@ -250,10 +291,12 @@ def resolve_python_import(
|
|
|
250
291
|
return None
|
|
251
292
|
|
|
252
293
|
|
|
253
|
-
def parse_python_imports(
|
|
294
|
+
def parse_python_imports(
|
|
295
|
+
file_content: str, file_path_abs: str, project_root_abs: str
|
|
296
|
+
) -> Set[str]:
|
|
254
297
|
resolved_imports = set()
|
|
255
298
|
current_file_dir_abs = os.path.dirname(file_path_abs)
|
|
256
|
-
|
|
299
|
+
|
|
257
300
|
try:
|
|
258
301
|
tree = ast.parse(file_content, filename=file_path_abs)
|
|
259
302
|
except SyntaxError:
|
|
@@ -263,22 +306,51 @@ def parse_python_imports(file_content: str, file_path_abs: str, project_root_abs
|
|
|
263
306
|
for node in ast.walk(tree):
|
|
264
307
|
if isinstance(node, ast.Import):
|
|
265
308
|
for alias in node.names:
|
|
266
|
-
module_parts = alias.name.split(
|
|
267
|
-
resolved = resolve_python_import(
|
|
268
|
-
|
|
309
|
+
module_parts = alias.name.split(".")
|
|
310
|
+
resolved = resolve_python_import(
|
|
311
|
+
module_parts, current_file_dir_abs, project_root_abs, level=0
|
|
312
|
+
)
|
|
313
|
+
if (
|
|
314
|
+
resolved
|
|
315
|
+
and os.path.exists(resolved)
|
|
316
|
+
and os.path.normpath(resolved).startswith(
|
|
317
|
+
os.path.normpath(project_root_abs)
|
|
318
|
+
)
|
|
319
|
+
):
|
|
269
320
|
resolved_imports.add(os.path.normpath(resolved))
|
|
270
321
|
elif isinstance(node, ast.ImportFrom):
|
|
271
322
|
level_to_resolve = node.level
|
|
272
323
|
if node.module:
|
|
273
|
-
module_parts = node.module.split(
|
|
274
|
-
resolved = resolve_python_import(
|
|
275
|
-
|
|
324
|
+
module_parts = node.module.split(".")
|
|
325
|
+
resolved = resolve_python_import(
|
|
326
|
+
module_parts,
|
|
327
|
+
current_file_dir_abs,
|
|
328
|
+
project_root_abs,
|
|
329
|
+
level_to_resolve,
|
|
330
|
+
)
|
|
331
|
+
if (
|
|
332
|
+
resolved
|
|
333
|
+
and os.path.exists(resolved)
|
|
334
|
+
and os.path.normpath(resolved).startswith(
|
|
335
|
+
os.path.normpath(project_root_abs)
|
|
336
|
+
)
|
|
337
|
+
):
|
|
276
338
|
resolved_imports.add(os.path.normpath(resolved))
|
|
277
339
|
else:
|
|
278
340
|
for alias in node.names:
|
|
279
|
-
item_name_parts = alias.name.split(
|
|
280
|
-
resolved = resolve_python_import(
|
|
281
|
-
|
|
341
|
+
item_name_parts = alias.name.split(".")
|
|
342
|
+
resolved = resolve_python_import(
|
|
343
|
+
item_name_parts,
|
|
344
|
+
current_file_dir_abs,
|
|
345
|
+
project_root_abs,
|
|
346
|
+
level=level_to_resolve,
|
|
347
|
+
)
|
|
348
|
+
if (
|
|
349
|
+
resolved
|
|
350
|
+
and os.path.exists(resolved)
|
|
351
|
+
and os.path.normpath(resolved).startswith(
|
|
352
|
+
os.path.normpath(project_root_abs)
|
|
353
|
+
)
|
|
354
|
+
):
|
|
282
355
|
resolved_imports.add(os.path.normpath(resolved))
|
|
283
356
|
return resolved_imports
|
|
284
|
-
|