kopipasta 0.25.0__tar.gz → 0.26.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kopipasta might be problematic. Click here for more details.
- {kopipasta-0.25.0/kopipasta.egg-info → kopipasta-0.26.0}/PKG-INFO +1 -1
- kopipasta-0.26.0/kopipasta/import_parser.py +286 -0
- {kopipasta-0.25.0 → kopipasta-0.26.0}/kopipasta/main.py +41 -23
- {kopipasta-0.25.0 → kopipasta-0.26.0/kopipasta.egg-info}/PKG-INFO +1 -1
- {kopipasta-0.25.0 → kopipasta-0.26.0}/kopipasta.egg-info/SOURCES.txt +1 -0
- {kopipasta-0.25.0 → kopipasta-0.26.0}/setup.py +1 -1
- {kopipasta-0.25.0 → kopipasta-0.26.0}/LICENSE +0 -0
- {kopipasta-0.25.0 → kopipasta-0.26.0}/MANIFEST.in +0 -0
- {kopipasta-0.25.0 → kopipasta-0.26.0}/README.md +0 -0
- {kopipasta-0.25.0 → kopipasta-0.26.0}/kopipasta/__init__.py +0 -0
- {kopipasta-0.25.0 → kopipasta-0.26.0}/kopipasta.egg-info/dependency_links.txt +0 -0
- {kopipasta-0.25.0 → kopipasta-0.26.0}/kopipasta.egg-info/entry_points.txt +0 -0
- {kopipasta-0.25.0 → kopipasta-0.26.0}/kopipasta.egg-info/requires.txt +0 -0
- {kopipasta-0.25.0 → kopipasta-0.26.0}/kopipasta.egg-info/top_level.txt +0 -0
- {kopipasta-0.25.0 → kopipasta-0.26.0}/requirements.txt +0 -0
- {kopipasta-0.25.0 → kopipasta-0.26.0}/setup.cfg +0 -0
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
# kopipasta/import_parser.py
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import re
|
|
5
|
+
import json
|
|
6
|
+
import ast
|
|
7
|
+
from typing import Dict, List, Optional, Set, Tuple
|
|
8
|
+
|
|
9
|
+
# --- Global Cache for tsconfig.json data ---
|
|
10
|
+
# Key: absolute path to tsconfig.json file
|
|
11
|
+
# Value: Tuple (absolute_base_url: Optional[str], alias_paths_map: Dict[str, List[str]])
|
|
12
|
+
_tsconfig_configs_cache: Dict[str, Tuple[Optional[str], Dict[str, List[str]]]] = {}
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
# --- TypeScript Alias and Import Resolution ---
|
|
16
|
+
|
|
17
|
+
def find_relevant_tsconfig_path(file_path_abs: str, project_root_abs: str) -> Optional[str]:
|
|
18
|
+
"""
|
|
19
|
+
Finds the most relevant tsconfig.json by searching upwards from the file's directory,
|
|
20
|
+
stopping at project_root_abs.
|
|
21
|
+
Searches for 'tsconfig.json' first, then 'tsconfig.*.json' in each directory.
|
|
22
|
+
"""
|
|
23
|
+
current_dir = os.path.dirname(os.path.normpath(file_path_abs))
|
|
24
|
+
project_root_abs_norm = os.path.normpath(project_root_abs)
|
|
25
|
+
|
|
26
|
+
while current_dir.startswith(project_root_abs_norm) and len(current_dir) >= len(project_root_abs_norm):
|
|
27
|
+
potential_tsconfig = os.path.join(current_dir, "tsconfig.json")
|
|
28
|
+
if os.path.isfile(potential_tsconfig):
|
|
29
|
+
return os.path.normpath(potential_tsconfig)
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
variant_tsconfigs = sorted([
|
|
33
|
+
f for f in os.listdir(current_dir)
|
|
34
|
+
if f.startswith("tsconfig.") and f.endswith(".json") and
|
|
35
|
+
os.path.isfile(os.path.join(current_dir, f))
|
|
36
|
+
])
|
|
37
|
+
if variant_tsconfigs:
|
|
38
|
+
return os.path.normpath(os.path.join(current_dir, variant_tsconfigs[0]))
|
|
39
|
+
except OSError:
|
|
40
|
+
pass
|
|
41
|
+
|
|
42
|
+
if current_dir == project_root_abs_norm:
|
|
43
|
+
break
|
|
44
|
+
|
|
45
|
+
parent_dir = os.path.dirname(current_dir)
|
|
46
|
+
if parent_dir == current_dir:
|
|
47
|
+
break
|
|
48
|
+
current_dir = parent_dir
|
|
49
|
+
return None
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def load_tsconfig_config(tsconfig_path_abs: str) -> Tuple[Optional[str], Dict[str, List[str]]]:
|
|
53
|
+
"""
|
|
54
|
+
Loads baseUrl and paths from a specific tsconfig.json.
|
|
55
|
+
Caches results.
|
|
56
|
+
Returns (absolute_base_url, paths_map).
|
|
57
|
+
"""
|
|
58
|
+
if tsconfig_path_abs in _tsconfig_configs_cache:
|
|
59
|
+
return _tsconfig_configs_cache[tsconfig_path_abs]
|
|
60
|
+
|
|
61
|
+
if not os.path.isfile(tsconfig_path_abs):
|
|
62
|
+
_tsconfig_configs_cache[tsconfig_path_abs] = (None, {})
|
|
63
|
+
return None, {}
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
with open(tsconfig_path_abs, 'r', encoding='utf-8') as f:
|
|
67
|
+
content = f.read()
|
|
68
|
+
content = re.sub(r"//.*?\n", "\n", content)
|
|
69
|
+
content = re.sub(r"/\*.*?\*/", "", content, flags=re.DOTALL)
|
|
70
|
+
config = json.loads(content)
|
|
71
|
+
|
|
72
|
+
compiler_options = config.get("compilerOptions", {})
|
|
73
|
+
tsconfig_dir = os.path.dirname(tsconfig_path_abs)
|
|
74
|
+
base_url_from_config = compiler_options.get("baseUrl", ".")
|
|
75
|
+
abs_base_url = os.path.normpath(os.path.join(tsconfig_dir, base_url_from_config))
|
|
76
|
+
|
|
77
|
+
paths = compiler_options.get("paths", {})
|
|
78
|
+
processed_paths = {key: (val if isinstance(val, list) else [val]) for key, val in paths.items()}
|
|
79
|
+
|
|
80
|
+
# print(f"DEBUG: Loaded config from {os.path.relpath(tsconfig_path_abs)}: effective abs_baseUrl='{abs_base_url}', {len(processed_paths)} path alias(es).")
|
|
81
|
+
_tsconfig_configs_cache[tsconfig_path_abs] = (abs_base_url, processed_paths)
|
|
82
|
+
return abs_base_url, processed_paths
|
|
83
|
+
except Exception as e:
|
|
84
|
+
print(f"Warning: Could not parse {os.path.relpath(tsconfig_path_abs)}: {e}")
|
|
85
|
+
_tsconfig_configs_cache[tsconfig_path_abs] = (None, {})
|
|
86
|
+
return None, {}
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _probe_ts_path_candidates(candidate_base_path_abs: str) -> Optional[str]:
|
|
90
|
+
"""
|
|
91
|
+
Given a candidate base absolute path, tries to find a corresponding file.
|
|
92
|
+
"""
|
|
93
|
+
possible_extensions = ['.ts', '.tsx', '.js', '.jsx', '.json']
|
|
94
|
+
|
|
95
|
+
if os.path.isfile(candidate_base_path_abs):
|
|
96
|
+
return candidate_base_path_abs
|
|
97
|
+
|
|
98
|
+
stem, original_ext = os.path.splitext(candidate_base_path_abs)
|
|
99
|
+
base_for_ext_check = stem if original_ext.lower() in possible_extensions else candidate_base_path_abs
|
|
100
|
+
|
|
101
|
+
for ext in possible_extensions:
|
|
102
|
+
path_with_ext = base_for_ext_check + ext
|
|
103
|
+
if os.path.isfile(path_with_ext):
|
|
104
|
+
return path_with_ext
|
|
105
|
+
|
|
106
|
+
if os.path.isdir(base_for_ext_check):
|
|
107
|
+
for ext in possible_extensions:
|
|
108
|
+
index_file_path = os.path.join(base_for_ext_check, "index" + ext)
|
|
109
|
+
if os.path.isfile(index_file_path):
|
|
110
|
+
return index_file_path
|
|
111
|
+
return None
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def resolve_ts_import_path(
|
|
115
|
+
import_str: str,
|
|
116
|
+
current_file_dir_abs: str,
|
|
117
|
+
abs_base_url: Optional[str],
|
|
118
|
+
alias_map: Dict[str, List[str]]
|
|
119
|
+
) -> Optional[str]:
|
|
120
|
+
"""
|
|
121
|
+
Resolves a TypeScript import string to an absolute file path.
|
|
122
|
+
"""
|
|
123
|
+
candidate_targets_abs: List[str] = []
|
|
124
|
+
sorted_alias_keys = sorted(alias_map.keys(), key=len, reverse=True)
|
|
125
|
+
alias_matched_and_resolved = False
|
|
126
|
+
|
|
127
|
+
for alias_pattern in sorted_alias_keys:
|
|
128
|
+
alias_prefix_pattern = alias_pattern.replace("/*", "")
|
|
129
|
+
if import_str.startswith(alias_prefix_pattern):
|
|
130
|
+
import_suffix = import_str[len(alias_prefix_pattern):]
|
|
131
|
+
for mapping_path_template_list in alias_map[alias_pattern]:
|
|
132
|
+
for mapping_path_template in (mapping_path_template_list if isinstance(mapping_path_template_list, list) else [mapping_path_template_list]):
|
|
133
|
+
if "/*" in alias_pattern :
|
|
134
|
+
resolved_relative_to_base = mapping_path_template.replace("*", import_suffix, 1)
|
|
135
|
+
else:
|
|
136
|
+
resolved_relative_to_base = mapping_path_template
|
|
137
|
+
if abs_base_url:
|
|
138
|
+
abs_candidate = os.path.normpath(os.path.join(abs_base_url, resolved_relative_to_base))
|
|
139
|
+
candidate_targets_abs.append(abs_candidate)
|
|
140
|
+
else:
|
|
141
|
+
print(f"Warning: TS Alias '{alias_pattern}' used, but no abs_base_url for context of '{current_file_dir_abs}'.")
|
|
142
|
+
if candidate_targets_abs:
|
|
143
|
+
alias_matched_and_resolved = True
|
|
144
|
+
break
|
|
145
|
+
|
|
146
|
+
if not alias_matched_and_resolved and import_str.startswith('.'):
|
|
147
|
+
abs_candidate = os.path.normpath(os.path.join(current_file_dir_abs, import_str))
|
|
148
|
+
candidate_targets_abs.append(abs_candidate)
|
|
149
|
+
elif not alias_matched_and_resolved and abs_base_url and not import_str.startswith('.'):
|
|
150
|
+
abs_candidate = os.path.normpath(os.path.join(abs_base_url, import_str))
|
|
151
|
+
candidate_targets_abs.append(abs_candidate)
|
|
152
|
+
|
|
153
|
+
for cand_abs_path in candidate_targets_abs:
|
|
154
|
+
resolved_file = _probe_ts_path_candidates(cand_abs_path)
|
|
155
|
+
if resolved_file:
|
|
156
|
+
return resolved_file
|
|
157
|
+
return None
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def parse_typescript_imports(
|
|
161
|
+
file_content: str,
|
|
162
|
+
file_path_abs: str,
|
|
163
|
+
project_root_abs: str
|
|
164
|
+
) -> Set[str]:
|
|
165
|
+
resolved_imports_abs_paths = set()
|
|
166
|
+
relevant_tsconfig_abs_path = find_relevant_tsconfig_path(file_path_abs, project_root_abs)
|
|
167
|
+
|
|
168
|
+
abs_base_url, alias_map = None, {}
|
|
169
|
+
if relevant_tsconfig_abs_path:
|
|
170
|
+
abs_base_url, alias_map = load_tsconfig_config(relevant_tsconfig_abs_path)
|
|
171
|
+
else:
|
|
172
|
+
# print(f"Warning: No tsconfig.json found for {os.path.relpath(file_path_abs, project_root_abs)}. Import resolution might be limited.")
|
|
173
|
+
abs_base_url = project_root_abs
|
|
174
|
+
|
|
175
|
+
import_regex = re.compile(
|
|
176
|
+
r"""
|
|
177
|
+
(?:import|export)
|
|
178
|
+
(?:\s+(?:type\s+)?(?:[\w*{}\s,\[\]:\."'`-]+)\s+from)?
|
|
179
|
+
\s*['"`]([^'"\n`]+?)['"`]
|
|
180
|
+
|require\s*\(\s*['"`]([^'"\n`]+?)['"`]\s*\)
|
|
181
|
+
|import\s*\(\s*['"`]([^'"\n`]+?)['"`]\s*\)
|
|
182
|
+
""",
|
|
183
|
+
re.VERBOSE | re.MULTILINE
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
current_file_dir_abs = os.path.dirname(file_path_abs)
|
|
187
|
+
|
|
188
|
+
for match in import_regex.finditer(file_content):
|
|
189
|
+
import_str_candidate = next((g for g in match.groups() if g is not None), None)
|
|
190
|
+
if import_str_candidate:
|
|
191
|
+
is_likely_external = (
|
|
192
|
+
not import_str_candidate.startswith(('.', '/')) and
|
|
193
|
+
not any(import_str_candidate.startswith(alias_pattern.replace("/*", "")) for alias_pattern in alias_map) and
|
|
194
|
+
not (abs_base_url and os.path.exists(os.path.join(abs_base_url, import_str_candidate))) and
|
|
195
|
+
(import_str_candidate.count('/') == 0 or (import_str_candidate.startswith('@') and import_str_candidate.count('/') == 1)) and
|
|
196
|
+
'.' not in import_str_candidate.split('/')[0]
|
|
197
|
+
)
|
|
198
|
+
if is_likely_external:
|
|
199
|
+
continue
|
|
200
|
+
|
|
201
|
+
resolved_abs_path = resolve_ts_import_path(
|
|
202
|
+
import_str_candidate,
|
|
203
|
+
current_file_dir_abs,
|
|
204
|
+
abs_base_url,
|
|
205
|
+
alias_map
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
if resolved_abs_path:
|
|
209
|
+
norm_resolved_path = os.path.normpath(resolved_abs_path)
|
|
210
|
+
if norm_resolved_path.startswith(os.path.normpath(project_root_abs)):
|
|
211
|
+
resolved_imports_abs_paths.add(norm_resolved_path)
|
|
212
|
+
return resolved_imports_abs_paths
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
# --- Python Import Resolution ---
|
|
216
|
+
|
|
217
|
+
def resolve_python_import(
|
|
218
|
+
module_name_parts: List[str],
|
|
219
|
+
current_file_dir_abs: str,
|
|
220
|
+
project_root_abs: str,
|
|
221
|
+
level: int
|
|
222
|
+
) -> Optional[str]:
|
|
223
|
+
base_path_to_search = ""
|
|
224
|
+
if level > 0:
|
|
225
|
+
base_path_to_search = current_file_dir_abs
|
|
226
|
+
for _ in range(level - 1):
|
|
227
|
+
base_path_to_search = os.path.dirname(base_path_to_search)
|
|
228
|
+
else:
|
|
229
|
+
base_path_to_search = project_root_abs
|
|
230
|
+
|
|
231
|
+
candidate_rel_path = os.path.join(*module_name_parts)
|
|
232
|
+
potential_abs_path = os.path.join(base_path_to_search, candidate_rel_path)
|
|
233
|
+
|
|
234
|
+
py_file = potential_abs_path + ".py"
|
|
235
|
+
if os.path.isfile(py_file):
|
|
236
|
+
return os.path.normpath(py_file)
|
|
237
|
+
|
|
238
|
+
init_file = os.path.join(potential_abs_path, "__init__.py")
|
|
239
|
+
if os.path.isdir(potential_abs_path) and os.path.isfile(init_file):
|
|
240
|
+
return os.path.normpath(init_file)
|
|
241
|
+
|
|
242
|
+
if level == 0 and base_path_to_search == project_root_abs:
|
|
243
|
+
src_base_path = os.path.join(project_root_abs, "src")
|
|
244
|
+
if os.path.isdir(src_base_path):
|
|
245
|
+
potential_abs_path_src = os.path.join(src_base_path, candidate_rel_path)
|
|
246
|
+
py_file_src = potential_abs_path_src + ".py"
|
|
247
|
+
if os.path.isfile(py_file_src):
|
|
248
|
+
return os.path.normpath(py_file_src)
|
|
249
|
+
init_file_src = os.path.join(potential_abs_path_src, "__init__.py")
|
|
250
|
+
if os.path.isdir(potential_abs_path_src) and os.path.isfile(init_file_src):
|
|
251
|
+
return os.path.normpath(init_file_src)
|
|
252
|
+
return None
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def parse_python_imports(file_content: str, file_path_abs: str, project_root_abs: str) -> Set[str]:
|
|
256
|
+
resolved_imports = set()
|
|
257
|
+
current_file_dir_abs = os.path.dirname(file_path_abs)
|
|
258
|
+
|
|
259
|
+
try:
|
|
260
|
+
tree = ast.parse(file_content, filename=file_path_abs)
|
|
261
|
+
except SyntaxError:
|
|
262
|
+
# print(f"Warning: Syntax error in {file_path_abs}, cannot parse Python imports.")
|
|
263
|
+
return resolved_imports
|
|
264
|
+
|
|
265
|
+
for node in ast.walk(tree):
|
|
266
|
+
if isinstance(node, ast.Import):
|
|
267
|
+
for alias in node.names:
|
|
268
|
+
module_parts = alias.name.split('.')
|
|
269
|
+
resolved = resolve_python_import(module_parts, current_file_dir_abs, project_root_abs, level=0)
|
|
270
|
+
if resolved and os.path.exists(resolved) and os.path.normpath(resolved).startswith(os.path.normpath(project_root_abs)):
|
|
271
|
+
resolved_imports.add(os.path.normpath(resolved))
|
|
272
|
+
elif isinstance(node, ast.ImportFrom):
|
|
273
|
+
level_to_resolve = node.level
|
|
274
|
+
if node.module:
|
|
275
|
+
module_parts = node.module.split('.')
|
|
276
|
+
resolved = resolve_python_import(module_parts, current_file_dir_abs, project_root_abs, level_to_resolve)
|
|
277
|
+
if resolved and os.path.exists(resolved) and os.path.normpath(resolved).startswith(os.path.normpath(project_root_abs)):
|
|
278
|
+
resolved_imports.add(os.path.normpath(resolved))
|
|
279
|
+
else:
|
|
280
|
+
for alias in node.names:
|
|
281
|
+
item_name_parts = alias.name.split('.')
|
|
282
|
+
resolved = resolve_python_import(item_name_parts, current_file_dir_abs, project_root_abs, level=level_to_resolve)
|
|
283
|
+
if resolved and os.path.exists(resolved) and os.path.normpath(resolved).startswith(os.path.normpath(project_root_abs)):
|
|
284
|
+
resolved_imports.add(os.path.normpath(resolved))
|
|
285
|
+
return resolved_imports
|
|
286
|
+
|
|
@@ -763,29 +763,47 @@ def generate_prompt_template(files_to_include: List[FileTuple], ignore_patterns:
|
|
|
763
763
|
prompt += "\n\n"
|
|
764
764
|
prompt += "## Instructions for Achieving the Task\n\n"
|
|
765
765
|
analysis_text = (
|
|
766
|
-
"
|
|
767
|
-
"
|
|
768
|
-
"
|
|
769
|
-
"
|
|
770
|
-
"
|
|
771
|
-
"
|
|
772
|
-
"
|
|
773
|
-
"
|
|
774
|
-
"
|
|
775
|
-
"
|
|
776
|
-
"
|
|
777
|
-
"
|
|
778
|
-
"
|
|
779
|
-
"
|
|
780
|
-
"
|
|
781
|
-
"
|
|
782
|
-
"
|
|
783
|
-
"
|
|
784
|
-
"
|
|
785
|
-
"
|
|
786
|
-
"
|
|
787
|
-
"
|
|
788
|
-
"
|
|
766
|
+
"### Core Principles\n"
|
|
767
|
+
"- Mark uncertainties with [UNCERTAIN] and request clarification instead of guessing\n"
|
|
768
|
+
"- Request missing files immediately - never write placeholder code\n"
|
|
769
|
+
"- After 3 failed attempts, summarize learnings and request specific diagnostic info\n"
|
|
770
|
+
"- Ask me to test code, external services, APIs, databases, and integrations frequently\n\n"
|
|
771
|
+
"### Development Workflow\n"
|
|
772
|
+
"**Phase 1: E2E Draft** - Build complete solution incrementally. I may comment but won't test until draft is complete.\n"
|
|
773
|
+
"- Signal completion: [E2E DRAFT COMPLETE]\n"
|
|
774
|
+
"- If fundamental blockers found, pause and discuss\n\n"
|
|
775
|
+
"**Phase 2: Final Review** - When I request consolidation:\n"
|
|
776
|
+
"- Provide complete, copy-paste ready code (no diffs)\n"
|
|
777
|
+
"- Group related changes together\n"
|
|
778
|
+
"- Order files by dependency\n\n"
|
|
779
|
+
"### Task Execution\n\n"
|
|
780
|
+
"1. **Understand**: Rephrase task, identify missing files, mark assumptions ([UNCERTAIN]/[MISSING]/[CONFIRMED])\n\n"
|
|
781
|
+
"2. **Plan**: Provide 2-3 approaches when feasible, identify risks, list required files\n\n"
|
|
782
|
+
"3. **Implement**: Small increments, track attempts with learnings:\n"
|
|
783
|
+
" ```\n"
|
|
784
|
+
" 1.[FAILED] X→Y error (learned: not type issue)\n"
|
|
785
|
+
" 2.[FAILED] Z→same (learned: runtime error)\n"
|
|
786
|
+
" 3.[CURRENT] Need: full stack trace + value of param\n"
|
|
787
|
+
" ```\n\n"
|
|
788
|
+
"4. **Present Code**: \n"
|
|
789
|
+
" - **Phase 1 (Incremental)**:\n"
|
|
790
|
+
" - Small changes: Show only modified lines with context\n"
|
|
791
|
+
" - Large changes: Show changed functions/classes with `// ... existing code ...` for unchanged parts\n"
|
|
792
|
+
" - Always specify filename at start of code block\n"
|
|
793
|
+
" - **Phase 2 (Consolidation)**: Show complete final code for easy copying\n"
|
|
794
|
+
" - **Missing files**: Never write placeholders:\n"
|
|
795
|
+
" ```\n"
|
|
796
|
+
" # MISSING: utils.py - need process_data implementation\n"
|
|
797
|
+
" # REQUEST: Please provide utils.py\n"
|
|
798
|
+
" ```\n\n"
|
|
799
|
+
"5. **Debug**: Include strategic outputs, request specific diagnostics, admit uncertainty early\n\n"
|
|
800
|
+
"### You Have Permission To\n"
|
|
801
|
+
"- Request any file shown in tree but not provided\n"
|
|
802
|
+
"- Ask me to run code and share outputs\n"
|
|
803
|
+
"- Test external dependencies: APIs, databases, services, integration points\n"
|
|
804
|
+
"- Request specific diagnostic information\n"
|
|
805
|
+
"- Suggest pausing when blocked\n"
|
|
806
|
+
"- Ask me to verify assumptions about external systems\n"
|
|
789
807
|
)
|
|
790
808
|
prompt += analysis_text
|
|
791
809
|
return prompt, cursor_position
|
|
@@ -10,7 +10,7 @@ with open("requirements.txt", "r", encoding="utf-8") as f:
|
|
|
10
10
|
|
|
11
11
|
setup(
|
|
12
12
|
name="kopipasta",
|
|
13
|
-
version="0.
|
|
13
|
+
version="0.26.0",
|
|
14
14
|
author="Mikko Korpela",
|
|
15
15
|
author_email="mikko.korpela@gmail.com",
|
|
16
16
|
description="A CLI tool to generate prompts with project structure and file contents",
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|