maketool 0.8.2__py3-none-any.whl → 0.8.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
maketool/refscan.py CHANGED
@@ -1,46 +1,31 @@
1
1
  #!/usr/bin/env python3
2
- """maketool-refscan
3
-
4
- Two heuristic reports (single command, minimal args):
5
-
6
- 1) MISSING (used but not installed)
7
- - If an entry script is provided (Sublime {file}) and it's a .py file,
8
- AST-scans it and any locally-resolvable sibling modules for imports.
9
- - Excludes stdlib, local modules/packages, and imports mapped to installed pip dists.
10
- - Uses a local-roots importability check to reduce false positives.
11
-
12
- 2) UNUSED files (no filename/path tokens found)
13
- - Scans the project directory (derived from Sublime {file} or current directory)
14
- - Shows ONLY unused candidates
15
- - Groups by extension, separated by a blank line
16
- - No group headers
2
+ """
3
+ maketool-refscan
17
4
 
18
- Expected input (same style as refscan):
19
- refscan "C:\\full\\path\\to\\current_file.py" # Sublime passes {file}
20
- refscan # scans current directory; missing-imports skipped
5
+ Usage:
6
+ maketool-refscan <entry_file>
21
7
 
22
- Notes:
23
- - Missing-imports requires Python 3.8+ for importlib.metadata.packages_distributions().
8
+ Reports:
9
+ 1) UNUSED files: filename/path tokens not found in scanned text sources
10
+ 2) MISSING imports: imports used by entry (and local-resolved siblings) but not installed
24
11
  """
25
12
 
26
13
  from __future__ import annotations
27
14
 
28
15
  import ast
29
16
  import sys
30
- from collections import defaultdict
31
- from dataclasses import dataclass
32
17
  from pathlib import Path
33
- from typing import Dict, Iterable, List, Optional, Set, Tuple
18
+ from dataclasses import dataclass
19
+ from typing import Dict, Iterable, List, Optional, Set
34
20
 
35
21
  try:
36
22
  from importlib.metadata import packages_distributions # py 3.8+
37
23
  except Exception:
38
24
  packages_distributions = None # type: ignore
39
25
 
40
-
41
- # ======================================================
42
- # Refscan config (unused-file token scan)
43
- # ======================================================
26
+ # ----------------------------
27
+ # Refscan settings
28
+ # ----------------------------
44
29
 
45
30
  DEFAULT_IGNORE_DIRS = {
46
31
  ".git", ".hg", ".svn",
@@ -70,10 +55,17 @@ DEFAULT_SKIP_CONTENT_EXTS = {
70
55
  class Candidate:
71
56
  path: Path
72
57
  rel: str
73
- tokens: Tuple[str, ...]
58
+ tokens: tuple[str, ...]
59
+
74
60
 
61
+ def usage_exit(msg: str = "") -> None:
62
+ if msg:
63
+ print(msg, file=sys.stderr)
64
+ print("Usage: maketool-refscan <entry_file>", file=sys.stderr)
65
+ raise SystemExit(2)
75
66
 
76
- def iter_files(root: Path, ignore_dirs: Set[str]) -> Iterable[Path]:
67
+
68
+ def iter_files(root: Path, ignore_dirs: set[str]) -> Iterable[Path]:
77
69
  for p in root.rglob("*"):
78
70
  if p.is_dir():
79
71
  continue
@@ -88,23 +80,23 @@ def safe_read_text(p: Path) -> Optional[str]:
88
80
  except Exception:
89
81
  return None
90
82
 
91
-
92
83
  def norm(s: str) -> str:
93
84
  return s.replace("\\", "/").lower()
94
85
 
95
86
 
96
- def build_candidates(root: Path, files: List[Path]) -> List[Candidate]:
97
- out: List[Candidate] = []
87
+ def build_candidates(root: Path, files: list[Path]) -> list[Candidate]:
88
+ out: list[Candidate] = []
98
89
  for p in files:
99
90
  rel = str(p.relative_to(root))
100
91
  rel_norm = norm(rel)
101
92
  base = p.name.lower()
102
93
  stem = p.stem.lower()
94
+
103
95
  tokens = {
104
- base,
105
- stem,
106
- rel_norm,
107
- rel_norm.replace("/", "\\"),
96
+ base, # rat.ico
97
+ stem, # common
98
+ rel_norm, # icons/rat.ico
99
+ rel_norm.replace("/", "\\"), # icons\rat.ico
108
100
  }
109
101
  out.append(Candidate(p, rel, tuple(t for t in tokens if t)))
110
102
  return out
@@ -117,50 +109,24 @@ def is_text_source(p: Path) -> bool:
117
109
  return ext in DEFAULT_SCAN_EXTS
118
110
 
119
111
 
120
- def print_unused_files_grouped(root: Path) -> None:
121
- files = sorted(iter_files(root, DEFAULT_IGNORE_DIRS))
122
- candidates = build_candidates(root, files)
123
- text_sources = [p for p in files if is_text_source(p)]
124
-
125
- references: Dict[str, List[str]] = {c.rel: [] for c in candidates}
126
-
127
- for src in text_sources:
128
- text = safe_read_text(src)
129
- if not text:
130
- continue
131
- hay = text.lower()
132
- src_rel = str(src.relative_to(root))
133
-
134
- for c in candidates:
135
- if src == c.path:
136
- continue
137
- if any(tok in hay for tok in c.tokens):
138
- references[c.rel].append(src_rel)
139
-
140
- unused = [c for c in candidates if not references[c.rel]]
141
-
142
- # group by extension (no headers, blank line between groups)
143
- groups: Dict[str, List[str]] = defaultdict(list)
144
- for c in unused:
145
- ext = c.path.suffix.lower() or ""
146
- groups[ext].append(c.rel)
147
-
148
- title = "UNUSED files (no filename/path tokens found)"
112
+ def print_group(title: str, rows: list[tuple[str, list[str]]]) -> None:
149
113
  print(title)
150
114
  print("-" * len(title))
115
+ for rel, refs in rows:
116
+ is_py = rel.lower().endswith(".py")
117
+ name = f"[PY] {rel}" if is_py else rel
151
118
 
152
- first = True
153
- for ext in sorted(groups):
154
- # if not first: print() # group spacer
155
- first = False
156
- for rel in sorted(groups[ext]):
157
- print(rel)
119
+ if refs:
120
+ preview = ", ".join(refs[:3]) + (" ..." if len(refs) > 3 else "")
121
+ print(f"{name} <-- {preview}")
122
+ else:
123
+ print(name)
158
124
  print()
159
125
 
160
126
 
161
- # ======================================================
162
- # Missing-imports report (integrated from report.py)
163
- # ======================================================
127
+ # ----------------------------
128
+ # Missing-import report (merged from report.py)
129
+ # ----------------------------
164
130
 
165
131
  def stdlib_names() -> Set[str]:
166
132
  names = set(getattr(sys, "stdlib_module_names", set()) or set())
@@ -188,10 +154,6 @@ def resolve_local_module(module_name: str, base_dir: Path) -> Optional[Path]:
188
154
 
189
155
 
190
156
  def imported_top_level_packages(entry_file: Path) -> Set[str]:
191
- """
192
- Parse entry_file and recursively parse local imports that resolve within the
193
- importing file's directory (conservative, no sys.path emulation).
194
- """
195
157
  visited: Set[Path] = set()
196
158
  found: Set[str] = set()
197
159
 
@@ -237,6 +199,7 @@ def _index_local_modules_under(root: Path) -> Set[str]:
237
199
 
238
200
  for init_file in root.glob("*/__init__.py"):
239
201
  found.add(init_file.parent.name)
202
+
240
203
  for py in root.glob("*.py"):
241
204
  found.add(py.stem)
242
205
 
@@ -244,13 +207,9 @@ def _index_local_modules_under(root: Path) -> Set[str]:
244
207
 
245
208
 
246
209
  def index_local_modules(project_root: Path) -> Set[str]:
247
- """
248
- Index local top-level import names from:
249
- - project_root
250
- - common source roots under it if present
251
- (No extra args to keep refscan UX minimal.)
252
- """
253
210
  roots: List[Path] = [project_root]
211
+
212
+ # common extra roots (optional)
254
213
  for name in ["src", "lib", "app", "python", "package", "packages"]:
255
214
  p = project_root / name
256
215
  if p.exists() and p.is_dir():
@@ -259,17 +218,17 @@ def index_local_modules(project_root: Path) -> Set[str]:
259
218
  locals_found: Set[str] = set()
260
219
  for r in roots:
261
220
  locals_found |= _index_local_modules_under(r)
221
+
262
222
  return locals_found
263
223
 
264
224
 
265
- def try_importable_as_local(name: str, project_root: Path, extra_roots: List[Path]) -> bool:
225
+ def try_importable_as_local(name: str, project_root: Path) -> bool:
266
226
  added: List[str] = []
267
227
  try:
268
- for p in [project_root, *extra_roots]:
269
- sp = str(p)
270
- if sp and sp not in sys.path:
271
- sys.path.insert(0, sp)
272
- added.append(sp)
228
+ sp = str(project_root)
229
+ if sp and sp not in sys.path:
230
+ sys.path.insert(0, sp)
231
+ added.append(sp)
273
232
  __import__(name)
274
233
  return True
275
234
  except Exception:
@@ -289,13 +248,7 @@ def compute_missing_used_imports(
289
248
  import_to_dists: Dict[str, List[str]],
290
249
  project_root: Path,
291
250
  ) -> Set[str]:
292
- """
293
- Missing = not stdlib, not local, not mapped to installed dists, and not importable
294
- when project roots are on sys.path.
295
- """
296
251
  missing: Set[str] = set()
297
- extra_roots: List[Path] = [] # keep refscan input simple; no extra flags
298
-
299
252
  for name in used_import_names:
300
253
  if not name or name in std:
301
254
  continue
@@ -303,34 +256,32 @@ def compute_missing_used_imports(
303
256
  continue
304
257
  if import_to_dists.get(name):
305
258
  continue
306
- if try_importable_as_local(name, project_root, extra_roots):
259
+ if try_importable_as_local(name, project_root):
307
260
  continue
308
261
  try:
309
262
  __import__(name)
310
263
  continue
311
264
  except Exception:
312
265
  missing.add(name)
313
-
314
266
  return missing
315
267
 
268
+ def print_unused_grouped_by_ext(unused_rels: list[str]) -> None:
269
+ # Sort by: extension (lower), then full path (lower)
270
+ def key(rel: str):
271
+ ext = Path(rel).suffix.lower()
272
+ return (ext, rel.lower())
316
273
 
317
- def print_missing_imports(entry: Path) -> None:
274
+ for rel in sorted(unused_rels, key=key):
275
+ print(rel)
318
276
 
319
- title = "MISSING (used but not installed):"
320
- print(title)
321
- print("-" * len(title))
277
+ def run_missing_imports_report(entry: Path, project_root: Path) -> None:
278
+ print("MISSING (used but not installed):")
279
+ print("------------------------------")
322
280
 
323
281
  if packages_distributions is None:
324
- print("(skipped: Python 3.8+ required for packages_distributions())")
325
- print()
326
- return
327
-
328
- if not entry.exists() or entry.suffix.lower() != ".py":
329
- print("(skipped: entry is not a .py file)")
330
- print()
282
+ print("(skipped: Python 3.8+ required for importlib.metadata.packages_distributions())\n")
331
283
  return
332
284
 
333
- project_root = entry.parent # matches your earlier behavior (reliable + simple)
334
285
  std = stdlib_names()
335
286
  local_index = index_local_modules(project_root)
336
287
  used_import_names = imported_top_level_packages(entry)
@@ -344,52 +295,66 @@ def print_missing_imports(entry: Path) -> None:
344
295
  project_root=project_root,
345
296
  )
346
297
 
347
- if missing:
348
- for m in sorted(missing):
349
- print(f"{m}")
350
- # else:
351
- # print("(none)")
352
-
298
+ if not missing: return
299
+
300
+ for m in sorted(missing):
301
+ print(m)
353
302
  print()
354
303
 
355
304
 
356
- # ======================================================
357
- # Main (refscan UX)
358
- # ======================================================
305
+ # ----------------------------
306
+ # Main
307
+ # ----------------------------
359
308
 
360
309
  def main() -> int:
310
+ if len(sys.argv) != 2:
311
+ usage_exit()
361
312
 
362
- # refscan input: optional positional path (Sublime {file}); otherwise CWD
363
- entry: Optional[Path] = None
364
- root = Path.cwd().resolve()
313
+ entry = Path(sys.argv[1]).expanduser()
314
+ if not entry.exists():
315
+ usage_exit(f"Entry file not found: {entry}")
365
316
 
366
- if len(sys.argv) >= 2:
367
- p = Path(sys.argv[1]).expanduser()
368
- try:
369
- p = p.resolve()
370
- except Exception:
371
- pass
317
+ entry = entry.resolve()
318
+ root = entry.parent
372
319
 
373
- # If Sublime passes a file, scan its parent dir; also use it as entry if .py
374
- if p.suffix:
375
- root = p.parent
376
- if p.suffix.lower() == ".py":
377
- entry = p
378
- else:
379
- # If user passes a directory, use it as root; no entry inferred
380
- if p.exists() and p.is_dir():
381
- root = p
320
+ ignore_dirs = set(DEFAULT_IGNORE_DIRS)
321
+
322
+ files = sorted(iter_files(root, ignore_dirs))
323
+ candidates = build_candidates(root, files)
324
+ text_sources = [p for p in files if is_text_source(p)]
382
325
 
326
+ references: Dict[str, List[str]] = {c.rel: [] for c in candidates}
327
+
328
+ for src in text_sources:
329
+ src_rel = str(src.relative_to(root))
330
+ text = safe_read_text(src)
331
+ if not text:
332
+ continue
333
+ hay = text.lower()
334
+
335
+ for c in candidates:
336
+ if src == c.path:
337
+ continue
338
+ if src == entry:
339
+ continue
340
+ if any(tok in hay for tok in c.tokens):
341
+ references[c.rel].append(src_rel)
383
342
 
343
+ unused_rows: list[tuple[str, List[str]]] = []
344
+ for c in candidates:
345
+ refs = references[c.rel]
346
+ if not refs:
347
+ unused_rows.append((c.rel, refs))
384
348
 
385
- print_unused_files_grouped(root)
349
+ print_group("2UNUSED files (no filename/path tokens found)", unused_rows)
386
350
 
387
- if entry is not None:
388
- print_missing_imports(entry)
351
+ # Second report: missing imports (based on entry .py)
352
+ if entry.suffix.lower() == ".py":
353
+ run_missing_imports_report(entry=entry, project_root=root)
389
354
  else:
390
355
  print("MISSING (used but not installed):")
391
- print(" (skipped: no entry .py provided)")
392
- print()
356
+ print("------------------------------")
357
+ print("(skipped: entry is not a .py file)\n")
393
358
 
394
359
  return 0
395
360
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: maketool
3
- Version: 0.8.2
3
+ Version: 0.8.12
4
4
  Summary: Python Automation tool for building PySide6 UI and PyInstaller EXE.
5
5
  Author-email: Alan Lilly <panofish@gmail.com>
6
6
  Requires-Python: >=3.7
@@ -0,0 +1,12 @@
1
+ maketool/__init__.py,sha256=in3uaJRClTGaMIQ6sg725m8so86U7gBr1QfCErxuLAk,172
2
+ maketool/build.py,sha256=-x45Nq-FZyemyAmh1gZlZ0c_JcSUePsbViLCeZezb4o,3921
3
+ maketool/clean.py,sha256=pJYb-kt23_HaOrckv8Kft09HKl9m4_Lt1Tf15HQCmvE,3245
4
+ maketool/compile.py,sha256=n1mzoyU5hlOjHVn2ytCsZVEvN6hta8-VHyR25UGc1CQ,14496
5
+ maketool/refscan.py,sha256=YpNrujP85iOPjfwg9OdUc9IFDqqZTjMsYYo_ceEZN_g,10089
6
+ maketool/run.py,sha256=UO6O7IaSl8XBqPEsC0CaTt65Or0m3_wQFvWZUnY4J00,2399
7
+ maketool/sublime.py,sha256=Ah_Y3tT7ifpUh_pGugY7hqM4SQ0UE-OPLam2hZZtar0,9116
8
+ maketool-0.8.12.dist-info/METADATA,sha256=iOaC-rrw1kvOpB0ByOLHCdYpcM3DMrEshbxf75F5J5A,4845
9
+ maketool-0.8.12.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
10
+ maketool-0.8.12.dist-info/entry_points.txt,sha256=hYswW0t7b9YacCLg0-4zAI1CKKjeOH1LJfi34Z8lAPc,248
11
+ maketool-0.8.12.dist-info/top_level.txt,sha256=e7JbT3AdVc2AJstJ1xW1hcwhwfFQ6U8QRdFzaFQe6sQ,9
12
+ maketool-0.8.12.dist-info/RECORD,,
@@ -1,12 +0,0 @@
1
- maketool/__init__.py,sha256=in3uaJRClTGaMIQ6sg725m8so86U7gBr1QfCErxuLAk,172
2
- maketool/build.py,sha256=-x45Nq-FZyemyAmh1gZlZ0c_JcSUePsbViLCeZezb4o,3921
3
- maketool/clean.py,sha256=pJYb-kt23_HaOrckv8Kft09HKl9m4_Lt1Tf15HQCmvE,3245
4
- maketool/compile.py,sha256=n1mzoyU5hlOjHVn2ytCsZVEvN6hta8-VHyR25UGc1CQ,14496
5
- maketool/refscan.py,sha256=-Mtje3zA0App0JONENGf_F5mNMQ7ItSDss_ClYV21oA,11545
6
- maketool/run.py,sha256=UO6O7IaSl8XBqPEsC0CaTt65Or0m3_wQFvWZUnY4J00,2399
7
- maketool/sublime.py,sha256=Ah_Y3tT7ifpUh_pGugY7hqM4SQ0UE-OPLam2hZZtar0,9116
8
- maketool-0.8.2.dist-info/METADATA,sha256=DelUlpdHMgeeKvyd1D54yWic-I6tsQ3TKhat5Je04fA,4844
9
- maketool-0.8.2.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
10
- maketool-0.8.2.dist-info/entry_points.txt,sha256=hYswW0t7b9YacCLg0-4zAI1CKKjeOH1LJfi34Z8lAPc,248
11
- maketool-0.8.2.dist-info/top_level.txt,sha256=e7JbT3AdVc2AJstJ1xW1hcwhwfFQ6U8QRdFzaFQe6sQ,9
12
- maketool-0.8.2.dist-info/RECORD,,