pysfi 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pysfi-0.1.5.dist-info/METADATA +107 -0
- pysfi-0.1.5.dist-info/RECORD +19 -0
- pysfi-0.1.5.dist-info/WHEEL +4 -0
- pysfi-0.1.5.dist-info/entry_points.txt +11 -0
- sfi/__init__.py +3 -0
- sfi/alarmclock/__init__.py +0 -0
- sfi/alarmclock/alarmclock.py +367 -0
- sfi/bumpversion/__init__.py +3 -0
- sfi/bumpversion/bumpversion.py +535 -0
- sfi/embedinstall/embedinstall.py +418 -0
- sfi/filedate/__init__.py +0 -0
- sfi/filedate/filedate.py +112 -0
- sfi/makepython/__init__.py +0 -0
- sfi/makepython/makepython.py +310 -0
- sfi/projectparse/projectparse.py +152 -0
- sfi/pyloadergen/pyloadergen.py +995 -0
- sfi/pypacker/fspacker.py +91 -0
- sfi/taskkill/taskkill.py +236 -0
- sfi/which/which.py +74 -0
|
@@ -0,0 +1,418 @@
|
|
|
1
|
+
"""Download Python embeddable package to a specific directory."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import logging
|
|
7
|
+
import platform
|
|
8
|
+
import shutil
|
|
9
|
+
import time
|
|
10
|
+
import zipfile
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Final
|
|
13
|
+
from urllib.error import HTTPError, URLError
|
|
14
|
+
from urllib.request import Request, urlopen
|
|
15
|
+
|
|
16
|
+
# Architecture mapping
|
|
17
|
+
ARCH_MAP: Final = {
|
|
18
|
+
"AMD64": "amd64",
|
|
19
|
+
"x86_64": "amd64",
|
|
20
|
+
"x86": "amd64",
|
|
21
|
+
"i686": "amd64",
|
|
22
|
+
"i386": "amd64",
|
|
23
|
+
"ARM64": "arm64",
|
|
24
|
+
"aarch64": "arm64",
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
USER_AGENT: Final = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
|
|
28
|
+
|
|
29
|
+
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
|
30
|
+
|
|
31
|
+
cwd = Path.cwd()
|
|
32
|
+
logger = logging.getLogger(__name__)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def get_system_arch(manual_arch: str | None = None) -> str:
|
|
36
|
+
"""Get system architecture for Python embeddable package."""
|
|
37
|
+
if manual_arch:
|
|
38
|
+
logger.debug(f"Using manual architecture: {manual_arch}")
|
|
39
|
+
return manual_arch
|
|
40
|
+
|
|
41
|
+
machine = platform.machine().upper()
|
|
42
|
+
|
|
43
|
+
logger.debug(f"System machine: {machine}")
|
|
44
|
+
|
|
45
|
+
if machine in ARCH_MAP:
|
|
46
|
+
arch = ARCH_MAP[machine]
|
|
47
|
+
logger.debug(f"Detected architecture: {arch}")
|
|
48
|
+
return arch
|
|
49
|
+
|
|
50
|
+
logger.warning(f"Unknown architecture: {machine}, defaulting to amd64")
|
|
51
|
+
return "amd64"
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def get_download_url(base_url_template: str, version: str, arch: str) -> str:
|
|
55
|
+
"""Get download URL with correct architecture."""
|
|
56
|
+
return base_url_template.format(version=version, arch=arch)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def get_official_url_template(arch: str) -> str:
|
|
60
|
+
"""Get official URL template based on architecture."""
|
|
61
|
+
return f"https://www.python.org/ftp/python/{{version}}/python-{{version}}-embed-{arch}.zip"
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def get_latest_patch_version(major_minor: str, arch: str, timeout: int = 5) -> str:
|
|
65
|
+
"""Get the latest patch version for a given major.minor version.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
major_minor: Major.minor version (e.g., '3.13') or full version (e.g., '3.13.1')
|
|
69
|
+
arch: Architecture (amd64 or arm64)
|
|
70
|
+
timeout: Request timeout in seconds
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
Full version string (e.g., '3.13.1') or original if already complete
|
|
74
|
+
"""
|
|
75
|
+
# Check if version is already complete (has 3 or more parts)
|
|
76
|
+
version_parts = major_minor.split(".")
|
|
77
|
+
if len(version_parts) >= 3:
|
|
78
|
+
logger.debug(f"Version already complete: {major_minor}, skipping auto-completion")
|
|
79
|
+
return major_minor
|
|
80
|
+
|
|
81
|
+
# Check if version format is valid
|
|
82
|
+
if "." not in major_minor or len(version_parts) != 2:
|
|
83
|
+
logger.debug(f"Invalid version format: {major_minor}, using as: `3.13.5`")
|
|
84
|
+
return major_minor
|
|
85
|
+
|
|
86
|
+
logger.info(f"Checking latest version for {major_minor}...")
|
|
87
|
+
|
|
88
|
+
base_url = get_official_url_template(arch)
|
|
89
|
+
major, minor = major_minor.split(".")
|
|
90
|
+
|
|
91
|
+
# Try from highest patch number down to 0
|
|
92
|
+
for patch in range(20, -1, -1):
|
|
93
|
+
version = f"{major}.{minor}.{patch}"
|
|
94
|
+
test_url = base_url.format(version=version, arch=arch)
|
|
95
|
+
|
|
96
|
+
req = Request(test_url, headers={"User-Agent": USER_AGENT}, method="HEAD")
|
|
97
|
+
try:
|
|
98
|
+
with urlopen(req, timeout=timeout) as response:
|
|
99
|
+
if response.status == 200:
|
|
100
|
+
logger.info(f"Latest version found: {version}")
|
|
101
|
+
return version
|
|
102
|
+
except HTTPError as e:
|
|
103
|
+
if e.code == 404:
|
|
104
|
+
logger.debug(f"Version {version} not found")
|
|
105
|
+
else:
|
|
106
|
+
logger.debug(f"Version {version}: HTTP {e.code}")
|
|
107
|
+
except (URLError, TimeoutError):
|
|
108
|
+
logger.debug(f"Version {version}: connection failed")
|
|
109
|
+
|
|
110
|
+
logger.warning(f"Could not find any patch version for {major_minor}, using as-is")
|
|
111
|
+
return major_minor
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def get_mirror_url_templates(arch: str) -> list[str]:
|
|
115
|
+
"""Get mirror URL templates based on architecture."""
|
|
116
|
+
return [
|
|
117
|
+
f"https://mirrors.huaweicloud.com/python/{{version}}/python-{{version}}-embed-{arch}.zip",
|
|
118
|
+
f"https://mirrors.aliyun.com/python-release/{{version}}/python-{{version}}-embed-{arch}.zip",
|
|
119
|
+
f"https://mirrors.tuna.tsinghua.edu.cn/python/{{version}}/python-{{version}}-embed-{arch}.zip",
|
|
120
|
+
f"https://mirrors.pku.edu.cn/python/{{version}}/python-{{version}}-embed-{arch}.zip",
|
|
121
|
+
]
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def test_url_speed(url: str, timeout: int = 5) -> float:
|
|
125
|
+
"""Test URL speed by making a HEAD request and measuring response time.
|
|
126
|
+
|
|
127
|
+
Falls back to GET request if HEAD is not supported.
|
|
128
|
+
"""
|
|
129
|
+
start_time = time.time()
|
|
130
|
+
|
|
131
|
+
req = Request(url, headers={"User-Agent": USER_AGENT}, method="HEAD")
|
|
132
|
+
try:
|
|
133
|
+
with urlopen(req, timeout=timeout) as response:
|
|
134
|
+
if response.status == 200:
|
|
135
|
+
elapsed = time.time() - start_time
|
|
136
|
+
logger.debug(f"URL {url}: {elapsed:.3f}s")
|
|
137
|
+
return elapsed
|
|
138
|
+
except HTTPError as e:
|
|
139
|
+
# Some servers don't support HEAD, fallback to GET with range request
|
|
140
|
+
if e.code == 405 or e.code == 403:
|
|
141
|
+
req = Request(
|
|
142
|
+
url,
|
|
143
|
+
headers={"User-Agent": USER_AGENT, "Range": "bytes=0-1023"},
|
|
144
|
+
method="GET",
|
|
145
|
+
)
|
|
146
|
+
try:
|
|
147
|
+
with urlopen(req, timeout=timeout) as response:
|
|
148
|
+
if response.status == 206 or response.status == 200:
|
|
149
|
+
elapsed = time.time() - start_time
|
|
150
|
+
logger.debug(f"URL {url}: {elapsed:.3f}s (GET fallback)")
|
|
151
|
+
return elapsed
|
|
152
|
+
except (URLError, TimeoutError):
|
|
153
|
+
logger.debug(f"URL {url}: failed (GET fallback)")
|
|
154
|
+
else:
|
|
155
|
+
logger.debug(f"URL {url}: failed (HTTP {e.code})")
|
|
156
|
+
except (URLError, TimeoutError) as e:
|
|
157
|
+
logger.debug(f"URL {url}: failed ({type(e).__name__})")
|
|
158
|
+
|
|
159
|
+
return float("inf")
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def find_available_urls(version: str, arch: str, timeout: int = 5) -> list[str]:
|
|
163
|
+
"""Find all available URLs for downloading Python embeddable package, sorted by speed."""
|
|
164
|
+
official_url = get_official_url_template(arch).format(version=version)
|
|
165
|
+
mirror_templates = get_mirror_url_templates(arch)
|
|
166
|
+
mirror_urls = [template.format(version=version) for template in mirror_templates]
|
|
167
|
+
urls_to_test = [*mirror_urls, official_url]
|
|
168
|
+
|
|
169
|
+
logger.debug("Testing mirror speeds...")
|
|
170
|
+
|
|
171
|
+
speed_results = []
|
|
172
|
+
for url in urls_to_test:
|
|
173
|
+
speed = test_url_speed(url, timeout)
|
|
174
|
+
if speed != float("inf"):
|
|
175
|
+
speed_results.append((speed, url))
|
|
176
|
+
logger.debug(f" ✓ {url} ({speed:.3f}s)")
|
|
177
|
+
else:
|
|
178
|
+
logger.debug(f" ✗ {url} (failed)")
|
|
179
|
+
|
|
180
|
+
if not speed_results:
|
|
181
|
+
logger.warning("All mirrors failed, falling back to official URL")
|
|
182
|
+
return [official_url]
|
|
183
|
+
|
|
184
|
+
speed_results.sort(key=lambda x: x[0])
|
|
185
|
+
sorted_urls = sorted(speed_results, key=lambda x: x[0])
|
|
186
|
+
available_urls = [url for _, url in sorted_urls]
|
|
187
|
+
|
|
188
|
+
logger.debug(f"Available URLs (sorted by speed): {len(available_urls)} found")
|
|
189
|
+
for i, url in enumerate(available_urls, 1):
|
|
190
|
+
logger.debug(f" {i}. {url}")
|
|
191
|
+
|
|
192
|
+
return available_urls
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def download_with_progress(url: str, dest_path: Path) -> None:
|
|
196
|
+
"""Download file with progress bar and integrity check."""
|
|
197
|
+
logger.info(f"Downloading from: {url}")
|
|
198
|
+
|
|
199
|
+
req = Request(url, headers={"User-Agent": USER_AGENT})
|
|
200
|
+
try:
|
|
201
|
+
with urlopen(req) as response:
|
|
202
|
+
total_size = int(response.headers.get("content-length", 0))
|
|
203
|
+
downloaded = 0
|
|
204
|
+
last_progress = 0
|
|
205
|
+
|
|
206
|
+
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
|
207
|
+
|
|
208
|
+
with dest_path.open("wb") as f:
|
|
209
|
+
while True:
|
|
210
|
+
chunk = response.read(65536) # Larger chunk size for better performance
|
|
211
|
+
if not chunk:
|
|
212
|
+
break
|
|
213
|
+
f.write(chunk)
|
|
214
|
+
downloaded += len(chunk)
|
|
215
|
+
|
|
216
|
+
if total_size > 0:
|
|
217
|
+
progress = (downloaded / total_size) * 100
|
|
218
|
+
# Update progress when percentage changes (every 1%)
|
|
219
|
+
if int(progress) > last_progress or downloaded == total_size:
|
|
220
|
+
logger.info(
|
|
221
|
+
f"Progress: {downloaded / 1024 / 1024:.2f} MB / {total_size / 1024 / 1024:.2f} MB ({progress:.1f}%)"
|
|
222
|
+
)
|
|
223
|
+
last_progress = int(progress)
|
|
224
|
+
|
|
225
|
+
# Verify downloaded file size
|
|
226
|
+
if total_size > 0 and downloaded != total_size:
|
|
227
|
+
raise URLError(f"Incomplete download: expected {total_size} bytes, got {downloaded} bytes")
|
|
228
|
+
|
|
229
|
+
logger.info(f"Download completed: {downloaded / 1024 / 1024:.2f} MB")
|
|
230
|
+
except URLError as e:
|
|
231
|
+
logger.error(f"Failed to download {url}: {e}")
|
|
232
|
+
raise
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def extract_zip(zip_path: Path, dest_dir: Path) -> None:
|
|
236
|
+
"""Extract zip file to destination directory using optimized shutil method."""
|
|
237
|
+
logger.info(f"Extracting to: {dest_dir}")
|
|
238
|
+
|
|
239
|
+
dest_dir.mkdir(parents=True, exist_ok=True)
|
|
240
|
+
|
|
241
|
+
# Use shutil.unpack_archive for better performance (uses optimized C implementation)
|
|
242
|
+
shutil.unpack_archive(str(zip_path), str(dest_dir), "zip")
|
|
243
|
+
|
|
244
|
+
# Count extracted files for logging
|
|
245
|
+
with zipfile.ZipFile(zip_path, "r") as zip_ref:
|
|
246
|
+
member_count = len(zip_ref.namelist())
|
|
247
|
+
logger.info(f"Extracted {member_count} files")
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def install_embed_python(
|
|
251
|
+
target_dir: Path,
|
|
252
|
+
version: str,
|
|
253
|
+
cache_dir: Path,
|
|
254
|
+
offline: bool,
|
|
255
|
+
keep_cache: bool,
|
|
256
|
+
skip_speed_test: bool,
|
|
257
|
+
arch: str,
|
|
258
|
+
timeout: int = 5,
|
|
259
|
+
) -> bool:
|
|
260
|
+
"""Install Python embeddable package to target directory."""
|
|
261
|
+
version_filename = f"python-{version}-embed-{arch}.zip"
|
|
262
|
+
cache_file = cache_dir / version_filename
|
|
263
|
+
|
|
264
|
+
if cache_file.exists():
|
|
265
|
+
logger.debug(f"Using cached file: {cache_file}")
|
|
266
|
+
elif offline:
|
|
267
|
+
logger.error(f"Offline mode: no cached file found for version {version}")
|
|
268
|
+
return False
|
|
269
|
+
else:
|
|
270
|
+
if skip_speed_test:
|
|
271
|
+
url = get_official_url_template(arch).format(version=version)
|
|
272
|
+
logger.info(f"Skipping speed test, using official URL: {url}")
|
|
273
|
+
urls = [url]
|
|
274
|
+
else:
|
|
275
|
+
urls = find_available_urls(version, arch, timeout)
|
|
276
|
+
|
|
277
|
+
download_success = False
|
|
278
|
+
for i, url in enumerate(urls, 1):
|
|
279
|
+
try:
|
|
280
|
+
logger.info(f"Attempting download {i}/{len(urls)}: {url}")
|
|
281
|
+
download_with_progress(url, cache_file)
|
|
282
|
+
download_success = True
|
|
283
|
+
break
|
|
284
|
+
except URLError as e:
|
|
285
|
+
logger.warning(f"Download failed from {url}: {e}")
|
|
286
|
+
if i < len(urls):
|
|
287
|
+
logger.info("Retrying with next fastest URL...")
|
|
288
|
+
else:
|
|
289
|
+
logger.error(
|
|
290
|
+
f"Failed to download Python {version} ({arch}) from all available URLs. "
|
|
291
|
+
f"Please check your internet connection and version."
|
|
292
|
+
)
|
|
293
|
+
return False
|
|
294
|
+
|
|
295
|
+
if not download_success:
|
|
296
|
+
return False
|
|
297
|
+
|
|
298
|
+
try:
|
|
299
|
+
extract_zip(cache_file, target_dir)
|
|
300
|
+
except zipfile.BadZipFile:
|
|
301
|
+
logger.error(f"Invalid zip file: {cache_file}")
|
|
302
|
+
if not keep_cache:
|
|
303
|
+
cache_file.unlink()
|
|
304
|
+
return False
|
|
305
|
+
except Exception as e:
|
|
306
|
+
logger.error(f"Failed to extract: {e}")
|
|
307
|
+
return False
|
|
308
|
+
|
|
309
|
+
if not keep_cache and not offline:
|
|
310
|
+
logger.debug(f"Cleaning up cache: {cache_file}")
|
|
311
|
+
cache_file.unlink()
|
|
312
|
+
|
|
313
|
+
python_exe = target_dir / "python.exe"
|
|
314
|
+
if python_exe.exists():
|
|
315
|
+
logger.info(f"Successfully installed Python {version} ({arch}) embeddable package to: {target_dir}")
|
|
316
|
+
logger.info(f"Python executable: {python_exe}")
|
|
317
|
+
return True
|
|
318
|
+
|
|
319
|
+
logger.warning(f"Installation completed but python.exe not found at: {python_exe}")
|
|
320
|
+
return True
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
def main() -> None:
|
|
324
|
+
parser = argparse.ArgumentParser(
|
|
325
|
+
prog="embedinstall",
|
|
326
|
+
description="Download and install Python embeddable package to a specific directory.",
|
|
327
|
+
)
|
|
328
|
+
parser.add_argument(
|
|
329
|
+
"--directory",
|
|
330
|
+
"-D",
|
|
331
|
+
type=str,
|
|
332
|
+
default=str(cwd / "runtime"),
|
|
333
|
+
help="Directory to install Python embeddable package (default: current directory)",
|
|
334
|
+
)
|
|
335
|
+
parser.add_argument("--debug", "-d", action="store_true", help="Enable debug mode")
|
|
336
|
+
parser.add_argument(
|
|
337
|
+
"--offline",
|
|
338
|
+
"-o",
|
|
339
|
+
action="store_true",
|
|
340
|
+
help="Offline mode (use cached files only)",
|
|
341
|
+
)
|
|
342
|
+
parser.add_argument(
|
|
343
|
+
"--version",
|
|
344
|
+
"-v",
|
|
345
|
+
type=str,
|
|
346
|
+
default="3.8.10",
|
|
347
|
+
help="Python version to install (default: 3.8.10)",
|
|
348
|
+
)
|
|
349
|
+
parser.add_argument(
|
|
350
|
+
"--cache-dir",
|
|
351
|
+
"-C",
|
|
352
|
+
type=str,
|
|
353
|
+
default=str(cwd / ".cache" / "embedinstall"),
|
|
354
|
+
help="Cache directory for downloaded files",
|
|
355
|
+
)
|
|
356
|
+
parser.add_argument(
|
|
357
|
+
"--keep-cache",
|
|
358
|
+
"-k",
|
|
359
|
+
action="store_true",
|
|
360
|
+
default=True,
|
|
361
|
+
help="Keep downloaded cache files",
|
|
362
|
+
)
|
|
363
|
+
parser.add_argument(
|
|
364
|
+
"--skip-speed-test",
|
|
365
|
+
"-s",
|
|
366
|
+
action="store_true",
|
|
367
|
+
help="Skip speed test and use official URL directly",
|
|
368
|
+
)
|
|
369
|
+
parser.add_argument(
|
|
370
|
+
"--arch",
|
|
371
|
+
"-a",
|
|
372
|
+
type=str,
|
|
373
|
+
help="Manually specify architecture (amd64, arm64). Default: auto-detect",
|
|
374
|
+
)
|
|
375
|
+
parser.add_argument(
|
|
376
|
+
"--timeout",
|
|
377
|
+
"-t",
|
|
378
|
+
type=int,
|
|
379
|
+
default=5,
|
|
380
|
+
help="Timeout in seconds for URL speed test (default: 5)",
|
|
381
|
+
)
|
|
382
|
+
|
|
383
|
+
args = parser.parse_args()
|
|
384
|
+
|
|
385
|
+
if args.debug:
|
|
386
|
+
logger.setLevel(logging.DEBUG)
|
|
387
|
+
else:
|
|
388
|
+
logger.setLevel(logging.INFO)
|
|
389
|
+
|
|
390
|
+
target_dir = Path(args.directory)
|
|
391
|
+
cache_dir = Path(args.cache_dir)
|
|
392
|
+
|
|
393
|
+
t0 = time.perf_counter()
|
|
394
|
+
arch = get_system_arch(args.arch)
|
|
395
|
+
|
|
396
|
+
# Auto-complete version if only major.minor is provided
|
|
397
|
+
version = get_latest_patch_version(args.version, arch, args.timeout)
|
|
398
|
+
if "." not in version:
|
|
399
|
+
return
|
|
400
|
+
|
|
401
|
+
logger.info(f"Installing Python {version} ({arch}) to: {target_dir}")
|
|
402
|
+
logger.info(f"Cache directory: {cache_dir}")
|
|
403
|
+
|
|
404
|
+
success = install_embed_python(
|
|
405
|
+
target_dir=target_dir,
|
|
406
|
+
version=version,
|
|
407
|
+
cache_dir=cache_dir,
|
|
408
|
+
offline=args.offline,
|
|
409
|
+
keep_cache=args.keep_cache,
|
|
410
|
+
skip_speed_test=args.skip_speed_test,
|
|
411
|
+
arch=arch,
|
|
412
|
+
timeout=args.timeout,
|
|
413
|
+
)
|
|
414
|
+
|
|
415
|
+
if not success:
|
|
416
|
+
exit(1)
|
|
417
|
+
|
|
418
|
+
logger.info(f"Installation completed in {time.perf_counter() - t0:.4f} seconds")
|
sfi/filedate/__init__.py
ADDED
|
File without changes
|
sfi/filedate/filedate.py
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
"""Remove file date prefix and replace with creation/modification date."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import concurrent.futures
|
|
7
|
+
import logging
|
|
8
|
+
import re
|
|
9
|
+
import time
|
|
10
|
+
from functools import lru_cache
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
DETECT_SEPARATORS: str = "-_#.~"
|
|
14
|
+
SEP: str = "_"
|
|
15
|
+
DATE_PATTERN = re.compile(r"(20|19)\d{2}((0[1-9])|(1[012]))((0[1-9])|([12]\d)|(3[01]))")
|
|
16
|
+
|
|
17
|
+
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def rename_file(target_path: Path) -> None:
|
|
22
|
+
"""Rename file using time marker."""
|
|
23
|
+
filestem = remove_date_prefix(target_path.stem)
|
|
24
|
+
modified, created = target_path.stat().st_mtime, target_path.stat().st_ctime
|
|
25
|
+
time_mark = time.strftime(
|
|
26
|
+
"%Y%m%d",
|
|
27
|
+
time.localtime(max((modified, created))),
|
|
28
|
+
)
|
|
29
|
+
dst_path = target_path.with_name(
|
|
30
|
+
f"{time_mark}{SEP}{filestem}{target_path.suffix}",
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
if dst_path == target_path:
|
|
34
|
+
logger.warning(f"{target_path} is the same as {dst_path}, skipping.")
|
|
35
|
+
return
|
|
36
|
+
|
|
37
|
+
sequence = 1
|
|
38
|
+
while dst_path.exists() and sequence < 100:
|
|
39
|
+
logger.warning(f"{dst_path} already exists, adding unique suffix.")
|
|
40
|
+
dst_path = dst_path.with_name(
|
|
41
|
+
f"{dst_path.stem}({sequence}){dst_path.suffix}",
|
|
42
|
+
)
|
|
43
|
+
sequence += 1
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
target_path.rename(dst_path)
|
|
47
|
+
except Exception:
|
|
48
|
+
logger.error(f"Rename failed: {target_path} -> {dst_path}")
|
|
49
|
+
else:
|
|
50
|
+
logger.info(f"Rename: {target_path} -> {dst_path}")
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@lru_cache(maxsize=1024)
|
|
54
|
+
def remove_date_prefix(filestem: str) -> str:
|
|
55
|
+
"""Remove date prefix from filename."""
|
|
56
|
+
logger.debug(f"Removing date prefix from: {filestem}")
|
|
57
|
+
|
|
58
|
+
match = re.search(DATE_PATTERN, filestem)
|
|
59
|
+
if not match:
|
|
60
|
+
logger.debug(f"No date prefix found: {filestem}")
|
|
61
|
+
return filestem
|
|
62
|
+
b, e = match.start(), match.end()
|
|
63
|
+
if b >= 1 and filestem[b - 1] in DETECT_SEPARATORS:
|
|
64
|
+
filestem = filestem[: b - 1] + filestem[e:]
|
|
65
|
+
elif e < len(filestem) and filestem[e] in DETECT_SEPARATORS:
|
|
66
|
+
filestem = filestem[:b] + filestem[e + 1 :]
|
|
67
|
+
|
|
68
|
+
logger.debug(f"Removed date prefix: {filestem}")
|
|
69
|
+
return remove_date_prefix(filestem)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def get_filtered_path(target_paths: list[str]) -> list[Path]:
|
|
73
|
+
"""Get filtered path list."""
|
|
74
|
+
logger.debug(f"Getting filtered path list: {target_paths}")
|
|
75
|
+
|
|
76
|
+
converted_paths = [Path(t) for t in target_paths]
|
|
77
|
+
valid_paths = [p for p in converted_paths if p.exists()]
|
|
78
|
+
missing_paths = list(set(converted_paths) - set(valid_paths))
|
|
79
|
+
|
|
80
|
+
if missing_paths:
|
|
81
|
+
logger.warning(f"Files not found: {missing_paths}, skipped.")
|
|
82
|
+
|
|
83
|
+
logger.debug(f"Filtered path list: {valid_paths}")
|
|
84
|
+
return valid_paths
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def main() -> None:
|
|
88
|
+
parser = argparse.ArgumentParser(
|
|
89
|
+
prog="filedate", description="Remove file date prefix and replace with creation/modification date."
|
|
90
|
+
)
|
|
91
|
+
parser.add_argument("targets", type=str, nargs="+", help="List of input files")
|
|
92
|
+
parser.add_argument("--debug", "-d", action="store_true", help="Enable debug mode")
|
|
93
|
+
args = parser.parse_args()
|
|
94
|
+
|
|
95
|
+
if args.debug:
|
|
96
|
+
logger.setLevel(logging.DEBUG)
|
|
97
|
+
else:
|
|
98
|
+
logger.setLevel(logging.INFO)
|
|
99
|
+
|
|
100
|
+
if not args.targets:
|
|
101
|
+
logger.error("Please provide file list.")
|
|
102
|
+
return
|
|
103
|
+
|
|
104
|
+
valid_paths = get_filtered_path(args.targets)
|
|
105
|
+
if not valid_paths:
|
|
106
|
+
logger.error("No valid files to process.")
|
|
107
|
+
return
|
|
108
|
+
|
|
109
|
+
t0 = time.perf_counter()
|
|
110
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
|
|
111
|
+
executor.map(rename_file, valid_paths)
|
|
112
|
+
logger.info(f"Done in {time.perf_counter() - t0:.4f}s")
|
|
File without changes
|