py2ls 0.2.4.26__py3-none-any.whl → 0.2.4.28__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
py2ls/.git/index
CHANGED
Binary file
|
py2ls/ips.py
CHANGED
@@ -17,7 +17,11 @@ import warnings
|
|
17
17
|
warnings.simplefilter("ignore", category=pd.errors.SettingWithCopyWarning)
|
18
18
|
warnings.filterwarnings("ignore", category=pd.errors.PerformanceWarning)
|
19
19
|
warnings.filterwarnings("ignore")
|
20
|
-
|
20
|
+
import os
|
21
|
+
import shutil
|
22
|
+
import logging
|
23
|
+
from pathlib import Path
|
24
|
+
from datetime import datetime
|
21
25
|
|
22
26
|
def run_once_within(duration=60,reverse=False): # default 60s
|
23
27
|
import time
|
@@ -874,6 +878,19 @@ def counter(list_, verbose=True):
|
|
874
878
|
# print(f"Return a list of the n most common elements:\n{c.most_common()}")
|
875
879
|
# print(f"Compute the sum of the counts:\n{c.total()}")
|
876
880
|
|
881
|
+
def dict2df(dict_, fill=None):
|
882
|
+
len_max = 0
|
883
|
+
for key, value in dict_.items():
|
884
|
+
# value部分需要是list
|
885
|
+
if isinstance(value, list):
|
886
|
+
pass
|
887
|
+
# get the max_length
|
888
|
+
len_max = len(value) if len(value) > len_max else len_max
|
889
|
+
# 补齐长度
|
890
|
+
for key, value in dict_.items():
|
891
|
+
value.extend([fill] * (len_max - len(value)))
|
892
|
+
dict_[key] = value
|
893
|
+
return pd.DataFrame.from_dict(dict_)
|
877
894
|
|
878
895
|
def str2time(time_str, fmt="24"):
|
879
896
|
"""
|
@@ -1322,7 +1339,7 @@ def docx2pdf(dir_docx, dir_pdf=None):
|
|
1322
1339
|
convert(dir_docx)
|
1323
1340
|
|
1324
1341
|
|
1325
|
-
def img2pdf(dir_img, kind=
|
1342
|
+
def img2pdf(dir_img, kind=None, page=None, dir_save=None, page_size="a4", dpi=300):
|
1326
1343
|
import img2pdf as image2pdf
|
1327
1344
|
|
1328
1345
|
def mm_to_point(size):
|
@@ -1331,7 +1348,8 @@ def img2pdf(dir_img, kind="jpeg", page=None, dir_save=None, page_size="a4", dpi=
|
|
1331
1348
|
def set_dpi(x):
|
1332
1349
|
dpix = dpiy = x
|
1333
1350
|
return image2pdf.get_fixed_dpi_layout_fun((dpix, dpiy))
|
1334
|
-
|
1351
|
+
if kind is None:
|
1352
|
+
_, kind = os.path.splitext(dir_img)
|
1335
1353
|
if not kind.startswith("."):
|
1336
1354
|
kind = "." + kind
|
1337
1355
|
if dir_save is None:
|
@@ -1354,8 +1372,10 @@ def img2pdf(dir_img, kind="jpeg", page=None, dir_save=None, page_size="a4", dpi=
|
|
1354
1372
|
continue
|
1355
1373
|
imgs.append(path)
|
1356
1374
|
else:
|
1357
|
-
imgs = [
|
1358
|
-
|
1375
|
+
imgs = [
|
1376
|
+
# os.path.isdir(dir_img),
|
1377
|
+
dir_img]
|
1378
|
+
print(imgs)
|
1359
1379
|
if page_size:
|
1360
1380
|
if isinstance(page_size, str):
|
1361
1381
|
pdf_in_mm = mm_to_point(paper_size(page_size))
|
@@ -3205,50 +3225,453 @@ def isa(content, kind):
|
|
3205
3225
|
return False
|
3206
3226
|
|
3207
3227
|
|
3208
|
-
|
3228
|
+
def get_os(full=False, verbose=False):
|
3229
|
+
"""Collects comprehensive system information.
|
3230
|
+
full(bool): True, get more detailed info
|
3231
|
+
verbose(bool): True, print it
|
3232
|
+
usage:
|
3233
|
+
info = get_os(full=True, verbose=False)
|
3234
|
+
"""
|
3235
|
+
import sys
|
3236
|
+
import platform
|
3237
|
+
import psutil
|
3238
|
+
import GPUtil
|
3239
|
+
import socket
|
3240
|
+
import uuid
|
3241
|
+
import cpuinfo
|
3242
|
+
import os
|
3243
|
+
import subprocess
|
3244
|
+
from datetime import datetime, timedelta
|
3245
|
+
from collections import defaultdict
|
3246
|
+
|
3247
|
+
def get_os_type():
|
3248
|
+
os_name = sys.platform
|
3249
|
+
if "dar" in os_name:
|
3250
|
+
return "macOS"
|
3251
|
+
else:
|
3252
|
+
if "win" in os_name:
|
3253
|
+
return "Windows"
|
3254
|
+
elif "linux" in os_name:
|
3255
|
+
return "Linux"
|
3256
|
+
else:
|
3257
|
+
print(f"{os_name}, returned 'None'")
|
3258
|
+
return None
|
3209
3259
|
|
3260
|
+
def get_os_info():
|
3261
|
+
"""Get the detailed OS name, version, and other platform-specific details."""
|
3210
3262
|
|
3211
|
-
def
|
3212
|
-
|
3213
|
-
|
3214
|
-
|
3215
|
-
|
3216
|
-
|
3217
|
-
|
3218
|
-
|
3219
|
-
|
3263
|
+
def get_mac_os_info():
|
3264
|
+
"""Get detailed macOS version and product name."""
|
3265
|
+
try:
|
3266
|
+
sw_vers = subprocess.check_output(["sw_vers"]).decode("utf-8")
|
3267
|
+
product_name = (
|
3268
|
+
[
|
3269
|
+
line
|
3270
|
+
for line in sw_vers.split("\n")
|
3271
|
+
if line.startswith("ProductName")
|
3272
|
+
][0]
|
3273
|
+
.split(":")[1]
|
3274
|
+
.strip()
|
3275
|
+
)
|
3276
|
+
product_version = (
|
3277
|
+
[
|
3278
|
+
line
|
3279
|
+
for line in sw_vers.split("\n")
|
3280
|
+
if line.startswith("ProductVersion")
|
3281
|
+
][0]
|
3282
|
+
.split(":")[1]
|
3283
|
+
.strip()
|
3284
|
+
)
|
3285
|
+
build_version = (
|
3286
|
+
[
|
3287
|
+
line
|
3288
|
+
for line in sw_vers.split("\n")
|
3289
|
+
if line.startswith("BuildVersion")
|
3290
|
+
][0]
|
3291
|
+
.split(":")[1]
|
3292
|
+
.strip()
|
3293
|
+
)
|
3294
|
+
|
3295
|
+
# Return the formatted macOS name, version, and build
|
3296
|
+
return f"{product_name} {product_version} (Build {build_version})"
|
3297
|
+
except Exception as e:
|
3298
|
+
return f"Error retrieving macOS name: {str(e)}"
|
3299
|
+
|
3300
|
+
def get_windows_info():
|
3301
|
+
"""Get detailed Windows version and edition."""
|
3302
|
+
try:
|
3303
|
+
# Get basic Windows version using platform
|
3304
|
+
windows_version = platform.version()
|
3305
|
+
release = platform.release()
|
3306
|
+
version = platform.win32_ver()[0]
|
3307
|
+
|
3308
|
+
# Additional information using Windows-specific system commands
|
3309
|
+
edition_command = "wmic os get caption"
|
3310
|
+
edition = (
|
3311
|
+
subprocess.check_output(edition_command, shell=True)
|
3312
|
+
.decode("utf-8")
|
3313
|
+
.strip()
|
3314
|
+
.split("\n")[1]
|
3315
|
+
)
|
3316
|
+
|
3317
|
+
# Return Windows information
|
3318
|
+
return f"Windows {version} {release} ({edition})"
|
3319
|
+
except Exception as e:
|
3320
|
+
return f"Error retrieving Windows information: {str(e)}"
|
3321
|
+
|
3322
|
+
def get_linux_info():
|
3323
|
+
"""Get detailed Linux version and distribution info."""
|
3324
|
+
try:
|
3325
|
+
# Check /etc/os-release for modern Linux distros
|
3326
|
+
with open("/etc/os-release") as f:
|
3327
|
+
os_info = f.readlines()
|
3328
|
+
|
3329
|
+
os_name = (
|
3330
|
+
next(line for line in os_info if line.startswith("NAME"))
|
3331
|
+
.split("=")[1]
|
3332
|
+
.strip()
|
3333
|
+
.replace('"', "")
|
3334
|
+
)
|
3335
|
+
os_version = (
|
3336
|
+
next(line for line in os_info if line.startswith("VERSION"))
|
3337
|
+
.split("=")[1]
|
3338
|
+
.strip()
|
3339
|
+
.replace('"', "")
|
3340
|
+
)
|
3341
|
+
|
3342
|
+
# For additional info, check for the package manager (e.g., apt, dnf)
|
3343
|
+
package_manager = "Unknown"
|
3344
|
+
if os.path.exists("/usr/bin/apt"):
|
3345
|
+
package_manager = "APT (Debian/Ubuntu)"
|
3346
|
+
elif os.path.exists("/usr/bin/dnf"):
|
3347
|
+
package_manager = "DNF (Fedora/RHEL)"
|
3348
|
+
|
3349
|
+
# Return Linux distribution, version, and package manager
|
3350
|
+
return f"{os_name} {os_version} (Package Manager: {package_manager})"
|
3351
|
+
except Exception as e:
|
3352
|
+
return f"Error retrieving Linux information: {str(e)}"
|
3353
|
+
|
3354
|
+
os_name = platform.system()
|
3355
|
+
|
3356
|
+
if os_name == "Darwin":
|
3357
|
+
return get_mac_os_info()
|
3358
|
+
elif os_name == "Windows":
|
3359
|
+
return get_windows_info()
|
3360
|
+
elif os_name == "Linux":
|
3361
|
+
return get_linux_info()
|
3220
3362
|
else:
|
3221
|
-
|
3222
|
-
|
3363
|
+
return f"Unknown OS: {os_name} {platform.release()}"
|
3364
|
+
|
3365
|
+
def get_os_name_and_version():
|
3366
|
+
os_name = platform.system()
|
3367
|
+
if os_name == "Darwin":
|
3368
|
+
try:
|
3369
|
+
# Run 'sw_vers' command to get macOS details like "macOS Sequoia"
|
3370
|
+
sw_vers = subprocess.check_output(["sw_vers"]).decode("utf-8")
|
3371
|
+
product_name = (
|
3372
|
+
[
|
3373
|
+
line
|
3374
|
+
for line in sw_vers.split("\n")
|
3375
|
+
if line.startswith("ProductName")
|
3376
|
+
][0]
|
3377
|
+
.split(":")[1]
|
3378
|
+
.strip()
|
3379
|
+
)
|
3380
|
+
product_version = (
|
3381
|
+
[
|
3382
|
+
line
|
3383
|
+
for line in sw_vers.split("\n")
|
3384
|
+
if line.startswith("ProductVersion")
|
3385
|
+
][0]
|
3386
|
+
.split(":")[1]
|
3387
|
+
.strip()
|
3388
|
+
)
|
3389
|
+
|
3390
|
+
# Return the formatted macOS name and version
|
3391
|
+
return f"{product_name} {product_version}"
|
3392
|
+
|
3393
|
+
except Exception as e:
|
3394
|
+
return f"Error retrieving macOS name: {str(e)}"
|
3395
|
+
|
3396
|
+
# For Windows, we use platform to get the OS name and version
|
3397
|
+
elif os_name == "Windows":
|
3398
|
+
os_version = platform.version()
|
3399
|
+
return f"Windows {os_version}"
|
3400
|
+
|
3401
|
+
# For Linux, check for distribution info using platform and os-release file
|
3402
|
+
elif os_name == "Linux":
|
3403
|
+
try:
|
3404
|
+
# Try to read Linux distribution info from '/etc/os-release'
|
3405
|
+
with open("/etc/os-release") as f:
|
3406
|
+
os_info = f.readlines()
|
3407
|
+
|
3408
|
+
# Find fields like NAME and VERSION
|
3409
|
+
os_name = (
|
3410
|
+
next(line for line in os_info if line.startswith("NAME"))
|
3411
|
+
.split("=")[1]
|
3412
|
+
.strip()
|
3413
|
+
.replace('"', "")
|
3414
|
+
)
|
3415
|
+
os_version = (
|
3416
|
+
next(line for line in os_info if line.startswith("VERSION"))
|
3417
|
+
.split("=")[1]
|
3418
|
+
.strip()
|
3419
|
+
.replace('"', "")
|
3420
|
+
)
|
3421
|
+
return f"{os_name} {os_version}"
|
3422
|
+
|
3423
|
+
except Exception as e:
|
3424
|
+
return f"Error retrieving Linux name: {str(e)}"
|
3425
|
+
|
3426
|
+
# Default fallback (for unknown OS or edge cases)
|
3427
|
+
return f"{os_name} {platform.release()}"
|
3428
|
+
|
3429
|
+
def get_system_uptime():
|
3430
|
+
"""Returns system uptime as a human-readable string."""
|
3431
|
+
boot_time = datetime.fromtimestamp(psutil.boot_time())
|
3432
|
+
uptime = datetime.now() - boot_time
|
3433
|
+
return str(uptime).split(".")[0] # Remove microseconds
|
3434
|
+
|
3435
|
+
def get_active_processes(limit=10):
|
3436
|
+
processes = []
|
3437
|
+
for proc in psutil.process_iter(
|
3438
|
+
["pid", "name", "cpu_percent", "memory_percent"]
|
3439
|
+
):
|
3440
|
+
try:
|
3441
|
+
processes.append(proc.info)
|
3442
|
+
except psutil.NoSuchProcess:
|
3443
|
+
pass
|
3444
|
+
# Handle NoneType values by treating them as 0
|
3445
|
+
processes.sort(key=lambda x: x["cpu_percent"] or 0, reverse=True)
|
3446
|
+
return processes[:limit]
|
3447
|
+
|
3448
|
+
def get_virtual_environment_info():
|
3449
|
+
"""Checks if the script is running in a virtual environment and returns details."""
|
3450
|
+
try:
|
3451
|
+
# Check if running in a virtual environment
|
3452
|
+
if hasattr(sys, "real_prefix") or (
|
3453
|
+
hasattr(sys, "base_prefix") and sys.base_prefix != sys.prefix
|
3454
|
+
):
|
3455
|
+
return {
|
3456
|
+
"Virtual Environment": sys.prefix,
|
3457
|
+
"Site-Packages Path": os.path.join(
|
3458
|
+
sys.prefix,
|
3459
|
+
"lib",
|
3460
|
+
"python{}/site-packages".format(sys.version_info.major),
|
3461
|
+
),
|
3462
|
+
}
|
3463
|
+
else:
|
3464
|
+
return {"Virtual Environment": "Not in a virtual environment"}
|
3465
|
+
except Exception as e:
|
3466
|
+
return {"Error": str(e)}
|
3467
|
+
|
3468
|
+
def get_temperatures():
|
3469
|
+
"""Returns temperature sensor readings."""
|
3470
|
+
try:
|
3471
|
+
return psutil.sensors_temperatures(fahrenheit=False)
|
3472
|
+
except AttributeError:
|
3473
|
+
return {"Error": "Temperature sensors not available"}
|
3474
|
+
|
3475
|
+
def get_battery_status():
|
3476
|
+
"""Returns battery status."""
|
3477
|
+
battery = psutil.sensors_battery()
|
3478
|
+
if battery:
|
3479
|
+
time_left = (
|
3480
|
+
str(timedelta(seconds=battery.secsleft))
|
3481
|
+
if battery.secsleft != psutil.POWER_TIME_UNLIMITED
|
3482
|
+
else "Charging/Unlimited"
|
3483
|
+
)
|
3484
|
+
return {
|
3485
|
+
"Percentage": battery.percent,
|
3486
|
+
"Plugged In": battery.power_plugged,
|
3487
|
+
"Time Left": time_left,
|
3488
|
+
}
|
3489
|
+
return {"Status": "No battery detected"}
|
3490
|
+
|
3491
|
+
def get_disk_io():
|
3492
|
+
"""Returns disk I/O statistics."""
|
3493
|
+
disk_io = psutil.disk_io_counters()
|
3494
|
+
return {
|
3495
|
+
"Read (GB)": disk_io.read_bytes / (1024**3),
|
3496
|
+
"Write (GB)": disk_io.write_bytes / (1024**3),
|
3497
|
+
"Read Count": disk_io.read_count,
|
3498
|
+
"Write Count": disk_io.write_count,
|
3499
|
+
}
|
3500
|
+
|
3501
|
+
def get_network_io():
|
3502
|
+
"""Returns network I/O statistics."""
|
3503
|
+
net_io = psutil.net_io_counters()
|
3504
|
+
return {
|
3505
|
+
"Bytes Sent (GB)": net_io.bytes_sent / (1024**3),
|
3506
|
+
"Bytes Received (GB)": net_io.bytes_recv / (1024**3),
|
3507
|
+
"Packets Sent": net_io.packets_sent,
|
3508
|
+
"Packets Received": net_io.packets_recv,
|
3509
|
+
}
|
3510
|
+
|
3511
|
+
def run_shell_command(command):
|
3512
|
+
"""Runs a shell command and returns its output."""
|
3513
|
+
try:
|
3514
|
+
result = subprocess.run(
|
3515
|
+
command,
|
3516
|
+
shell=True,
|
3517
|
+
stdout=subprocess.PIPE,
|
3518
|
+
stderr=subprocess.PIPE,
|
3519
|
+
text=True,
|
3520
|
+
)
|
3521
|
+
return (
|
3522
|
+
result.stdout.strip()
|
3523
|
+
if result.returncode == 0
|
3524
|
+
else result.stderr.strip()
|
3525
|
+
)
|
3526
|
+
except Exception as e:
|
3527
|
+
return f"Error running command: {e}"
|
3528
|
+
|
3529
|
+
system_info = {
|
3530
|
+
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
3531
|
+
"os": get_os_type(),
|
3532
|
+
"system": {
|
3533
|
+
"os": get_os_info(),
|
3534
|
+
"platform": f"{platform.system()} {platform.release()}",
|
3535
|
+
"version": platform.version(),
|
3536
|
+
"machine": platform.machine(),
|
3537
|
+
"processor": platform.processor(),
|
3538
|
+
"architecture": platform.architecture()[0],
|
3539
|
+
"hostname": socket.gethostname(),
|
3540
|
+
"ip address": socket.gethostbyname(socket.gethostname()),
|
3541
|
+
"mac address": ":".join(
|
3542
|
+
["{:02x}".format((uuid.getnode() >> i) & 0xFF) for i in range(0, 48, 8)]
|
3543
|
+
),
|
3544
|
+
"cpu brand": cpuinfo.get_cpu_info().get("brand_raw", "Unknown"),
|
3545
|
+
"python version": platform.python_version(),
|
3546
|
+
"uptime": get_system_uptime(),
|
3547
|
+
},
|
3548
|
+
"cpu": {
|
3549
|
+
"physical cores": psutil.cpu_count(logical=False),
|
3550
|
+
"logical cores": psutil.cpu_count(logical=True),
|
3551
|
+
"max frequency (MHz)": psutil.cpu_freq().max,
|
3552
|
+
"min frequency (MHz)": psutil.cpu_freq().min,
|
3553
|
+
"current frequency (MHz)": psutil.cpu_freq().current,
|
3554
|
+
"usage per core (%)": psutil.cpu_percent(percpu=True),
|
3555
|
+
"total cpu Usage (%)": psutil.cpu_percent(),
|
3556
|
+
"load average (1m, 5m, 15m)": (
|
3557
|
+
os.getloadavg() if hasattr(os, "getloadavg") else "N/A"
|
3558
|
+
),
|
3559
|
+
},
|
3560
|
+
"memory": {
|
3561
|
+
"total memory (GB)": psutil.virtual_memory().total / (1024**3),
|
3562
|
+
"available memory (GB)": psutil.virtual_memory().available / (1024**3),
|
3563
|
+
"used memory (GB)": psutil.virtual_memory().used / (1024**3),
|
3564
|
+
"memory usage (%)": psutil.virtual_memory().percent,
|
3565
|
+
"swap total (GB)": psutil.swap_memory().total / (1024**3),
|
3566
|
+
"swap free (GB)": psutil.swap_memory().free / (1024**3),
|
3567
|
+
"swap used (GB)": psutil.swap_memory().used / (1024**3),
|
3568
|
+
"swap usage (%)": psutil.swap_memory().percent,
|
3569
|
+
},
|
3570
|
+
"disk": {},
|
3571
|
+
"disk io": get_disk_io(),
|
3572
|
+
"network": {},
|
3573
|
+
"network io": get_network_io(),
|
3574
|
+
"gpu": [],
|
3575
|
+
"temperatures": get_temperatures(),
|
3576
|
+
"battery": get_battery_status(),
|
3577
|
+
"active processes": get_active_processes(),
|
3578
|
+
"environment": {
|
3579
|
+
"user": os.getenv("USER", "Unknown"),
|
3580
|
+
"environment variables": dict(os.environ),
|
3581
|
+
"virtual environment info": get_virtual_environment_info(), # Virtual env details
|
3582
|
+
"docker running": os.path.exists("/.dockerenv"), # Check for Docker
|
3583
|
+
"shell": os.environ.get("SHELL", "Unknown"),
|
3584
|
+
"default terminal": run_shell_command("echo $TERM"),
|
3585
|
+
"kernel version": platform.uname().release,
|
3586
|
+
"virtualization type": run_shell_command("systemd-detect-virt"),
|
3587
|
+
},
|
3588
|
+
"additional info": {
|
3589
|
+
"Shell": os.environ.get("SHELL", "Unknown"),
|
3590
|
+
"default terminal": run_shell_command("echo $TERM"),
|
3591
|
+
"kernel version": platform.uname().release,
|
3592
|
+
"virtualization type": run_shell_command("systemd-detect-virt"),
|
3593
|
+
"running in docker": os.path.exists("/.dockerenv"),
|
3594
|
+
},
|
3595
|
+
}
|
3596
|
+
|
3597
|
+
# Disk Information
|
3598
|
+
for partition in psutil.disk_partitions():
|
3599
|
+
try:
|
3600
|
+
usage = psutil.disk_usage(partition.mountpoint)
|
3601
|
+
system_info["disk"][partition.device] = {
|
3602
|
+
"mountpoint": partition.mountpoint,
|
3603
|
+
"file system type": partition.fstype,
|
3604
|
+
"total size (GB)": usage.total / (1024**3),
|
3605
|
+
"used (GB)": usage.used / (1024**3),
|
3606
|
+
"free (GB)": usage.free / (1024**3),
|
3607
|
+
"usage (%)": usage.percent,
|
3608
|
+
}
|
3609
|
+
except PermissionError:
|
3610
|
+
system_info["Disk"][partition.device] = "Permission Denied"
|
3611
|
+
|
3612
|
+
# Network Information
|
3613
|
+
if_addrs = psutil.net_if_addrs()
|
3614
|
+
for interface_name, interface_addresses in if_addrs.items():
|
3615
|
+
system_info["network"][interface_name] = []
|
3616
|
+
for address in interface_addresses:
|
3617
|
+
if str(address.family) == "AddressFamily.AF_INET":
|
3618
|
+
system_info["network"][interface_name].append(
|
3619
|
+
{
|
3620
|
+
"ip address": address.address,
|
3621
|
+
"netmask": address.netmask,
|
3622
|
+
"broadcast ip": address.broadcast,
|
3623
|
+
}
|
3624
|
+
)
|
3625
|
+
elif str(address.family) == "AddressFamily.AF_PACKET":
|
3626
|
+
system_info["network"][interface_name].append(
|
3627
|
+
{
|
3628
|
+
"mac address": address.address,
|
3629
|
+
"netmask": address.netmask,
|
3630
|
+
"broadcast mac": address.broadcast,
|
3631
|
+
}
|
3632
|
+
)
|
3223
3633
|
|
3634
|
+
# GPU Information
|
3635
|
+
gpus = GPUtil.getGPUs()
|
3636
|
+
for gpu in gpus:
|
3637
|
+
gpu_info = {
|
3638
|
+
"name": gpu.name,
|
3639
|
+
"load (%)": gpu.load * 100,
|
3640
|
+
"free memory (MB)": gpu.memoryFree,
|
3641
|
+
"used memory (MB)": gpu.memoryUsed,
|
3642
|
+
"total memory (MB)": gpu.memoryTotal,
|
3643
|
+
"driver version": gpu.driver,
|
3644
|
+
"temperature (°C)": gpu.temperature,
|
3645
|
+
}
|
3646
|
+
if hasattr(gpu, "powerDraw"):
|
3647
|
+
gpu_info["Power Draw (W)"] = gpu.powerDraw
|
3648
|
+
if hasattr(gpu, "powerLimit"):
|
3649
|
+
gpu_info["Power Limit (W)"] = gpu.powerLimit
|
3650
|
+
system_info["gpu"].append(gpu_info)
|
3651
|
+
|
3652
|
+
res = system_info if full else get_os_type()
|
3653
|
+
if verbose:
|
3654
|
+
try:
|
3655
|
+
preview(res)
|
3656
|
+
except Exception as e:
|
3657
|
+
pnrint(e)
|
3658
|
+
return res
|
3224
3659
|
|
3660
|
+
import re
|
3661
|
+
import stat
|
3662
|
+
import platform
|
3225
3663
|
def listdir(
|
3226
3664
|
rootdir,
|
3227
3665
|
kind=None,
|
3228
3666
|
sort_by="name",
|
3229
3667
|
ascending=True,
|
3230
|
-
contains=None
|
3668
|
+
contains=None,# filter filenames using re
|
3669
|
+
booster=False,# walk in subfolders
|
3670
|
+
hidden=False, # Include hidden files/folders
|
3231
3671
|
orient="list",
|
3232
|
-
output="df", # 'list','dict','records','index','series'
|
3672
|
+
output="df", # "df", 'list','dict','records','index','series'
|
3233
3673
|
verbose=True,
|
3234
|
-
):
|
3235
|
-
if kind is None:
|
3236
|
-
ls = os.listdir(rootdir)
|
3237
|
-
ls = [f for f in ls if not f.startswith(".") and not f.startswith("~")]
|
3238
|
-
if verbose:
|
3239
|
-
if len(ls)>20:
|
3240
|
-
print(ls[:20])
|
3241
|
-
else:
|
3242
|
-
print(ls)
|
3243
|
-
df_all = pd.DataFrame(
|
3244
|
-
{
|
3245
|
-
"fname": ls,
|
3246
|
-
"fpath": [os.path.join(rootdir, i) for i in ls],
|
3247
|
-
}
|
3248
|
-
)
|
3249
|
-
if verbose:
|
3250
|
-
display(df_all.head())
|
3251
|
-
return df_all
|
3674
|
+
):
|
3252
3675
|
if isinstance(kind, list):
|
3253
3676
|
f_ = []
|
3254
3677
|
for kind_ in kind:
|
@@ -3258,56 +3681,106 @@ def listdir(
|
|
3258
3681
|
sort_by=sort_by,
|
3259
3682
|
ascending=ascending,
|
3260
3683
|
contains=contains,
|
3684
|
+
booster=booster,# walk in subfolders
|
3685
|
+
hidden=hidden,
|
3261
3686
|
orient=orient,
|
3262
3687
|
output=output,
|
3688
|
+
verbose=verbose,
|
3263
3689
|
)
|
3264
3690
|
f_.append(f_tmp)
|
3265
3691
|
if f_:
|
3266
3692
|
return pd.concat(f_, ignore_index=True)
|
3267
|
-
|
3268
|
-
|
3269
|
-
|
3270
|
-
|
3271
|
-
|
3272
|
-
|
3273
|
-
|
3274
|
-
|
3275
|
-
|
3276
|
-
|
3277
|
-
|
3278
|
-
|
3279
|
-
|
3280
|
-
|
3281
|
-
|
3282
|
-
|
3283
|
-
|
3284
|
-
|
3285
|
-
|
3286
|
-
|
3287
|
-
|
3288
|
-
|
3289
|
-
|
3290
|
-
if
|
3693
|
+
if kind is not None:
|
3694
|
+
if not kind.startswith("."):
|
3695
|
+
kind = "." + kind
|
3696
|
+
fd = [".fd", ".fld", ".fol", ".fd", ".folder"]
|
3697
|
+
i = 0
|
3698
|
+
f = {
|
3699
|
+
"name": [],
|
3700
|
+
'kind':[],
|
3701
|
+
"length": [],
|
3702
|
+
"basename":[],
|
3703
|
+
"path": [],
|
3704
|
+
"created_time": [],
|
3705
|
+
"modified_time": [],
|
3706
|
+
"last_open_time": [],
|
3707
|
+
"size": [],
|
3708
|
+
"permission":[],
|
3709
|
+
"owner":[],
|
3710
|
+
"rootdir":[],
|
3711
|
+
"fname": [],
|
3712
|
+
"fpath": [],
|
3713
|
+
}
|
3714
|
+
for dirpath, dirnames, ls in os.walk(rootdir):
|
3715
|
+
if not hidden:
|
3716
|
+
dirnames[:] = [d for d in dirnames if not d.startswith(".")]
|
3717
|
+
ls = [i for i in ls if not i.startswith(".")]
|
3718
|
+
for dirname in dirnames:
|
3719
|
+
if contains and not re.search(contains, dirname):
|
3291
3720
|
continue
|
3292
|
-
|
3293
|
-
|
3294
|
-
|
3295
|
-
os.
|
3296
|
-
|
3297
|
-
|
3298
|
-
|
3299
|
-
|
3300
|
-
|
3301
|
-
return flist(fpath, contains=contains)
|
3302
|
-
else: # 精确到文件的后缀
|
3303
|
-
if not is_folder and not is_file:
|
3304
|
-
continue
|
3721
|
+
dirname_path = os.path.join(dirpath, dirname)
|
3722
|
+
fpath = os.path.join(os.path.dirname(dirname_path), dirname)
|
3723
|
+
try:
|
3724
|
+
stats_file = os.stat(fpath)
|
3725
|
+
except Exception as e:
|
3726
|
+
print(e)
|
3727
|
+
continue
|
3728
|
+
filename, file_extension = os.path.splitext(dirname)
|
3729
|
+
file_extension = file_extension if file_extension!='' else None
|
3305
3730
|
f["name"].append(filename)
|
3731
|
+
f['kind'].append(file_extension)
|
3306
3732
|
f["length"].append(len(filename))
|
3307
|
-
f["
|
3733
|
+
f["size"].append(round(os.path.getsize(fpath) / 1024 / 1024, 3))
|
3734
|
+
f['basename'].append(os.path.basename(dirname_path))
|
3735
|
+
f["path"].append(os.path.join(os.path.dirname(dirname_path), dirname))
|
3736
|
+
f["created_time"].append(
|
3737
|
+
pd.to_datetime(os.path.getctime(dirname_path), unit="s")
|
3738
|
+
)
|
3739
|
+
f["modified_time"].append(
|
3740
|
+
pd.to_datetime(os.path.getmtime(dirname_path), unit="s")
|
3741
|
+
)
|
3742
|
+
f["last_open_time"].append(
|
3743
|
+
pd.to_datetime(os.path.getatime(dirname_path), unit="s")
|
3744
|
+
)
|
3745
|
+
f["permission"].append(stat.filemode(stats_file.st_mode)),
|
3746
|
+
f["owner"].append(os.getlogin() if platform.system() != "Windows" else "N/A"),
|
3747
|
+
f["rootdir"].append(dirpath)
|
3748
|
+
f["fname"].append(filename) # will be removed
|
3749
|
+
f["fpath"].append(fpath) # will be removed
|
3750
|
+
i += 1
|
3751
|
+
for item in ls:
|
3752
|
+
if contains and not re.search(contains, item):
|
3753
|
+
continue
|
3754
|
+
item_path = os.path.join(dirpath, item)
|
3308
3755
|
fpath = os.path.join(os.path.dirname(item_path), item)
|
3309
|
-
|
3756
|
+
try:
|
3757
|
+
stats_file = os.stat(fpath)
|
3758
|
+
except Exception as e:
|
3759
|
+
print(e)
|
3760
|
+
continue
|
3761
|
+
filename, file_extension = os.path.splitext(item)
|
3762
|
+
if kind is not None:
|
3763
|
+
if not kind.startswith("."):
|
3764
|
+
kind = "." + kind
|
3765
|
+
is_folder = kind.lower() in fd and os.path.isdir(item_path)
|
3766
|
+
is_file = kind.lower() in file_extension.lower() and (
|
3767
|
+
os.path.isfile(item_path)
|
3768
|
+
)
|
3769
|
+
if kind in [".doc", ".img", ".zip"]: # 选择大的类别
|
3770
|
+
if kind != ".folder" and not isa(item_path, kind):
|
3771
|
+
continue
|
3772
|
+
elif kind in [".all"]:
|
3773
|
+
return flist(fpath, contains=contains)
|
3774
|
+
else: # 精确到文件的后缀
|
3775
|
+
if not is_folder and not is_file:
|
3776
|
+
continue
|
3777
|
+
file_extension = file_extension if file_extension!='' else None
|
3778
|
+
f["name"].append(filename)
|
3779
|
+
f['kind'].append(file_extension)
|
3780
|
+
f["length"].append(len(filename))
|
3310
3781
|
f["size"].append(round(os.path.getsize(fpath) / 1024 / 1024, 3))
|
3782
|
+
f['basename'].append(os.path.basename(item_path))
|
3783
|
+
f["path"].append(os.path.join(os.path.dirname(item_path), item))
|
3311
3784
|
f["created_time"].append(
|
3312
3785
|
pd.to_datetime(os.path.getctime(item_path), unit="s")
|
3313
3786
|
)
|
@@ -3317,26 +3790,22 @@ def listdir(
|
|
3317
3790
|
f["last_open_time"].append(
|
3318
3791
|
pd.to_datetime(os.path.getatime(item_path), unit="s")
|
3319
3792
|
)
|
3793
|
+
f["permission"].append(stat.filemode(stats_file.st_mode)),
|
3794
|
+
f["owner"].append(os.getlogin() if platform.system() != "Windows" else "N/A"),
|
3320
3795
|
f["fname"].append(filename) # will be removed
|
3321
3796
|
f["fpath"].append(fpath) # will be removed
|
3322
|
-
f[
|
3797
|
+
f["rootdir"].append(dirpath)
|
3323
3798
|
i += 1
|
3324
3799
|
|
3325
3800
|
f["num"] = i
|
3326
|
-
f["rootdir"] = rootdir
|
3327
3801
|
f["os"] = get_os() # os.uname().machine
|
3328
|
-
|
3329
|
-
|
3330
|
-
|
3331
|
-
rootdir
|
3332
|
-
)
|
3333
|
-
)
|
3334
|
-
|
3802
|
+
if not booster: # go deeper subfolders
|
3803
|
+
break
|
3804
|
+
#* convert to pd.DataFrame
|
3335
3805
|
f = pd.DataFrame(f)
|
3336
|
-
|
3337
|
-
|
3338
|
-
|
3339
|
-
|
3806
|
+
f=f[["basename","name","kind","length","size","num","path","created_time",
|
3807
|
+
"modified_time","last_open_time","rootdir",
|
3808
|
+
"fname","fpath","permission","owner","os",]]
|
3340
3809
|
if "nam" in sort_by.lower():
|
3341
3810
|
f = sort_kind(f, by="name", ascending=ascending)
|
3342
3811
|
elif "crea" in sort_by.lower():
|
@@ -3349,10 +3818,10 @@ def listdir(
|
|
3349
3818
|
if "df" in output:
|
3350
3819
|
if verbose:
|
3351
3820
|
display(f.head())
|
3821
|
+
print(f"shape: {f.shape}")
|
3352
3822
|
return f
|
3353
3823
|
else:
|
3354
3824
|
from box import Box
|
3355
|
-
|
3356
3825
|
if "l" in orient.lower(): # list # default
|
3357
3826
|
res_output = Box(f.to_dict(orient="list"))
|
3358
3827
|
return res_output
|
@@ -3365,12 +3834,6 @@ def listdir(
|
|
3365
3834
|
if "se" in orient.lower(): # records
|
3366
3835
|
return Box(f.to_dict(orient="series"))
|
3367
3836
|
|
3368
|
-
|
3369
|
-
# Example usage:
|
3370
|
-
# result = listdir('your_root_directory')
|
3371
|
-
# print(result)
|
3372
|
-
# df=listdir("/", contains='sss',sort_by='name',ascending=False)
|
3373
|
-
# print(df.fname.to_list(),"\n",df.fpath.to_list())
|
3374
3837
|
def listfunc(lib_name, opt="call"):
|
3375
3838
|
if opt == "call":
|
3376
3839
|
funcs = [func for func in dir(lib_name) if callable(getattr(lib_name, func))]
|
@@ -3382,7 +3845,101 @@ def listfunc(lib_name, opt="call"):
|
|
3382
3845
|
def func_list(lib_name, opt="call"):
|
3383
3846
|
return list_func(lib_name, opt=opt)
|
3384
3847
|
|
3848
|
+
def copy(src, dst, overwrite=False):
|
3849
|
+
"""Copy a file from src to dst."""
|
3850
|
+
try:
|
3851
|
+
dir_par_dst = os.path.dirname(dst)
|
3852
|
+
if not os.path.isdir(dir_par_dst):
|
3853
|
+
mkdir(dir_par_dst)
|
3854
|
+
print(dir_par_dst)
|
3855
|
+
src = Path(src)
|
3856
|
+
dst = Path(dst)
|
3857
|
+
if not src.is_dir():
|
3858
|
+
if dst.is_dir():
|
3859
|
+
dst = dst / src.name
|
3860
|
+
|
3861
|
+
if dst.exists():
|
3862
|
+
if overwrite:
|
3863
|
+
dst.unlink()
|
3864
|
+
else:
|
3865
|
+
dst = dst.with_name(f"{dst.stem}_{datetime.now().strftime('_%H%M%S')}{dst.suffix}")
|
3866
|
+
shutil.copy(src, dst)
|
3867
|
+
print(f"\n Done! copy to {dst}\n")
|
3868
|
+
else:
|
3869
|
+
dst = dst/src.name
|
3870
|
+
if dst.exists():
|
3871
|
+
if overwrite:
|
3872
|
+
shutil.rmtree(dst) # Remove existing directory
|
3873
|
+
else:
|
3874
|
+
dst = dst.with_name(f"{dst.stem}_{datetime.now().strftime('%H%M%S')}")
|
3875
|
+
shutil.copytree(src, dst)
|
3876
|
+
print(f"\n Done! copy to {dst}\n")
|
3877
|
+
|
3878
|
+
except Exception as e:
|
3879
|
+
logging.error(f"Failed {e}")
|
3880
|
+
|
3881
|
+
def cut(src, dst, overwrite=False):
|
3882
|
+
return move(src=src, dst=dst, overwrite=overwrite)
|
3385
3883
|
|
3884
|
+
def move(src, dst, overwrite=False):
|
3885
|
+
try:
|
3886
|
+
dir_par_dst = os.path.dirname(dst)
|
3887
|
+
if not os.path.isdir(dir_par_dst):
|
3888
|
+
mkdir(dir_par_dst)
|
3889
|
+
src = Path(src)
|
3890
|
+
dst = Path(dst)
|
3891
|
+
if dst.is_dir():
|
3892
|
+
dst = dst / src.name
|
3893
|
+
if dst.exists():
|
3894
|
+
if overwrite:
|
3895
|
+
# dst.unlink() # Delete the existing file
|
3896
|
+
pass
|
3897
|
+
else:
|
3898
|
+
dst = dst.with_name(f"{dst.stem}_{datetime.now().strftime('_%H%M%S')}{dst.suffix}")
|
3899
|
+
shutil.move(src, dst)
|
3900
|
+
print(f"\n Done! moved to {dst}\n")
|
3901
|
+
except Exception as e:
|
3902
|
+
logging.error(f"Failed to move file from {src} to {dst}: {e}")
|
3903
|
+
|
3904
|
+
def delete(fpath):
|
3905
|
+
"""Delete a file/folder."""
|
3906
|
+
try:
|
3907
|
+
fpath = Path(fpath)
|
3908
|
+
if not fpath.is_dir(): # file
|
3909
|
+
if fpath.exists():
|
3910
|
+
fpath.unlink()
|
3911
|
+
print(f"\n Done! delete {fpath}\n")
|
3912
|
+
else:
|
3913
|
+
print(f"File '{fpath}' does not exist.")
|
3914
|
+
else:#folder
|
3915
|
+
if fpath.exists():
|
3916
|
+
shutil.rmtree(fpath) # Remove existing directory
|
3917
|
+
print(f"\n Done! delete {fpath}\n")
|
3918
|
+
else:
|
3919
|
+
print(f"Folder '{fpath}' does not exist.")
|
3920
|
+
except Exception as e:
|
3921
|
+
logging.error(f"Failed to delete {fpath}: {e}")
|
3922
|
+
def rename(fpath, dst, smart=True):
|
3923
|
+
"""Rename a file or folder."""
|
3924
|
+
try:
|
3925
|
+
src_kind,dst_kind = None,None
|
3926
|
+
if smart:
|
3927
|
+
dir_name_src=os.path.dirname(fpath)
|
3928
|
+
dir_name_dst=os.path.dirname(dst)
|
3929
|
+
src_kind=os.path.splitext(fpath)[1]
|
3930
|
+
dst_kind=os.path.splitext(dst)[1]
|
3931
|
+
if dir_name_dst!=dir_name_src:
|
3932
|
+
dst=os.path.join(dir_name_src,dst)
|
3933
|
+
if dst_kind is not None and src_kind is not None:
|
3934
|
+
if dst_kind!=src_kind:
|
3935
|
+
dst=dst + src_kind
|
3936
|
+
if os.path.exists(fpath):
|
3937
|
+
os.rename(fpath,dst)
|
3938
|
+
print(f"Done! rename to {dst}")
|
3939
|
+
else:
|
3940
|
+
print(f"Failed: {fpath} does not exist.")
|
3941
|
+
except Exception as e:
|
3942
|
+
logging.error(f"Failed to rename {fpath} to {dst}: {e}")
|
3386
3943
|
def mkdir_nest(fpath: str) -> str:
|
3387
3944
|
"""
|
3388
3945
|
Create nested directories based on the provided file path.
|
@@ -3401,7 +3958,9 @@ def mkdir_nest(fpath: str) -> str:
|
|
3401
3958
|
dir_parts = fpath.split(f_slash) # Split the path by the OS-specific separator
|
3402
3959
|
|
3403
3960
|
# Start creating directories from the root to the desired path
|
3404
|
-
|
3961
|
+
root_dir = os.path.splitdrive(fpath)[0] # Get the root drive on Windows (e.g., 'C:')
|
3962
|
+
current_path = root_dir if root_dir else f_slash # Start from the root directory or POSIX '/'
|
3963
|
+
|
3405
3964
|
for part in dir_parts:
|
3406
3965
|
if part:
|
3407
3966
|
current_path = os.path.join(current_path, part)
|
@@ -3425,10 +3984,13 @@ def mkdir(pardir: str = None, chdir: str | list = None, overwrite=False):
|
|
3425
3984
|
Returns:
|
3426
3985
|
- str: The path of the created directory or an error message.
|
3427
3986
|
"""
|
3428
|
-
|
3429
3987
|
rootdir = []
|
3988
|
+
pardir= mkdir_nest(pardir)
|
3430
3989
|
if chdir is None:
|
3431
|
-
return
|
3990
|
+
return pardir
|
3991
|
+
else:
|
3992
|
+
pass
|
3993
|
+
print(pardir)
|
3432
3994
|
if isinstance(chdir, str):
|
3433
3995
|
chdir = [chdir]
|
3434
3996
|
chdir = list(set(chdir))
|
@@ -3466,7 +4028,7 @@ def mkdir(pardir: str = None, chdir: str | list = None, overwrite=False):
|
|
3466
4028
|
# Dir is the main output, if only one dir, then str type is inconvenient
|
3467
4029
|
if len(rootdir) == 1:
|
3468
4030
|
rootdir = rootdir[0]
|
3469
|
-
|
4031
|
+
rootdir = rootdir + stype if not rootdir.endswith(stype) else rootdir
|
3470
4032
|
|
3471
4033
|
return rootdir
|
3472
4034
|
|
@@ -3865,6 +4427,114 @@ def apply_filter(img, *args):
|
|
3865
4427
|
)
|
3866
4428
|
return img.filter(supported_filters[filter_name])
|
3867
4429
|
|
4430
|
+
def detect_angle(image, by="median", template=None):
|
4431
|
+
"""Detect the angle of rotation using various methods."""
|
4432
|
+
from sklearn.decomposition import PCA
|
4433
|
+
from skimage import transform, feature, filters, measure
|
4434
|
+
from skimage.color import rgb2gray
|
4435
|
+
from scipy.fftpack import fftshift, fft2
|
4436
|
+
import numpy as np
|
4437
|
+
import cv2
|
4438
|
+
# Convert to grayscale
|
4439
|
+
gray_image = rgb2gray(image)
|
4440
|
+
|
4441
|
+
# Detect edges using Canny edge detector
|
4442
|
+
edges = feature.canny(gray_image, sigma=2)
|
4443
|
+
|
4444
|
+
# Use Hough transform to detect lines
|
4445
|
+
lines = transform.probabilistic_hough_line(edges)
|
4446
|
+
|
4447
|
+
if not lines and any(["me" in by, "pca" in by]):
|
4448
|
+
print("No lines detected. Adjust the edge detection parameters.")
|
4449
|
+
return 0
|
4450
|
+
|
4451
|
+
# Hough Transform-based angle detection (Median/Mean)
|
4452
|
+
if "me" in by:
|
4453
|
+
angles = []
|
4454
|
+
for line in lines:
|
4455
|
+
(x0, y0), (x1, y1) = line
|
4456
|
+
angle = np.arctan2(y1 - y0, x1 - x0) * 180 / np.pi
|
4457
|
+
if 80 < abs(angle) < 100:
|
4458
|
+
angles.append(angle)
|
4459
|
+
if not angles:
|
4460
|
+
return 0
|
4461
|
+
if "di" in by:
|
4462
|
+
median_angle = np.median(angles)
|
4463
|
+
rotation_angle = (
|
4464
|
+
90 - median_angle if median_angle > 0 else -90 - median_angle
|
4465
|
+
)
|
4466
|
+
|
4467
|
+
return rotation_angle
|
4468
|
+
else:
|
4469
|
+
mean_angle = np.mean(angles)
|
4470
|
+
rotation_angle = 90 - mean_angle if mean_angle > 0 else -90 - mean_angle
|
4471
|
+
|
4472
|
+
return rotation_angle
|
4473
|
+
|
4474
|
+
# PCA-based angle detection
|
4475
|
+
elif "pca" in by:
|
4476
|
+
y, x = np.nonzero(edges)
|
4477
|
+
if len(x) == 0:
|
4478
|
+
return 0
|
4479
|
+
pca = PCA(n_components=2)
|
4480
|
+
pca.fit(np.vstack((x, y)).T)
|
4481
|
+
angle = np.arctan2(pca.components_[0, 1], pca.components_[0, 0]) * 180 / np.pi
|
4482
|
+
return angle
|
4483
|
+
|
4484
|
+
# Gradient Orientation-based angle detection
|
4485
|
+
elif "gra" in by:
|
4486
|
+
gx, gy = np.gradient(gray_image)
|
4487
|
+
angles = np.arctan2(gy, gx) * 180 / np.pi
|
4488
|
+
hist, bin_edges = np.histogram(angles, bins=360, range=(-180, 180))
|
4489
|
+
return bin_edges[np.argmax(hist)]
|
4490
|
+
|
4491
|
+
# Template Matching-based angle detection
|
4492
|
+
elif "temp" in by:
|
4493
|
+
if template is None:
|
4494
|
+
# Automatically extract a template from the center of the image
|
4495
|
+
height, width = gray_image.shape
|
4496
|
+
center_x, center_y = width // 2, height // 2
|
4497
|
+
size = (
|
4498
|
+
min(height, width) // 4
|
4499
|
+
) # Size of the template as a fraction of image size
|
4500
|
+
template = gray_image[
|
4501
|
+
center_y - size : center_y + size, center_x - size : center_x + size
|
4502
|
+
]
|
4503
|
+
best_angle = None
|
4504
|
+
best_corr = -1
|
4505
|
+
for angle in range(0, 180, 1): # Checking every degree
|
4506
|
+
rotated_template = transform.rotate(template, angle)
|
4507
|
+
res = cv2.matchTemplate(gray_image, rotated_template, cv2.TM_CCOEFF)
|
4508
|
+
_, max_val, _, _ = cv2.minMaxLoc(res)
|
4509
|
+
if max_val > best_corr:
|
4510
|
+
best_corr = max_val
|
4511
|
+
best_angle = angle
|
4512
|
+
return best_angle
|
4513
|
+
|
4514
|
+
# Image Moments-based angle detection
|
4515
|
+
elif "mo" in by:
|
4516
|
+
moments = measure.moments_central(gray_image)
|
4517
|
+
angle = (
|
4518
|
+
0.5
|
4519
|
+
* np.arctan2(2 * moments[1, 1], moments[0, 2] - moments[2, 0])
|
4520
|
+
* 180
|
4521
|
+
/ np.pi
|
4522
|
+
)
|
4523
|
+
return angle
|
4524
|
+
|
4525
|
+
# Fourier Transform-based angle detection
|
4526
|
+
elif "fft" in by:
|
4527
|
+
f = fft2(gray_image)
|
4528
|
+
fshift = fftshift(f)
|
4529
|
+
magnitude_spectrum = np.log(np.abs(fshift) + 1)
|
4530
|
+
rows, cols = magnitude_spectrum.shape
|
4531
|
+
r, c = np.unravel_index(np.argmax(magnitude_spectrum), (rows, cols))
|
4532
|
+
angle = np.arctan2(r - rows // 2, c - cols // 2) * 180 / np.pi
|
4533
|
+
return angle
|
4534
|
+
|
4535
|
+
else:
|
4536
|
+
print(f"Unknown method {by}")
|
4537
|
+
return 0
|
3868
4538
|
|
3869
4539
|
def imgsets(img, **kwargs):
|
3870
4540
|
"""
|
py2ls/netfinder.py
CHANGED
@@ -1608,3 +1608,191 @@ def ai(*args, **kwargs):
|
|
1608
1608
|
if len(args) == 1 and isinstance(args[0], str):
|
1609
1609
|
kwargs["query"] = args[0]
|
1610
1610
|
return echo(**kwargs)
|
1611
|
+
|
1612
|
+
|
1613
|
+
#! get_ip()
|
1614
|
+
def get_ip(ip=None):
|
1615
|
+
"""
|
1616
|
+
Usage:
|
1617
|
+
from py2ls import netfinder as nt
|
1618
|
+
ip = nt.get_ip()
|
1619
|
+
"""
|
1620
|
+
|
1621
|
+
import requests
|
1622
|
+
import time
|
1623
|
+
import logging
|
1624
|
+
from datetime import datetime, timedelta
|
1625
|
+
|
1626
|
+
# Set up logging configuration
|
1627
|
+
logging.basicConfig(
|
1628
|
+
level=logging.INFO,
|
1629
|
+
format="%(asctime)s - %(levelname)s - %(message)s",
|
1630
|
+
handlers=[
|
1631
|
+
logging.StreamHandler(),
|
1632
|
+
logging.FileHandler("public_ip_log.log"), # Log to a file
|
1633
|
+
],
|
1634
|
+
)
|
1635
|
+
|
1636
|
+
cache = {}
|
1637
|
+
|
1638
|
+
# Function to fetch IP addresses synchronously
|
1639
|
+
def fetch_ip(url, retries, timeout, headers):
|
1640
|
+
"""
|
1641
|
+
Synchronous function to fetch the IP address with retries.
|
1642
|
+
"""
|
1643
|
+
for attempt in range(retries):
|
1644
|
+
try:
|
1645
|
+
response = requests.get(url, timeout=timeout, headers=headers)
|
1646
|
+
response.raise_for_status()
|
1647
|
+
return response.json()
|
1648
|
+
except requests.RequestException as e:
|
1649
|
+
logging.error(f"Attempt {attempt + 1} failed: {e}")
|
1650
|
+
if attempt < retries - 1:
|
1651
|
+
time.sleep(2**attempt) # Exponential backoff
|
1652
|
+
else:
|
1653
|
+
logging.error("Max retries reached.")
|
1654
|
+
return {"error": f"Error fetching IP: {e}"}
|
1655
|
+
except requests.Timeout:
|
1656
|
+
logging.error("Request timed out")
|
1657
|
+
time.sleep(2**attempt)
|
1658
|
+
return {"error": "Failed to fetch IP after retries"}
|
1659
|
+
|
1660
|
+
# Function to fetch geolocation synchronously
|
1661
|
+
def fetch_geolocation(url, retries, timeout, headers):
|
1662
|
+
"""
|
1663
|
+
Synchronous function to fetch geolocation data by IP address.
|
1664
|
+
"""
|
1665
|
+
for attempt in range(retries):
|
1666
|
+
try:
|
1667
|
+
response = requests.get(url, timeout=timeout, headers=headers)
|
1668
|
+
response.raise_for_status()
|
1669
|
+
return response.json()
|
1670
|
+
except requests.RequestException as e:
|
1671
|
+
logging.error(f"Geolocation request attempt {attempt + 1} failed: {e}")
|
1672
|
+
if attempt < retries - 1:
|
1673
|
+
time.sleep(2**attempt) # Exponential backoff
|
1674
|
+
else:
|
1675
|
+
logging.error("Max retries reached.")
|
1676
|
+
return {"error": f"Error fetching geolocation: {e}"}
|
1677
|
+
except requests.Timeout:
|
1678
|
+
logging.error("Geolocation request timed out")
|
1679
|
+
time.sleep(2**attempt)
|
1680
|
+
return {"error": "Failed to fetch geolocation after retries"}
|
1681
|
+
|
1682
|
+
# Main function to get public IP and geolocation
|
1683
|
+
def get_public_ip(
|
1684
|
+
ip4=True,
|
1685
|
+
ip6=True,
|
1686
|
+
verbose=True,
|
1687
|
+
retries=3,
|
1688
|
+
timeout=5,
|
1689
|
+
geolocation=True,
|
1690
|
+
headers=None,
|
1691
|
+
cache_duration=5,
|
1692
|
+
):
|
1693
|
+
"""
|
1694
|
+
Synchronously fetches public IPv4 and IPv6 addresses, along with optional geolocation info.
|
1695
|
+
"""
|
1696
|
+
# Use the cache if it's still valid
|
1697
|
+
cache_key_ip4 = "public_ip4"
|
1698
|
+
cache_key_ip6 = "public_ip6"
|
1699
|
+
cache_key_geolocation = "geolocation"
|
1700
|
+
|
1701
|
+
if (
|
1702
|
+
cache
|
1703
|
+
and cache_key_ip4 in cache
|
1704
|
+
and datetime.now() < cache[cache_key_ip4]["expires"]
|
1705
|
+
):
|
1706
|
+
logging.info("Cache hit for IPv4, using cached data.")
|
1707
|
+
ip4_data = cache[cache_key_ip4]["data"]
|
1708
|
+
else:
|
1709
|
+
ip4_data = None
|
1710
|
+
|
1711
|
+
if (
|
1712
|
+
cache
|
1713
|
+
and cache_key_ip6 in cache
|
1714
|
+
and datetime.now() < cache[cache_key_ip6]["expires"]
|
1715
|
+
):
|
1716
|
+
logging.info("Cache hit for IPv6, using cached data.")
|
1717
|
+
ip6_data = cache[cache_key_ip6]["data"]
|
1718
|
+
else:
|
1719
|
+
ip6_data = None
|
1720
|
+
|
1721
|
+
if (
|
1722
|
+
cache
|
1723
|
+
and cache_key_geolocation in cache
|
1724
|
+
and datetime.now() < cache[cache_key_geolocation]["expires"]
|
1725
|
+
):
|
1726
|
+
logging.info("Cache hit for Geolocation, using cached data.")
|
1727
|
+
geolocation_data = cache[cache_key_geolocation]["data"]
|
1728
|
+
else:
|
1729
|
+
geolocation_data = None
|
1730
|
+
|
1731
|
+
# Fetch IPv4 if requested
|
1732
|
+
if ip4 and not ip4_data:
|
1733
|
+
logging.info("Fetching IPv4...")
|
1734
|
+
ip4_data = fetch_ip(
|
1735
|
+
"https://api.ipify.org?format=json", retries, timeout, headers
|
1736
|
+
)
|
1737
|
+
cache[cache_key_ip4] = {
|
1738
|
+
"data": ip4_data,
|
1739
|
+
"expires": datetime.now() + timedelta(minutes=cache_duration),
|
1740
|
+
}
|
1741
|
+
|
1742
|
+
# Fetch IPv6 if requested
|
1743
|
+
if ip6 and not ip6_data:
|
1744
|
+
logging.info("Fetching IPv6...")
|
1745
|
+
ip6_data = fetch_ip(
|
1746
|
+
"https://api6.ipify.org?format=json", retries, timeout, headers
|
1747
|
+
)
|
1748
|
+
cache[cache_key_ip6] = {
|
1749
|
+
"data": ip6_data,
|
1750
|
+
"expires": datetime.now() + timedelta(minutes=cache_duration),
|
1751
|
+
}
|
1752
|
+
|
1753
|
+
# Fetch geolocation if requested
|
1754
|
+
if geolocation and not geolocation_data:
|
1755
|
+
logging.info("Fetching Geolocation...")
|
1756
|
+
geolocation_data = fetch_geolocation(
|
1757
|
+
"https://ipinfo.io/json", retries, timeout, headers
|
1758
|
+
)
|
1759
|
+
cache[cache_key_geolocation] = {
|
1760
|
+
"data": geolocation_data,
|
1761
|
+
"expires": datetime.now() + timedelta(minutes=cache_duration),
|
1762
|
+
}
|
1763
|
+
|
1764
|
+
# Prepare the results
|
1765
|
+
ip_info = {
|
1766
|
+
"ip4": ip4_data.get("ip") if ip4_data else "N/A",
|
1767
|
+
"ip6": ip6_data.get("ip") if ip6_data else "N/A",
|
1768
|
+
"geolocation": geolocation_data if geolocation_data else "N/A",
|
1769
|
+
}
|
1770
|
+
|
1771
|
+
# Verbose output if requested
|
1772
|
+
if verbose:
|
1773
|
+
print(f"Public IPv4: {ip_info['ip4']}")
|
1774
|
+
print(f"Public IPv6: {ip_info['ip6']}")
|
1775
|
+
print(f"Geolocation: {ip_info['geolocation']}")
|
1776
|
+
|
1777
|
+
return ip_info
|
1778
|
+
|
1779
|
+
# Function to get geolocation data by IP
|
1780
|
+
def get_geolocation_by_ip(ip, retries=3, timeout=5, headers=None):
|
1781
|
+
"""
|
1782
|
+
Fetches geolocation data for a given IP address.
|
1783
|
+
"""
|
1784
|
+
url = f"https://ipinfo.io/{ip}/json"
|
1785
|
+
geolocation_data = fetch_geolocation(url, retries, timeout, headers)
|
1786
|
+
return geolocation_data
|
1787
|
+
#! here starting get_ip()
|
1788
|
+
headers = {"User-Agent": user_agent()}
|
1789
|
+
if ip is None:
|
1790
|
+
try:
|
1791
|
+
ip_data = get_public_ip(headers=headers, verbose=True)
|
1792
|
+
except Exception as e:
|
1793
|
+
print(e)
|
1794
|
+
ip_data = None
|
1795
|
+
return ip_data
|
1796
|
+
else:
|
1797
|
+
geolocation_data = get_geolocation_by_ip(ip, headers=headers)
|
1798
|
+
return geolocation_data
|
py2ls/ocr.py
CHANGED
@@ -486,6 +486,18 @@ def preprocess_img(
|
|
486
486
|
|
487
487
|
return img_preprocessed
|
488
488
|
|
489
|
+
def convert_image_to_bytes(image):
|
490
|
+
"""
|
491
|
+
Convert a CV2 or numpy image to bytes for ddddocr.
|
492
|
+
"""
|
493
|
+
import io
|
494
|
+
# Convert OpenCV image (numpy array) to PIL image
|
495
|
+
if isinstance(image, np.ndarray):
|
496
|
+
image = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
|
497
|
+
# Save PIL image to a byte stream
|
498
|
+
img_byte_arr = io.BytesIO()
|
499
|
+
image.save(img_byte_arr, format='PNG')
|
500
|
+
return img_byte_arr.getvalue()
|
489
501
|
|
490
502
|
def text_postprocess(
|
491
503
|
text,
|
@@ -604,10 +616,11 @@ def get_text(
|
|
604
616
|
"""
|
605
617
|
)
|
606
618
|
|
607
|
-
models = ["easyocr", "paddleocr", "pytesseract"]
|
619
|
+
models = ["easyocr", "paddleocr", "pytesseract","ddddocr"]
|
608
620
|
model = strcmp(model, models)[0]
|
609
621
|
lang = lang_auto_detect(lang, model)
|
610
622
|
if isinstance(image, str):
|
623
|
+
dir_img=image
|
611
624
|
image = cv2.imread(image)
|
612
625
|
|
613
626
|
# Ensure lang is always a list
|
@@ -705,9 +718,10 @@ def get_text(
|
|
705
718
|
) # PaddleOCR supports only one language at a time
|
706
719
|
result = ocr.ocr(image_process, **kwargs)
|
707
720
|
detections = []
|
708
|
-
|
709
|
-
|
710
|
-
|
721
|
+
if result[0] is not None:
|
722
|
+
for line in result[0]:
|
723
|
+
bbox, (text, score) = line
|
724
|
+
detections.append((bbox, text, score))
|
711
725
|
if postprocess is None:
|
712
726
|
postprocess = dict(
|
713
727
|
spell_check=True,
|
@@ -787,7 +801,49 @@ def get_text(
|
|
787
801
|
else:
|
788
802
|
# 默认返回所有检测信息
|
789
803
|
return detections
|
804
|
+
elif "ddddocr" in model.lower():
|
805
|
+
import ddddocr
|
806
|
+
|
807
|
+
ocr = ddddocr.DdddOcr(det=False, ocr=True)
|
808
|
+
image_bytes = convert_image_to_bytes(image_process)
|
809
|
+
|
810
|
+
results = ocr.classification(image_bytes) # Text extraction
|
811
|
+
|
812
|
+
# Optional: Perform detection for bounding boxes
|
813
|
+
detections = []
|
814
|
+
if kwargs.get("det", False):
|
815
|
+
det_ocr = ddddocr.DdddOcr(det=True)
|
816
|
+
det_results = det_ocr.detect(image_bytes)
|
817
|
+
for box in det_results:
|
818
|
+
top_left = (box[0], box[1])
|
819
|
+
bottom_right = (box[2], box[3])
|
820
|
+
detections.append((top_left, bottom_right))
|
790
821
|
|
822
|
+
if postprocess is None:
|
823
|
+
postprocess = dict(
|
824
|
+
spell_check=True,
|
825
|
+
clean=True,
|
826
|
+
filter=dict(min_length=2),
|
827
|
+
pattern=None,
|
828
|
+
merge=True,
|
829
|
+
)
|
830
|
+
text_corr = []
|
831
|
+
[
|
832
|
+
text_corr.extend(text_postprocess(text, **postprocess))
|
833
|
+
for _, text, _ in detections
|
834
|
+
]
|
835
|
+
# Visualization
|
836
|
+
if show:
|
837
|
+
if ax is None:
|
838
|
+
ax = plt.gca()
|
839
|
+
image_vis = image.copy()
|
840
|
+
if detections:
|
841
|
+
for top_left, bottom_right in detections:
|
842
|
+
cv2.rectangle(image_vis, top_left, bottom_right, box_color, 2)
|
843
|
+
image_vis = cv2.cvtColor(image_vis, cmap)
|
844
|
+
ax.imshow(image_vis)
|
845
|
+
ax.axis("off")
|
846
|
+
return detections
|
791
847
|
else: # "pytesseract"
|
792
848
|
if ax is None:
|
793
849
|
ax = plt.gca()
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: py2ls
|
3
|
-
Version: 0.2.4.
|
3
|
+
Version: 0.2.4.28
|
4
4
|
Summary: py(thon)2(too)ls
|
5
5
|
Author: Jianfeng
|
6
6
|
Author-email: Jianfeng.Liu0413@gmail.com
|
@@ -18,6 +18,7 @@ Provides-Extra: extr
|
|
18
18
|
Requires-Dist: CacheControl (>=0.13.1)
|
19
19
|
Requires-Dist: Cython (>=3.0.10)
|
20
20
|
Requires-Dist: Deprecated (>=1.2.14)
|
21
|
+
Requires-Dist: GPUtil (>=1.4.0)
|
21
22
|
Requires-Dist: Jinja2 (>=3.1.4)
|
22
23
|
Requires-Dist: Markdown (>=3.6)
|
23
24
|
Requires-Dist: MarkupSafe (>=2.1.5)
|
@@ -25,7 +26,7 @@ Requires-Dist: PyMatting (>=1.1.12)
|
|
25
26
|
Requires-Dist: PyOpenGL (>=3.1.6)
|
26
27
|
Requires-Dist: PyPDF2 (>=3.0.1)
|
27
28
|
Requires-Dist: PyQt5 (>=5.15.11)
|
28
|
-
Requires-Dist: PyQt5-Qt5 (>=5.15.
|
29
|
+
Requires-Dist: PyQt5-Qt5 (>=5.15.0)
|
29
30
|
Requires-Dist: PyQt5_sip (>=12.15.0)
|
30
31
|
Requires-Dist: PyQtWebEngine (>=5.15.7)
|
31
32
|
Requires-Dist: PyQtWebEngine-Qt5 (>=5.15.14)
|
@@ -37,7 +38,6 @@ Requires-Dist: SciencePlots (>=2.1.1)
|
|
37
38
|
Requires-Dist: XlsxWriter (>=3.2.0)
|
38
39
|
Requires-Dist: asciitree (>=0.3.3)
|
39
40
|
Requires-Dist: asttokens (>=2.4.1)
|
40
|
-
Requires-Dist: attrs (>=23.2.0)
|
41
41
|
Requires-Dist: autogluon (>=1.2)
|
42
42
|
Requires-Dist: beautifulsoup4 (>=4.12.3)
|
43
43
|
Requires-Dist: bleach (>=6.1.0)
|
@@ -18,7 +18,7 @@ py2ls/.git/hooks/pre-receive.sample,sha256=pMPSuce7P9jRRBwxvU7nGlldZrRPz0ndsxAlI
|
|
18
18
|
py2ls/.git/hooks/prepare-commit-msg.sample,sha256=6d3KpBif3dJe2X_Ix4nsp7bKFjkLI5KuMnbwyOGqRhk,1492
|
19
19
|
py2ls/.git/hooks/push-to-checkout.sample,sha256=pT0HQXmLKHxt16-mSu5HPzBeZdP0lGO7nXQI7DsSv18,2783
|
20
20
|
py2ls/.git/hooks/update.sample,sha256=jV8vqD4QPPCLV-qmdSHfkZT0XL28s32lKtWGCXoU0QY,3650
|
21
|
-
py2ls/.git/index,sha256=
|
21
|
+
py2ls/.git/index,sha256=1-0l4HpWQFAVRMijggz6CNt4WGBxyGaAzgKzr57Td2I,4232
|
22
22
|
py2ls/.git/info/exclude,sha256=ZnH-g7egfIky7okWTR8nk7IxgFjri5jcXAbuClo7DsE,240
|
23
23
|
py2ls/.git/logs/HEAD,sha256=8ID7WuAe_TlO9g-ARxhIJYdgdL3u3m7-1qrOanaIUlA,3535
|
24
24
|
py2ls/.git/logs/refs/heads/main,sha256=8ID7WuAe_TlO9g-ARxhIJYdgdL3u3m7-1qrOanaIUlA,3535
|
@@ -240,18 +240,18 @@ py2ls/export_requirements.py,sha256=x2WgUF0jYKz9GfA1MVKN-MdsM-oQ8yUeC6Ua8oCymio,
|
|
240
240
|
py2ls/fetch_update.py,sha256=9LXj661GpCEFII2wx_99aINYctDiHni6DOruDs_fdt8,4752
|
241
241
|
py2ls/freqanalysis.py,sha256=F4218VSPbgL5tnngh6xNCYuNnfR-F_QjECUUxrPYZss,32594
|
242
242
|
py2ls/ich2ls.py,sha256=3E9R8oVpyYZXH5PiIQgT3CN5NxLe4Dwtm2LwaeacE6I,21381
|
243
|
-
py2ls/ips.py,sha256=
|
243
|
+
py2ls/ips.py,sha256=ZAwkOilTrfQ3zkUiMQbidrcFs-hZoWmXH60KGHkh530,342899
|
244
244
|
py2ls/ml2ls.py,sha256=kIk-ZnDdJGd-fw9GPIFf1r4jtrw5hgvBpRnYNoL1U8I,209494
|
245
245
|
py2ls/mol.py,sha256=AZnHzarIk_MjueKdChqn1V6e4tUle3X1NnHSFA6n3Nw,10645
|
246
|
-
py2ls/netfinder.py,sha256=
|
246
|
+
py2ls/netfinder.py,sha256=UfsruqlFwUOZQx4mO7P7-UiRJqcxcT0WN3QRLv22o74,64059
|
247
247
|
py2ls/nl2ls.py,sha256=UEIdok-OamFZFIvvz_PdZenu085zteMdaJd9mLu3F-s,11485
|
248
|
-
py2ls/ocr.py,sha256=
|
248
|
+
py2ls/ocr.py,sha256=CmG2GUBorz4q1aaq5TkQ7bKn3iueQJ9JKrPTzloGqlY,33447
|
249
249
|
py2ls/plot.py,sha256=HcOtaSwaz2tQT-diA-_r46BFIYM_N1LFBCj-HUUsWgY,229795
|
250
250
|
py2ls/setuptools-70.1.0-py3-none-any.whl,sha256=2bi3cUVal8ip86s0SOvgspteEF8SKLukECi-EWmFomc,882588
|
251
251
|
py2ls/sleep_events_detectors.py,sha256=bQA3HJqv5qnYKJJEIhCyhlDtkXQfIzqksnD0YRXso68,52145
|
252
252
|
py2ls/stats.py,sha256=qBn2rJmNa_QLLUqjwYqXUlGzqmW94sgA1bxJU2FC3r0,39175
|
253
253
|
py2ls/translator.py,sha256=77Tp_GjmiiwFbEIJD_q3VYpQ43XL9ZeJo6Mhl44mvh8,34284
|
254
254
|
py2ls/wb_detector.py,sha256=7y6TmBUj9exCZeIgBAJ_9hwuhkDh1x_-yg4dvNY1_GQ,6284
|
255
|
-
py2ls-0.2.4.
|
256
|
-
py2ls-0.2.4.
|
257
|
-
py2ls-0.2.4.
|
255
|
+
py2ls-0.2.4.28.dist-info/METADATA,sha256=DmVK7j9FD4wjYNSx3WoUaGlhwf0VSFNIu6Vlewfy5R0,20215
|
256
|
+
py2ls-0.2.4.28.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
257
|
+
py2ls-0.2.4.28.dist-info/RECORD,,
|
File without changes
|