py2ls 0.2.4.26__py3-none-any.whl → 0.2.4.27__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
py2ls/.git/index
CHANGED
Binary file
|
py2ls/ips.py
CHANGED
@@ -17,7 +17,11 @@ import warnings
|
|
17
17
|
warnings.simplefilter("ignore", category=pd.errors.SettingWithCopyWarning)
|
18
18
|
warnings.filterwarnings("ignore", category=pd.errors.PerformanceWarning)
|
19
19
|
warnings.filterwarnings("ignore")
|
20
|
-
|
20
|
+
import os
|
21
|
+
import shutil
|
22
|
+
import logging
|
23
|
+
from pathlib import Path
|
24
|
+
from datetime import datetime
|
21
25
|
|
22
26
|
def run_once_within(duration=60,reverse=False): # default 60s
|
23
27
|
import time
|
@@ -874,6 +878,19 @@ def counter(list_, verbose=True):
|
|
874
878
|
# print(f"Return a list of the n most common elements:\n{c.most_common()}")
|
875
879
|
# print(f"Compute the sum of the counts:\n{c.total()}")
|
876
880
|
|
881
|
+
def dict2df(dict_, fill=None):
|
882
|
+
len_max = 0
|
883
|
+
for key, value in dict_.items():
|
884
|
+
# value部分需要是list
|
885
|
+
if isinstance(value, list):
|
886
|
+
pass
|
887
|
+
# get the max_length
|
888
|
+
len_max = len(value) if len(value) > len_max else len_max
|
889
|
+
# 补齐长度
|
890
|
+
for key, value in dict_.items():
|
891
|
+
value.extend([fill] * (len_max - len(value)))
|
892
|
+
dict_[key] = value
|
893
|
+
return pd.DataFrame.from_dict(dict_)
|
877
894
|
|
878
895
|
def str2time(time_str, fmt="24"):
|
879
896
|
"""
|
@@ -1322,7 +1339,7 @@ def docx2pdf(dir_docx, dir_pdf=None):
|
|
1322
1339
|
convert(dir_docx)
|
1323
1340
|
|
1324
1341
|
|
1325
|
-
def img2pdf(dir_img, kind=
|
1342
|
+
def img2pdf(dir_img, kind=None, page=None, dir_save=None, page_size="a4", dpi=300):
|
1326
1343
|
import img2pdf as image2pdf
|
1327
1344
|
|
1328
1345
|
def mm_to_point(size):
|
@@ -1331,7 +1348,8 @@ def img2pdf(dir_img, kind="jpeg", page=None, dir_save=None, page_size="a4", dpi=
|
|
1331
1348
|
def set_dpi(x):
|
1332
1349
|
dpix = dpiy = x
|
1333
1350
|
return image2pdf.get_fixed_dpi_layout_fun((dpix, dpiy))
|
1334
|
-
|
1351
|
+
if kind is None:
|
1352
|
+
_, kind = os.path.splitext(dir_img)
|
1335
1353
|
if not kind.startswith("."):
|
1336
1354
|
kind = "." + kind
|
1337
1355
|
if dir_save is None:
|
@@ -1354,8 +1372,10 @@ def img2pdf(dir_img, kind="jpeg", page=None, dir_save=None, page_size="a4", dpi=
|
|
1354
1372
|
continue
|
1355
1373
|
imgs.append(path)
|
1356
1374
|
else:
|
1357
|
-
imgs = [
|
1358
|
-
|
1375
|
+
imgs = [
|
1376
|
+
# os.path.isdir(dir_img),
|
1377
|
+
dir_img]
|
1378
|
+
print(imgs)
|
1359
1379
|
if page_size:
|
1360
1380
|
if isinstance(page_size, str):
|
1361
1381
|
pdf_in_mm = mm_to_point(paper_size(page_size))
|
@@ -3205,21 +3225,437 @@ def isa(content, kind):
|
|
3205
3225
|
return False
|
3206
3226
|
|
3207
3227
|
|
3208
|
-
|
3228
|
+
def get_os(full=False, verbose=False):
|
3229
|
+
"""Collects comprehensive system information.
|
3230
|
+
full(bool): True, get more detailed info
|
3231
|
+
verbose(bool): True, print it
|
3232
|
+
usage:
|
3233
|
+
info = get_os(full=True, verbose=False)
|
3234
|
+
"""
|
3235
|
+
import sys
|
3236
|
+
import platform
|
3237
|
+
import psutil
|
3238
|
+
import GPUtil
|
3239
|
+
import socket
|
3240
|
+
import uuid
|
3241
|
+
import cpuinfo
|
3242
|
+
import os
|
3243
|
+
import subprocess
|
3244
|
+
from datetime import datetime, timedelta
|
3245
|
+
from collections import defaultdict
|
3209
3246
|
|
3247
|
+
def get_os_type():
|
3248
|
+
os_name = sys.platform
|
3249
|
+
if "dar" in os_name:
|
3250
|
+
return "macOS"
|
3251
|
+
else:
|
3252
|
+
if "win" in os_name:
|
3253
|
+
return "Windows"
|
3254
|
+
elif "linux" in os_name:
|
3255
|
+
return "Linux"
|
3256
|
+
else:
|
3257
|
+
print(f"{os_name}, returned 'None'")
|
3258
|
+
return None
|
3210
3259
|
|
3211
|
-
def
|
3212
|
-
|
3213
|
-
|
3214
|
-
|
3215
|
-
|
3216
|
-
|
3217
|
-
|
3218
|
-
|
3219
|
-
|
3260
|
+
def get_os_info():
|
3261
|
+
"""Get the detailed OS name, version, and other platform-specific details."""
|
3262
|
+
|
3263
|
+
def get_mac_os_info():
|
3264
|
+
"""Get detailed macOS version and product name."""
|
3265
|
+
try:
|
3266
|
+
sw_vers = subprocess.check_output(["sw_vers"]).decode("utf-8")
|
3267
|
+
product_name = (
|
3268
|
+
[
|
3269
|
+
line
|
3270
|
+
for line in sw_vers.split("\n")
|
3271
|
+
if line.startswith("ProductName")
|
3272
|
+
][0]
|
3273
|
+
.split(":")[1]
|
3274
|
+
.strip()
|
3275
|
+
)
|
3276
|
+
product_version = (
|
3277
|
+
[
|
3278
|
+
line
|
3279
|
+
for line in sw_vers.split("\n")
|
3280
|
+
if line.startswith("ProductVersion")
|
3281
|
+
][0]
|
3282
|
+
.split(":")[1]
|
3283
|
+
.strip()
|
3284
|
+
)
|
3285
|
+
build_version = (
|
3286
|
+
[
|
3287
|
+
line
|
3288
|
+
for line in sw_vers.split("\n")
|
3289
|
+
if line.startswith("BuildVersion")
|
3290
|
+
][0]
|
3291
|
+
.split(":")[1]
|
3292
|
+
.strip()
|
3293
|
+
)
|
3294
|
+
|
3295
|
+
# Return the formatted macOS name, version, and build
|
3296
|
+
return f"{product_name} {product_version} (Build {build_version})"
|
3297
|
+
except Exception as e:
|
3298
|
+
return f"Error retrieving macOS name: {str(e)}"
|
3299
|
+
|
3300
|
+
def get_windows_info():
|
3301
|
+
"""Get detailed Windows version and edition."""
|
3302
|
+
try:
|
3303
|
+
# Get basic Windows version using platform
|
3304
|
+
windows_version = platform.version()
|
3305
|
+
release = platform.release()
|
3306
|
+
version = platform.win32_ver()[0]
|
3307
|
+
|
3308
|
+
# Additional information using Windows-specific system commands
|
3309
|
+
edition_command = "wmic os get caption"
|
3310
|
+
edition = (
|
3311
|
+
subprocess.check_output(edition_command, shell=True)
|
3312
|
+
.decode("utf-8")
|
3313
|
+
.strip()
|
3314
|
+
.split("\n")[1]
|
3315
|
+
)
|
3316
|
+
|
3317
|
+
# Return Windows information
|
3318
|
+
return f"Windows {version} {release} ({edition})"
|
3319
|
+
except Exception as e:
|
3320
|
+
return f"Error retrieving Windows information: {str(e)}"
|
3321
|
+
|
3322
|
+
def get_linux_info():
|
3323
|
+
"""Get detailed Linux version and distribution info."""
|
3324
|
+
try:
|
3325
|
+
# Check /etc/os-release for modern Linux distros
|
3326
|
+
with open("/etc/os-release") as f:
|
3327
|
+
os_info = f.readlines()
|
3328
|
+
|
3329
|
+
os_name = (
|
3330
|
+
next(line for line in os_info if line.startswith("NAME"))
|
3331
|
+
.split("=")[1]
|
3332
|
+
.strip()
|
3333
|
+
.replace('"', "")
|
3334
|
+
)
|
3335
|
+
os_version = (
|
3336
|
+
next(line for line in os_info if line.startswith("VERSION"))
|
3337
|
+
.split("=")[1]
|
3338
|
+
.strip()
|
3339
|
+
.replace('"', "")
|
3340
|
+
)
|
3341
|
+
|
3342
|
+
# For additional info, check for the package manager (e.g., apt, dnf)
|
3343
|
+
package_manager = "Unknown"
|
3344
|
+
if os.path.exists("/usr/bin/apt"):
|
3345
|
+
package_manager = "APT (Debian/Ubuntu)"
|
3346
|
+
elif os.path.exists("/usr/bin/dnf"):
|
3347
|
+
package_manager = "DNF (Fedora/RHEL)"
|
3348
|
+
|
3349
|
+
# Return Linux distribution, version, and package manager
|
3350
|
+
return f"{os_name} {os_version} (Package Manager: {package_manager})"
|
3351
|
+
except Exception as e:
|
3352
|
+
return f"Error retrieving Linux information: {str(e)}"
|
3353
|
+
|
3354
|
+
os_name = platform.system()
|
3355
|
+
|
3356
|
+
if os_name == "Darwin":
|
3357
|
+
return get_mac_os_info()
|
3358
|
+
elif os_name == "Windows":
|
3359
|
+
return get_windows_info()
|
3360
|
+
elif os_name == "Linux":
|
3361
|
+
return get_linux_info()
|
3220
3362
|
else:
|
3221
|
-
|
3222
|
-
|
3363
|
+
return f"Unknown OS: {os_name} {platform.release()}"
|
3364
|
+
|
3365
|
+
def get_os_name_and_version():
|
3366
|
+
os_name = platform.system()
|
3367
|
+
if os_name == "Darwin":
|
3368
|
+
try:
|
3369
|
+
# Run 'sw_vers' command to get macOS details like "macOS Sequoia"
|
3370
|
+
sw_vers = subprocess.check_output(["sw_vers"]).decode("utf-8")
|
3371
|
+
product_name = (
|
3372
|
+
[
|
3373
|
+
line
|
3374
|
+
for line in sw_vers.split("\n")
|
3375
|
+
if line.startswith("ProductName")
|
3376
|
+
][0]
|
3377
|
+
.split(":")[1]
|
3378
|
+
.strip()
|
3379
|
+
)
|
3380
|
+
product_version = (
|
3381
|
+
[
|
3382
|
+
line
|
3383
|
+
for line in sw_vers.split("\n")
|
3384
|
+
if line.startswith("ProductVersion")
|
3385
|
+
][0]
|
3386
|
+
.split(":")[1]
|
3387
|
+
.strip()
|
3388
|
+
)
|
3389
|
+
|
3390
|
+
# Return the formatted macOS name and version
|
3391
|
+
return f"{product_name} {product_version}"
|
3392
|
+
|
3393
|
+
except Exception as e:
|
3394
|
+
return f"Error retrieving macOS name: {str(e)}"
|
3395
|
+
|
3396
|
+
# For Windows, we use platform to get the OS name and version
|
3397
|
+
elif os_name == "Windows":
|
3398
|
+
os_version = platform.version()
|
3399
|
+
return f"Windows {os_version}"
|
3400
|
+
|
3401
|
+
# For Linux, check for distribution info using platform and os-release file
|
3402
|
+
elif os_name == "Linux":
|
3403
|
+
try:
|
3404
|
+
# Try to read Linux distribution info from '/etc/os-release'
|
3405
|
+
with open("/etc/os-release") as f:
|
3406
|
+
os_info = f.readlines()
|
3407
|
+
|
3408
|
+
# Find fields like NAME and VERSION
|
3409
|
+
os_name = (
|
3410
|
+
next(line for line in os_info if line.startswith("NAME"))
|
3411
|
+
.split("=")[1]
|
3412
|
+
.strip()
|
3413
|
+
.replace('"', "")
|
3414
|
+
)
|
3415
|
+
os_version = (
|
3416
|
+
next(line for line in os_info if line.startswith("VERSION"))
|
3417
|
+
.split("=")[1]
|
3418
|
+
.strip()
|
3419
|
+
.replace('"', "")
|
3420
|
+
)
|
3421
|
+
return f"{os_name} {os_version}"
|
3422
|
+
|
3423
|
+
except Exception as e:
|
3424
|
+
return f"Error retrieving Linux name: {str(e)}"
|
3425
|
+
|
3426
|
+
# Default fallback (for unknown OS or edge cases)
|
3427
|
+
return f"{os_name} {platform.release()}"
|
3428
|
+
|
3429
|
+
def get_system_uptime():
|
3430
|
+
"""Returns system uptime as a human-readable string."""
|
3431
|
+
boot_time = datetime.fromtimestamp(psutil.boot_time())
|
3432
|
+
uptime = datetime.now() - boot_time
|
3433
|
+
return str(uptime).split(".")[0] # Remove microseconds
|
3434
|
+
|
3435
|
+
def get_active_processes(limit=10):
|
3436
|
+
processes = []
|
3437
|
+
for proc in psutil.process_iter(
|
3438
|
+
["pid", "name", "cpu_percent", "memory_percent"]
|
3439
|
+
):
|
3440
|
+
try:
|
3441
|
+
processes.append(proc.info)
|
3442
|
+
except psutil.NoSuchProcess:
|
3443
|
+
pass
|
3444
|
+
# Handle NoneType values by treating them as 0
|
3445
|
+
processes.sort(key=lambda x: x["cpu_percent"] or 0, reverse=True)
|
3446
|
+
return processes[:limit]
|
3447
|
+
|
3448
|
+
def get_virtual_environment_info():
|
3449
|
+
"""Checks if the script is running in a virtual environment and returns details."""
|
3450
|
+
try:
|
3451
|
+
# Check if running in a virtual environment
|
3452
|
+
if hasattr(sys, "real_prefix") or (
|
3453
|
+
hasattr(sys, "base_prefix") and sys.base_prefix != sys.prefix
|
3454
|
+
):
|
3455
|
+
return {
|
3456
|
+
"Virtual Environment": sys.prefix,
|
3457
|
+
"Site-Packages Path": os.path.join(
|
3458
|
+
sys.prefix,
|
3459
|
+
"lib",
|
3460
|
+
"python{}/site-packages".format(sys.version_info.major),
|
3461
|
+
),
|
3462
|
+
}
|
3463
|
+
else:
|
3464
|
+
return {"Virtual Environment": "Not in a virtual environment"}
|
3465
|
+
except Exception as e:
|
3466
|
+
return {"Error": str(e)}
|
3467
|
+
|
3468
|
+
def get_temperatures():
|
3469
|
+
"""Returns temperature sensor readings."""
|
3470
|
+
try:
|
3471
|
+
return psutil.sensors_temperatures(fahrenheit=False)
|
3472
|
+
except AttributeError:
|
3473
|
+
return {"Error": "Temperature sensors not available"}
|
3474
|
+
|
3475
|
+
def get_battery_status():
|
3476
|
+
"""Returns battery status."""
|
3477
|
+
battery = psutil.sensors_battery()
|
3478
|
+
if battery:
|
3479
|
+
time_left = (
|
3480
|
+
str(timedelta(seconds=battery.secsleft))
|
3481
|
+
if battery.secsleft != psutil.POWER_TIME_UNLIMITED
|
3482
|
+
else "Charging/Unlimited"
|
3483
|
+
)
|
3484
|
+
return {
|
3485
|
+
"Percentage": battery.percent,
|
3486
|
+
"Plugged In": battery.power_plugged,
|
3487
|
+
"Time Left": time_left,
|
3488
|
+
}
|
3489
|
+
return {"Status": "No battery detected"}
|
3490
|
+
|
3491
|
+
def get_disk_io():
|
3492
|
+
"""Returns disk I/O statistics."""
|
3493
|
+
disk_io = psutil.disk_io_counters()
|
3494
|
+
return {
|
3495
|
+
"Read (GB)": disk_io.read_bytes / (1024**3),
|
3496
|
+
"Write (GB)": disk_io.write_bytes / (1024**3),
|
3497
|
+
"Read Count": disk_io.read_count,
|
3498
|
+
"Write Count": disk_io.write_count,
|
3499
|
+
}
|
3500
|
+
|
3501
|
+
def get_network_io():
|
3502
|
+
"""Returns network I/O statistics."""
|
3503
|
+
net_io = psutil.net_io_counters()
|
3504
|
+
return {
|
3505
|
+
"Bytes Sent (GB)": net_io.bytes_sent / (1024**3),
|
3506
|
+
"Bytes Received (GB)": net_io.bytes_recv / (1024**3),
|
3507
|
+
"Packets Sent": net_io.packets_sent,
|
3508
|
+
"Packets Received": net_io.packets_recv,
|
3509
|
+
}
|
3510
|
+
|
3511
|
+
def run_shell_command(command):
|
3512
|
+
"""Runs a shell command and returns its output."""
|
3513
|
+
try:
|
3514
|
+
result = subprocess.run(
|
3515
|
+
command,
|
3516
|
+
shell=True,
|
3517
|
+
stdout=subprocess.PIPE,
|
3518
|
+
stderr=subprocess.PIPE,
|
3519
|
+
text=True,
|
3520
|
+
)
|
3521
|
+
return (
|
3522
|
+
result.stdout.strip()
|
3523
|
+
if result.returncode == 0
|
3524
|
+
else result.stderr.strip()
|
3525
|
+
)
|
3526
|
+
except Exception as e:
|
3527
|
+
return f"Error running command: {e}"
|
3528
|
+
|
3529
|
+
system_info = {
|
3530
|
+
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
3531
|
+
"os": get_os_type(),
|
3532
|
+
"system": {
|
3533
|
+
"os": get_os_info(),
|
3534
|
+
"platform": f"{platform.system()} {platform.release()}",
|
3535
|
+
"version": platform.version(),
|
3536
|
+
"machine": platform.machine(),
|
3537
|
+
"processor": platform.processor(),
|
3538
|
+
"architecture": platform.architecture()[0],
|
3539
|
+
"hostname": socket.gethostname(),
|
3540
|
+
"ip address": socket.gethostbyname(socket.gethostname()),
|
3541
|
+
"mac address": ":".join(
|
3542
|
+
["{:02x}".format((uuid.getnode() >> i) & 0xFF) for i in range(0, 48, 8)]
|
3543
|
+
),
|
3544
|
+
"cpu brand": cpuinfo.get_cpu_info().get("brand_raw", "Unknown"),
|
3545
|
+
"python version": platform.python_version(),
|
3546
|
+
"uptime": get_system_uptime(),
|
3547
|
+
},
|
3548
|
+
"cpu": {
|
3549
|
+
"physical cores": psutil.cpu_count(logical=False),
|
3550
|
+
"logical cores": psutil.cpu_count(logical=True),
|
3551
|
+
"max frequency (MHz)": psutil.cpu_freq().max,
|
3552
|
+
"min frequency (MHz)": psutil.cpu_freq().min,
|
3553
|
+
"current frequency (MHz)": psutil.cpu_freq().current,
|
3554
|
+
"usage per core (%)": psutil.cpu_percent(percpu=True),
|
3555
|
+
"total cpu Usage (%)": psutil.cpu_percent(),
|
3556
|
+
"load average (1m, 5m, 15m)": (
|
3557
|
+
os.getloadavg() if hasattr(os, "getloadavg") else "N/A"
|
3558
|
+
),
|
3559
|
+
},
|
3560
|
+
"memory": {
|
3561
|
+
"total memory (GB)": psutil.virtual_memory().total / (1024**3),
|
3562
|
+
"available memory (GB)": psutil.virtual_memory().available / (1024**3),
|
3563
|
+
"used memory (GB)": psutil.virtual_memory().used / (1024**3),
|
3564
|
+
"memory usage (%)": psutil.virtual_memory().percent,
|
3565
|
+
"swap total (GB)": psutil.swap_memory().total / (1024**3),
|
3566
|
+
"swap free (GB)": psutil.swap_memory().free / (1024**3),
|
3567
|
+
"swap used (GB)": psutil.swap_memory().used / (1024**3),
|
3568
|
+
"swap usage (%)": psutil.swap_memory().percent,
|
3569
|
+
},
|
3570
|
+
"disk": {},
|
3571
|
+
"disk io": get_disk_io(),
|
3572
|
+
"network": {},
|
3573
|
+
"network io": get_network_io(),
|
3574
|
+
"gpu": [],
|
3575
|
+
"temperatures": get_temperatures(),
|
3576
|
+
"battery": get_battery_status(),
|
3577
|
+
"active processes": get_active_processes(),
|
3578
|
+
"environment": {
|
3579
|
+
"user": os.getenv("USER", "Unknown"),
|
3580
|
+
"environment variables": dict(os.environ),
|
3581
|
+
"virtual environment info": get_virtual_environment_info(), # Virtual env details
|
3582
|
+
"docker running": os.path.exists("/.dockerenv"), # Check for Docker
|
3583
|
+
"shell": os.environ.get("SHELL", "Unknown"),
|
3584
|
+
"default terminal": run_shell_command("echo $TERM"),
|
3585
|
+
"kernel version": platform.uname().release,
|
3586
|
+
"virtualization type": run_shell_command("systemd-detect-virt"),
|
3587
|
+
},
|
3588
|
+
"additional info": {
|
3589
|
+
"Shell": os.environ.get("SHELL", "Unknown"),
|
3590
|
+
"default terminal": run_shell_command("echo $TERM"),
|
3591
|
+
"kernel version": platform.uname().release,
|
3592
|
+
"virtualization type": run_shell_command("systemd-detect-virt"),
|
3593
|
+
"running in docker": os.path.exists("/.dockerenv"),
|
3594
|
+
},
|
3595
|
+
}
|
3596
|
+
|
3597
|
+
# Disk Information
|
3598
|
+
for partition in psutil.disk_partitions():
|
3599
|
+
try:
|
3600
|
+
usage = psutil.disk_usage(partition.mountpoint)
|
3601
|
+
system_info["disk"][partition.device] = {
|
3602
|
+
"mountpoint": partition.mountpoint,
|
3603
|
+
"file system type": partition.fstype,
|
3604
|
+
"total size (GB)": usage.total / (1024**3),
|
3605
|
+
"used (GB)": usage.used / (1024**3),
|
3606
|
+
"free (GB)": usage.free / (1024**3),
|
3607
|
+
"usage (%)": usage.percent,
|
3608
|
+
}
|
3609
|
+
except PermissionError:
|
3610
|
+
system_info["Disk"][partition.device] = "Permission Denied"
|
3611
|
+
|
3612
|
+
# Network Information
|
3613
|
+
if_addrs = psutil.net_if_addrs()
|
3614
|
+
for interface_name, interface_addresses in if_addrs.items():
|
3615
|
+
system_info["network"][interface_name] = []
|
3616
|
+
for address in interface_addresses:
|
3617
|
+
if str(address.family) == "AddressFamily.AF_INET":
|
3618
|
+
system_info["network"][interface_name].append(
|
3619
|
+
{
|
3620
|
+
"ip address": address.address,
|
3621
|
+
"netmask": address.netmask,
|
3622
|
+
"broadcast ip": address.broadcast,
|
3623
|
+
}
|
3624
|
+
)
|
3625
|
+
elif str(address.family) == "AddressFamily.AF_PACKET":
|
3626
|
+
system_info["network"][interface_name].append(
|
3627
|
+
{
|
3628
|
+
"mac address": address.address,
|
3629
|
+
"netmask": address.netmask,
|
3630
|
+
"broadcast mac": address.broadcast,
|
3631
|
+
}
|
3632
|
+
)
|
3633
|
+
|
3634
|
+
# GPU Information
|
3635
|
+
gpus = GPUtil.getGPUs()
|
3636
|
+
for gpu in gpus:
|
3637
|
+
gpu_info = {
|
3638
|
+
"name": gpu.name,
|
3639
|
+
"load (%)": gpu.load * 100,
|
3640
|
+
"free memory (MB)": gpu.memoryFree,
|
3641
|
+
"used memory (MB)": gpu.memoryUsed,
|
3642
|
+
"total memory (MB)": gpu.memoryTotal,
|
3643
|
+
"driver version": gpu.driver,
|
3644
|
+
"temperature (°C)": gpu.temperature,
|
3645
|
+
}
|
3646
|
+
if hasattr(gpu, "powerDraw"):
|
3647
|
+
gpu_info["Power Draw (W)"] = gpu.powerDraw
|
3648
|
+
if hasattr(gpu, "powerLimit"):
|
3649
|
+
gpu_info["Power Limit (W)"] = gpu.powerLimit
|
3650
|
+
system_info["gpu"].append(gpu_info)
|
3651
|
+
|
3652
|
+
res = system_info if full else get_os_type()
|
3653
|
+
if verbose:
|
3654
|
+
try:
|
3655
|
+
preview(res)
|
3656
|
+
except Exception as e:
|
3657
|
+
pnrint(e)
|
3658
|
+
return res
|
3223
3659
|
|
3224
3660
|
|
3225
3661
|
def listdir(
|
@@ -3242,8 +3678,9 @@ def listdir(
|
|
3242
3678
|
print(ls)
|
3243
3679
|
df_all = pd.DataFrame(
|
3244
3680
|
{
|
3245
|
-
"
|
3246
|
-
"
|
3681
|
+
"name": ls,
|
3682
|
+
"path": [os.path.join(rootdir, i) for i in ls],
|
3683
|
+
"kind":[os.path.splitext(i)[1] for i in ls]
|
3247
3684
|
}
|
3248
3685
|
)
|
3249
3686
|
if verbose:
|
@@ -3382,7 +3819,94 @@ def listfunc(lib_name, opt="call"):
|
|
3382
3819
|
def func_list(lib_name, opt="call"):
|
3383
3820
|
return list_func(lib_name, opt=opt)
|
3384
3821
|
|
3822
|
+
def copy(src, dst, overwrite=False):
|
3823
|
+
"""Copy a file from src to dst."""
|
3824
|
+
try:
|
3825
|
+
src = Path(src)
|
3826
|
+
dst = Path(dst)
|
3827
|
+
if not src.is_dir():
|
3828
|
+
if dst.is_dir():
|
3829
|
+
dst = dst / src.name
|
3830
|
+
|
3831
|
+
if dst.exists():
|
3832
|
+
if overwrite:
|
3833
|
+
dst.unlink()
|
3834
|
+
else:
|
3835
|
+
dst = dst.with_name(f"{dst.stem}_{datetime.now().strftime('_%H%M%S')}{dst.suffix}")
|
3836
|
+
shutil.copy(src, dst)
|
3837
|
+
print(f"\n Done! copy to {dst}\n")
|
3838
|
+
else:
|
3839
|
+
dst = dst/src.name
|
3840
|
+
if dst.exists():
|
3841
|
+
if overwrite:
|
3842
|
+
shutil.rmtree(dst) # Remove existing directory
|
3843
|
+
else:
|
3844
|
+
dst = dst.with_name(f"{dst.stem}_{datetime.now().strftime('%H%M%S')}")
|
3845
|
+
shutil.copytree(src, dst)
|
3846
|
+
print(f"\n Done! copy to {dst}\n")
|
3847
|
+
|
3848
|
+
except Exception as e:
|
3849
|
+
logging.error(f"Failed {e}")
|
3850
|
+
|
3851
|
+
def move(src, dst, overwrite=False):
|
3852
|
+
return cut(src=src, dst=dst, overwrite=overwrite)
|
3385
3853
|
|
3854
|
+
def cut(src, dst, overwrite=False):
|
3855
|
+
try:
|
3856
|
+
src = Path(src)
|
3857
|
+
dst = Path(dst)
|
3858
|
+
if dst.is_dir():
|
3859
|
+
dst = dst / src.name
|
3860
|
+
if dst.exists():
|
3861
|
+
if overwrite:
|
3862
|
+
# dst.unlink() # Delete the existing file
|
3863
|
+
pass
|
3864
|
+
else:
|
3865
|
+
dst = dst.with_name(f"{dst.stem}_{datetime.now().strftime('_%H%M%S')}{dst.suffix}")
|
3866
|
+
shutil.move(src, dst)
|
3867
|
+
print(f"\n Done! moved to {dst}\n")
|
3868
|
+
except Exception as e:
|
3869
|
+
logging.error(f"Failed to move file from {src} to {dst}: {e}")
|
3870
|
+
|
3871
|
+
def delete(fpath):
|
3872
|
+
"""Delete a file/folder."""
|
3873
|
+
try:
|
3874
|
+
fpath = Path(fpath)
|
3875
|
+
if not fpath.is_dir(): # file
|
3876
|
+
if fpath.exists():
|
3877
|
+
fpath.unlink()
|
3878
|
+
print(f"\n Done! delete {fpath}\n")
|
3879
|
+
else:
|
3880
|
+
print(f"File '{fpath}' does not exist.")
|
3881
|
+
else:#folder
|
3882
|
+
if fpath.exists():
|
3883
|
+
shutil.rmtree(fpath) # Remove existing directory
|
3884
|
+
print(f"\n Done! delete {fpath}\n")
|
3885
|
+
else:
|
3886
|
+
print(f"Folder '{fpath}' does not exist.")
|
3887
|
+
except Exception as e:
|
3888
|
+
logging.error(f"Failed to delete {fpath}: {e}")
|
3889
|
+
def rename(fpath, dst, smart=True):
|
3890
|
+
"""Rename a file or folder."""
|
3891
|
+
try:
|
3892
|
+
src_kind,dst_kind = None,None
|
3893
|
+
if smart:
|
3894
|
+
dir_name_src=os.path.dirname(fpath)
|
3895
|
+
dir_name_dst=os.path.dirname(dst)
|
3896
|
+
src_kind=os.path.splitext(fpath)[1]
|
3897
|
+
dst_kind=os.path.splitext(dst)[1]
|
3898
|
+
if dir_name_dst!=dir_name_src:
|
3899
|
+
dst=os.path.join(dir_name_src,dst)
|
3900
|
+
if dst_kind is not None and src_kind is not None:
|
3901
|
+
if dst_kind!=src_kind:
|
3902
|
+
dst=dst + src_kind
|
3903
|
+
if os.path.exists(fpath):
|
3904
|
+
os.rename(fpath,dst)
|
3905
|
+
print(f"Done! rename to {dst}")
|
3906
|
+
else:
|
3907
|
+
print(f"Failed: {fpath} does not exist.")
|
3908
|
+
except Exception as e:
|
3909
|
+
logging.error(f"Failed to rename {fpath} to {dst}: {e}")
|
3386
3910
|
def mkdir_nest(fpath: str) -> str:
|
3387
3911
|
"""
|
3388
3912
|
Create nested directories based on the provided file path.
|
@@ -3401,7 +3925,9 @@ def mkdir_nest(fpath: str) -> str:
|
|
3401
3925
|
dir_parts = fpath.split(f_slash) # Split the path by the OS-specific separator
|
3402
3926
|
|
3403
3927
|
# Start creating directories from the root to the desired path
|
3404
|
-
|
3928
|
+
root_dir = os.path.splitdrive(fpath)[0] # Get the root drive on Windows (e.g., 'C:')
|
3929
|
+
current_path = root_dir if root_dir else f_slash # Start from the root directory or POSIX '/'
|
3930
|
+
|
3405
3931
|
for part in dir_parts:
|
3406
3932
|
if part:
|
3407
3933
|
current_path = os.path.join(current_path, part)
|
@@ -3425,10 +3951,13 @@ def mkdir(pardir: str = None, chdir: str | list = None, overwrite=False):
|
|
3425
3951
|
Returns:
|
3426
3952
|
- str: The path of the created directory or an error message.
|
3427
3953
|
"""
|
3428
|
-
|
3429
3954
|
rootdir = []
|
3955
|
+
pardir= mkdir_nest(pardir)
|
3430
3956
|
if chdir is None:
|
3431
|
-
return
|
3957
|
+
return pardir
|
3958
|
+
else:
|
3959
|
+
pass
|
3960
|
+
print(pardir)
|
3432
3961
|
if isinstance(chdir, str):
|
3433
3962
|
chdir = [chdir]
|
3434
3963
|
chdir = list(set(chdir))
|
@@ -3466,7 +3995,7 @@ def mkdir(pardir: str = None, chdir: str | list = None, overwrite=False):
|
|
3466
3995
|
# Dir is the main output, if only one dir, then str type is inconvenient
|
3467
3996
|
if len(rootdir) == 1:
|
3468
3997
|
rootdir = rootdir[0]
|
3469
|
-
|
3998
|
+
rootdir = rootdir + stype if not rootdir.endswith(stype) else rootdir
|
3470
3999
|
|
3471
4000
|
return rootdir
|
3472
4001
|
|
@@ -3865,6 +4394,114 @@ def apply_filter(img, *args):
|
|
3865
4394
|
)
|
3866
4395
|
return img.filter(supported_filters[filter_name])
|
3867
4396
|
|
4397
|
+
def detect_angle(image, by="median", template=None):
|
4398
|
+
"""Detect the angle of rotation using various methods."""
|
4399
|
+
from sklearn.decomposition import PCA
|
4400
|
+
from skimage import transform, feature, filters, measure
|
4401
|
+
from skimage.color import rgb2gray
|
4402
|
+
from scipy.fftpack import fftshift, fft2
|
4403
|
+
import numpy as np
|
4404
|
+
import cv2
|
4405
|
+
# Convert to grayscale
|
4406
|
+
gray_image = rgb2gray(image)
|
4407
|
+
|
4408
|
+
# Detect edges using Canny edge detector
|
4409
|
+
edges = feature.canny(gray_image, sigma=2)
|
4410
|
+
|
4411
|
+
# Use Hough transform to detect lines
|
4412
|
+
lines = transform.probabilistic_hough_line(edges)
|
4413
|
+
|
4414
|
+
if not lines and any(["me" in by, "pca" in by]):
|
4415
|
+
print("No lines detected. Adjust the edge detection parameters.")
|
4416
|
+
return 0
|
4417
|
+
|
4418
|
+
# Hough Transform-based angle detection (Median/Mean)
|
4419
|
+
if "me" in by:
|
4420
|
+
angles = []
|
4421
|
+
for line in lines:
|
4422
|
+
(x0, y0), (x1, y1) = line
|
4423
|
+
angle = np.arctan2(y1 - y0, x1 - x0) * 180 / np.pi
|
4424
|
+
if 80 < abs(angle) < 100:
|
4425
|
+
angles.append(angle)
|
4426
|
+
if not angles:
|
4427
|
+
return 0
|
4428
|
+
if "di" in by:
|
4429
|
+
median_angle = np.median(angles)
|
4430
|
+
rotation_angle = (
|
4431
|
+
90 - median_angle if median_angle > 0 else -90 - median_angle
|
4432
|
+
)
|
4433
|
+
|
4434
|
+
return rotation_angle
|
4435
|
+
else:
|
4436
|
+
mean_angle = np.mean(angles)
|
4437
|
+
rotation_angle = 90 - mean_angle if mean_angle > 0 else -90 - mean_angle
|
4438
|
+
|
4439
|
+
return rotation_angle
|
4440
|
+
|
4441
|
+
# PCA-based angle detection
|
4442
|
+
elif "pca" in by:
|
4443
|
+
y, x = np.nonzero(edges)
|
4444
|
+
if len(x) == 0:
|
4445
|
+
return 0
|
4446
|
+
pca = PCA(n_components=2)
|
4447
|
+
pca.fit(np.vstack((x, y)).T)
|
4448
|
+
angle = np.arctan2(pca.components_[0, 1], pca.components_[0, 0]) * 180 / np.pi
|
4449
|
+
return angle
|
4450
|
+
|
4451
|
+
# Gradient Orientation-based angle detection
|
4452
|
+
elif "gra" in by:
|
4453
|
+
gx, gy = np.gradient(gray_image)
|
4454
|
+
angles = np.arctan2(gy, gx) * 180 / np.pi
|
4455
|
+
hist, bin_edges = np.histogram(angles, bins=360, range=(-180, 180))
|
4456
|
+
return bin_edges[np.argmax(hist)]
|
4457
|
+
|
4458
|
+
# Template Matching-based angle detection
|
4459
|
+
elif "temp" in by:
|
4460
|
+
if template is None:
|
4461
|
+
# Automatically extract a template from the center of the image
|
4462
|
+
height, width = gray_image.shape
|
4463
|
+
center_x, center_y = width // 2, height // 2
|
4464
|
+
size = (
|
4465
|
+
min(height, width) // 4
|
4466
|
+
) # Size of the template as a fraction of image size
|
4467
|
+
template = gray_image[
|
4468
|
+
center_y - size : center_y + size, center_x - size : center_x + size
|
4469
|
+
]
|
4470
|
+
best_angle = None
|
4471
|
+
best_corr = -1
|
4472
|
+
for angle in range(0, 180, 1): # Checking every degree
|
4473
|
+
rotated_template = transform.rotate(template, angle)
|
4474
|
+
res = cv2.matchTemplate(gray_image, rotated_template, cv2.TM_CCOEFF)
|
4475
|
+
_, max_val, _, _ = cv2.minMaxLoc(res)
|
4476
|
+
if max_val > best_corr:
|
4477
|
+
best_corr = max_val
|
4478
|
+
best_angle = angle
|
4479
|
+
return best_angle
|
4480
|
+
|
4481
|
+
# Image Moments-based angle detection
|
4482
|
+
elif "mo" in by:
|
4483
|
+
moments = measure.moments_central(gray_image)
|
4484
|
+
angle = (
|
4485
|
+
0.5
|
4486
|
+
* np.arctan2(2 * moments[1, 1], moments[0, 2] - moments[2, 0])
|
4487
|
+
* 180
|
4488
|
+
/ np.pi
|
4489
|
+
)
|
4490
|
+
return angle
|
4491
|
+
|
4492
|
+
# Fourier Transform-based angle detection
|
4493
|
+
elif "fft" in by:
|
4494
|
+
f = fft2(gray_image)
|
4495
|
+
fshift = fftshift(f)
|
4496
|
+
magnitude_spectrum = np.log(np.abs(fshift) + 1)
|
4497
|
+
rows, cols = magnitude_spectrum.shape
|
4498
|
+
r, c = np.unravel_index(np.argmax(magnitude_spectrum), (rows, cols))
|
4499
|
+
angle = np.arctan2(r - rows // 2, c - cols // 2) * 180 / np.pi
|
4500
|
+
return angle
|
4501
|
+
|
4502
|
+
else:
|
4503
|
+
print(f"Unknown method {by}")
|
4504
|
+
return 0
|
3868
4505
|
|
3869
4506
|
def imgsets(img, **kwargs):
|
3870
4507
|
"""
|
py2ls/netfinder.py
CHANGED
@@ -1608,3 +1608,191 @@ def ai(*args, **kwargs):
|
|
1608
1608
|
if len(args) == 1 and isinstance(args[0], str):
|
1609
1609
|
kwargs["query"] = args[0]
|
1610
1610
|
return echo(**kwargs)
|
1611
|
+
|
1612
|
+
|
1613
|
+
#! get_ip()
|
1614
|
+
def get_ip(ip=None):
|
1615
|
+
"""
|
1616
|
+
Usage:
|
1617
|
+
from py2ls import netfinder as nt
|
1618
|
+
ip = nt.get_ip()
|
1619
|
+
"""
|
1620
|
+
|
1621
|
+
import requests
|
1622
|
+
import time
|
1623
|
+
import logging
|
1624
|
+
from datetime import datetime, timedelta
|
1625
|
+
|
1626
|
+
# Set up logging configuration
|
1627
|
+
logging.basicConfig(
|
1628
|
+
level=logging.INFO,
|
1629
|
+
format="%(asctime)s - %(levelname)s - %(message)s",
|
1630
|
+
handlers=[
|
1631
|
+
logging.StreamHandler(),
|
1632
|
+
logging.FileHandler("public_ip_log.log"), # Log to a file
|
1633
|
+
],
|
1634
|
+
)
|
1635
|
+
|
1636
|
+
cache = {}
|
1637
|
+
|
1638
|
+
# Function to fetch IP addresses synchronously
|
1639
|
+
def fetch_ip(url, retries, timeout, headers):
|
1640
|
+
"""
|
1641
|
+
Synchronous function to fetch the IP address with retries.
|
1642
|
+
"""
|
1643
|
+
for attempt in range(retries):
|
1644
|
+
try:
|
1645
|
+
response = requests.get(url, timeout=timeout, headers=headers)
|
1646
|
+
response.raise_for_status()
|
1647
|
+
return response.json()
|
1648
|
+
except requests.RequestException as e:
|
1649
|
+
logging.error(f"Attempt {attempt + 1} failed: {e}")
|
1650
|
+
if attempt < retries - 1:
|
1651
|
+
time.sleep(2**attempt) # Exponential backoff
|
1652
|
+
else:
|
1653
|
+
logging.error("Max retries reached.")
|
1654
|
+
return {"error": f"Error fetching IP: {e}"}
|
1655
|
+
except requests.Timeout:
|
1656
|
+
logging.error("Request timed out")
|
1657
|
+
time.sleep(2**attempt)
|
1658
|
+
return {"error": "Failed to fetch IP after retries"}
|
1659
|
+
|
1660
|
+
# Function to fetch geolocation synchronously
|
1661
|
+
def fetch_geolocation(url, retries, timeout, headers):
|
1662
|
+
"""
|
1663
|
+
Synchronous function to fetch geolocation data by IP address.
|
1664
|
+
"""
|
1665
|
+
for attempt in range(retries):
|
1666
|
+
try:
|
1667
|
+
response = requests.get(url, timeout=timeout, headers=headers)
|
1668
|
+
response.raise_for_status()
|
1669
|
+
return response.json()
|
1670
|
+
except requests.RequestException as e:
|
1671
|
+
logging.error(f"Geolocation request attempt {attempt + 1} failed: {e}")
|
1672
|
+
if attempt < retries - 1:
|
1673
|
+
time.sleep(2**attempt) # Exponential backoff
|
1674
|
+
else:
|
1675
|
+
logging.error("Max retries reached.")
|
1676
|
+
return {"error": f"Error fetching geolocation: {e}"}
|
1677
|
+
except requests.Timeout:
|
1678
|
+
logging.error("Geolocation request timed out")
|
1679
|
+
time.sleep(2**attempt)
|
1680
|
+
return {"error": "Failed to fetch geolocation after retries"}
|
1681
|
+
|
1682
|
+
# Main function to get public IP and geolocation
|
1683
|
+
def get_public_ip(
|
1684
|
+
ip4=True,
|
1685
|
+
ip6=True,
|
1686
|
+
verbose=True,
|
1687
|
+
retries=3,
|
1688
|
+
timeout=5,
|
1689
|
+
geolocation=True,
|
1690
|
+
headers=None,
|
1691
|
+
cache_duration=5,
|
1692
|
+
):
|
1693
|
+
"""
|
1694
|
+
Synchronously fetches public IPv4 and IPv6 addresses, along with optional geolocation info.
|
1695
|
+
"""
|
1696
|
+
# Use the cache if it's still valid
|
1697
|
+
cache_key_ip4 = "public_ip4"
|
1698
|
+
cache_key_ip6 = "public_ip6"
|
1699
|
+
cache_key_geolocation = "geolocation"
|
1700
|
+
|
1701
|
+
if (
|
1702
|
+
cache
|
1703
|
+
and cache_key_ip4 in cache
|
1704
|
+
and datetime.now() < cache[cache_key_ip4]["expires"]
|
1705
|
+
):
|
1706
|
+
logging.info("Cache hit for IPv4, using cached data.")
|
1707
|
+
ip4_data = cache[cache_key_ip4]["data"]
|
1708
|
+
else:
|
1709
|
+
ip4_data = None
|
1710
|
+
|
1711
|
+
if (
|
1712
|
+
cache
|
1713
|
+
and cache_key_ip6 in cache
|
1714
|
+
and datetime.now() < cache[cache_key_ip6]["expires"]
|
1715
|
+
):
|
1716
|
+
logging.info("Cache hit for IPv6, using cached data.")
|
1717
|
+
ip6_data = cache[cache_key_ip6]["data"]
|
1718
|
+
else:
|
1719
|
+
ip6_data = None
|
1720
|
+
|
1721
|
+
if (
|
1722
|
+
cache
|
1723
|
+
and cache_key_geolocation in cache
|
1724
|
+
and datetime.now() < cache[cache_key_geolocation]["expires"]
|
1725
|
+
):
|
1726
|
+
logging.info("Cache hit for Geolocation, using cached data.")
|
1727
|
+
geolocation_data = cache[cache_key_geolocation]["data"]
|
1728
|
+
else:
|
1729
|
+
geolocation_data = None
|
1730
|
+
|
1731
|
+
# Fetch IPv4 if requested
|
1732
|
+
if ip4 and not ip4_data:
|
1733
|
+
logging.info("Fetching IPv4...")
|
1734
|
+
ip4_data = fetch_ip(
|
1735
|
+
"https://api.ipify.org?format=json", retries, timeout, headers
|
1736
|
+
)
|
1737
|
+
cache[cache_key_ip4] = {
|
1738
|
+
"data": ip4_data,
|
1739
|
+
"expires": datetime.now() + timedelta(minutes=cache_duration),
|
1740
|
+
}
|
1741
|
+
|
1742
|
+
# Fetch IPv6 if requested
|
1743
|
+
if ip6 and not ip6_data:
|
1744
|
+
logging.info("Fetching IPv6...")
|
1745
|
+
ip6_data = fetch_ip(
|
1746
|
+
"https://api6.ipify.org?format=json", retries, timeout, headers
|
1747
|
+
)
|
1748
|
+
cache[cache_key_ip6] = {
|
1749
|
+
"data": ip6_data,
|
1750
|
+
"expires": datetime.now() + timedelta(minutes=cache_duration),
|
1751
|
+
}
|
1752
|
+
|
1753
|
+
# Fetch geolocation if requested
|
1754
|
+
if geolocation and not geolocation_data:
|
1755
|
+
logging.info("Fetching Geolocation...")
|
1756
|
+
geolocation_data = fetch_geolocation(
|
1757
|
+
"https://ipinfo.io/json", retries, timeout, headers
|
1758
|
+
)
|
1759
|
+
cache[cache_key_geolocation] = {
|
1760
|
+
"data": geolocation_data,
|
1761
|
+
"expires": datetime.now() + timedelta(minutes=cache_duration),
|
1762
|
+
}
|
1763
|
+
|
1764
|
+
# Prepare the results
|
1765
|
+
ip_info = {
|
1766
|
+
"ip4": ip4_data.get("ip") if ip4_data else "N/A",
|
1767
|
+
"ip6": ip6_data.get("ip") if ip6_data else "N/A",
|
1768
|
+
"geolocation": geolocation_data if geolocation_data else "N/A",
|
1769
|
+
}
|
1770
|
+
|
1771
|
+
# Verbose output if requested
|
1772
|
+
if verbose:
|
1773
|
+
print(f"Public IPv4: {ip_info['ip4']}")
|
1774
|
+
print(f"Public IPv6: {ip_info['ip6']}")
|
1775
|
+
print(f"Geolocation: {ip_info['geolocation']}")
|
1776
|
+
|
1777
|
+
return ip_info
|
1778
|
+
|
1779
|
+
# Function to get geolocation data by IP
|
1780
|
+
def get_geolocation_by_ip(ip, retries=3, timeout=5, headers=None):
|
1781
|
+
"""
|
1782
|
+
Fetches geolocation data for a given IP address.
|
1783
|
+
"""
|
1784
|
+
url = f"https://ipinfo.io/{ip}/json"
|
1785
|
+
geolocation_data = fetch_geolocation(url, retries, timeout, headers)
|
1786
|
+
return geolocation_data
|
1787
|
+
#! here starting get_ip()
|
1788
|
+
headers = {"User-Agent": user_agent()}
|
1789
|
+
if ip is None:
|
1790
|
+
try:
|
1791
|
+
ip_data = get_public_ip(headers=headers, verbose=True)
|
1792
|
+
except Exception as e:
|
1793
|
+
print(e)
|
1794
|
+
ip_data = None
|
1795
|
+
return ip_data
|
1796
|
+
else:
|
1797
|
+
geolocation_data = get_geolocation_by_ip(ip, headers=headers)
|
1798
|
+
return geolocation_data
|
py2ls/ocr.py
CHANGED
@@ -486,6 +486,18 @@ def preprocess_img(
|
|
486
486
|
|
487
487
|
return img_preprocessed
|
488
488
|
|
489
|
+
def convert_image_to_bytes(image):
|
490
|
+
"""
|
491
|
+
Convert a CV2 or numpy image to bytes for ddddocr.
|
492
|
+
"""
|
493
|
+
import io
|
494
|
+
# Convert OpenCV image (numpy array) to PIL image
|
495
|
+
if isinstance(image, np.ndarray):
|
496
|
+
image = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
|
497
|
+
# Save PIL image to a byte stream
|
498
|
+
img_byte_arr = io.BytesIO()
|
499
|
+
image.save(img_byte_arr, format='PNG')
|
500
|
+
return img_byte_arr.getvalue()
|
489
501
|
|
490
502
|
def text_postprocess(
|
491
503
|
text,
|
@@ -604,10 +616,11 @@ def get_text(
|
|
604
616
|
"""
|
605
617
|
)
|
606
618
|
|
607
|
-
models = ["easyocr", "paddleocr", "pytesseract"]
|
619
|
+
models = ["easyocr", "paddleocr", "pytesseract","ddddocr"]
|
608
620
|
model = strcmp(model, models)[0]
|
609
621
|
lang = lang_auto_detect(lang, model)
|
610
622
|
if isinstance(image, str):
|
623
|
+
dir_img=image
|
611
624
|
image = cv2.imread(image)
|
612
625
|
|
613
626
|
# Ensure lang is always a list
|
@@ -705,9 +718,10 @@ def get_text(
|
|
705
718
|
) # PaddleOCR supports only one language at a time
|
706
719
|
result = ocr.ocr(image_process, **kwargs)
|
707
720
|
detections = []
|
708
|
-
|
709
|
-
|
710
|
-
|
721
|
+
if result[0] is not None:
|
722
|
+
for line in result[0]:
|
723
|
+
bbox, (text, score) = line
|
724
|
+
detections.append((bbox, text, score))
|
711
725
|
if postprocess is None:
|
712
726
|
postprocess = dict(
|
713
727
|
spell_check=True,
|
@@ -787,7 +801,49 @@ def get_text(
|
|
787
801
|
else:
|
788
802
|
# 默认返回所有检测信息
|
789
803
|
return detections
|
804
|
+
elif "ddddocr" in model.lower():
|
805
|
+
import ddddocr
|
806
|
+
|
807
|
+
ocr = ddddocr.DdddOcr(det=False, ocr=True)
|
808
|
+
image_bytes = convert_image_to_bytes(image_process)
|
809
|
+
|
810
|
+
results = ocr.classification(image_bytes) # Text extraction
|
811
|
+
|
812
|
+
# Optional: Perform detection for bounding boxes
|
813
|
+
detections = []
|
814
|
+
if kwargs.get("det", False):
|
815
|
+
det_ocr = ddddocr.DdddOcr(det=True)
|
816
|
+
det_results = det_ocr.detect(image_bytes)
|
817
|
+
for box in det_results:
|
818
|
+
top_left = (box[0], box[1])
|
819
|
+
bottom_right = (box[2], box[3])
|
820
|
+
detections.append((top_left, bottom_right))
|
790
821
|
|
822
|
+
if postprocess is None:
|
823
|
+
postprocess = dict(
|
824
|
+
spell_check=True,
|
825
|
+
clean=True,
|
826
|
+
filter=dict(min_length=2),
|
827
|
+
pattern=None,
|
828
|
+
merge=True,
|
829
|
+
)
|
830
|
+
text_corr = []
|
831
|
+
[
|
832
|
+
text_corr.extend(text_postprocess(text, **postprocess))
|
833
|
+
for _, text, _ in detections
|
834
|
+
]
|
835
|
+
# Visualization
|
836
|
+
if show:
|
837
|
+
if ax is None:
|
838
|
+
ax = plt.gca()
|
839
|
+
image_vis = image.copy()
|
840
|
+
if detections:
|
841
|
+
for top_left, bottom_right in detections:
|
842
|
+
cv2.rectangle(image_vis, top_left, bottom_right, box_color, 2)
|
843
|
+
image_vis = cv2.cvtColor(image_vis, cmap)
|
844
|
+
ax.imshow(image_vis)
|
845
|
+
ax.axis("off")
|
846
|
+
return detections
|
791
847
|
else: # "pytesseract"
|
792
848
|
if ax is None:
|
793
849
|
ax = plt.gca()
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: py2ls
|
3
|
-
Version: 0.2.4.
|
3
|
+
Version: 0.2.4.27
|
4
4
|
Summary: py(thon)2(too)ls
|
5
5
|
Author: Jianfeng
|
6
6
|
Author-email: Jianfeng.Liu0413@gmail.com
|
@@ -18,6 +18,7 @@ Provides-Extra: extr
|
|
18
18
|
Requires-Dist: CacheControl (>=0.13.1)
|
19
19
|
Requires-Dist: Cython (>=3.0.10)
|
20
20
|
Requires-Dist: Deprecated (>=1.2.14)
|
21
|
+
Requires-Dist: GPUtil (>=1.4.0)
|
21
22
|
Requires-Dist: Jinja2 (>=3.1.4)
|
22
23
|
Requires-Dist: Markdown (>=3.6)
|
23
24
|
Requires-Dist: MarkupSafe (>=2.1.5)
|
@@ -18,7 +18,7 @@ py2ls/.git/hooks/pre-receive.sample,sha256=pMPSuce7P9jRRBwxvU7nGlldZrRPz0ndsxAlI
|
|
18
18
|
py2ls/.git/hooks/prepare-commit-msg.sample,sha256=6d3KpBif3dJe2X_Ix4nsp7bKFjkLI5KuMnbwyOGqRhk,1492
|
19
19
|
py2ls/.git/hooks/push-to-checkout.sample,sha256=pT0HQXmLKHxt16-mSu5HPzBeZdP0lGO7nXQI7DsSv18,2783
|
20
20
|
py2ls/.git/hooks/update.sample,sha256=jV8vqD4QPPCLV-qmdSHfkZT0XL28s32lKtWGCXoU0QY,3650
|
21
|
-
py2ls/.git/index,sha256=
|
21
|
+
py2ls/.git/index,sha256=X6N4wzd4ONwHaOyemlvaqQkoXIroIn7A7Q7RgUgXpaI,4232
|
22
22
|
py2ls/.git/info/exclude,sha256=ZnH-g7egfIky7okWTR8nk7IxgFjri5jcXAbuClo7DsE,240
|
23
23
|
py2ls/.git/logs/HEAD,sha256=8ID7WuAe_TlO9g-ARxhIJYdgdL3u3m7-1qrOanaIUlA,3535
|
24
24
|
py2ls/.git/logs/refs/heads/main,sha256=8ID7WuAe_TlO9g-ARxhIJYdgdL3u3m7-1qrOanaIUlA,3535
|
@@ -240,18 +240,18 @@ py2ls/export_requirements.py,sha256=x2WgUF0jYKz9GfA1MVKN-MdsM-oQ8yUeC6Ua8oCymio,
|
|
240
240
|
py2ls/fetch_update.py,sha256=9LXj661GpCEFII2wx_99aINYctDiHni6DOruDs_fdt8,4752
|
241
241
|
py2ls/freqanalysis.py,sha256=F4218VSPbgL5tnngh6xNCYuNnfR-F_QjECUUxrPYZss,32594
|
242
242
|
py2ls/ich2ls.py,sha256=3E9R8oVpyYZXH5PiIQgT3CN5NxLe4Dwtm2LwaeacE6I,21381
|
243
|
-
py2ls/ips.py,sha256=
|
243
|
+
py2ls/ips.py,sha256=255SRYfnZXRDQ2NTyMV3V7fqsQRKyF1qJXfmqC1Il9Q,340739
|
244
244
|
py2ls/ml2ls.py,sha256=kIk-ZnDdJGd-fw9GPIFf1r4jtrw5hgvBpRnYNoL1U8I,209494
|
245
245
|
py2ls/mol.py,sha256=AZnHzarIk_MjueKdChqn1V6e4tUle3X1NnHSFA6n3Nw,10645
|
246
|
-
py2ls/netfinder.py,sha256=
|
246
|
+
py2ls/netfinder.py,sha256=UfsruqlFwUOZQx4mO7P7-UiRJqcxcT0WN3QRLv22o74,64059
|
247
247
|
py2ls/nl2ls.py,sha256=UEIdok-OamFZFIvvz_PdZenu085zteMdaJd9mLu3F-s,11485
|
248
|
-
py2ls/ocr.py,sha256=
|
248
|
+
py2ls/ocr.py,sha256=CmG2GUBorz4q1aaq5TkQ7bKn3iueQJ9JKrPTzloGqlY,33447
|
249
249
|
py2ls/plot.py,sha256=HcOtaSwaz2tQT-diA-_r46BFIYM_N1LFBCj-HUUsWgY,229795
|
250
250
|
py2ls/setuptools-70.1.0-py3-none-any.whl,sha256=2bi3cUVal8ip86s0SOvgspteEF8SKLukECi-EWmFomc,882588
|
251
251
|
py2ls/sleep_events_detectors.py,sha256=bQA3HJqv5qnYKJJEIhCyhlDtkXQfIzqksnD0YRXso68,52145
|
252
252
|
py2ls/stats.py,sha256=qBn2rJmNa_QLLUqjwYqXUlGzqmW94sgA1bxJU2FC3r0,39175
|
253
253
|
py2ls/translator.py,sha256=77Tp_GjmiiwFbEIJD_q3VYpQ43XL9ZeJo6Mhl44mvh8,34284
|
254
254
|
py2ls/wb_detector.py,sha256=7y6TmBUj9exCZeIgBAJ_9hwuhkDh1x_-yg4dvNY1_GQ,6284
|
255
|
-
py2ls-0.2.4.
|
256
|
-
py2ls-0.2.4.
|
257
|
-
py2ls-0.2.4.
|
255
|
+
py2ls-0.2.4.27.dist-info/METADATA,sha256=HWrS9YqO12ydb_7jNR9CtR_a-HCyGqE9nmSX2xvnPdk,20248
|
256
|
+
py2ls-0.2.4.27.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
257
|
+
py2ls-0.2.4.27.dist-info/RECORD,,
|
File without changes
|