py2ls 0.2.4.33__py3-none-any.whl → 0.2.4.35__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Binary file
py2ls/ips.py CHANGED
@@ -26,6 +26,22 @@ import re
26
26
  import stat
27
27
  import platform
28
28
 
29
+ # only for backup these scripts
30
+ def backup(
31
+ src="/Users/macjianfeng/Dropbox/github/python/py2ls/.venv/lib/python3.12/site-packages/py2ls/",
32
+ tar="/Users/macjianfeng/Dropbox/github/python/py2ls/py2ls/",
33
+ kind="py",
34
+ overwrite=True,
35
+ reverse=False,
36
+ verbose=False
37
+ ):
38
+ if reverse:
39
+ src, tar = tar,src
40
+ print(f"reversed")
41
+ f = listdir(src, kind, verbose=verbose)
42
+ [copy(i, tar, overwrite=overwrite, verbose=verbose) for i in f.path]
43
+ print(f"all files are copied from {os.path.basename(src)} to {tar}") if verbose else None
44
+
29
45
  def run_once_within(duration=60, reverse=False): # default 60s
30
46
  import time
31
47
 
@@ -2683,13 +2699,41 @@ def fload(fpath, kind=None, **kwargs):
2683
2699
  def load_excel(fpath, **kwargs):
2684
2700
  engine = kwargs.get("engine", "openpyxl")
2685
2701
  verbose = kwargs.pop("verbose", False)
2686
- if run_once_within(reverse=True):
2687
- use_pd("read_excel", verbose=verbose)
2688
- df = pd.read_excel(fpath, engine=engine, **kwargs)
2702
+ password=kwargs.pop("password",None)
2703
+
2704
+ if not password:
2705
+ if run_once_within(reverse=True):
2706
+ use_pd("read_excel", verbose=verbose)
2707
+ df = pd.read_excel(fpath, engine=engine, **kwargs)
2708
+ try:
2709
+ meta = pd.ExcelFile(fpath)
2710
+ print(f"n_sheet={len(meta.sheet_names)},\t'sheetname = 0 (default)':")
2711
+ [print(f"{i}:\t{i_}") for i, i_ in enumerate(meta.sheet_names)]
2712
+ except:
2713
+ pass
2714
+ return df
2715
+ #* needs a password?
2716
+ import msoffcrypto # pip install msoffcrypto-tool
2717
+ from io import BytesIO
2718
+
2719
+ # Open the encrypted Excel file
2720
+ with open(fpath, 'rb') as f:
2721
+ try:
2722
+ office_file = msoffcrypto.OfficeFile(f)
2723
+ office_file.load_key(password=password) # Provide the password
2724
+ decrypted = BytesIO()
2725
+ office_file.decrypt(decrypted)
2726
+ except:
2727
+ office_file = msoffcrypto.OfficeFile(f)
2728
+ office_file.load_key(password=depass(password)) # Provide the password
2729
+ decrypted = BytesIO()
2730
+ office_file.decrypt(decrypted)
2731
+ decrypted.seek(0)
2732
+ df = pd.read_excel(decrypted, engine=engine, **kwargs)
2689
2733
  try:
2690
- meata = pd.ExcelFile(fpath)
2691
- print(f"n_sheet={len(meata.sheet_names)},\t'sheetname = 0 (default)':")
2692
- [print(f"{i}:\t{i_}") for i, i_ in enumerate(meata.sheet_names)]
2734
+ meta = pd.ExcelFile(fpath)
2735
+ print(f"n_sheet={len(meta.sheet_names)},\t'sheetname = 0 (default)':")
2736
+ [print(f"{i}:\t{i_}") for i, i_ in enumerate(meta.sheet_names)]
2693
2737
  except:
2694
2738
  pass
2695
2739
  return df
@@ -3273,11 +3317,16 @@ def fsave(
3273
3317
  df = pd.DataFrame(data)
3274
3318
  df.to_csv(fpath, **kwargs_valid)
3275
3319
 
3276
- def save_xlsx(fpath, data, **kwargs):
3320
+
3321
+ def save_xlsx(fpath, data, password=None, **kwargs):
3322
+ import msoffcrypto
3323
+ from io import BytesIO
3277
3324
  verbose = kwargs.pop("verbose", False)
3278
3325
  sheet_name = kwargs.pop("sheet_name", "Sheet1")
3326
+
3279
3327
  if run_once_within(reverse=True):
3280
3328
  use_pd("to_excel", verbose=verbose)
3329
+
3281
3330
  if any(kwargs):
3282
3331
  format_excel(df=data, filename=fpath, **kwargs)
3283
3332
  else:
@@ -3302,16 +3351,30 @@ def fsave(
3302
3351
  kwargs.pop(key, None)
3303
3352
 
3304
3353
  df = pd.DataFrame(data)
3305
- # Check if the file exists, then append the sheet, otherwise create a new file
3306
- try:
3307
- # Use ExcelWriter with append mode if the file exists
3308
- with pd.ExcelWriter(
3309
- fpath, engine="openpyxl", mode="a", if_sheet_exists="new"
3310
- ) as writer:
3311
- df.to_excel(writer, sheet_name=sheet_name, index=False, **kwargs)
3312
- except FileNotFoundError:
3313
- # If file doesn't exist, create a new one
3314
- df.to_excel(fpath, sheet_name=sheet_name, index=False, **kwargs)
3354
+
3355
+ # Write to Excel without password first
3356
+ temp_file = BytesIO()
3357
+ df.to_excel(temp_file, sheet_name=sheet_name, index=False, engine="xlsxwriter", **kwargs)
3358
+
3359
+ # If a password is provided, encrypt the file
3360
+ if password:
3361
+ temp_file.seek(0)
3362
+ office_file = msoffcrypto.OfficeFile(temp_file)
3363
+ office_file.load_key(password=password) # Provide the password
3364
+
3365
+ # Encrypt and save the file
3366
+ with open(fpath, 'wb') as encrypted_file:
3367
+ office_file.encrypt(encrypted_file)
3368
+ else:
3369
+ # Save the file without encryption if no password is provided
3370
+ try:
3371
+ # Use ExcelWriter with append mode if the file exists
3372
+ with pd.ExcelWriter(fpath, engine="openpyxl", mode="a", if_sheet_exists="new") as writer:
3373
+ df.to_excel(writer, sheet_name=sheet_name, index=False, **kwargs)
3374
+ except FileNotFoundError:
3375
+ # If file doesn't exist, create a new one
3376
+ df.to_excel(fpath, sheet_name=sheet_name, index=False, **kwargs)
3377
+
3315
3378
 
3316
3379
  def save_ipynb(fpath, data, **kwargs):
3317
3380
  # Split the content by code fences to distinguish between code and markdown
@@ -4400,13 +4463,13 @@ def func_list(lib_name, opt="call"):
4400
4463
  return list_func(lib_name, opt=opt)
4401
4464
 
4402
4465
 
4403
- def copy(src, dst, overwrite=False):
4466
+ def copy(src, dst, overwrite=False, verbose=True):
4404
4467
  """Copy a file from src to dst."""
4405
4468
  try:
4406
4469
  dir_par_dst = os.path.dirname(dst)
4407
4470
  if not os.path.isdir(dir_par_dst):
4408
4471
  mkdir(dir_par_dst)
4409
- print(dir_par_dst)
4472
+ print(dir_par_dst) if verbose else None
4410
4473
  src = Path(src)
4411
4474
  dst = Path(dst)
4412
4475
  if not src.is_dir():
@@ -4421,7 +4484,7 @@ def copy(src, dst, overwrite=False):
4421
4484
  f"{dst.stem}_{datetime.now().strftime('_%H%M%S')}{dst.suffix}"
4422
4485
  )
4423
4486
  shutil.copy(src, dst)
4424
- print(f"\n Done! copy to {dst}\n")
4487
+ print(f"\n Done! copy to {dst}\n") if verbose else None
4425
4488
  else:
4426
4489
  dst = dst / src.name
4427
4490
  if dst.exists():
@@ -4432,7 +4495,7 @@ def copy(src, dst, overwrite=False):
4432
4495
  f"{dst.stem}_{datetime.now().strftime('%H%M%S')}"
4433
4496
  )
4434
4497
  shutil.copytree(src, dst)
4435
- print(f"\n Done! copy to {dst}\n")
4498
+ print(f"\n Done! copy to {dst}\n") if verbose else None
4436
4499
 
4437
4500
  except Exception as e:
4438
4501
  logging.error(f"Failed {e}")
@@ -4442,7 +4505,7 @@ def cut(src, dst, overwrite=False):
4442
4505
  return move(src=src, dst=dst, overwrite=overwrite)
4443
4506
 
4444
4507
 
4445
- def move(src, dst, overwrite=False):
4508
+ def move(src, dst, overwrite=False, verbose=True):
4446
4509
  try:
4447
4510
  dir_par_dst = os.path.dirname(dst)
4448
4511
  if not os.path.isdir(dir_par_dst):
@@ -4460,7 +4523,7 @@ def move(src, dst, overwrite=False):
4460
4523
  f"{dst.stem}_{datetime.now().strftime('_%H%M%S')}{dst.suffix}"
4461
4524
  )
4462
4525
  shutil.move(src, dst)
4463
- print(f"\n Done! moved to {dst}\n")
4526
+ print(f"\n Done! moved to {dst}\n") if verbose else None
4464
4527
  except Exception as e:
4465
4528
  logging.error(f"Failed to move file from {src} to {dst}: {e}")
4466
4529
 
@@ -4719,6 +4782,19 @@ def figsave(*args, dpi=300):
4719
4782
  plt.savefig(fname, format="emf", dpi=dpi, bbox_inches="tight", pad_inches=0)
4720
4783
  elif ftype.lower() == "fig":
4721
4784
  plt.savefig(fname, format="pdf", bbox_inches="tight", dpi=dpi, pad_inches=0)
4785
+
4786
+ elif ftype.lower() == "ico":
4787
+ # Ensure the image is in a format that can be saved as an icon (e.g., 32x32, 64x64, etc.)
4788
+ if img is None: # If no image is provided, use the matplotlib figure
4789
+ img = plt.figure()
4790
+ img.savefig(fname, dpi=dpi, format="png", bbox_inches="tight")
4791
+ img = Image.open(fname) # Load the saved figure image
4792
+
4793
+ # Resize the image to typical icon sizes and save it as .ico
4794
+ icon_sizes = [(32, 32), (64, 64), (128, 128), (256, 256)]
4795
+ img = img.convert("RGBA") # Ensure it has an alpha channel for transparency
4796
+ img.save(fname, format="ICO", sizes=icon_sizes)
4797
+ print(f"Icon saved @: {fname} with sizes: {icon_sizes}")
4722
4798
  print(f"\nSaved @: dpi={dpi}\n{fname}")
4723
4799
 
4724
4800
 
@@ -5497,7 +5573,6 @@ def detect_angle(image, by="median", template=None):
5497
5573
  print(f"Unknown method {by}: supported methods: {methods}")
5498
5574
  return 0
5499
5575
 
5500
-
5501
5576
  def imgsets(img,
5502
5577
  auto:bool=True,
5503
5578
  size=None,
@@ -5506,7 +5581,9 @@ def imgsets(img,
5506
5581
  show_axis:bool=False,
5507
5582
  plot_:bool=True,
5508
5583
  verbose:bool=False,
5509
- **kwargs):
5584
+ model:str="isnet-general-use",
5585
+ **kwargs,
5586
+ ):
5510
5587
  """
5511
5588
  Apply various enhancements and filters to an image using PIL's ImageEnhance and ImageFilter modules.
5512
5589
 
@@ -5562,6 +5639,26 @@ def imgsets(img,
5562
5639
  "BOX_BLUR",
5563
5640
  "MEDIAN_FILTER",
5564
5641
  ]
5642
+ # *Rembg is a tool to remove images background.
5643
+ # https://github.com/danielgatis/rembg
5644
+ rem_models = {
5645
+ "u2net": "general use cases.",
5646
+ "u2netp": "A lightweight version of u2net model.",
5647
+ "u2net_human_seg": "human segmentation.",
5648
+ "u2net_cloth_seg": "Cloths Parsing from human portrait. Here clothes are parsed into 3 category: Upper body, Lower body and Full body.",
5649
+ "silueta": "Same as u2net but the size is reduced to 43Mb.",
5650
+ "isnet-general-use": "A new pre-trained model for general use cases.",
5651
+ "isnet-anime": "A high-accuracy segmentation for anime character.",
5652
+ "sam": "any use cases.",
5653
+ "birefnet-general": "general use cases.",
5654
+ "birefnet-general-lite": "A light pre-trained model for general use cases.",
5655
+ "birefnet-portrait": "human portraits.",
5656
+ "birefnet-dis": "dichotomous image segmentation (DIS).",
5657
+ "birefnet-hrsod": "high-resolution salient object detection (HRSOD).",
5658
+ "birefnet-cod": "concealed object detection (COD).",
5659
+ "birefnet-massive": "A pre-trained model with massive dataset.",
5660
+ }
5661
+ models_support_rem=list(rem_models.keys())
5565
5662
  str_usage="""
5566
5663
  imgsets(dir_img, auto=1, color=1.5, plot_=0)
5567
5664
  imgsets(dir_img, color=2)
@@ -5569,6 +5666,7 @@ def imgsets(img,
5569
5666
  imgsets(dir_img, contrast=0, color=1.2, plot_=0)
5570
5667
  imgsets(get_clip(), flip="tb")# flip top and bottom
5571
5668
  imgsets(get_clip(), contrast=1, rm=[100, 5, 2]) #'foreground_threshold', 'background_threshold' and 'erode_structure_size'
5669
+ imgsets(dir_img, rm="birefnet-portrait") # with using custom model
5572
5670
  """
5573
5671
  if run_once_within():
5574
5672
  print(str_usage)
@@ -5577,28 +5675,7 @@ def imgsets(img,
5577
5675
  # adjust gama value
5578
5676
  inv_gamma = 1.0 / gamma
5579
5677
  lut = [int((i / float(v_max)) ** inv_gamma * int(v_max)) for i in range(int(v_max))]
5580
- return lut #image.point(lut)
5581
-
5582
- def confirm_rembg_models(model_name):
5583
- models_support = [
5584
- "u2net",
5585
- "u2netp",
5586
- "u2net_human_seg",
5587
- "u2net_cloth_seg",
5588
- "silueta",
5589
- "isnet-general-use",
5590
- "isnet-anime",
5591
- "sam",
5592
- ]
5593
- if model_name in models_support:
5594
- print(f"model_name: {model_name}")
5595
- return model_name
5596
- else:
5597
- print(
5598
- f"{model_name} cannot be found, check the name:{models_support}, default('isnet-general-use') has been used"
5599
- )
5600
- return "isnet-general-use"
5601
-
5678
+ return lut #image.point(lut)
5602
5679
  def auto_enhance(img):
5603
5680
  """
5604
5681
  Automatically enhances the image based on its characteristics, including brightness,
@@ -5701,7 +5778,7 @@ def imgsets(img,
5701
5778
  kwargs = {**auto_enhance(img_update), **kwargs}
5702
5779
  params=["sharp","color","contrast","bright","crop","rotate",'size',"resize",
5703
5780
  "thumbnail","cover","contain","filter","fit","pad",
5704
- "rem","rm","back","bg_color","cut",'gamma','flip']
5781
+ "rem","rm","back","bg_color","cut","gamma","flip","booster"]
5705
5782
  for k, value in kwargs.items():
5706
5783
  k = strcmp(k, params)[0] # correct the param name
5707
5784
  if "shar" in k.lower():
@@ -5722,7 +5799,7 @@ def imgsets(img,
5722
5799
  img_update = ImageOps.autocontrast(img_update)
5723
5800
  print("autocontrasted")
5724
5801
  except Exception as e:
5725
- print(f"Failed 'autocontrasted':{e}")
5802
+ print(f"Failed 'auto-contrasted':{e}")
5726
5803
  elif "bri" in k.lower():
5727
5804
  enhancer = ImageEnhance.Brightness(img_update)
5728
5805
  img_update = enhancer.enhance(value)
@@ -5765,9 +5842,14 @@ def imgsets(img,
5765
5842
  img_update = ImageOps.pad(img_update, size=value)
5766
5843
  elif "rem" in k.lower() or "rm" in k.lower() or "back" in k.lower():
5767
5844
  from rembg import remove, new_session
5768
-
5845
+ if verbose:
5846
+ preview(rem_models)
5847
+
5848
+ print(f"supported modles: {models_support_rem}")
5849
+ model=strcmp(model, models_support_rem)[0]
5850
+ session = new_session(model)
5769
5851
  if isinstance(value, bool):
5770
- session = new_session("isnet-general-use")
5852
+ print(f"using model:{model}")
5771
5853
  img_update = remove(img_update, session=session)
5772
5854
  elif value and isinstance(value, (int, float, list)):
5773
5855
  if verbose:
@@ -5779,14 +5861,14 @@ def imgsets(img,
5779
5861
  img_update = remove(
5780
5862
  img_update,
5781
5863
  alpha_matting=True,
5782
- alpha_matting_background_threshold=value,
5864
+ alpha_matting_background_threshold=value, session=session
5783
5865
  )
5784
5866
  elif 2 <= len(value) < 3:
5785
5867
  img_update = remove(
5786
5868
  img_update,
5787
5869
  alpha_matting=True,
5788
5870
  alpha_matting_background_threshold=value[0],
5789
- alpha_matting_foreground_threshold=value[1],
5871
+ alpha_matting_foreground_threshold=value[1], session=session
5790
5872
  )
5791
5873
  elif 3 <= len(value) < 4:
5792
5874
  img_update = remove(
@@ -5794,17 +5876,16 @@ def imgsets(img,
5794
5876
  alpha_matting=True,
5795
5877
  alpha_matting_background_threshold=value[0],
5796
5878
  alpha_matting_foreground_threshold=value[1],
5797
- alpha_matting_erode_size=value[2],
5879
+ alpha_matting_erode_size=value[2], session=session
5798
5880
  )
5799
5881
  elif isinstance(value, tuple): # replace the background color
5800
5882
  if len(value) == 3:
5801
5883
  value += (255,)
5802
- img_update = remove(img_update, bgcolor=value)
5884
+ img_update = remove(img_update, bgcolor=value, session=session)
5803
5885
  elif isinstance(value, str):
5804
- if confirm_rembg_models(value):
5805
- img_update = remove(img_update, session=new_session(value))
5806
- else:
5807
- img_update = remove(img_update)
5886
+ # use custom model
5887
+ print(f"using model:{strcmp(value, models_support_rem)[0]}")
5888
+ img_update = remove(img_update, session=new_session(strcmp(value, models_support_rem)[0]))
5808
5889
  elif "bg" in k.lower() and "color" in k.lower():
5809
5890
  from rembg import remove
5810
5891
 
@@ -5814,7 +5895,33 @@ def imgsets(img,
5814
5895
  if len(value) == 3:
5815
5896
  value += (255,)
5816
5897
  img_update = remove(img_update, bgcolor=value)
5817
-
5898
+ elif 'boost' in k.lower():
5899
+ import torch
5900
+ from realesrgan import RealESRGANer
5901
+ if verbose:
5902
+ print("Applying Real-ESRGAN for image reconstruction...")
5903
+ if isinstance(value, bool):
5904
+ scale=4
5905
+ elif isinstance(value, (float, int)):
5906
+ scale=value
5907
+ else:
5908
+ scale=4
5909
+
5910
+ # try:
5911
+ device = "cuda" if torch.cuda.is_available() else "cpu"
5912
+ dir_curr_script = os.path.dirname(os.path.abspath(__file__))
5913
+ model_path = dir_curr_script + "/data/RealESRGAN_x4plus.pth"
5914
+ model_RealESRGAN = RealESRGANer(device=device,
5915
+ scale=scale,
5916
+ model_path=model_path,
5917
+ model="RealESRGAN_x4plus"
5918
+ )
5919
+ # https://github.com/xinntao/Real-ESRGAN?tab=readme-ov-file#python-script
5920
+
5921
+ img_update = model_RealESRGAN.enhance(np.array(img_update))[0]
5922
+ # except Exception as e:
5923
+ # print(f"Failed to apply Real-ESRGAN: {e}")
5924
+
5818
5925
  # elif "ga" in k.lower() and "m" in k.lower():
5819
5926
  # img_update = gamma_correction(img_update, gamma=value)
5820
5927
  # Display the image if requested
@@ -10715,4 +10822,227 @@ def mouse(
10715
10822
  # "Image not found. Ensure the image is visible and parameters are correct."
10716
10823
  # )
10717
10824
  # except Exception as e:
10718
- # print(f"An error occurred: {e}")
10825
+ # print(f"An error occurred: {e}")
10826
+
10827
+
10828
+
10829
+ def py2installer(
10830
+ script_path: str=None,
10831
+ flatform:str="mingw64",
10832
+ output_dir: str = "dist",
10833
+ icon_path: str = None,
10834
+ extra_data: list = None,
10835
+ hidden_imports: list = None,
10836
+ plugins:list=None,
10837
+ use_nuitka: bool = True,
10838
+ onefile: bool = True,
10839
+ console: bool = True,
10840
+ clean_build: bool = False,
10841
+ additional_args: list = None,
10842
+ verbose: bool = True,
10843
+ use_docker: bool = False,
10844
+ docker_image: str = "python:3.12-slim",
10845
+ ):
10846
+ """
10847
+ to package Python scripts into standalone application.
10848
+
10849
+ script_path (str): Path to the Python script to package.
10850
+ output_dir (str): Directory where the executable will be stored.
10851
+ icon_path (str): Path to the .ico file for the executable icon.
10852
+ extra_data (list): List of additional data files or directories in "source:dest" format.
10853
+ hidden_imports (list): List of hidden imports to include.
10854
+ plugins (list): List of plugins imports to include.e.g., 'tk-inter'
10855
+ use_nuitka (bool): Whether to use Nuitka instead of PyInstaller.
10856
+ onefile (bool): If True, produces a single executable file.
10857
+ console (bool): If False, hides the console window (GUI mode).
10858
+ clean_build (bool): If True, cleans previous build and dist directories.
10859
+ additional_args (list): Additional arguments for PyInstaller/Nuitka.
10860
+ verbose (bool): If True, provides detailed logs.
10861
+ use_docker (bool): If True, uses Docker to package the script.
10862
+ docker_image (str): Docker image to use for packaging.
10863
+
10864
+ """
10865
+
10866
+ import os
10867
+ import sys
10868
+ import shutil
10869
+ import subprocess
10870
+ import sys
10871
+ import glob
10872
+ from pathlib import Path
10873
+ if run_once_within():
10874
+ usage_str="""
10875
+ # build locally
10876
+ py2installer(
10877
+ script_path="update_tab.py",
10878
+ output_dir="dist",
10879
+ icon_path="icon4app.ico",
10880
+ extra_data=["dat/*.xlsx:dat"],
10881
+ hidden_imports=["msoffcrypto", "tkinter", "pandas", "numpy"],
10882
+ onefile=True,
10883
+ console=False,
10884
+ clean_build=True,
10885
+ verbose=True,
10886
+ )
10887
+ # build via docker
10888
+ py2installer(
10889
+ "my_script.py",
10890
+ output_dir="dist",
10891
+ onefile=True,
10892
+ clean_build=True,
10893
+ use_docker=True,
10894
+ docker_image="python:3.12-slim"
10895
+ )
10896
+ """
10897
+ print(usage_str)
10898
+ if verbose:
10899
+ return
10900
+ else:
10901
+ pass
10902
+ # Check if the script path exists
10903
+ script_path = Path(script_path)
10904
+ if not script_path.exists():
10905
+ raise FileNotFoundError(f"Script '{script_path}' not found.")
10906
+
10907
+ # Clean build and dist directories if requested
10908
+ if clean_build:
10909
+ for folder in ["build", "dist"]:
10910
+ folder_path = Path(folder)
10911
+ if folder_path.exists():
10912
+ shutil.rmtree(folder_path, ignore_errors=True)
10913
+ # Recreate the folders
10914
+ for folder in ["build", "dist"]:
10915
+ folder_path = Path(folder)
10916
+ folder_path.mkdir(parents=True, exist_ok=True)
10917
+
10918
+
10919
+ if use_docker:
10920
+ # Ensure Docker is installed
10921
+ try:
10922
+ subprocess.run(["docker", "--version"], check=True, capture_output=True, text=True)
10923
+ except FileNotFoundError:
10924
+ raise EnvironmentError("Docker is not installed or not in the PATH.")
10925
+
10926
+ # Prepare Docker volume mappings
10927
+ script_dir = script_path.parent.resolve()
10928
+ dist_path = Path(output_dir).resolve()
10929
+ volumes = [
10930
+ f"{script_dir}:/app:rw",
10931
+ f"{dist_path}:/output:rw",
10932
+ ]
10933
+ docker_cmd = [
10934
+ "docker", "run", "--rm",
10935
+ "-v", volumes[0],
10936
+ "-v", volumes[1],
10937
+ docker_image,
10938
+ "bash", "-c",
10939
+ ]
10940
+
10941
+ # Build the packaging command inside the container
10942
+ cmd = ["nuitka"] if use_nuitka else ["pyinstaller"]
10943
+ if onefile:
10944
+ cmd.append("--onefile")
10945
+ if not console:
10946
+ cmd.append("--windowed")
10947
+ cmd.extend(["--distpath", "/output"])
10948
+ if icon_path:
10949
+ cmd.extend(["--icon", f"/app/{Path(icon_path).name}"])
10950
+ if extra_data:
10951
+ for data in extra_data:
10952
+ cmd.extend(["--add-data", f"/app/{data}"])
10953
+ if hidden_imports:
10954
+ for hidden in hidden_imports:
10955
+ cmd.extend(["--hidden-import", hidden])
10956
+ if additional_args:
10957
+ cmd.extend(additional_args)
10958
+ cmd.append(f"/app/{script_path.name}")
10959
+
10960
+ # Full command to execute inside the container
10961
+ docker_cmd.append(" ".join(cmd))
10962
+
10963
+ if verbose:
10964
+ print(f"Running Docker command: {' '.join(docker_cmd)}")
10965
+
10966
+ # Run Docker command
10967
+ try:
10968
+ subprocess.run(
10969
+ docker_cmd,
10970
+ capture_output=not verbose,
10971
+ text=True,
10972
+ check=True,
10973
+ )
10974
+ except subprocess.CalledProcessError as e:
10975
+ print(f"Error during Docker packaging:\n{e.stderr}", file=sys.stderr)
10976
+ raise
10977
+ else:
10978
+ # Handle local packaging (native build)
10979
+ cmd = ["nuitka"]
10980
+ cmd.append("--standalone") # Make sure to use --standalone for independent environments
10981
+ if 'min' in flatform.lower():
10982
+ cmd.append("--mingw64")
10983
+ if onefile:
10984
+ cmd.append("--onefile")
10985
+ if not console:
10986
+ # cmd.append("--windows-disable-console") # Disabled based on your request
10987
+ pass
10988
+
10989
+ cmd.extend([f"--output-dir={output_dir}"]) # Correct the space issue here
10990
+ if icon_path:
10991
+ icon_path = Path(icon_path)
10992
+ if not icon_path.exists():
10993
+ raise FileNotFoundError(f"Icon file '{icon_path}' not found.")
10994
+ cmd.extend([f"--windows-icon-from-ico={icon_path}"])
10995
+
10996
+ if extra_data:
10997
+ for data in extra_data:
10998
+ if "*" in data:
10999
+ matches = glob.glob(data.split(":")[0])
11000
+ for match in matches:
11001
+ dest = data.split(":")[1]
11002
+ cmd.extend(
11003
+ [
11004
+ "--include-data-file=" if use_nuitka else "--add-data",
11005
+ f"{match}:{dest}",
11006
+ ]
11007
+ )
11008
+ else:
11009
+ cmd.extend(
11010
+ ["--include-data-file=" if use_nuitka else "--add-data", data]
11011
+ )
11012
+
11013
+ if hidden_imports:
11014
+ cmd.extend([f"--nofollow-import-to={','.join(hidden_imports)}"])
11015
+
11016
+ if plugins:
11017
+ for plugin in plugins:
11018
+ cmd.extend([f"--plugin-enable={plugin}"])
11019
+
11020
+ if additional_args:
11021
+ cmd.extend(additional_args)
11022
+
11023
+ # Add the script path (final positional argument)
11024
+ cmd.append(str(script_path))
11025
+
11026
+ # Ensure Windows shell compatibility
11027
+ shell_flag = sys.platform.startswith("win")
11028
+
11029
+ # Run the command
11030
+ if verbose:
11031
+ print(f"Running command: {' '.join(cmd)}")
11032
+ try:
11033
+ result = subprocess.run(
11034
+ cmd,
11035
+ capture_output=not verbose,
11036
+ text=True,
11037
+ shell=shell_flag,
11038
+ check=True,
11039
+ )
11040
+ if verbose:
11041
+ print(result.stdout)
11042
+ except subprocess.CalledProcessError as e:
11043
+ print(f"Error during packaging:\n{e.stderr}", file=sys.stderr)
11044
+ print(" ".join(cmd))
11045
+ raise
11046
+
11047
+ print("\nPackaging complete. Check the output directory for the executable.")
11048
+
py2ls/netfinder.py CHANGED
@@ -333,32 +333,6 @@ def parse_cookies(cookies_str):
333
333
 
334
334
  return cookies_dict
335
335
 
336
- class FetchSpider(scrapy.Spider):
337
- name = "fetch_spider"
338
-
339
- def __init__(self, url, parser="html.parser", cookies=None, headers=None, *args, **kwargs):
340
- super(FetchSpider, self).__init__(*args, **kwargs)
341
- self.start_urls = [url]
342
- self.cookies = cookies
343
- self.headers = headers
344
- self.parser = parser
345
-
346
- def start_requests(self):
347
- for url in self.start_urls:
348
- yield scrapy.Request(
349
- url,
350
- cookies=self.cookies,
351
- headers=self.headers,
352
- callback=self.parse
353
- )
354
-
355
- def parse(self, response):
356
- # Use the desired parser (default: html.parser)
357
- from bs4 import BeautifulSoup
358
- soup = BeautifulSoup(response.text, self.parser)
359
- yield {"content": soup}
360
-
361
-
362
336
  def fetch_scrapy(
363
337
  url,
364
338
  parser="html.parser",
@@ -367,7 +341,7 @@ def fetch_scrapy(
367
341
  settings=None,
368
342
  ):
369
343
  """
370
- Fetches content using Scrapy.
344
+ Fetches content using Scrapy with proper reactor handling.
371
345
 
372
346
  Args:
373
347
  url (str): The URL to scrape.
@@ -380,9 +354,10 @@ def fetch_scrapy(
380
354
  dict: Parsed content as a dictionary.
381
355
  """
382
356
  from scrapy.utils.project import get_project_settings
383
- from scrapy.crawler import CrawlerProcess
357
+ from scrapy.crawler import CrawlerRunner
384
358
  from scrapy.signalmanager import dispatcher
385
359
  from scrapy import signals
360
+ from twisted.internet import reactor, defer
386
361
  import scrapy
387
362
 
388
363
  # Container for scraped content
@@ -403,19 +378,30 @@ def fetch_scrapy(
403
378
  }
404
379
  )
405
380
 
406
- # Initialize and configure Scrapy process
407
- process = CrawlerProcess(settings=process_settings)
381
+ # Connect item scraped signal
408
382
  dispatcher.connect(handle_item, signal=signals.item_scraped)
409
383
 
410
- # Start the Scrapy crawl
411
- process.crawl(
412
- FetchSpider,
413
- url=url,
414
- parser=parser,
415
- cookies=cookies,
416
- headers=headers,
417
- )
418
- process.start() # Blocks until all crawls are finished
384
+ # Asynchronous Twisted function
385
+ @defer.inlineCallbacks
386
+ def crawl():
387
+ runner = CrawlerRunner(settings=process_settings)
388
+ yield runner.crawl(
389
+ FetchSpider,
390
+ url=url,
391
+ parser=parser,
392
+ cookies=cookies,
393
+ headers=headers,
394
+ )
395
+ reactor.stop()
396
+
397
+ # Start the reactor if not already running
398
+ if not reactor.running:
399
+ crawl()
400
+ reactor.run() # Blocks until the crawl finishes
401
+ else:
402
+ # Run the crawl if the reactor is already running
403
+ d = crawl()
404
+ d.addBoth(lambda _: reactor.stop())
419
405
 
420
406
  # Return the first scraped content or None if empty
421
407
  return content[0] if content else None
@@ -654,7 +640,11 @@ def fetch_all(
654
640
  "COOKIES_ENABLED": True if cookies else False,
655
641
  "LOG_LEVEL": "WARNING", # Reduce log verbosity
656
642
  }
657
- content=fetch_scrapy(url, parser=parser, cookies=cookies, headers=headers, settings=settings)
643
+ content=fetch_scrapy(url,
644
+ parser=parser,
645
+ cookies=cookies,
646
+ headers=headers,
647
+ settings=settings)
658
648
  return parser, content
659
649
 
660
650
  except requests.RequestException as e:
py2ls/ocr.py CHANGED
@@ -9,8 +9,6 @@ from py2ls.ips import (
9
9
  isa
10
10
  )
11
11
  import logging
12
- #logging.getLogger("ppocr").setLevel(logging.ERROR)
13
- logging.getLogger("ppocr").setLevel(logging.WARNING)
14
12
 
15
13
  """
16
14
  Optical Character Recognition (OCR)
@@ -563,8 +561,8 @@ def get_text(
563
561
  font=cv2.FONT_HERSHEY_SIMPLEX,# draw_box
564
562
  fontsize=8,# draw_box
565
563
  figsize=[10,10],
566
- box_color = (0, 255, 0), # draw_box
567
- fontcolor = (0, 0, 0),# draw_box
564
+ box_color = (0, 255, 0), # draw_box
565
+ fontcolor = (116,173,233), # draw_box
568
566
  bg_color=(133, 203, 245, 100),# draw_box
569
567
  usage=False,
570
568
  **kwargs,
@@ -618,7 +616,6 @@ def get_text(
618
616
  image = cv2.imread(image)
619
617
  elif isa(image,'image'):
620
618
  cvt_cmp=False
621
- print(1)
622
619
  image = np.array(image)
623
620
  else:
624
621
  raise ValueError(f"not support image with {type(image)} type")
@@ -704,6 +701,8 @@ def get_text(
704
701
  return detections
705
702
  elif "pad" in model.lower():
706
703
  from paddleocr import PaddleOCR
704
+ logging.getLogger("ppocr").setLevel(logging.ERROR)
705
+
707
706
  lang=strcmp(lang, ['ch','en','french','german','korean','japan'])[0]
708
707
  ocr = PaddleOCR(
709
708
  use_angle_cls=True,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: py2ls
3
- Version: 0.2.4.33
3
+ Version: 0.2.4.35
4
4
  Summary: py(thon)2(too)ls
5
5
  Author: Jianfeng
6
6
  Author-email: Jianfeng.Liu0413@gmail.com
@@ -14,7 +14,7 @@ Classifier: Programming Language :: Python :: 3.9
14
14
  Classifier: Programming Language :: Python :: 3.10
15
15
  Classifier: Programming Language :: Python :: 3.11
16
16
  Classifier: Programming Language :: Python :: 3.12
17
- Provides-Extra: extr
17
+ Provides-Extra: full
18
18
  Requires-Dist: CacheControl (>=0.13.1)
19
19
  Requires-Dist: Cython (>=3.0.10)
20
20
  Requires-Dist: Deprecated (>=1.2.14)
@@ -85,7 +85,7 @@ Requires-Dist: h2 (>=3.2.0)
85
85
  Requires-Dist: h5py (>=3.11.0)
86
86
  Requires-Dist: hpack (>=3.0.0)
87
87
  Requires-Dist: hstspreload (>=2024.7.1)
88
- Requires-Dist: httpcore (>=0.9.1)
88
+ Requires-Dist: httpcore (>=0.9.0)
89
89
  Requires-Dist: httpx (>=0.13.3)
90
90
  Requires-Dist: humanfriendly (>=10.0)
91
91
  Requires-Dist: hyperframe (>=5.2.0)
@@ -123,6 +123,7 @@ Requires-Dist: mne (>=1.7.1)
123
123
  Requires-Dist: more-itertools (>=10.3.0)
124
124
  Requires-Dist: mpmath (>=1.3.0)
125
125
  Requires-Dist: msgpack (>=1.0.8)
126
+ Requires-Dist: msoffcrypto-tool (>=5.4.2)
126
127
  Requires-Dist: mtscomp (>=1.0.2)
127
128
  Requires-Dist: nbclient (>=0.10.0)
128
129
  Requires-Dist: nbconvert (>=7.16.4)
@@ -131,6 +132,7 @@ Requires-Dist: neo (>=0.13.1)
131
132
  Requires-Dist: nest-asyncio (>=1.6.0)
132
133
  Requires-Dist: networkx (>=3.3)
133
134
  Requires-Dist: nltk (>=3.8.1)
135
+ Requires-Dist: nuitka (>=2.5.9)
134
136
  Requires-Dist: numba (>=0.59.1)
135
137
  Requires-Dist: numcodecs (>=0.13.0)
136
138
  Requires-Dist: numerizer (>=0.2.3)
@@ -139,6 +141,7 @@ Requires-Dist: onnxruntime (>=1.18.1)
139
141
  Requires-Dist: opencv-contrib-python (>=4.10.0.84)
140
142
  Requires-Dist: opencv-python (>=4.10.0.84)
141
143
  Requires-Dist: opencv-python-headless (>=4.10.0.84)
144
+ Requires-Dist: openpyxl (>=3.1.5)
142
145
  Requires-Dist: outcome (>=1.3.0.post0)
143
146
  Requires-Dist: packaging (>=24.1)
144
147
  Requires-Dist: pandas (>=2.2.2)
@@ -199,6 +202,7 @@ Requires-Dist: rpds-py (>=0.18.1)
199
202
  Requires-Dist: scikit-image (>=0.23.2)
200
203
  Requires-Dist: scikit-learn (>=1.5.1)
201
204
  Requires-Dist: scipy (>=1.14.0)
205
+ Requires-Dist: scrapy (>=2.12.0)
202
206
  Requires-Dist: seaborn (>=0.13.2)
203
207
  Requires-Dist: selenium (>=4.23.1)
204
208
  Requires-Dist: setuptools (>=70.3.0)
@@ -182,6 +182,7 @@ py2ls/chat.py,sha256=Yr22GoIvoWhpV3m4fdwV_I0Mn77La346_ymSinR-ORA,3793
182
182
  py2ls/corr.py,sha256=RbOaJIPLCHJtUm5SFi_4dCJ7VFUPWR0PErfK3K26ad4,18243
183
183
  py2ls/correlators.py,sha256=RbOaJIPLCHJtUm5SFi_4dCJ7VFUPWR0PErfK3K26ad4,18243
184
184
  py2ls/data/.DS_Store,sha256=0vLN27VSbtJbHNG5DG-oGqs9h_ubWh3xHz-baHnyfck,6148
185
+ py2ls/data/RealESRGAN_x4plus.pth,sha256=T6DTiQX3WsButJp5UbQmZwAhvjAYJl_RkdISXfnWgvE,67040989
185
186
  py2ls/data/db2ls_sql_chtsht.json,sha256=ls9d7Sm8TLeujanWHfHlWhU85Qz1KnAizO_9X3wUH7E,6933
186
187
  py2ls/data/docs_links.json,sha256=kXgbbWo0b8bfV4n6iuuUNLnZipIyLzokUO6Lzmf7nO4,101829
187
188
  py2ls/data/email/email_html_template.html,sha256=UIg3aixWfdNsvVx-j2dX1M5N3G-6DgrnV1Ya1cLjiUQ,2809
@@ -242,18 +243,18 @@ py2ls/export_requirements.py,sha256=x2WgUF0jYKz9GfA1MVKN-MdsM-oQ8yUeC6Ua8oCymio,
242
243
  py2ls/fetch_update.py,sha256=9LXj661GpCEFII2wx_99aINYctDiHni6DOruDs_fdt8,4752
243
244
  py2ls/freqanalysis.py,sha256=F4218VSPbgL5tnngh6xNCYuNnfR-F_QjECUUxrPYZss,32594
244
245
  py2ls/ich2ls.py,sha256=3E9R8oVpyYZXH5PiIQgT3CN5NxLe4Dwtm2LwaeacE6I,21381
245
- py2ls/ips.py,sha256=Fap4Iqt3QCq-td6CSz2dPBwI65TLfTRApy2QYFO7TIY,403036
246
+ py2ls/ips.py,sha256=2tFfYrkRXdR-jwWYK1goE2zhSfG_iHhkDc73gLje6hI,416145
246
247
  py2ls/ml2ls.py,sha256=I-JFPdikgEtfQjhv5gBz-QSeorpTJI_Pda_JwkTioBY,209732
247
248
  py2ls/mol.py,sha256=AZnHzarIk_MjueKdChqn1V6e4tUle3X1NnHSFA6n3Nw,10645
248
- py2ls/netfinder.py,sha256=6XZWxFCo5PNOVKdr5qGL_250AoKLfz6CuVmhGkDwkFM,69266
249
+ py2ls/netfinder.py,sha256=OhqD3S9PuwweL2013D-q4GNP1WvJjuYfZzq5BZgGddE,68980
249
250
  py2ls/nl2ls.py,sha256=UEIdok-OamFZFIvvz_PdZenu085zteMdaJd9mLu3F-s,11485
250
- py2ls/ocr.py,sha256=xNNQd_cHjb4HLSZvRQPHJGPm29de6urZhoOddBKnV8M,33989
251
+ py2ls/ocr.py,sha256=WDFvx1oVxXjlyyFs2a6pizdu4-jEL5pTFLP960-HbyM,33939
251
252
  py2ls/plot.py,sha256=7C1x6KX0Fvmbll4IStIzlNjxLnrRBNSPaLJRgGjF3Ok,239172
252
253
  py2ls/setuptools-70.1.0-py3-none-any.whl,sha256=2bi3cUVal8ip86s0SOvgspteEF8SKLukECi-EWmFomc,882588
253
254
  py2ls/sleep_events_detectors.py,sha256=bQA3HJqv5qnYKJJEIhCyhlDtkXQfIzqksnD0YRXso68,52145
254
255
  py2ls/stats.py,sha256=qBn2rJmNa_QLLUqjwYqXUlGzqmW94sgA1bxJU2FC3r0,39175
255
256
  py2ls/translator.py,sha256=77Tp_GjmiiwFbEIJD_q3VYpQ43XL9ZeJo6Mhl44mvh8,34284
256
257
  py2ls/wb_detector.py,sha256=7y6TmBUj9exCZeIgBAJ_9hwuhkDh1x_-yg4dvNY1_GQ,6284
257
- py2ls-0.2.4.33.dist-info/METADATA,sha256=YHx2Yww0F2wy0fPxBKC-pAkpLBq9db2D-McYtW7r8FI,20332
258
- py2ls-0.2.4.33.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
259
- py2ls-0.2.4.33.dist-info/RECORD,,
258
+ py2ls-0.2.4.35.dist-info/METADATA,sha256=j9QC5tXPpOp4VTvG6onwy3BaoPw_ROq0KGPGw2bo8Gk,20473
259
+ py2ls-0.2.4.35.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
260
+ py2ls-0.2.4.35.dist-info/RECORD,,