py2ls 0.2.6.2__py3-none-any.whl → 0.2.6.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
py2ls/ips.py CHANGED
@@ -11,6 +11,7 @@ from datetime import datetime, date, time
11
11
  import re # built-in
12
12
  import stat
13
13
  import platform
14
+ import subprocess
14
15
 
15
16
  from typing import Dict, List, Optional, Union, Any, Tuple, Literal,Callable
16
17
  from regex import X
@@ -32,6 +33,15 @@ except ImportError:
32
33
  pkg_resources = None
33
34
  import glob # built-in
34
35
  import pkg_resources # built-in
36
+ import importlib
37
+ import inspect
38
+ import pkgutil
39
+ import pytest
40
+ try:
41
+ import importlib.metadata as metadata # Python 3.8+
42
+ except ImportError:
43
+ import importlib_metadata as metadata # For older versions via backport
44
+
35
45
  class PkgManager:
36
46
  """
37
47
  PkgManager.uninstall("py2ls")
@@ -102,7 +112,7 @@ class PkgManager:
102
112
  subprocess.run(["pip", "uninstall", "-y", pkg], check=True)
103
113
 
104
114
  if make_log:
105
- log_path = os.path.join(station, f"uninstall_{timestamp}.txt")
115
+ log_path = os.path.join(station, f"log_uninstall.txt")
106
116
  with open(log_path, "w") as f:
107
117
  f.write(f"# Uninstallation log created at {timestamp}\n")
108
118
  f.write(f"# Mode: {mode}, Keywords: {kw}\n\n")
@@ -237,7 +247,99 @@ class PkgManager:
237
247
  else:
238
248
  print("Invalid selection. Please try again.")
239
249
  except ValueError:
240
- print("Please enter a valid number.")
250
+ print("Please enter a valid number.")
251
+ @staticmethod
252
+ def listfunc(
253
+ where: Union[str, Any]= None,
254
+ query: Optional[str] = None,
255
+ return_output:bool=False,
256
+ show_all: bool = False,
257
+ include_dunder: bool = False,
258
+ only_defined: bool = True,
259
+ verbose: bool = False
260
+ ) -> Dict[str, Any]:
261
+ """
262
+ Recursively list functions defined in a package/module and its submodules.
263
+ If `where=None`, returns the installed pip packages instead.
264
+
265
+ Args:
266
+ where (str or module or None): Module/package to inspect, or None for full pip list.
267
+ query (str): Optional search string for fuzzy match.
268
+ show_all (bool): Show all callables including those starting with '_'.
269
+ include_dunder (bool): Include dunder (__) methods like __init__.
270
+ only_defined (bool): Only show functions actually defined in the module.
271
+ verbose (bool): Show detailed skip/load error messages.
272
+
273
+ Returns:
274
+ dict: Nested dictionary with module names and their function lists or pip list.
275
+ """
276
+ if where is None:
277
+ # Return pip list instead
278
+ print("📦 Installed pip packages:")
279
+ pip_packages = {dist.metadata['Name']: dist.version for dist in metadata.distributions()}
280
+ if query:
281
+ func_OI = strcmp(query, list(pip_packages.keys()))[0]
282
+ print(f" - {func_OI}=={pip_packages[func_OI]}")
283
+ return {func_OI: pip_packages[func_OI]}
284
+ for name, version in sorted(pip_packages.items()):
285
+ print(f" - {name}=={version}")
286
+ return pip_packages
287
+ if isinstance(where, str):
288
+ try:
289
+ mod = importlib.import_module(where)
290
+ except ModuleNotFoundError:
291
+ print(f"Module '{where}' not found.")
292
+ return {}
293
+ else:
294
+ mod = where
295
+
296
+ root_name = mod.__name__
297
+ results = {}
298
+
299
+ def list_functions_in_module(module, module_name) -> List[str]:
300
+ funcs = []
301
+ for name in dir(module):
302
+ attr = getattr(module, name)
303
+ if callable(attr):
304
+ if not show_all:
305
+ if name.startswith("__") and not include_dunder:
306
+ continue
307
+ if name.startswith("_") and not name.startswith("__"):
308
+ continue
309
+ if only_defined and getattr(attr, "__module__", "") != module_name:
310
+ continue
311
+ funcs.append(name)
312
+ if query:
313
+ from difflib import get_close_matches
314
+ funcs = get_close_matches(query, funcs, n=10, cutoff=0.3)
315
+ return sorted(set(funcs))
316
+
317
+ def walk_package(mod) -> Dict[str, Any]:
318
+ subresults = {}
319
+ modname = mod.__name__
320
+ funcs = list_functions_in_module(mod, modname)
321
+ if funcs:
322
+ print(f"\n🎒: {modname}")
323
+ for f in funcs:
324
+ print(f" - {f}")
325
+ subresults[modname] = funcs
326
+
327
+ if hasattr(mod, '__path__'): # If it's a package
328
+ for finder, name, ispkg in pkgutil.walk_packages(mod.__path__, prefix=mod.__name__ + "."):
329
+ try:
330
+ submod = importlib.import_module(name)
331
+ submod_result = walk_package(submod)
332
+ subresults.update(submod_result)
333
+ except pytest.skip.Exception as e:
334
+ if verbose:
335
+ print(f"正常跳过: Skipped test module {name}: {e}")
336
+ except Exception as e:
337
+ if verbose:
338
+ print(f"因错跳过 {name}: {e}")
339
+ return subresults
340
+
341
+ results[root_name] = walk_package(mod)
342
+ return results if return_output else None
241
343
 
242
344
  def _yaoshi_fernet(mima="mimashigudingde",yan=b"mimashigudingde",verbose=True):
243
345
  import base64
@@ -3361,7 +3463,7 @@ def text2audio(
3361
3463
  sys.exit()
3362
3464
  except SystemExit:
3363
3465
  pass
3364
- elif method.lower() in ["google", "gtts"]:
3466
+ elif method in ["google", "gtts"]:
3365
3467
  from gtts import gTTS
3366
3468
 
3367
3469
  try:
@@ -3392,7 +3494,7 @@ def text2audio(
3392
3494
  print("done")
3393
3495
 
3394
3496
  # from datetime import datetime
3395
- from dateutil import parser
3497
+
3396
3498
  def str2time(
3397
3499
  time_str: str,
3398
3500
  fmt: str = "24",
@@ -4433,15 +4535,52 @@ def paper_size(paper_type_str="a4"):
4433
4535
  if not paper_type:
4434
4536
  paper_type = "a4" # default
4435
4537
  return df[paper_type].tolist()
4538
+
4539
+ def docx2pdf(dir_docx, dir_pdf=None):
4540
+ """
4541
+ Converts .docx to .pdf. Works on Windows using docx2pdf and on Linux/macOS using LibreOffice.
4542
+
4543
+ Parameters:
4544
+ - dir_docx: path to .docx file or directory containing .docx files
4545
+ - dir_pdf: optional output directory; if None, uses same directory as input
4546
+ """
4436
4547
 
4548
+ system = platform.system()
4549
+ is_file = os.path.isfile(dir_docx)
4550
+ is_dir = os.path.isdir(dir_docx)
4437
4551
 
4438
- def docx2pdf(dir_docx, dir_pdf=None):
4439
- from docx2pdf import convert
4552
+ if not is_file and not is_dir:
4553
+ raise FileNotFoundError(f"Input path '{dir_docx}' does not exist.")
4554
+
4555
+ if system == "Windows":
4556
+ try:
4557
+ from docx2pdf import convert
4558
+ except ImportError:
4559
+ raise ImportError("docx2pdf is not installed. Run: pip install docx2pdf")
4560
+
4561
+ convert(dir_docx, dir_pdf) if dir_pdf else convert(dir_docx)
4562
+
4563
+ elif system in {"Linux", "Darwin"}:
4564
+ # Check if libreoffice is available
4565
+ if shutil.which("libreoffice") is None:
4566
+ raise EnvironmentError("LibreOffice is not installed or not in PATH. Install it with: sudo apt install libreoffice")
4567
+
4568
+ # Determine the output directory
4569
+ output_dir = dir_pdf or os.path.dirname(dir_docx) if is_file else dir_docx
4440
4570
 
4441
- if dir_pdf:
4442
- convert(dir_docx, dir_pdf)
4571
+ if is_file:
4572
+ subprocess.run([
4573
+ "libreoffice", "--headless", "--convert-to", "pdf", "--outdir", output_dir, dir_docx
4574
+ ], check=True)
4575
+ elif is_dir:
4576
+ for filename in os.listdir(dir_docx):
4577
+ if filename.lower().endswith(".docx"):
4578
+ full_path = os.path.join(dir_docx, filename)
4579
+ subprocess.run([
4580
+ "libreoffice", "--headless", "--convert-to", "pdf", "--outdir", dir_pdf or dir_docx, full_path
4581
+ ], check=True)
4443
4582
  else:
4444
- convert(dir_docx)
4583
+ raise OSError(f"Unsupported OS: {system}")
4445
4584
 
4446
4585
 
4447
4586
  def img2pdf(dir_img, kind=None, page=None, dir_save=None, page_size="a4", dpi=300):
@@ -18666,12 +18805,6 @@ def py2installer(
18666
18805
  docker_image (str): Docker image to use for packaging.
18667
18806
 
18668
18807
  """
18669
-
18670
- import os
18671
- import sys
18672
- import shutil
18673
- import subprocess
18674
- import sys
18675
18808
  import glob
18676
18809
  from pathlib import Path
18677
18810
 
@@ -19139,131 +19272,390 @@ print("Result of padcat(a, b, c):\n", result2)
19139
19272
  return result
19140
19273
 
19141
19274
 
19142
- # ========== memory cleaner ==========
19143
- import gc
19144
- import os
19145
- import sys
19146
- import psutil
19147
- import platform
19148
- import ctypes
19149
- import subprocess
19150
- import warnings
19275
+ # ========== memory cleaner ==========
19276
+ import gc
19277
+ import psutil
19278
+ import weakref
19151
19279
  import time
19280
+ import inspect
19281
+ import tracemalloc
19282
+ from collections import defaultdict
19152
19283
 
19153
19284
  class MemoryOptimizer:
19154
- def __init__(self, verbose: bool = True, aggressive_mode: bool = True):
19285
+ def __init__(self,
19286
+ verbose: bool = True,
19287
+ aggressive_mode: bool = True,
19288
+ track_leaks: bool = False,
19289
+ max_history: int = 100):
19155
19290
  self.verbose = verbose
19156
19291
  self.aggressive_mode = aggressive_mode
19292
+ self.track_leaks = track_leaks
19293
+ self.max_history = max_history
19157
19294
  self.system = platform.system()
19158
19295
  self.process = psutil.Process(os.getpid())
19159
19296
  self.start_time = time.time()
19160
19297
  self.memory_history = []
19298
+ self.leak_tracker = None
19161
19299
 
19300
+ if track_leaks:
19301
+ self._setup_leak_tracking()
19302
+
19303
+ def _setup_leak_tracking(self):
19304
+ self.leak_tracker = {
19305
+ 'snapshots': [],
19306
+ 'diff_stats': [],
19307
+ 'object_types': defaultdict(int),
19308
+ 'suspected_leaks': []
19309
+ }
19310
+ tracemalloc.start(25)
19311
+
19162
19312
  def log(self, msg: str, level: str = "INFO"):
19163
19313
  if self.verbose:
19164
19314
  rss = self.process.memory_info().rss / (1024 ** 2)
19165
19315
  elapsed = time.time() - self.start_time
19166
- print(f"[{level}][{elapsed:.2f}s][{rss:.1f}MB] {msg}")
19316
+ caller = inspect.currentframe().f_back.f_code.co_name
19317
+ print(f"[{level}][{elapsed:.2f}s][{rss:.1f}MB][{caller}] {msg}")
19318
+
19319
+ def collect_garbage(self, generations: List[int] = None) -> Dict[str, Any]:
19320
+ self.log("Starting deep garbage collection...")
19321
+ stats = {
19322
+ 'collected': defaultdict(int),
19323
+ 'garbage_cleared': 0,
19324
+ 'freed_mb': 0
19325
+ }
19167
19326
 
19168
- def collect_garbage(self):
19169
- self.log("Performing deep garbage collection...")
19170
- stats = {}
19171
19327
  before_mem = self.process.memory_info().rss
19172
- for gen in reversed(range(3)):
19328
+
19329
+ if self.aggressive_mode:
19330
+ gc.set_threshold(1, 1, 1)
19331
+ gc.set_debug(gc.DEBUG_SAVEALL)
19332
+
19333
+ gens = generations if generations is not None else [2, 1, 0]
19334
+ for gen in gens:
19173
19335
  collected = gc.collect(gen)
19174
- self.log(f"GC Gen {gen}: Collected {collected}")
19336
+ stats['collected'][f'gen_{gen}'] = collected
19337
+ self.log(f"GC Gen {gen}: Collected {collected} objects")
19338
+
19339
+ stats['garbage_cleared'] = len(gc.garbage)
19175
19340
  gc.garbage.clear()
19341
+
19342
+ self._clear_weakref_caches()
19343
+
19176
19344
  after_mem = self.process.memory_info().rss
19177
19345
  stats['freed_mb'] = (before_mem - after_mem) / (1024 ** 2)
19346
+
19178
19347
  return stats
19179
19348
 
19180
- def clear_frameworks(self):
19349
+ def _clear_weakref_caches(self):
19350
+ self.log("Clearing weak reference caches...")
19351
+ try:
19352
+ for obj in gc.get_objects():
19353
+ if isinstance(obj, weakref.WeakValueDictionary):
19354
+ obj.clear()
19355
+ except Exception as e:
19356
+ self.log(f"Failed to clear weakref caches: {e}", "WARNING")
19357
+
19358
+ def clear_frameworks(self) -> Dict[str, Any]:
19181
19359
  result = {}
19360
+
19361
+ # PyTorch
19182
19362
  try:
19183
19363
  import torch
19184
19364
  if torch.cuda.is_available():
19185
- self.log("Clearing PyTorch cache...")
19365
+ self.log("Clearing PyTorch CUDA cache...")
19186
19366
  torch.cuda.empty_cache()
19187
19367
  torch.cuda.ipc_collect()
19188
- result['pytorch'] = 'cleared'
19368
+ result['pytorch'] = {
19369
+ 'cuda_cache_cleared': True,
19370
+ 'allocated_mb': torch.cuda.memory_allocated() / (1024 ** 2),
19371
+ 'cached_mb': torch.cuda.memory_reserved() / (1024 ** 2)
19372
+ }
19189
19373
  except Exception as e:
19190
19374
  self.log(f"PyTorch skipped: {e}", "WARNING")
19191
-
19375
+ result['pytorch'] = {'error': str(e)}
19376
+
19377
+ # TensorFlow
19192
19378
  try:
19193
19379
  import tensorflow as tf
19194
19380
  self.log("Clearing TensorFlow session...")
19195
19381
  tf.keras.backend.clear_session()
19196
- result['tensorflow'] = 'cleared'
19382
+ result['tensorflow'] = {'session_cleared': True}
19197
19383
  except Exception as e:
19198
19384
  self.log(f"TensorFlow skipped: {e}", "WARNING")
19199
-
19385
+ result['tensorflow'] = {'error': str(e)}
19386
+
19387
+ # OpenCV
19200
19388
  try:
19201
19389
  import cv2
19202
19390
  self.log("Closing OpenCV windows...")
19203
19391
  cv2.destroyAllWindows()
19204
- result['opencv'] = 'cleared'
19205
- except Exception:
19206
- pass
19392
+ result['opencv'] = {'windows_closed': True}
19393
+ except Exception as e:
19394
+ self.log(f"OpenCV skipped: {e}", "WARNING")
19395
+ result['opencv'] = {'error': str(e)}
19207
19396
 
19397
+ # Matplotlib
19208
19398
  try:
19209
19399
  import matplotlib.pyplot as plt
19210
19400
  self.log("Closing matplotlib figures...")
19211
19401
  plt.close('all')
19212
- result['matplotlib'] = 'cleared'
19213
- except Exception:
19214
- pass
19215
-
19216
- return result
19402
+ result['matplotlib'] = {'figures_closed': True}
19403
+ except Exception as e:
19404
+ self.log(f"Matplotlib skipped: {e}", "WARNING")
19405
+ result['matplotlib'] = {'error': str(e)}
19217
19406
 
19218
- def clear_system_caches(self):
19219
- result = {}
19220
- self.log("Attempting full system cache clearance...")
19407
+ # IPython
19221
19408
  try:
19222
- if self.system == "Linux":
19223
- subprocess.run(["sync"], check=True)
19224
- subprocess.run(["sudo", "sh", "-c", "echo 3 > /proc/sys/vm/drop_caches"], check=True)
19225
- result['linux'] = 'caches dropped'
19226
- elif self.system == "Darwin":
19227
- subprocess.run(["sudo", "purge"], check=True)
19228
- result['macos'] = 'purge run'
19229
- elif self.system == "Windows":
19230
- ctypes.windll.psapi.EmptyWorkingSet(-1)
19231
- if self.aggressive_mode:
19232
- ctypes.windll.kernel32.SetProcessWorkingSetSizeEx(
19233
- -1, ctypes.c_size_t(-1), ctypes.c_size_t(-1), ctypes.c_uint(0x1)
19234
- )
19235
- result['windows'] = 'working set emptied'
19409
+ from IPython import get_ipython
19410
+ ipython = get_ipython()
19411
+ if ipython is not None:
19412
+ self.log("Clearing IPython outputs...")
19413
+ ipython.run_line_magic('reset', '-f')
19414
+ result['ipython'] = {'outputs_cleared': True}
19236
19415
  except Exception as e:
19237
- self.log(f"System cache clearing failed: {e}", "ERROR")
19416
+ self.log(f"IPython skipped: {e}", "WARNING")
19417
+ result['ipython'] = {'error': str(e)}
19418
+
19238
19419
  return result
19420
+
19239
19421
 
19240
- def profile(self) -> Dict[str, Any]:
19422
+ def profile(self, deep: bool = False) -> Dict[str, Any]:
19241
19423
  mem = self.process.memory_info()
19242
19424
  vm = psutil.virtual_memory()
19425
+ swap = psutil.swap_memory()
19426
+
19243
19427
  profile = {
19244
- 'rss_mb': mem.rss / (1024 ** 2),
19245
- 'vms_mb': mem.vms / (1024 ** 2),
19246
- 'used_gb': vm.used / (1024 ** 3),
19247
- 'available_gb': vm.available / (1024 ** 3),
19248
- 'percent': vm.percent,
19428
+ 'timestamp': time.time(),
19429
+ 'process': {
19430
+ 'rss_mb': mem.rss / (1024 ** 2),
19431
+ 'vms_mb': mem.vms / (1024 ** 2),
19432
+ },
19433
+ 'system': {
19434
+ 'used_gb': vm.used / (1024 ** 3),
19435
+ 'available_gb': vm.available / (1024 ** 3),
19436
+ 'percent': vm.percent,
19437
+ 'swap_used_gb': swap.used / (1024 ** 3),
19438
+ 'swap_free_gb': swap.free / (1024 ** 3),
19439
+ },
19440
+ 'gc': {
19441
+ 'objects': len(gc.get_objects()),
19442
+ 'garbage': len(gc.garbage),
19443
+ 'thresholds': gc.get_threshold(),
19444
+ }
19249
19445
  }
19446
+
19447
+ if deep:
19448
+ profile['deep'] = self._deep_memory_analysis()
19449
+
19250
19450
  self.memory_history.append(profile)
19451
+ if len(self.memory_history) > self.max_history:
19452
+ self.memory_history.pop(0)
19453
+
19251
19454
  return profile
19252
19455
 
19253
- def optimize(self) -> Dict[str, Any]:
19254
- result = {}
19255
- result['before'] = self.profile()
19256
- result['gc'] = self.collect_garbage()
19257
- result['frameworks'] = self.clear_frameworks()
19258
- result['system'] = self.clear_system_caches()
19259
- result['after'] = self.profile()
19260
- saved = result['before']['rss_mb'] - result['after']['rss_mb']
19261
- result['saved_mb'] = saved
19262
- result['saved_percent'] = (saved / result['before']['rss_mb']) * 100 if result['before']['rss_mb'] else 0
19263
- self.log(f"Optimization complete: Saved {saved:.2f} MB ({result['saved_percent']:.1f}%)", "SUCCESS")
19456
+ def _deep_memory_analysis(self) -> Dict[str, Any]:
19457
+ self.log("Performing deep memory analysis...")
19458
+ type_sizes = defaultdict(int)
19459
+ for obj in gc.get_objects():
19460
+ try:
19461
+ obj_type = type(obj).__name__
19462
+ type_sizes[obj_type] += sys.getsizeof(obj)
19463
+ except Exception:
19464
+ continue
19465
+
19466
+ top_types = sorted(type_sizes.items(), key=lambda x: x[1], reverse=True)[:10]
19467
+ return {'top_object_types': top_types}
19468
+
19469
+
19470
+ def detect_leaks(self, min_growth_mb: float = 5.0) -> Optional[Dict[str, Any]]:
19471
+ """
19472
+ Detect potential memory leaks by comparing snapshots.
19473
+
19474
+ Args:
19475
+ min_growth_mb: Minimum growth in MB to consider a leak
19476
+
19477
+ Returns:
19478
+ Leak detection report or None if no leaks detected
19479
+ """
19480
+ if not self.track_leaks or len(self.memory_history) < 2:
19481
+ return None
19482
+
19483
+ current = self.memory_history[-1]
19484
+ previous = self.memory_history[-2]
19485
+
19486
+ growth_mb = current['process']['rss_mb'] - previous['process']['rss_mb']
19487
+ if growth_mb < min_growth_mb:
19488
+ return None
19489
+
19490
+ leak_report = {
19491
+ 'growth_mb': growth_mb,
19492
+ 'time_elapsed': current['timestamp'] - previous['timestamp'],
19493
+ 'suspected_causes': [],
19494
+ }
19495
+
19496
+ # Try to identify potential causes
19497
+ if 'deep' in current and 'deep' in previous:
19498
+ current_counts = current['deep']['object_counts']
19499
+ previous_counts = previous['deep']['object_counts']
19500
+
19501
+ for obj_type, count in current_counts.items():
19502
+ prev_count = previous_counts.get(obj_type, 0)
19503
+ if count > prev_count * 1.5 and count - prev_count > 100:
19504
+ leak_report['suspected_causes'].append({
19505
+ 'type': obj_type,
19506
+ 'growth': count - prev_count,
19507
+ 'percent_growth': ((count - prev_count) / prev_count * 100) if prev_count else float('inf')
19508
+ })
19509
+
19510
+ if leak_report['suspected_causes']:
19511
+ self.leak_tracker['suspected_leaks'].append(leak_report)
19512
+ return leak_report
19513
+
19514
+ return None
19515
+
19516
+ def optimize(self, full: bool = True) -> Dict[str, Any]:
19517
+ """
19518
+ Perform comprehensive memory optimization.
19519
+
19520
+ Args:
19521
+ full: Whether to perform all optimization steps
19522
+
19523
+ Returns:
19524
+ Dictionary with optimization results
19525
+ """
19526
+ result = {
19527
+ 'timestamp': time.time(),
19528
+ 'before': self.profile(deep=self.track_leaks),
19529
+ 'steps': {}
19530
+ }
19531
+
19532
+ # Step 1: Garbage collection
19533
+ result['steps']['gc'] = self.collect_garbage()
19534
+
19535
+ # Step 2: Framework-specific memory clearing
19536
+ result['steps']['frameworks'] = self.clear_frameworks()
19537
+
19538
+ # # Step 3: System-level cache clearing
19539
+ # if full:
19540
+ # result['steps']['system'] = self.clear_system_caches()
19541
+
19542
+ # Step 4: Additional aggressive measures
19543
+ if self.aggressive_mode and full:
19544
+ result['steps']['aggressive'] = self._aggressive_optimizations()
19545
+
19546
+ # Final profile and results
19547
+ result['after'] = self.profile(deep=self.track_leaks)
19548
+
19549
+ # Calculate savings
19550
+ saved_mb = result['before']['process']['rss_mb'] - result['after']['process']['rss_mb']
19551
+ result['saved_mb'] = saved_mb
19552
+ result['saved_percent'] = (saved_mb / result['before']['process']['rss_mb']) * 100
19553
+
19554
+ # Check for leaks if tracking enabled
19555
+ if self.track_leaks:
19556
+ leak_report = self.detect_leaks()
19557
+ if leak_report:
19558
+ result['leak_detected'] = leak_report
19559
+
19560
+ self.log(
19561
+ f"Optimization complete: Saved {saved_mb:.2f} MB "
19562
+ f"({result['saved_percent']:.1f}% reduction)",
19563
+ "SUCCESS"
19564
+ )
19565
+
19264
19566
  return result
19567
+
19568
+ def _aggressive_optimizations(self):
19569
+ self.log("Aggressively clearing known caches...")
19265
19570
 
19571
+ errors = {}
19572
+ try:
19573
+ gc.collect()
19574
+ self.log("Basic garbage collection done.")
19575
+ except Exception as e:
19576
+ errors['gc_collect'] = str(e)
19577
+
19578
+ try:
19579
+ import numpy as np
19580
+ _ = np.empty(0) # trigger allocation to finalize previous arrays
19581
+ except Exception as e:
19582
+ errors['numpy'] = str(e)
19266
19583
 
19267
- def cleaner(verbose: bool = True, aggressive: bool = True) -> Dict[str, Any]:
19268
- optimizer = MemoryOptimizer(verbose=verbose, aggressive_mode=aggressive)
19269
- return optimizer.optimize()
19584
+ try:
19585
+ import pandas as pd
19586
+ _ = pd.DataFrame() # no effect but helps ensure cleanup
19587
+ except Exception as e:
19588
+ errors['pandas'] = str(e)
19589
+
19590
+ return {'status': 'done', 'errors': errors}
19591
+
19592
+ def memory_report(self, detailed: bool = False) -> str:
19593
+ """Generate a comprehensive memory usage report."""
19594
+ current = self.profile(deep=detailed)
19595
+ report = [
19596
+ "="*80,
19597
+ f"Memory Report (PID: {os.getpid()})",
19598
+ "="*80,
19599
+ f"Process RSS: {current['process']['rss_mb']:.1f} MB",
19600
+ f"Process VMS: {current['process']['vms_mb']:.1f} MB",
19601
+ f"System Memory Used: {current['system']['used_gb']:.1f} GB ({current['system']['percent']}%)",
19602
+ f"Available Memory: {current['system']['available_gb']:.1f} GB",
19603
+ f"Swap Used: {current['system']['swap_used_gb']:.1f} GB",
19604
+ f"GC Objects: {current['gc']['objects']:,}",
19605
+ f"GC Garbage: {current['gc']['garbage']:,}",
19606
+ ]
19607
+
19608
+ if detailed and 'deep' in current:
19609
+ report.append("\n[Object Type Breakdown (Top 10)]")
19610
+ sorted_types = sorted(
19611
+ current['deep']['object_counts'].items(),
19612
+ key=lambda x: x[1],
19613
+ reverse=True
19614
+ )[:10]
19615
+
19616
+ for obj_type, count in sorted_types:
19617
+ size_mb = current['deep']['estimated_sizes'].get(obj_type, 0)
19618
+ report.append(f"{obj_type}: {count:,} objects ({size_mb:.2f} MB)")
19619
+
19620
+ if self.track_leaks and self.leak_tracker['suspected_leaks']:
19621
+ report.append("\n[Potential Memory Leaks]")
19622
+ for i, leak in enumerate(self.leak_tracker['suspected_leaks'], 1):
19623
+ report.append(
19624
+ f"Leak {i}: +{leak['growth_mb']:.1f}MB in "
19625
+ f"{leak['time_elapsed']:.1f}s"
19626
+ )
19627
+ for cause in leak['suspected_causes']:
19628
+ report.append(
19629
+ f" - {cause['type']}: +{cause['growth']:,} "
19630
+ f"({cause['percent_growth']:.1f}%)"
19631
+ )
19632
+
19633
+ return "\n".join(report)
19634
+
19635
+
19636
+ def cleaner(
19637
+ verbose: bool = True,
19638
+ aggressive: bool = True,
19639
+ track_leaks: bool = False,
19640
+ full_clean: bool = True,
19641
+ return_output:bool=False
19642
+ ) -> Dict[str, Any]:
19643
+ """
19644
+ Ultimate memory cleaning function with all optimizations.
19645
+
19646
+ Args:
19647
+ verbose: Print detailed progress information
19648
+ aggressive: Use aggressive memory clearing techniques
19649
+ track_leaks: Enable memory leak detection
19650
+ full_clean: Perform all cleaning steps (including system-level)
19651
+
19652
+ Returns:
19653
+ Dictionary with optimization results
19654
+ """
19655
+ optimizer = MemoryOptimizer(
19656
+ verbose=verbose,
19657
+ aggressive_mode=aggressive,
19658
+ track_leaks=track_leaks
19659
+ )
19660
+ output=optimizer.optimize(full=full_clean)
19661
+ return output if return_output else None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: py2ls
3
- Version: 0.2.6.2
3
+ Version: 0.2.6.3
4
4
  Summary: py(thon)2(too)ls
5
5
  Author: Jianfeng
6
6
  Author-email: Jianfeng.Liu0413@gmail.com
@@ -18,24 +18,25 @@ Provides-Extra: full
18
18
  Provides-Extra: light
19
19
  Provides-Extra: ml
20
20
  Provides-Extra: neuroscience
21
+ Provides-Extra: superlight
21
22
  Requires-Dist: GEOparse (>=2.0.4) ; extra == "full"
22
- Requires-Dist: Pillow (>=11.2.1) ; extra == "full" or extra == "light"
23
- Requires-Dist: PyPDF2 (>=3.0.1) ; extra == "full" or extra == "light"
24
- Requires-Dist: PyYAML (>=6.0.1) ; extra == "full" or extra == "light"
23
+ Requires-Dist: Pillow (>=11.2.1) ; extra == "full" or extra == "superlight" or extra == "light"
24
+ Requires-Dist: PyPDF2 (>=3.0.1) ; extra == "full" or extra == "superlight" or extra == "light"
25
+ Requires-Dist: PyYAML (>=6.0.1) ; extra == "full" or extra == "superlight" or extra == "light"
25
26
  Requires-Dist: SQLAlchemy (>=2.0.37) ; extra == "full"
26
- Requires-Dist: adjustText (>=1.3.0) ; extra == "full" or extra == "light"
27
+ Requires-Dist: adjustText (>=1.3.0) ; extra == "full" or extra == "superlight" or extra == "light"
27
28
  Requires-Dist: autogluon (>=1.2) ; extra == "full" or extra == "ml"
28
29
  Requires-Dist: beautifulsoup4 (>=4.12.3) ; extra == "full" or extra == "light"
29
30
  Requires-Dist: catboost (>=1.2.8) ; extra == "full" or extra == "ml"
30
31
  Requires-Dist: category_encoders (>=2.8.1) ; extra == "full"
31
- Requires-Dist: chardet (>=3.0.4) ; extra == "full" or extra == "light"
32
- Requires-Dist: cryptography (>=45.0.3) ; extra == "full" or extra == "light"
33
- Requires-Dist: cycler (>=0.12.1) ; extra == "full" or extra == "light"
34
- Requires-Dist: docx2pdf (>=0.1.8) ; extra == "full" or extra == "light"
32
+ Requires-Dist: chardet (>=3.0.4) ; extra == "full" or extra == "superlight" or extra == "light"
33
+ Requires-Dist: cryptography (>=45.0.3) ; extra == "full" or extra == "superlight" or extra == "light"
34
+ Requires-Dist: cycler (>=0.12.1) ; extra == "full" or extra == "superlight" or extra == "light"
35
+ Requires-Dist: docx2pdf (>=0.1.8) ; extra == "full" or extra == "superlight" or extra == "light"
35
36
  Requires-Dist: fake-useragent (>=1.5.1) ; extra == "full" or extra == "light"
36
37
  Requires-Dist: folium (>=0.19.6) ; extra == "full"
37
- Requires-Dist: fpdf (>=1.7.2) ; extra == "full" or extra == "light"
38
- Requires-Dist: fuzzywuzzy (>=0.18.0) ; extra == "full" or extra == "light"
38
+ Requires-Dist: fpdf (>=1.7.2) ; extra == "full" or extra == "superlight" or extra == "light"
39
+ Requires-Dist: fuzzywuzzy (>=0.18.0) ; extra == "full" or extra == "superlight" or extra == "light"
39
40
  Requires-Dist: geopy (>=2.4.1) ; extra == "full"
40
41
  Requires-Dist: gseapy (>=1.1.8) ; extra == "full"
41
42
  Requires-Dist: gtts (>=2.5.4) ; extra == "full"
@@ -44,26 +45,26 @@ Requires-Dist: img2pdf (>=0.5.1) ; extra == "full"
44
45
  Requires-Dist: ipython (>=8.26.0) ; extra == "full" or extra == "light"
45
46
  Requires-Dist: langdetect (>=1.0.9) ; extra == "full"
46
47
  Requires-Dist: lightgbm (>=4.6.0) ; extra == "full" or extra == "ml"
47
- Requires-Dist: lxml (>=5.3.0) ; extra == "full" or extra == "light"
48
- Requires-Dist: matplotlib (>=3.9.1) ; extra == "full" or extra == "light"
48
+ Requires-Dist: lxml (>=5.3.0) ; extra == "full" or extra == "superlight" or extra == "light"
49
+ Requires-Dist: matplotlib (>=3.9.1) ; extra == "full" or extra == "superlight" or extra == "light"
49
50
  Requires-Dist: matplotlib-venn (>=1.1.2) ; extra == "full"
50
51
  Requires-Dist: mne (>=1.6.1) ; extra == "neuroscience"
51
- Requires-Dist: msoffcrypto-tool (>=5.4.2) ; extra == "full" or extra == "light"
52
+ Requires-Dist: msoffcrypto-tool (>=5.4.2) ; extra == "full" or extra == "superlight" or extra == "light"
52
53
  Requires-Dist: neo (>=0.13.1) ; extra == "neuroscience"
53
- Requires-Dist: networkx (>=3.3) ; extra == "full" or extra == "light"
54
+ Requires-Dist: networkx (>=3.3) ; extra == "full" or extra == "superlight" or extra == "light"
54
55
  Requires-Dist: nltk (>=3.8.1) ; extra == "full" or extra == "light"
55
- Requires-Dist: numerizer (>=0.2.3) ; extra == "full" or extra == "light"
56
- Requires-Dist: numpy (>=1.26.4,<2.0.0) ; extra == "full" or extra == "light"
56
+ Requires-Dist: numerizer (>=0.2.3) ; extra == "full" or extra == "superlight" or extra == "light"
57
+ Requires-Dist: numpy (>=1.26.4,<2.0.0) ; extra == "full" or extra == "superlight" or extra == "light"
57
58
  Requires-Dist: opencv-python-headless (>=4.10.0.84) ; extra == "full"
58
59
  Requires-Dist: openlocationcode (>=1.0.1) ; extra == "full"
59
- Requires-Dist: openpyxl (>=3.1.5) ; extra == "full" or extra == "light"
60
- Requires-Dist: pandas (>=2.2.2) ; extra == "full" or extra == "light"
60
+ Requires-Dist: openpyxl (>=3.1.5) ; extra == "full" or extra == "superlight" or extra == "light"
61
+ Requires-Dist: pandas (>=2.2.2) ; extra == "full" or extra == "superlight" or extra == "light"
61
62
  Requires-Dist: pdf2image (>=1.17.0) ; extra == "full"
62
63
  Requires-Dist: phonenumbers (>=8.13.51) ; extra == "full"
63
- Requires-Dist: pingouin (>=0.5.4) ; extra == "full"
64
+ Requires-Dist: pingouin (>=0.5.4) ; extra == "full" or extra == "light"
64
65
  Requires-Dist: pixels2svg (>=0.2.2) ; extra == "full"
65
- Requires-Dist: psutil (>=5.9.8) ; extra == "full" or extra == "light"
66
- Requires-Dist: py-cpuinfo (>=9.0.0) ; extra == "light"
66
+ Requires-Dist: psutil (>=5.9.8) ; extra == "full" or extra == "superlight" or extra == "light"
67
+ Requires-Dist: py-cpuinfo (>=9.0.0) ; extra == "superlight" or extra == "light"
67
68
  Requires-Dist: py7zr (>=0.22.0) ; extra == "full"
68
69
  Requires-Dist: pyautogui (>=0.9.54) ; extra == "full"
69
70
  Requires-Dist: pycryptodome (>=3.23.0) ; extra == "full"
@@ -71,20 +72,20 @@ Requires-Dist: pydeck (>=0.9.1) ; extra == "full"
71
72
  Requires-Dist: pyperclip (>=1.9.0) ; extra == "full"
72
73
  Requires-Dist: pypinyin (>=0.54.0) ; extra == "full"
73
74
  Requires-Dist: python-box (>=7.2.0) ; extra == "full" or extra == "light"
74
- Requires-Dist: python-dateutil (>=2.9.0.post0) ; extra == "full" or extra == "light"
75
- Requires-Dist: python-docx (>=1.1.2) ; extra == "full" or extra == "light"
76
- Requires-Dist: pytz (>=2024.1) ; extra == "full" or extra == "light"
75
+ Requires-Dist: python-dateutil (>=2.9.0.post0) ; extra == "full" or extra == "superlight" or extra == "light"
76
+ Requires-Dist: python-docx (>=1.1.2) ; extra == "full" or extra == "superlight" or extra == "light"
77
+ Requires-Dist: pytz (>=2024.1) ; extra == "full" or extra == "superlight" or extra == "light"
77
78
  Requires-Dist: pyvis (>=0.3.2) ; extra == "full"
78
79
  Requires-Dist: rarfile (>=4.2) ; extra == "full"
79
80
  Requires-Dist: realesrgan (>=0.3.0) ; extra == "full"
80
- Requires-Dist: regex (>=2024.5.15) ; extra == "full" or extra == "light"
81
+ Requires-Dist: regex (>=2024.5.15) ; extra == "full" or extra == "superlight" or extra == "light"
81
82
  Requires-Dist: rembg (>=2.0.57) ; extra == "full"
82
83
  Requires-Dist: requests (>=2.32.3) ; extra == "full" or extra == "light"
83
84
  Requires-Dist: schedule (>=1.2.2) ; extra == "full"
84
85
  Requires-Dist: scikit-image (>=0.23.2) ; extra == "full"
85
86
  Requires-Dist: scikit-learn (>=1.5.1) ; extra == "full" or extra == "ml"
86
- Requires-Dist: scipy (>=1.14.0) ; extra == "full" or extra == "light"
87
- Requires-Dist: seaborn (>=0.13.2) ; extra == "full"
87
+ Requires-Dist: scipy (>=1.14.0) ; extra == "full" or extra == "superlight" or extra == "light"
88
+ Requires-Dist: seaborn (>=0.13.2) ; extra == "full" or extra == "superlight" or extra == "light"
88
89
  Requires-Dist: selenium (>=4.23.1) ; extra == "full" or extra == "light"
89
90
  Requires-Dist: skimpy (>=0.0.15) ; extra == "full"
90
91
  Requires-Dist: skorch (>=1.0.0) ; extra == "full" or extra == "ml"
@@ -94,7 +95,7 @@ Requires-Dist: streamlit-folium (>=0.25.0) ; extra == "full"
94
95
  Requires-Dist: striprtf (>=0.0.29) ; extra == "full"
95
96
  Requires-Dist: symspellpy (>=6.9.0) ; extra == "full"
96
97
  Requires-Dist: tensorflow (>=2.19.0) ; extra == "full" or extra == "ml"
97
- Requires-Dist: textblob (>=0.18.0.post0) ; extra == "full" or extra == "light"
98
+ Requires-Dist: textblob (>=0.18.0.post0) ; extra == "full" or extra == "superlight" or extra == "light"
98
99
  Requires-Dist: torch (>=2.4.0) ; extra == "full" or extra == "ml"
99
100
  Requires-Dist: tqdm (>=4.66.4) ; extra == "full" or extra == "light"
100
101
  Requires-Dist: umap (>=0.1.1) ; extra == "full"
@@ -243,7 +243,7 @@ py2ls/fetch_update.py,sha256=9LXj661GpCEFII2wx_99aINYctDiHni6DOruDs_fdt8,4752
243
243
  py2ls/freqanalysis.py,sha256=F4218VSPbgL5tnngh6xNCYuNnfR-F_QjECUUxrPYZss,32594
244
244
  py2ls/ich2ls.py,sha256=zXWdQzebBZjFIfpjgJuqZ6_7AnGehFTPX-BoNnAQwyc,81940
245
245
  py2ls/im2.py,sha256=inSdpU_IIvHG7FoXcdIvpGprPASySwJCR05Lpaa5fdU,3026
246
- py2ls/ips.py,sha256=EwsE1a0ti16G58YM9-S6Z6mJ28Sh4_UDbquzvEl-hwg,741622
246
+ py2ls/ips.py,sha256=T6NbmCHn4LQjMHZfdYUgz0ExXUITpgf3M-1oVTqGqHg,756341
247
247
  py2ls/ml2ls.py,sha256=I-JFPdikgEtfQjhv5gBz-QSeorpTJI_Pda_JwkTioBY,209732
248
248
  py2ls/mol.py,sha256=AZnHzarIk_MjueKdChqn1V6e4tUle3X1NnHSFA6n3Nw,10645
249
249
  py2ls/netfinder.py,sha256=GZinvZrKRV7h8GPPUBczFEcUKoHNLinh7PafnqMqsYg,81131
@@ -255,6 +255,6 @@ py2ls/sleep_events_detectors.py,sha256=bQA3HJqv5qnYKJJEIhCyhlDtkXQfIzqksnD0YRXso
255
255
  py2ls/stats.py,sha256=xBXUHgV92iqNKtNaj7p5YouNJXKPrJcwkYJhc6lw8NI,34107
256
256
  py2ls/translator.py,sha256=77Tp_GjmiiwFbEIJD_q3VYpQ43XL9ZeJo6Mhl44mvh8,34284
257
257
  py2ls/wb_detector.py,sha256=7y6TmBUj9exCZeIgBAJ_9hwuhkDh1x_-yg4dvNY1_GQ,6284
258
- py2ls-0.2.6.2.dist-info/METADATA,sha256=5dnSVzTOBpmJlixUkJDlGf9Wu2b_Tta1spNIcWzILTc,17790
259
- py2ls-0.2.6.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
260
- py2ls-0.2.6.2.dist-info/RECORD,,
258
+ py2ls-0.2.6.3.dist-info/METADATA,sha256=F0ZC706JhRsKyvns1iLr9h_Kj9eEcnDYO9wwJ7dlcmU,18532
259
+ py2ls-0.2.6.3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
260
+ py2ls-0.2.6.3.dist-info/RECORD,,