py2ls 0.1.10.12__py3-none-any.whl → 0.2.7.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of py2ls might be problematic. Click here for more details.

Files changed (72) hide show
  1. py2ls/.DS_Store +0 -0
  2. py2ls/.git/.DS_Store +0 -0
  3. py2ls/.git/index +0 -0
  4. py2ls/.git/logs/refs/remotes/origin/HEAD +1 -0
  5. py2ls/.git/objects/.DS_Store +0 -0
  6. py2ls/.git/refs/.DS_Store +0 -0
  7. py2ls/ImageLoader.py +621 -0
  8. py2ls/__init__.py +7 -5
  9. py2ls/apptainer2ls.py +3940 -0
  10. py2ls/batman.py +164 -42
  11. py2ls/bio.py +2595 -0
  12. py2ls/cell_image_clf.py +1632 -0
  13. py2ls/container2ls.py +4635 -0
  14. py2ls/corr.py +475 -0
  15. py2ls/data/.DS_Store +0 -0
  16. py2ls/data/email/email_html_template.html +88 -0
  17. py2ls/data/hyper_param_autogluon_zeroshot2024.json +2383 -0
  18. py2ls/data/hyper_param_tabrepo_2024.py +1753 -0
  19. py2ls/data/mygenes_fields_241022.txt +355 -0
  20. py2ls/data/re_common_pattern.json +173 -0
  21. py2ls/data/sns_info.json +74 -0
  22. py2ls/data/styles/.DS_Store +0 -0
  23. py2ls/data/styles/example/.DS_Store +0 -0
  24. py2ls/data/styles/stylelib/.DS_Store +0 -0
  25. py2ls/data/styles/stylelib/grid.mplstyle +15 -0
  26. py2ls/data/styles/stylelib/high-contrast.mplstyle +6 -0
  27. py2ls/data/styles/stylelib/high-vis.mplstyle +4 -0
  28. py2ls/data/styles/stylelib/ieee.mplstyle +15 -0
  29. py2ls/data/styles/stylelib/light.mplstyl +6 -0
  30. py2ls/data/styles/stylelib/muted.mplstyle +6 -0
  31. py2ls/data/styles/stylelib/nature-reviews-latex.mplstyle +616 -0
  32. py2ls/data/styles/stylelib/nature-reviews.mplstyle +616 -0
  33. py2ls/data/styles/stylelib/nature.mplstyle +31 -0
  34. py2ls/data/styles/stylelib/no-latex.mplstyle +10 -0
  35. py2ls/data/styles/stylelib/notebook.mplstyle +36 -0
  36. py2ls/data/styles/stylelib/paper.mplstyle +290 -0
  37. py2ls/data/styles/stylelib/paper2.mplstyle +305 -0
  38. py2ls/data/styles/stylelib/retro.mplstyle +4 -0
  39. py2ls/data/styles/stylelib/sans.mplstyle +10 -0
  40. py2ls/data/styles/stylelib/scatter.mplstyle +7 -0
  41. py2ls/data/styles/stylelib/science.mplstyle +48 -0
  42. py2ls/data/styles/stylelib/std-colors.mplstyle +4 -0
  43. py2ls/data/styles/stylelib/vibrant.mplstyle +6 -0
  44. py2ls/data/tiles.csv +146 -0
  45. py2ls/data/usages_pd.json +1417 -0
  46. py2ls/data/usages_sns.json +31 -0
  47. py2ls/docker2ls.py +5446 -0
  48. py2ls/ec2ls.py +61 -0
  49. py2ls/fetch_update.py +145 -0
  50. py2ls/ich2ls.py +1955 -296
  51. py2ls/im2.py +8242 -0
  52. py2ls/image_ml2ls.py +2100 -0
  53. py2ls/ips.py +33909 -3418
  54. py2ls/ml2ls.py +7700 -0
  55. py2ls/mol.py +289 -0
  56. py2ls/mount2ls.py +1307 -0
  57. py2ls/netfinder.py +873 -351
  58. py2ls/nl2ls.py +283 -0
  59. py2ls/ocr.py +1581 -458
  60. py2ls/plot.py +10394 -314
  61. py2ls/rna2ls.py +311 -0
  62. py2ls/ssh2ls.md +456 -0
  63. py2ls/ssh2ls.py +5933 -0
  64. py2ls/ssh2ls_v01.py +2204 -0
  65. py2ls/stats.py +66 -172
  66. py2ls/temp20251124.py +509 -0
  67. py2ls/translator.py +2 -0
  68. py2ls/utils/decorators.py +3564 -0
  69. py2ls/utils_bio.py +3453 -0
  70. {py2ls-0.1.10.12.dist-info → py2ls-0.2.7.10.dist-info}/METADATA +113 -224
  71. {py2ls-0.1.10.12.dist-info → py2ls-0.2.7.10.dist-info}/RECORD +72 -16
  72. {py2ls-0.1.10.12.dist-info → py2ls-0.2.7.10.dist-info}/WHEEL +0 -0
@@ -0,0 +1,3564 @@
1
+ """
2
+ Ultimate Decorator Toolkit - A comprehensive collection of decorators for function enhancement,
3
+ logging, performance monitoring, and fault tolerance, implemented as classes.
4
+
5
+ Features:
6
+ - 50+ practical decorators organized by category
7
+ - Consistent verbose mode for all decorators
8
+ - Usage examples with expected output
9
+ - Thread-safe implementations
10
+ - Support for both sync and async functions
11
+
12
+ """
13
+
14
+ import time as time_module
15
+ import functools
16
+ import logging
17
+ import threading
18
+ import inspect
19
+ import random
20
+ import cProfile
21
+ import pstats
22
+ import io
23
+ import hashlib
24
+ import pickle
25
+ import os
26
+ import sys
27
+ import warnings
28
+ import asyncio
29
+ from collections import defaultdict
30
+ from functools import lru_cache
31
+ from contextlib import suppress as context_suppress
32
+ from typing import Callable, Any, Dict, List, Tuple, Optional, Union
33
+
34
+ ##############################
35
+ # Time2Do
36
+ import re
37
+ from datetime import datetime, time, date, timedelta
38
+ from zoneinfo import ZoneInfo
39
+ ##############################
40
+
41
+ from pathlib import Path
42
+
43
+ # ----------- 检查 Numba 是否可用 -----------
44
+ try:
45
+ import numba
46
+ NUMBA_AVAILABLE = True
47
+ except Exception:
48
+ numba = None
49
+ NUMBA_AVAILABLE = False
50
+
51
+ # ----------- 检查 CUDA 是否可用 -----------
52
+ CUDA_AVAILABLE = False
53
+ if NUMBA_AVAILABLE:
54
+ try:
55
+ CUDA_AVAILABLE = numba.cuda.is_available()
56
+ except Exception:
57
+ CUDA_AVAILABLE = False
58
+
59
+
60
+ class Time2Do:
61
+ """Decorator class for conditional execution based on time parameters"""
62
+
63
+ def __init__(
64
+ self,
65
+ when: str = "range",
66
+ start_time: Optional[Union[str, time]] = None,
67
+ end_time: Optional[Union[str, time]] = None,
68
+ weekdays: Optional[Union[List[int], List[str], str, bool]] = None,
69
+ invert: bool = False,
70
+ timezone: Optional[str] = "Europe/Berlin",
71
+ start_date: Optional[Union[str, date]] = None,
72
+ end_date: Optional[Union[str, date]] = None,
73
+ holidays: Optional[Union[List[Union[str, date]], Callable[[date], bool]]] = None,
74
+ inclusive: str = "[]",
75
+ *,
76
+ cache: bool = True,
77
+ on_false: Optional[Callable] = None
78
+ ):
79
+ """
80
+ Ultimate time-based execution decorator with complete feature set
81
+
82
+ Args:
83
+ when: Time expression or keyword ("range", "never", "every day")
84
+ start_time: Override start time
85
+ end_time: Override end time
86
+ weekdays: Weekday specification
87
+ invert: Return inverse result
88
+ timezone: Timezone identifier
89
+ start_date: Start date boundary
90
+ end_date: End date boundary
91
+ holidays: List of dates or holiday checker function
92
+ inclusive: Time boundary inclusion ("[]", "[)", "(]", "()")
93
+ cache: Enable result caching
94
+ on_false: Callback when condition not met
95
+ """
96
+ self.when = when
97
+ self.start_time = start_time
98
+ self.end_time = end_time
99
+ self.weekdays = weekdays
100
+ self.invert = invert
101
+ self.timezone = timezone
102
+ self.start_date = start_date
103
+ self.end_date = end_date
104
+ self.holidays = holidays
105
+ self.inclusive = inclusive
106
+ self.cache = cache
107
+ self.on_false = on_false
108
+
109
+ # Pre-compiled regex patterns
110
+ self.patterns = {
111
+ "special": re.compile(r"^(midnight|noon)$", re.I),
112
+ "am_pm": re.compile(r"(\d{1,2})(?::(\d{2}))?\s*([ap]m?)\b", re.I),
113
+ "24hr": re.compile(r"(\d{1,2})(?::(\d{2}))?\b"),
114
+ "weekday": re.compile(
115
+ r"\b(mon|tue|wed|thu|fri|sat|sun|weekdays?|weekends?)\b", re.I
116
+ ),
117
+ "range_sep": re.compile(r"\b(?:to|-|and|until)\b", re.I),
118
+ "date": re.compile(r"(\d{4})-(\d{2})-(\d{2})"),
119
+ }
120
+
121
+ def __call__(self, func: Callable) -> Callable:
122
+ """Decorator implementation"""
123
+ if inspect.iscoroutinefunction(func):
124
+ @functools.wraps(func)
125
+ async def async_wrapper(*args, **kwargs):
126
+ if self._should_execute():
127
+ return await func(*args, **kwargs)
128
+ return self._handle_false()
129
+ return async_wrapper
130
+ else:
131
+ @functools.wraps(func)
132
+ def sync_wrapper(*args, **kwargs):
133
+ if self._should_execute():
134
+ return func(*args, **kwargs)
135
+ return self._handle_false()
136
+ return sync_wrapper
137
+
138
+ def _should_execute(self) -> bool:
139
+ """Determine if the function should execute based on time conditions"""
140
+ if self.cache:
141
+ cache_key = self._get_cache_key()
142
+ return self._cached_time_check(cache_key)
143
+ return self._time_check_impl()
144
+
145
+ @lru_cache(maxsize=128)
146
+ def _cached_time_check(self, cache_key: Tuple[Any]) -> bool:
147
+ """Cached version of time check"""
148
+ return self._time_check_impl()
149
+
150
+ def _get_cache_key(self) -> Tuple[Any]:
151
+ """Generate cache key based on current parameters"""
152
+ return (
153
+ self.when,
154
+ self.start_time,
155
+ self.end_time,
156
+ tuple(self.weekdays) if isinstance(self.weekdays, list) else self.weekdays,
157
+ self.invert,
158
+ self.timezone,
159
+ self.start_date,
160
+ self.end_date,
161
+ tuple(self.holidays) if isinstance(self.holidays, list) else self.holidays,
162
+ self.inclusive,
163
+ datetime.now().minute # Cache per minute
164
+ )
165
+
166
+ def _time_check_impl(self) -> bool:
167
+ """Core time checking implementation"""
168
+ now = self._get_current_time()
169
+ current_time = now.time()
170
+ current_date = now.date()
171
+
172
+ params = {
173
+ "start_time": time(6, 0),
174
+ "end_time": time(23, 0),
175
+ "weekdays": None,
176
+ "start_date": None,
177
+ "end_date": None,
178
+ "holidays": None,
179
+ "never": False,
180
+ "always": False,
181
+ }
182
+
183
+ self._process_when_string(params)
184
+ self._process_time_params(params)
185
+ self._process_date_params(params)
186
+ self._process_weekdays(params)
187
+ self._process_holidays(params)
188
+
189
+ # Early exit conditions
190
+ if params["never"]:
191
+ return self.invert
192
+ if params["always"]:
193
+ return not self.invert
194
+
195
+ # Check date range
196
+ if not self._check_date_range(current_date, params["start_date"], params["end_date"]):
197
+ return self.invert
198
+
199
+ # Check holidays
200
+ if self._is_holiday(current_date, params["holidays"]):
201
+ return self.invert
202
+
203
+ # Check weekdays
204
+ if not self._check_weekday(now.weekday(), params["weekdays"]):
205
+ return self.invert
206
+
207
+ # Check time range
208
+ in_range = self._check_time_range(
209
+ current_time, params["start_time"], params["end_time"], self.inclusive
210
+ )
211
+
212
+ return not in_range if self.invert else in_range
213
+
214
+ def _get_current_time(self) -> datetime:
215
+ """Get current time with timezone support"""
216
+ try:
217
+ if self.timezone:
218
+ return datetime.now(ZoneInfo(self.timezone))
219
+ except Exception:
220
+ pass
221
+ return datetime.now()
222
+
223
+ def _process_when_string(self, params: dict):
224
+ """Process the natural language 'when' string"""
225
+ when_lower = self.when.lower().strip()
226
+
227
+ if when_lower == "never":
228
+ params["never"] = True
229
+ return
230
+ elif when_lower == "every day":
231
+ params["always"] = True
232
+ return
233
+
234
+ # Extract weekdays
235
+ weekday_matches = self.patterns["weekday"].finditer(when_lower)
236
+ for match in weekday_matches:
237
+ if not params["weekdays"]:
238
+ params["weekdays"] = []
239
+ params["weekdays"].append(match.group(1))
240
+ when_lower = when_lower.replace(match.group(), "").strip()
241
+
242
+ # Parse time expressions
243
+ if "between" in when_lower and "and" in when_lower:
244
+ parts = self.patterns["range_sep"].split(
245
+ when_lower.replace("between", ""), maxsplit=1
246
+ )
247
+ if len(parts) >= 2:
248
+ params["start_time"] = self._parse_time(parts[0])
249
+ params["end_time"] = self._parse_time(parts[1])
250
+ elif any(sep in when_lower for sep in [" to ", "-", " until "]):
251
+ parts = self.patterns["range_sep"].split(when_lower, maxsplit=1)
252
+ if len(parts) >= 2:
253
+ params["start_time"] = self._parse_time(parts[0])
254
+ params["end_time"] = self._parse_time(parts[1])
255
+ elif when_lower.startswith("after "):
256
+ params["start_time"] = self._parse_time(when_lower[6:])
257
+ params["end_time"] = time(23, 59, 59)
258
+ elif when_lower.startswith("before "):
259
+ params["start_time"] = time(0, 0)
260
+ params["end_time"] = self._parse_time(when_lower[7:])
261
+
262
+ def _process_time_params(self, params: dict):
263
+ """Process explicit time parameters"""
264
+ if self.start_time is not None:
265
+ params["start_time"] = self._parse_time(self.start_time)
266
+ if self.end_time is not None:
267
+ params["end_time"] = self._parse_time(self.end_time)
268
+
269
+ def _parse_time(self, t: Union[str, time]) -> time:
270
+ """Parse time from string or time object"""
271
+ if isinstance(t, time):
272
+ return t
273
+
274
+ t_str = str(t).lower().strip()
275
+
276
+ # Handle special cases
277
+ if match := self.patterns["special"].match(t_str):
278
+ return time(0, 0) if match.group(1) == "midnight" else time(12, 0)
279
+
280
+ # Parse AM/PM format
281
+ if match := self.patterns["am_pm"].search(t_str):
282
+ hour = int(match.group(1))
283
+ minute = int(match.group(2) or 0)
284
+ period = match.group(3).lower()
285
+ if period.startswith("p") and hour != 12:
286
+ hour += 12
287
+ elif period.startswith("a") and hour == 12:
288
+ hour = 0
289
+ return time(hour, minute)
290
+
291
+ # Parse 24-hour format
292
+ if match := self.patterns["24hr"].search(t_str):
293
+ hour = int(match.group(1))
294
+ minute = int(match.group(2) or 0)
295
+ return time(hour, minute)
296
+
297
+ raise ValueError(f"Invalid time format: '{t}'")
298
+
299
+ def _process_date_params(self, params: dict):
300
+ """Process date parameters"""
301
+ if self.start_date is not None:
302
+ params["start_date"] = self._parse_date(self.start_date)
303
+ if self.end_date is not None:
304
+ params["end_date"] = self._parse_date(self.end_date)
305
+
306
+ def _parse_date(self, d: Union[str, date]) -> date:
307
+ """Parse date from string or date object"""
308
+ if isinstance(d, date):
309
+ return d
310
+
311
+ if match := self.patterns["date"].match(d):
312
+ return date(int(match.group(1)), int(match.group(2)), int(match.group(3)))
313
+
314
+ raise ValueError(f"Invalid date format: '{d}'. Use YYYY-MM-DD")
315
+
316
+ def _process_weekdays(self, params: dict):
317
+ """Process weekday specifications"""
318
+ if self.weekdays is None:
319
+ return
320
+
321
+ if isinstance(self.weekdays, bool):
322
+ params["weekdays"] = ["weekdays"] if self.weekdays else []
323
+ return
324
+
325
+ if not params["weekdays"]:
326
+ params["weekdays"] = []
327
+
328
+ if isinstance(self.weekdays, str):
329
+ params["weekdays"].extend([w.strip() for w in self.weekdays.split(",")])
330
+ elif isinstance(self.weekdays, list):
331
+ params["weekdays"].extend(self.weekdays)
332
+
333
+ def _process_holidays(self, params: dict):
334
+ """Process holiday specifications"""
335
+ if self.holidays is None:
336
+ return
337
+
338
+ params["holidays"] = []
339
+
340
+ if callable(self.holidays):
341
+ params["holidays"] = self.holidays
342
+ return
343
+
344
+ for h in self.holidays:
345
+ if isinstance(h, str):
346
+ params["holidays"].append(self._parse_date(h))
347
+ else:
348
+ params["holidays"].append(h)
349
+
350
+ def _check_date_range(
351
+ self,
352
+ current_date: date,
353
+ start_date: Optional[date],
354
+ end_date: Optional[date]
355
+ ) -> bool:
356
+ """Check if current date is within range"""
357
+ if start_date and current_date < start_date:
358
+ return False
359
+ if end_date and current_date > end_date:
360
+ return False
361
+ return True
362
+
363
+ def _is_holiday(
364
+ self,
365
+ current_date: date,
366
+ holidays: Union[List[date], Callable[[date], bool]]
367
+ ) -> bool:
368
+ """Check if date is a holiday"""
369
+ if not holidays:
370
+ return False
371
+ if callable(holidays):
372
+ return holidays(current_date)
373
+ return current_date in [
374
+ (self._parse_date(h) if isinstance(h, str) else h)
375
+ for h in holidays
376
+ ]
377
+
378
+ def _check_weekday(
379
+ self,
380
+ current_weekday: int,
381
+ weekdays_spec: List[Union[str, int]]
382
+ ) -> bool:
383
+ """Check if current weekday matches specification"""
384
+ if not weekdays_spec:
385
+ return True
386
+
387
+ day_map = {
388
+ "mon": 0, "tue": 1, "wed": 2, "thu": 3,
389
+ "fri": 4, "sat": 5, "sun": 6,
390
+ "weekday": [0, 1, 2, 3, 4],
391
+ "weekdays": [0, 1, 2, 3, 4],
392
+ "weekend": [5, 6],
393
+ "weekends": [5, 6],
394
+ }
395
+
396
+ allowed_days = set()
397
+ for spec in weekdays_spec:
398
+ if isinstance(spec, int) and 0 <= spec <= 6:
399
+ allowed_days.add(spec)
400
+ elif isinstance(spec, str):
401
+ spec_lower = spec.lower()
402
+ if spec_lower in day_map:
403
+ days = day_map[spec_lower]
404
+ if isinstance(days, list):
405
+ allowed_days.update(days)
406
+ else:
407
+ allowed_days.add(days)
408
+
409
+ return current_weekday in allowed_days if allowed_days else True
410
+
411
+ def _check_time_range(
412
+ self,
413
+ current_time: time,
414
+ start_time: time,
415
+ end_time: time,
416
+ inclusive: str
417
+ ) -> bool:
418
+ """Check if current time is within range"""
419
+ if start_time <= end_time:
420
+ if inclusive == "[]":
421
+ return start_time <= current_time <= end_time
422
+ elif inclusive == "[)":
423
+ return start_time <= current_time < end_time
424
+ elif inclusive == "(]":
425
+ return start_time < current_time <= end_time
426
+ elif inclusive == "()":
427
+ return start_time < current_time < end_time
428
+ else: # Crosses midnight
429
+ if inclusive == "[]":
430
+ return current_time >= start_time or current_time <= end_time
431
+ elif inclusive == "[)":
432
+ return current_time >= start_time or current_time < end_time
433
+ elif inclusive == "(]":
434
+ return current_time > start_time or current_time <= end_time
435
+ elif inclusive == "()":
436
+ return current_time > start_time or current_time < end_time
437
+
438
+ return False
439
+
440
+ def _handle_false(self):
441
+ """Execute the false condition handler"""
442
+ if self.on_false is not None:
443
+ return self.on_false()
444
+ return None
445
+
446
+ @staticmethod
447
+ def usage_example() -> str:
448
+ """Provide usage examples"""
449
+ return """
450
+ # Example 1: Basic time-based execution
451
+ @Time2Do(when="between 9am and 5pm on weekdays")
452
+ def business_hours_task():
453
+ print("Executing during business hours")
454
+
455
+ # Example 2: With date ranges and holidays
456
+ holidays = ["2023-12-25", "2024-01-01"]
457
+
458
+ @Time2Do(
459
+ start_date="2023-01-01",
460
+ end_date="2023-12-31",
461
+ holidays=holidays,
462
+ when="after 2pm"
463
+ )
464
+ def afternoon_task():
465
+ print("Executing in the afternoon on non-holidays")
466
+
467
+ # Example 3: Asynchronous function with custom false handler
468
+ def skip_handler():
469
+ print("Skipping execution - condition not met")
470
+
471
+ @Time2Do(
472
+ when="before 8am",
473
+ on_false=skip_handler,
474
+ timezone="America/New_York"
475
+ )
476
+ async def morning_task():
477
+ print("Good morning!")
478
+ """
479
+
480
+
481
+
482
+ # Base class for all decorators
483
+ class DecoratorBase:
484
+ def __init__(self, verbose: bool = True):
485
+ self.verbose = verbose
486
+
487
+ def _log(self, message: str):
488
+ """Log message if verbose mode is enabled"""
489
+ if self.verbose:
490
+ print(message)
491
+
492
+ @staticmethod
493
+ def usage_example() -> str:
494
+ """Return usage example with expected output"""
495
+ return ""
496
+
497
+ ##############################
498
+ # 1. Timing & Profiling
499
+ ##############################
500
+ class Timer(DecoratorBase):
501
+ """Measure function execution time with threshold alerting"""
502
+
503
+ def __init__(self, threshold: float = None, use_logging: bool = False,
504
+ log_level: int = logging.INFO, log_format: str = None,
505
+ verbose: bool = True):
506
+ super().__init__(verbose)
507
+ self.threshold = threshold
508
+ self.use_logging = use_logging
509
+ self.log_level = log_level
510
+ self.log_format = log_format or "[TIMER] {func} took {duration}"
511
+
512
+ def __call__(self, func: Callable) -> Callable:
513
+ is_coroutine = inspect.iscoroutinefunction(func)
514
+ logger = logging.getLogger(func.__module__) if self.use_logging else None
515
+
516
+ if is_coroutine:
517
+ @functools.wraps(func)
518
+ async def async_wrapper(*args, **kwargs):
519
+ start = time_module.perf_counter()
520
+ result = await func(*args, **kwargs)
521
+ duration = time_module.perf_counter() - start
522
+ self._log_execution(func.__name__, duration, logger)
523
+ return result
524
+ return async_wrapper
525
+ else:
526
+ @functools.wraps(func)
527
+ def sync_wrapper(*args, **kwargs):
528
+ start = time_module.perf_counter()
529
+ result = func(*args, **kwargs)
530
+ duration = time_module.perf_counter() - start
531
+ self._log_execution(func.__name__, duration, logger)
532
+ return result
533
+ return sync_wrapper
534
+
535
+ def _log_execution(self, func_name: str, duration: float, logger: logging.Logger):
536
+ readable_duration = self._format_duration(duration)
537
+ msg = self.log_format.format(func=func_name, duration=readable_duration)
538
+
539
+ if self.threshold and duration > self.threshold:
540
+ msg += f"Exceeded threshold {self._format_duration(self.threshold)}"
541
+
542
+ if self.use_logging and logger:
543
+ logger.log(self.log_level, msg)
544
+ else:
545
+ self._log(msg)
546
+
547
+ @staticmethod
548
+ def _format_duration(seconds: float) -> str:
549
+ """Convert duration in seconds to a human-readable format"""
550
+ seconds = int(seconds)
551
+ intervals = (
552
+ ('month', 2592000), # 30 * 24 * 60 * 60
553
+ ('week', 604800), # 7 * 24 * 60 * 60
554
+ ('day', 86400),
555
+ ('h', 3600),
556
+ ('min', 60),
557
+ ('s', 1),
558
+ )
559
+ parts = []
560
+ for name, count in intervals:
561
+ value = seconds // count
562
+ if value:
563
+ parts.append(f"{value} {name}{'s' if value > 1 and name not in ['h', 'min', 's'] else ''}")
564
+ seconds %= count
565
+ return ' '.join(parts) if parts else '0 s'
566
+
567
+ @staticmethod
568
+ def usage_example() -> str:
569
+ return """
570
+ # Timer Example
571
+ @Timer(threshold=0.3, verbose=True)
572
+ def process_data(data):
573
+ time_module.sleep(125.5)
574
+ return f"Processed {len(data)} items"
575
+
576
+ result = process_data([1, 2, 3])
577
+ # Expected output:
578
+ # [TIMER] process_data took 2 min 5 s Exceeded threshold 0.3 s
579
+ """
580
+
581
+
582
+ class TimeIt(DecoratorBase):
583
+ """Measure execution time with configurable units"""
584
+ def __init__(self, print_result: bool = True, unit: str = "ms", verbose: bool = True):
585
+ super().__init__(verbose)
586
+ self.print_result = print_result
587
+ self.unit = unit # ms, s, ns
588
+
589
+ def __call__(self, func: Callable) -> Callable:
590
+ @functools.wraps(func)
591
+ def wrapper(*args, **kwargs):
592
+ start_time = time_module.perf_counter_ns()
593
+ result = func(*args, **kwargs)
594
+ end_time = time_module.perf_counter_ns()
595
+
596
+ duration_ns = end_time - start_time
597
+ duration = {
598
+ "ns": duration_ns,
599
+ "ms": duration_ns / 1_000_000,
600
+ "s": duration_ns / 1_000_000_000
601
+ }[self.unit]
602
+
603
+ unit_str = self.unit
604
+ if self.print_result:
605
+ msg = f"[TIMEIT] {func.__name__} took {duration:.4f}{unit_str} -> {result}"
606
+ else:
607
+ msg = f"[TIMEIT] {func.__name__} took {duration:.4f}{unit_str}"
608
+
609
+ self._log(msg)
610
+ return result
611
+ return wrapper
612
+
613
+ @staticmethod
614
+ def usage_example() -> str:
615
+ return """
616
+ # TimeIt Example
617
+ @TimeIt(unit="ms", verbose=True)
618
+ def calculate_factorial(n):
619
+ result = 1
620
+ for i in range(1, n+1):
621
+ result *= i
622
+ return result
623
+
624
+ fact = calculate_factorial(10)
625
+ # Expected output:
626
+ # [TIMEIT] calculate_factorial took 0.0056ms -> 3628800
627
+ """
628
+
629
+ class Profile(DecoratorBase):
630
+ """Profile function execution using cProfile"""
631
+ def __init__(self, sort_by: str = 'cumulative', lines: int = 20, verbose: bool = True):
632
+ super().__init__(verbose)
633
+ self.sort_by = sort_by
634
+ self.lines = lines
635
+
636
+ def __call__(self, func: Callable) -> Callable:
637
+ @functools.wraps(func)
638
+ def wrapper(*args, **kwargs):
639
+ profiler = cProfile.Profile()
640
+ profiler.enable()
641
+ result = func(*args, **kwargs)
642
+ profiler.disable()
643
+ s = io.StringIO()
644
+ ps = pstats.Stats(profiler, stream=s).sort_stats(self.sort_by)
645
+ ps.print_stats(self.lines)
646
+ self._log(f"[PROFILE] {func.__name__}:\n{s.getvalue()}")
647
+ return result
648
+ return wrapper
649
+
650
+ @staticmethod
651
+ def usage_example() -> str:
652
+ return """
653
+ # Profile Example
654
+ @Profile(sort_by='time', lines=5, verbose=True)
655
+ def complex_computation():
656
+ total = 0
657
+ for i in range(1000):
658
+ for j in range(1000):
659
+ total += i * j
660
+ return total
661
+
662
+ result = complex_computation()
663
+ # Expected output: Profile statistics for the function
664
+ """
665
+
666
+ class Benchmark(DecoratorBase):
667
+ """Run performance benchmarks with warmup iterations"""
668
+ def __init__(self, iterations: int = 1000, warmup: int = 10, verbose: bool = True):
669
+ super().__init__(verbose)
670
+ self.iterations = iterations
671
+ self.warmup = warmup
672
+
673
+ def __call__(self, func: Callable) -> Callable:
674
+ @functools.wraps(func)
675
+ def wrapper(*args, **kwargs):
676
+ # Warmup runs
677
+ for _ in range(self.warmup):
678
+ func(*args, **kwargs)
679
+
680
+ # Timed runs
681
+ times = []
682
+ for _ in range(self.iterations):
683
+ start_time = time_module.perf_counter_ns()
684
+ func(*args, **kwargs)
685
+ end_time = time_module.perf_counter_ns()
686
+ times.append(end_time - start_time)
687
+
688
+ # Calculate stats
689
+ avg_ns = sum(times) / len(times)
690
+ min_ns = min(times)
691
+ max_ns = max(times)
692
+
693
+ stats = {
694
+ "function": func.__name__,
695
+ "iterations": self.iterations,
696
+ "avg_ns": avg_ns,
697
+ "min_ns": min_ns,
698
+ "max_ns": max_ns,
699
+ "total_ms": sum(times) / 1_000_000
700
+ }
701
+
702
+ if self.verbose:
703
+ print(f"[BENCHMARK] {func.__name__} performance:")
704
+ print(f" Iterations: {self.iterations}")
705
+ print(f" Average: {avg_ns/1000:.2f} µs")
706
+ print(f" Min: {min_ns/1000:.2f} µs")
707
+ print(f" Max: {max_ns/1000:.2f} µs")
708
+ print(f" Total: {sum(times)/1_000_000:.2f} ms")
709
+
710
+ return stats
711
+ return wrapper
712
+
713
+ @staticmethod
714
+ def usage_example() -> str:
715
+ return """
716
+ # Benchmark Example
717
+ @Benchmark(iterations=1000, warmup=10, verbose=True)
718
+ def vector_dot_product(a, b):
719
+ return sum(x*y for x, y in zip(a, b))
720
+
721
+ a = list(range(1000))
722
+ b = list(range(1000))
723
+ stats = vector_dot_product(a, b)
724
+ # Expected output: Performance statistics
725
+ """
726
+
727
+ ##############################
728
+ # 2. Error Handling & Retry
729
+ ##############################
730
+
731
+ class Retry(DecoratorBase):
732
+ """Retry function on failure with exponential backoff"""
733
+ def __init__(self, retries: int = 3, delay: float = 1, backoff: float = 2,
734
+ exceptions: Tuple[Exception] = (Exception,), jitter: float = 0,
735
+ verbose: bool = True):
736
+ super().__init__(verbose)
737
+ self.retries = retries
738
+ self.delay = delay
739
+ self.backoff = backoff
740
+ self.exceptions = exceptions
741
+ self.jitter = jitter
742
+
743
+ def __call__(self, func: Callable) -> Callable:
744
+ @functools.wraps(func)
745
+ def wrapper(*args, **kwargs):
746
+ current_delay = self.delay
747
+ for attempt in range(self.retries):
748
+ try:
749
+ return func(*args, **kwargs)
750
+ except self.exceptions as e:
751
+ if attempt == self.retries - 1:
752
+ raise
753
+ sleep_time = current_delay * (1 + random.uniform(-self.jitter, self.jitter))
754
+ self._log(f"[RETRY] {func.__name__} failed: {e}. Retrying in {sleep_time:.2f}s...")
755
+ time_module.sleep(sleep_time)
756
+ current_delay *= self.backoff
757
+ return wrapper
758
+
759
+ @staticmethod
760
+ def usage_example() -> str:
761
+ return """
762
+ # Retry Example
763
+ @Retry(retries=3, delay=0.1, backoff=2, verbose=True)
764
+ def fetch_data():
765
+ if random.random() < 0.8:
766
+ raise ConnectionError("API timeout")
767
+ return "Data fetched"
768
+
769
+ try:
770
+ result = fetch_data()
771
+ except Exception as e:
772
+ print(f"Final error: {e}")
773
+ # Expected output:
774
+ # [RETRY] fetch_data failed: API timeout. Retrying in 0.12s...
775
+ # [RETRY] fetch_data failed: API timeout. Retrying in 0.24s...
776
+ """
777
+
778
+ class RetryWithExponentialBackoff(Retry):
779
+ """Retry with exponential backoff and max delay limit"""
780
+ def __init__(self, max_retries: int = 5, initial_delay: float = 1.0,
781
+ max_delay: float = 60.0, exceptions: Tuple[Exception] = (Exception,),
782
+ verbose: bool = True):
783
+ super().__init__(
784
+ retries=max_retries,
785
+ delay=initial_delay,
786
+ backoff=2,
787
+ exceptions=exceptions,
788
+ verbose=verbose
789
+ )
790
+ self.max_delay = max_delay
791
+
792
+ def __call__(self, func: Callable) -> Callable:
793
+ @functools.wraps(func)
794
+ def wrapper(*args, **kwargs):
795
+ current_delay = self.delay
796
+ for attempt in range(self.retries):
797
+ try:
798
+ return func(*args, **kwargs)
799
+ except self.exceptions as e:
800
+ if attempt == self.retries - 1:
801
+ raise
802
+
803
+ # Calculate next delay with cap
804
+ next_delay = min(current_delay * (2 ** attempt), self.max_delay)
805
+ next_delay *= random.uniform(0.8, 1.2) # Add jitter
806
+
807
+ self._log(f"[RETRY] {func.__name__} failed (attempt {attempt+1}): {e}. "
808
+ f"Retrying in {next_delay:.2f}s...")
809
+
810
+ time_module.sleep(next_delay)
811
+ return wrapper
812
+
813
+ @staticmethod
814
+ def usage_example() -> str:
815
+ return """
816
+ # RetryWithExponentialBackoff Example
817
+ @RetryWithExponentialBackoff(
818
+ max_retries=5,
819
+ initial_delay=1,
820
+ max_delay=30,
821
+ verbose=True
822
+ )
823
+ def payment_processing():
824
+ if random.random() < 0.7:
825
+ raise Exception("Payment gateway error")
826
+ return "Payment successful"
827
+
828
+ payment_processing()
829
+ # Expected output: Shows exponential backoff retries
830
+ """
831
+
832
+ class AsyncRetry(DecoratorBase):
833
+ """Async version of Retry decorator"""
834
+ def __init__(self, retries: int = 3, delay: float = 1, backoff: float = 2,
835
+ exceptions: Tuple[Exception] = (Exception,), jitter: float = 0,
836
+ verbose: bool = True):
837
+ super().__init__(verbose)
838
+ self.retries = retries
839
+ self.delay = delay
840
+ self.backoff = backoff
841
+ self.exceptions = exceptions
842
+ self.jitter = jitter
843
+
844
+ def __call__(self, func: Callable) -> Callable:
845
+ @functools.wraps(func)
846
+ async def async_wrapper(*args, **kwargs):
847
+ current_delay = self.delay
848
+ for attempt in range(self.retries):
849
+ try:
850
+ return await func(*args, **kwargs)
851
+ except self.exceptions as e:
852
+ if attempt == self.retries - 1:
853
+ raise
854
+ sleep_time = current_delay * (1 + random.uniform(-self.jitter, self.jitter))
855
+ self._log(f"[ASYNCRETRY] {func.__name__} failed: {e}. Retrying in {sleep_time:.2f}s...")
856
+ await asyncio.sleep(sleep_time)
857
+ current_delay *= self.backoff
858
+ return async_wrapper
859
+
860
+ @staticmethod
861
+ def usage_example() -> str:
862
+ return """
863
+ # AsyncRetry Example
864
+ @AsyncRetry(retries=3, delay=0.1, verbose=True)
865
+ async def async_api_call():
866
+ if random.random() < 0.7:
867
+ raise ConnectionError("API timeout")
868
+ return "Success"
869
+
870
+ # In async context:
871
+ # await async_api_call()
872
+ # Expected output: Shows async retries
873
+ """
874
+
875
+ class Suppress(DecoratorBase):
876
+ """Suppress specified exceptions"""
877
+ def __init__(self, *exceptions, verbose: bool = True):
878
+ super().__init__(verbose)
879
+ self.exceptions = exceptions or (Exception,)
880
+
881
+ def __call__(self, func: Callable) -> Callable:
882
+ @functools.wraps(func)
883
+ def wrapper(*args, **kwargs):
884
+ try:
885
+ return func(*args, **kwargs)
886
+ except self.exceptions as e:
887
+ self._log(f"[SUPPRESS] Suppressed {type(e).__name__} in {func.__name__}: {e}")
888
+ return None
889
+ return wrapper
890
+
891
+ @staticmethod
892
+ def usage_example() -> str:
893
+ return """
894
+ # Suppress Example
895
+ @Suppress(ZeroDivisionError, ValueError, verbose=True)
896
+ def safe_divide(a, b):
897
+ return a / b
898
+
899
+ result = safe_divide(10, 0)
900
+ # Expected output:
901
+ # [SUPPRESS] Suppressed ZeroDivisionError in safe_divide: division by zero
902
+ """
903
+
904
+ class CircuitBreaker(DecoratorBase):
905
+ """Circuit breaker pattern for fault tolerance"""
906
+ def __init__(self, failure_threshold: int = 5, recovery_timeout: float = 30.0,
907
+ exceptions: Tuple[Exception] = (Exception,), verbose: bool = True):
908
+ super().__init__(verbose)
909
+ self.failure_threshold = failure_threshold
910
+ self.recovery_timeout = recovery_timeout
911
+ self.exceptions = exceptions
912
+ self.lock = threading.Lock()
913
+ self.func_states = defaultdict(lambda: {
914
+ "state": "CLOSED",
915
+ "failure_count": 0,
916
+ "last_failure_time": 0
917
+ })
918
+
919
+ def __call__(self, func: Callable) -> Callable:
920
+ @functools.wraps(func)
921
+ def wrapper(*args, **kwargs):
922
+ with self.lock:
923
+ state_info = self.func_states[func.__name__]
924
+ current_time = time_module.monotonic()
925
+
926
+ if state_info["state"] == "OPEN":
927
+ if current_time - state_info["last_failure_time"] > self.recovery_timeout:
928
+ state_info["state"] = "HALF_OPEN"
929
+ self._log(f"[CIRCUIT] {func.__name__} moved to HALF_OPEN state")
930
+ else:
931
+ self._log(f"[CIRCUIT] {func.__name__} blocked (OPEN state)")
932
+ raise RuntimeError("Circuit breaker is OPEN")
933
+
934
+ try:
935
+ result = func(*args, **kwargs)
936
+
937
+ with self.lock:
938
+ if state_info["state"] == "HALF_OPEN":
939
+ self._log(f"[CIRCUIT] {func.__name__} succeeded in HALF_OPEN state, resetting")
940
+ self._reset(func.__name__)
941
+ return result
942
+ except self.exceptions as e:
943
+ with self.lock:
944
+ state_info["failure_count"] += 1
945
+ state_info["last_failure_time"] = time_module.monotonic()
946
+
947
+ if state_info["failure_count"] >= self.failure_threshold:
948
+ state_info["state"] = "OPEN"
949
+ self._log(f"[CIRCUIT] {func.__name__} moved to OPEN state "
950
+ f"({state_info['failure_count']} failures)")
951
+
952
+ if state_info["state"] == "HALF_OPEN":
953
+ state_info["state"] = "OPEN"
954
+ self._log(f"[CIRCUIT] {func.__name__} failed in HALF_OPEN state, moving to OPEN")
955
+ raise e
956
+
957
+ def reset():
958
+ with self.lock:
959
+ self._reset(func.__name__)
960
+
961
+ wrapper.reset = reset
962
+ wrapper.state = property(lambda: self.func_states[func.__name__]["state"])
963
+ return wrapper
964
+
965
+ def _reset(self, func_name: str):
966
+ state_info = self.func_states[func_name]
967
+ state_info["state"] = "CLOSED"
968
+ state_info["failure_count"] = 0
969
+ self._log(f"[CIRCUIT] {func_name} circuit reset")
970
+
971
+ @staticmethod
972
+ def usage_example() -> str:
973
+ return """
974
+ # CircuitBreaker Example
975
+ @CircuitBreaker(failure_threshold=3, recovery_timeout=10, verbose=True)
976
+ def unstable_service():
977
+ if random.random() > 0.3:
978
+ raise ConnectionError("Service unavailable")
979
+ return "Success"
980
+
981
+ for i in range(5):
982
+ try:
983
+ print(unstable_service())
984
+ except Exception as e:
985
+ print(f"Error: {e}")
986
+ time_module.sleep(1)
987
+ # Expected output: Shows circuit state transitions
988
+ """
989
+
990
+ class Timeout(DecoratorBase):
991
+ """Timeout decorator with thread-based implementation"""
992
+ def __init__(self, seconds: float, exception: Exception = TimeoutError,
993
+ verbose: bool = True):
994
+ super().__init__(verbose)
995
+ self.seconds = seconds
996
+ self.exception = exception
997
+
998
+ def __call__(self, func: Callable) -> Callable:
999
+ @functools.wraps(func)
1000
+ def wrapper(*args, **kwargs):
1001
+ self._log(f"[TIMEOUT] Setting {self.seconds}s timeout for {func.__name__}")
1002
+
1003
+ result = None
1004
+ exception_raised = None
1005
+ event = threading.Event()
1006
+
1007
+ def target():
1008
+ nonlocal result, exception_raised
1009
+ try:
1010
+ result = func(*args, **kwargs)
1011
+ except Exception as e:
1012
+ exception_raised = e
1013
+ finally:
1014
+ event.set()
1015
+
1016
+ thread = threading.Thread(target=target)
1017
+ thread.daemon = True
1018
+ thread.start()
1019
+
1020
+ event.wait(self.seconds)
1021
+
1022
+ if not event.is_set():
1023
+ self._log(f"[TIMEOUT] {func.__name__} timed out after {self.seconds}s")
1024
+ raise self.exception(f"Function {func.__name__} timed out after {self.seconds} seconds")
1025
+
1026
+ if exception_raised:
1027
+ raise exception_raised
1028
+
1029
+ return result
1030
+ return wrapper
1031
+
1032
+ @staticmethod
1033
+ def usage_example() -> str:
1034
+ return """
1035
+ # Timeout Example
1036
+ @Timeout(seconds=1.5, verbose=True)
1037
+ def long_running_task():
1038
+ time_module.sleep(2)
1039
+ return "Completed"
1040
+
1041
+ try:
1042
+ result = long_running_task()
1043
+ except TimeoutError as e:
1044
+ print(e)
1045
+ # Expected output:
1046
+ # [TIMEOUT] Setting 1.5s timeout for long_running_task
1047
+ # [TIMEOUT] long_running_task timed out after 1.5s
1048
+ # Function long_running_task timed out after 1.5 seconds
1049
+ """
1050
+
1051
+ ##############################
1052
+ # 3. Caching & Memoization
1053
+ ##############################
1054
+
1055
+ class Memoize(DecoratorBase):
1056
+ """LRU-based memoization with cache statistics"""
1057
+ def __init__(self, maxsize: int = 128, verbose: bool = True):
1058
+ super().__init__(verbose)
1059
+ self.maxsize = maxsize
1060
+
1061
+ def __call__(self, func: Callable) -> Callable:
1062
+ cached_func = lru_cache(maxsize=self.maxsize)(func)
1063
+
1064
+ @functools.wraps(func)
1065
+ def wrapper(*args, **kwargs):
1066
+ result = cached_func(*args, **kwargs)
1067
+ if self.verbose:
1068
+ cache_info = cached_func.cache_info()
1069
+ self._log(f"[MEMOIZE] {func.__name__} cache: "
1070
+ f"hits={cache_info.hits}, misses={cache_info.misses}, "
1071
+ f"size={cache_info.currsize}/{self.maxsize}")
1072
+ return result
1073
+
1074
+ wrapper.cache_info = cached_func.cache_info
1075
+ wrapper.cache_clear = cached_func.cache_clear
1076
+ return wrapper
1077
+
1078
+ @staticmethod
1079
+ def usage_example() -> str:
1080
+ return """
1081
+ # Memoize Example
1082
+ @Memoize(maxsize=100, verbose=True)
1083
+ def fibonacci(n):
1084
+ if n <= 1:
1085
+ return n
1086
+ return fibonacci(n-1) + fibonacci(n-2)
1087
+
1088
+ print(fibonacci(10))
1089
+ # Expected output: Shows cache usage information
1090
+ """
1091
+
1092
+ class MemoizeWithTTL(DecoratorBase):
1093
+ """Memoization with time-based expiration"""
1094
+ def __init__(self, ttl: float = 60, maxsize: int = 128, verbose: bool = True):
1095
+ super().__init__(verbose)
1096
+ self.ttl = ttl
1097
+ self.maxsize = maxsize
1098
+ self.cache = {}
1099
+ self.timestamps = {}
1100
+ self.lock = threading.Lock()
1101
+
1102
+ def __call__(self, func: Callable) -> Callable:
1103
+ @functools.wraps(func)
1104
+ def wrapper(*args, **kwargs):
1105
+ key = self._make_key(func, args, kwargs)
1106
+ current_time = time_module.monotonic()
1107
+
1108
+ with self.lock:
1109
+ # Check cache and TTL
1110
+ if key in self.cache:
1111
+ if current_time - self.timestamps[key] < self.ttl:
1112
+ self.timestamps[key] = current_time
1113
+ self._log(f"[MEMOTTL] Cache hit for {func.__name__}")
1114
+ return self.cache[key]
1115
+ else:
1116
+ # Remove expired
1117
+ del self.cache[key]
1118
+ del self.timestamps[key]
1119
+
1120
+ # Apply maxsize
1121
+ if len(self.cache) >= self.maxsize:
1122
+ oldest_key = min(self.timestamps, key=self.timestamps.get)
1123
+ self._log(f"[MEMOTTL] Evicting oldest key for {func.__name__}")
1124
+ del self.cache[oldest_key]
1125
+ del self.timestamps[oldest_key]
1126
+
1127
+ # Compute and cache
1128
+ result = func(*args, **kwargs)
1129
+
1130
+ with self.lock:
1131
+ self.cache[key] = result
1132
+ self.timestamps[key] = current_time
1133
+ self._log(f"[MEMOTTL] Cached result for {func.__name__} (TTL: {self.ttl}s)")
1134
+
1135
+ return result
1136
+
1137
+ def clear_cache():
1138
+ with self.lock:
1139
+ self.cache.clear()
1140
+ self.timestamps.clear()
1141
+ self._log(f"[MEMOTTL] Cleared cache for {func.__name__}")
1142
+
1143
+ wrapper.clear_cache = clear_cache
1144
+ return wrapper
1145
+
1146
+ def _make_key(self, func, args, kwargs):
1147
+ return hashlib.md5(pickle.dumps((func.__name__, args, kwargs))).hexdigest()
1148
+
1149
+ @staticmethod
1150
+ def usage_example() -> str:
1151
+ return """
1152
+ # MemoizeWithTTL Example
1153
+ @MemoizeWithTTL(ttl=5, maxsize=100, verbose=True)
1154
+ def get_weather(city):
1155
+ print(f"Fetching weather for {city}...")
1156
+ return {"city": city, "temp": random.randint(10, 30)}
1157
+
1158
+ # First call - fetches
1159
+ weather1 = get_weather("London")
1160
+ # Second call within 5s - cached
1161
+ weather2 = get_weather("London")
1162
+ # After 5s - refetches
1163
+ time_module.sleep(6)
1164
+ weather3 = get_weather("London")
1165
+ # Expected output: Shows cache hits and misses
1166
+ """
1167
+
1168
+ class MemoizeDisk(DecoratorBase):
1169
+ """Disk-based memoization with file storage"""
1170
+ def __init__(self, cache_dir: str = ".cache", max_size: int = 100, verbose: bool = True):
1171
+ super().__init__(verbose)
1172
+ self.cache_dir = cache_dir
1173
+ self.max_size = max_size
1174
+ os.makedirs(cache_dir, exist_ok=True)
1175
+ self.cache_files = []
1176
+
1177
+ def __call__(self, func: Callable) -> Callable:
1178
+ @functools.wraps(func)
1179
+ def wrapper(*args, **kwargs):
1180
+ key = self._make_key(func, args, kwargs)
1181
+ cache_file = os.path.join(self.cache_dir, f"{key}.pkl")
1182
+
1183
+ # Check cache
1184
+ if os.path.exists(cache_file):
1185
+ self._log(f"[MEMODISK] Cache hit for {func.__name__}")
1186
+ with open(cache_file, "rb") as f:
1187
+ return pickle.load(f)
1188
+
1189
+ # Compute and save
1190
+ result = func(*args, **kwargs)
1191
+
1192
+ with open(cache_file, "wb") as f:
1193
+ pickle.dump(result, f)
1194
+
1195
+ self.cache_files.append(cache_file)
1196
+
1197
+ # Apply max size
1198
+ if len(self.cache_files) > self.max_size:
1199
+ oldest = self.cache_files.pop(0)
1200
+ os.remove(oldest)
1201
+ self._log(f"[MEMODISK] Evicted oldest cache file: {os.path.basename(oldest)}")
1202
+
1203
+ return result
1204
+ return wrapper
1205
+
1206
+ def _make_key(self, func, args, kwargs):
1207
+ return hashlib.md5(pickle.dumps((func.__name__, args, kwargs))).hexdigest()
1208
+
1209
+ def clear_cache(self):
1210
+ for f in os.listdir(self.cache_dir):
1211
+ os.remove(os.path.join(self.cache_dir, f))
1212
+ self.cache_files = []
1213
+ self._log("[MEMODISK] Disk cache cleared")
1214
+
1215
+ @staticmethod
1216
+ def usage_example() -> str:
1217
+ return """
1218
+ # MemoizeDisk Example
1219
+ @MemoizeDisk(cache_dir=".math_cache", verbose=True)
1220
+ def expensive_calculation(x, y):
1221
+ print("Calculating...")
1222
+ time_module.sleep(1)
1223
+ return x ** y
1224
+
1225
+ # First call - slow
1226
+ result1 = expensive_calculation(2, 10)
1227
+ # Second call - fast from disk cache
1228
+ result2 = expensive_calculation(2, 10)
1229
+ # Expected output:
1230
+ # [MEMODISK] Cache hit for expensive_calculation
1231
+ """
1232
+
1233
+ class Idempotent(DecoratorBase):
1234
+ """Ensure function idempotency with key-based caching"""
1235
+ def __init__(self, key_func: Callable = None, ttl: float = 60, verbose: bool = True):
1236
+ super().__init__(verbose)
1237
+ self.key_func = key_func
1238
+ self.ttl = ttl
1239
+ self.results = {}
1240
+ self.timestamps = {}
1241
+ self.lock = threading.Lock()
1242
+
1243
+ def __call__(self, func: Callable) -> Callable:
1244
+ @functools.wraps(func)
1245
+ def wrapper(*args, **kwargs):
1246
+ key = self._make_key(func, args, kwargs)
1247
+
1248
+ with self.lock:
1249
+ current_time = time_module.monotonic()
1250
+
1251
+ # Check cache
1252
+ if key in self.results:
1253
+ if current_time - self.timestamps[key] < self.ttl:
1254
+ self._log(f"[IDEMPOTENT] Returning cached result for {func.__name__}")
1255
+ return self.results[key]
1256
+ else:
1257
+ del self.results[key]
1258
+ del self.timestamps[key]
1259
+
1260
+ # Execute and cache
1261
+ result = func(*args, **kwargs)
1262
+
1263
+ with self.lock:
1264
+ self.results[key] = result
1265
+ self.timestamps[key] = time_module.monotonic()
1266
+ self._log(f"[IDEMPOTENT] Cached result for {func.__name__} (TTL: {self.ttl}s)")
1267
+
1268
+ return result
1269
+ return wrapper
1270
+
1271
+ def _make_key(self, func, args, kwargs):
1272
+ if self.key_func:
1273
+ return self.key_func(*args, **kwargs)
1274
+ return hashlib.sha256(pickle.dumps((func.__name__, args, kwargs))).hexdigest()
1275
+
1276
+ def clear_cache(self):
1277
+ with self.lock:
1278
+ self.results.clear()
1279
+ self.timestamps.clear()
1280
+ self._log("[IDEMPOTENT] Cache cleared")
1281
+
1282
+ @staticmethod
1283
+ def usage_example() -> str:
1284
+ return """
1285
+ # Idempotent Example
1286
+ @Idempotent(key_func=lambda user_id: f"user_{user_id}", ttl=300, verbose=True)
1287
+ def update_user_profile(user_id, data):
1288
+ print(f"Updating profile for user {user_id}")
1289
+ return {"status": "success"}
1290
+
1291
+ # First call - executes
1292
+ update_user_profile(123, {"name": "John"})
1293
+ # Second call - returns cached result
1294
+ update_user_profile(123, {"name": "John"})
1295
+ # Expected output:
1296
+ # [IDEMPOTENT] Returning cached result for update_user_profile
1297
+ """
1298
+
1299
+ ##############################
1300
+ # 4. Logging & Debugging
1301
+ ##############################
1302
+ class Log(DecoratorBase):
1303
+ """
1304
+ logging decorator with comprehensive logging capabilities
1305
+ Supports: function calls, results, errors, execution time, variable tracking,
1306
+ debug statements, warnings, and custom log messages
1307
+ """
1308
+
1309
+ def __init__(
1310
+ self,
1311
+ fpath: str,
1312
+ level: int = logging.INFO,
1313
+ format: str = None,
1314
+ verbose: bool = True,
1315
+ log_args: bool = True,
1316
+ log_return: bool = True,
1317
+ log_time: bool = True,
1318
+ log_errors: bool = True,
1319
+ log_debug: bool = False,
1320
+ max_file_size: int = 10 * 1024 * 1024, # 10MB
1321
+ backup_count: int = 5,
1322
+ mode: str = "a",
1323
+ encoding: str = "utf-8",
1324
+ ):
1325
+ """
1326
+ Args:
1327
+ fpath: Log file path
1328
+ level: Logging level
1329
+ format: Log message format
1330
+ verbose: Print to console
1331
+ log_args: Log function arguments
1332
+ log_return: Log return values
1333
+ log_time: Log execution time
1334
+ log_errors: Log exceptions
1335
+ log_debug: Enable debug logging within function
1336
+ max_file_size: Maximum log file size before rotation (bytes)
1337
+ backup_count: Number of backup files to keep
1338
+ mode: File open mode
1339
+ encoding: File encoding
1340
+ """
1341
+ super().__init__(verbose)
1342
+ self.fpath = fpath
1343
+ self.level = level
1344
+ self.format = format or "%(asctime)s - %(levelname)s - %(name)s - %(message)s"
1345
+ self.log_args = log_args
1346
+ self.log_return = log_return
1347
+ self.log_time = log_time
1348
+ self.log_errors = log_errors
1349
+ self.log_debug = log_debug
1350
+ self.mode = mode
1351
+ self.encoding = encoding
1352
+
1353
+ Path(fpath).parent.mkdir(parents=True, exist_ok=True)
1354
+
1355
+ self.logger = self._setup_logger(max_file_size, backup_count)
1356
+ self._function_loggers: Dict[str, logging.Logger] = {}
1357
+
1358
+ def _setup_logger(self, max_file_size: int, backup_count: int) -> logging.Logger:
1359
+ """Setup logger with file handler and rotation"""
1360
+ logger_name = f"Log4Jeff{hash(self.fpath)}"
1361
+ logger = logging.getLogger(logger_name)
1362
+ logger.setLevel(self.level)
1363
+
1364
+ # Remove existing handlers to avoid duplicates
1365
+ for handler in logger.handlers[:]:
1366
+ logger.removeHandler(handler)
1367
+
1368
+ # Use RotatingFileHandler for log rotation
1369
+ handler = logging.handlers.RotatingFileHandler(
1370
+ self.fpath,
1371
+ maxBytes=max_file_size,
1372
+ backupCount=backup_count,
1373
+ mode=self.mode,
1374
+ encoding=self.encoding,
1375
+ )
1376
+
1377
+ formatter = logging.Formatter(self.format)
1378
+ handler.setFormatter(formatter)
1379
+ logger.addHandler(handler)
1380
+ logger.propagate = False
1381
+
1382
+ return logger
1383
+
1384
+ def _get_function_logger(self, func: Callable) -> logging.Logger:
1385
+ """Get or create a dedicated logger for the function"""
1386
+ func_id = f"{func.__module__}.{func.__name__}"
1387
+ if func_id not in self._function_loggers:
1388
+ func_logger = logging.getLogger(func_id)
1389
+ func_logger.setLevel(self.level)
1390
+ func_logger.handlers = self.logger.handlers
1391
+ func_logger.propagate = False
1392
+ self._function_loggers[func_id] = func_logger
1393
+ return self._function_loggers[func_id]
1394
+
1395
+ def log_call(self, func: Callable, args: tuple, kwargs: dict) -> None:
1396
+ """Log function call with arguments"""
1397
+ if self.log_args:
1398
+ arg_str = self._format_arguments(func, args, kwargs)
1399
+ self.logger.info(f"CALL: {func.__name__}({arg_str})")
1400
+ else:
1401
+ self.logger.info(f"CALL: {func.__name__}()")
1402
+
1403
+ def log_success(self, func: Callable, result: Any, duration: float) -> None:
1404
+ """Log successful function execution"""
1405
+ messages = []
1406
+ if self.log_time:
1407
+ messages.append(f"{duration:.6f}s")
1408
+ if self.log_return:
1409
+ result_str = self._format_value(result, max_length=200)
1410
+ messages.append(f"RESULT: {result_str}")
1411
+
1412
+ if messages:
1413
+ self.logger.info(f"{func.__name__} - {' | '.join(messages)}")
1414
+ else:
1415
+ self.logger.info(f"{func.__name__} completed")
1416
+
1417
+ def log_error(self, func: Callable, error: Exception, duration: float) -> None:
1418
+ """Log function error"""
1419
+ if self.log_errors:
1420
+ error_msg = f"ERROR: {func.__name__} -> {type(error).__name__}: {str(error)}"
1421
+ if self.log_time:
1422
+ error_msg += f" (after {duration:.6f}s)"
1423
+ self.logger.error(error_msg, exc_info=True)
1424
+
1425
+ def log_debug_message(self, func: Callable, message: str, *args, **kwargs) -> None:
1426
+ """Log debug message from within the function"""
1427
+ if self.log_debug:
1428
+ func_logger = self._get_function_logger(func)
1429
+ func_logger.debug(f"{func.__name__} - {message}", *args, **kwargs)
1430
+
1431
+ def log_info_message(self, func: Callable, message: str, *args, **kwargs) -> None:
1432
+ """Log info message from within the function"""
1433
+ func_logger = self._get_function_logger(func)
1434
+ func_logger.info(f"{func.__name__} - {message}", *args, **kwargs)
1435
+
1436
+ def log_warning_message(
1437
+ self, func: Callable, message: str, *args, **kwargs
1438
+ ) -> None:
1439
+ """Log warning message from within the function"""
1440
+ func_logger = self._get_function_logger(func)
1441
+ func_logger.warning(f"{func.__name__} - {message}", *args, **kwargs)
1442
+
1443
+ def log_variable(
1444
+ self, func: Callable, var_name: str, value: Any, level: str = "DEBUG"
1445
+ ) -> None:
1446
+ """Log variable value from within the function"""
1447
+ if level.upper() == "DEBUG" and not self.log_debug:
1448
+ return
1449
+
1450
+ value_str = self._format_value(value)
1451
+ func_logger = self._get_function_logger(func)
1452
+ log_method = getattr(func_logger, level.lower(), func_logger.debug)
1453
+ log_method(f"{func.__name__} - {var_name} = {value_str}")
1454
+
1455
+ def _format_arguments(self, func: Callable, args: tuple, kwargs: dict) -> str:
1456
+ """Format function arguments for logging"""
1457
+ try:
1458
+ sig = inspect.signature(func)
1459
+ bound_args = sig.bind(*args, **kwargs)
1460
+ bound_args.apply_defaults()
1461
+
1462
+ arg_parts = []
1463
+ for param_name, param_value in bound_args.arguments.items():
1464
+ value_str = self._format_value(param_value)
1465
+ arg_parts.append(f"{param_name}={value_str}")
1466
+
1467
+ return ", ".join(arg_parts)
1468
+ except (ValueError, TypeError):
1469
+ # Fallback if signature binding fails
1470
+ arg_parts = [self._format_value(arg) for arg in args]
1471
+ arg_parts.extend(f"{k}={self._format_value(v)}" for k, v in kwargs.items())
1472
+ return ", ".join(arg_parts)
1473
+
1474
+ def _format_value(self, value: Any, max_length: int = 100) -> str:
1475
+ """Format a value for logging with length limits"""
1476
+ try:
1477
+ if value is None:
1478
+ return "None"
1479
+ elif isinstance(value, (int, float, bool)):
1480
+ return str(value)
1481
+ elif isinstance(value, str):
1482
+ if len(value) > max_length:
1483
+ return f"'{value[:max_length]}...' ({len(value)} chars)"
1484
+ return f"'{value}'"
1485
+ elif isinstance(value, (list, tuple, set)):
1486
+ if len(value) > 5: # Limit collection items
1487
+ return f"{type(value).__name__}[{len(value)}]"
1488
+ return str(value)
1489
+ elif isinstance(value, dict):
1490
+ if len(value) > 3: # Limit dict items
1491
+ return f"dict[{len(value)}]"
1492
+ return str(value)
1493
+ else:
1494
+ type_name = type(value).__name__
1495
+ return f"<{type_name} object at {id(value):x}>"
1496
+ except Exception:
1497
+ return "<unrepresentable object>"
1498
+
1499
+ def __call__(self, func: Callable) -> Callable:
1500
+ func_logger = self._get_function_logger(func)
1501
+
1502
+ @functools.wraps(func)
1503
+ def wrapper(*args, **kwargs):
1504
+ # Log function call
1505
+ self.log_call(func, args, kwargs)
1506
+
1507
+ # Execute function with timing
1508
+ start_time = time_module.perf_counter()
1509
+
1510
+ # Create a context object for in-function logging
1511
+ log_context = {
1512
+ 'debug': lambda msg, *a, **kw: self.log_debug_message(func, msg, *a, **kw),
1513
+ 'info': lambda msg, *a, **kw: self.log_info_message(func, msg, *a, **kw),
1514
+ 'warning': lambda msg, *a, **kw: self.log_warning_message(func, msg, *a, **kw),
1515
+ 'variable': lambda name, value, level='DEBUG': self.log_variable(func, name, value, level),
1516
+ 'logger': func_logger
1517
+ }
1518
+
1519
+ try:
1520
+ # Store the log context in a thread-local variable for access within the function
1521
+ import threading
1522
+ thread_local = threading.local()
1523
+ thread_local.current_log_context = log_context
1524
+
1525
+ result = func(*args, **kwargs)
1526
+ duration = time_module.perf_counter() - start_time
1527
+
1528
+ # Log successful execution
1529
+ self.log_success(func, result, duration)
1530
+
1531
+ self._log(
1532
+ f"[Log] Comprehensive logging completed for {func.__name__} in {self.fpath}"
1533
+ )
1534
+ return result
1535
+
1536
+ except Exception as e:
1537
+ duration = time_module.perf_counter() - start_time
1538
+ self.log_error(func, e, duration)
1539
+ raise
1540
+ finally:
1541
+ # Clean up thread-local storage
1542
+ if hasattr(threading.local(), 'current_log_context'):
1543
+ del threading.local().current_log_context
1544
+
1545
+ # Add logging methods as attributes to the wrapper function
1546
+ # These can be called from outside the function or within using the function name
1547
+ wrapper.log_debug = lambda msg, *a, **kw: self.log_debug_message(func, msg, *a, **kw)
1548
+ wrapper.log_info = lambda msg, *a, **kw: self.log_info_message(func, msg, *a, **kw)
1549
+ wrapper.log_warning = lambda msg, *a, **kw: self.log_warning_message(func, msg, *a, **kw)
1550
+ wrapper.log_variable = lambda name, value, level="DEBUG": self.log_variable(func, name, value, level)
1551
+ wrapper.get_logger = lambda: func_logger
1552
+
1553
+ # Add a method to get the current log context (for use within the function)
1554
+ wrapper.get_log_context = lambda: getattr(threading.local(), 'current_log_context', None)
1555
+
1556
+ return wrapper
1557
+
1558
+ @staticmethod
1559
+ def usage_example() -> str:
1560
+ return """
1561
+ # Log Example - Comprehensive logging capabilities
1562
+
1563
+ @Log(
1564
+ fpath="app.log",
1565
+ level=logging.DEBUG,
1566
+ verbose=True,
1567
+ log_args=True,
1568
+ log_return=True,
1569
+ log_time=True,
1570
+ log_errors=True,
1571
+ log_debug=True # Enable debug logging within function
1572
+ )
1573
+ def process_data(data, threshold=10):
1574
+ # Method 1: Use the function's own methods (recommended)
1575
+ process_data.log_debug("Starting data processing")
1576
+ process_data.log_variable("input_data", data)
1577
+ process_data.log_variable("threshold", threshold)
1578
+
1579
+ if len(data) > threshold:
1580
+ process_data.log_warning("Data size exceeds threshold")
1581
+
1582
+ # Method 2: Access via thread-local context (alternative)
1583
+ log_ctx = process_data.get_log_context()
1584
+ if log_ctx:
1585
+ log_ctx['info']("Processing data...")
1586
+
1587
+ # Process data
1588
+ result = [x * 2 for x in data if x > 0]
1589
+ process_data.log_variable("result_size", len(result))
1590
+
1591
+ process_data.log_info("Data processing completed successfully")
1592
+ return result
1593
+
1594
+ # Usage
1595
+ data = [1, 2, 3, 4, 5, -1, -2]
1596
+ result = process_data(data, threshold=5)
1597
+
1598
+ # You can also log from outside the function:
1599
+ process_data.log_info("This log comes from outside the function execution")
1600
+
1601
+ # Expected log file content:
1602
+ # 2023-10-15 12:00:00 - INFO - CALL: process_data(data=[1, 2, 3, 4, 5, -1, -2], threshold=5)
1603
+ # 2023-10-15 12:00:00 - DEBUG - process_data - Starting data processing
1604
+ # 2023-10-15 12:00:00 - DEBUG - process_data - input_data = [1, 2, 3, 4, 5, -1, -2]
1605
+ # 2023-10-15 12:00:00 - DEBUG - process_data - threshold = 5
1606
+ # 2023-10-15 12:00:00 - WARNING - process_data - Data size exceeds threshold
1607
+ # 2023-10-15 12:00:00 - INFO - process_data - Processing data...
1608
+ # 2023-10-15 12:00:00 - DEBUG - process_data - result_size = 5
1609
+ # 2023-10-15 12:00:00 - INFO - process_data - Data processing completed successfully
1610
+ # 2023-10-15 12:00:00 - INFO - process_data - 0.000123s | RESULT: [2, 4, 6, 8, 10]
1611
+ # 2023-10-15 12:00:00 - INFO - process_data - This log comes from outside the function execution
1612
+ """
1613
+ class Debug:
1614
+ """Comprehensive debugging with argument, return value, and timing logging"""
1615
+ def __init__(self, log_args: bool = True, log_return: bool = True,
1616
+ log_time: bool = True, log_level: int = logging.DEBUG,
1617
+ verbose: bool = True):
1618
+ self.log_args = log_args
1619
+ self.log_return = log_return
1620
+ self.log_time = log_time
1621
+ self.log_level = log_level
1622
+ self.verbose = verbose
1623
+
1624
+ def __call__(self, func: Callable) -> Callable:
1625
+ logger = logging.getLogger(func.__module__)
1626
+ is_coroutine = inspect.iscoroutinefunction(func)
1627
+
1628
+ if is_coroutine:
1629
+ @functools.wraps(func)
1630
+ async def async_wrapper(*args, **kwargs):
1631
+ return await self._execute_async(func, args, kwargs, logger)
1632
+ return async_wrapper
1633
+ else:
1634
+ @functools.wraps(func)
1635
+ def sync_wrapper(*args, **kwargs):
1636
+ return self._execute_sync(func, args, kwargs, logger)
1637
+ return sync_wrapper
1638
+
1639
+ async def _execute_async(self, func, args, kwargs, logger):
1640
+ """Execute and log an async function"""
1641
+ # Log arguments
1642
+ if self.log_args:
1643
+ arg_str = ", ".join([repr(a) for a in args] +
1644
+ [f"{k}={v!r}" for k, v in kwargs.items()])
1645
+ logger.log(self.log_level, f"[DEBUG] Calling {func.__name__}({arg_str})")
1646
+
1647
+ # Execute and time
1648
+ start_time = time_module.perf_counter()
1649
+ try:
1650
+ result = await func(*args, **kwargs)
1651
+ except Exception as e:
1652
+ end_time = time_module.perf_counter()
1653
+ logger.log(self.log_level,
1654
+ f"[DEBUG] {func.__name__} raised {type(e).__name__} after "
1655
+ f"{(end_time - start_time):.4f}s: {e}")
1656
+ raise
1657
+
1658
+ end_time = time_module.perf_counter()
1659
+
1660
+ # Log results
1661
+ if self.log_return:
1662
+ logger.log(self.log_level, f"[DEBUG] {func.__name__} returned: {result!r}")
1663
+
1664
+ if self.log_time:
1665
+ logger.log(self.log_level,
1666
+ f"[DEBUG] {func.__name__} executed in {(end_time - start_time):.4f}s")
1667
+
1668
+ return result
1669
+
1670
+ def _execute_sync(self, func, args, kwargs, logger):
1671
+ """Execute and log a sync function"""
1672
+ # Log arguments
1673
+ if self.log_args:
1674
+ arg_str = ", ".join([repr(a) for a in args] +
1675
+ [f"{k}={v!r}" for k, v in kwargs.items()])
1676
+ logger.log(self.log_level, f"[DEBUG] Calling {func.__name__}({arg_str})")
1677
+
1678
+ # Execute and time
1679
+ start_time = time_module.perf_counter()
1680
+ try:
1681
+ result = func(*args, **kwargs)
1682
+ except Exception as e:
1683
+ end_time = time_module.perf_counter()
1684
+ logger.log(self.log_level,
1685
+ f"[DEBUG] {func.__name__} raised {type(e).__name__} after "
1686
+ f"{(end_time - start_time):.4f}s: {e}")
1687
+ raise
1688
+
1689
+ end_time = time_module.perf_counter()
1690
+
1691
+ # Log results
1692
+ if self.log_return:
1693
+ logger.log(self.log_level, f"[DEBUG] {func.__name__} returned: {result!r}")
1694
+
1695
+ if self.log_time:
1696
+ logger.log(self.log_level,
1697
+ f"[DEBUG] {func.__name__} executed in {(end_time - start_time):.4f}s")
1698
+
1699
+ return result
1700
+
1701
+ @staticmethod
1702
+ def usage_example() -> str:
1703
+ return """
1704
+ # Debug Example
1705
+ @Debug(log_args=True, log_return=True, log_time=True, verbose=True)
1706
+ def complex_calculation(a, b):
1707
+ time_module.sleep(0.2)
1708
+ return a * b + a / b
1709
+
1710
+ result = complex_calculation(10, 5)
1711
+ # Expected output: Detailed debug logs
1712
+ """
1713
+
1714
+ ##############################
1715
+ # 5. Concurrency & Threading
1716
+ ##############################
1717
+ class Threaded(DecoratorBase):
1718
+ """
1719
+ 装饰器:使目标函数(同步或异步)在后台线程中异步执行。
1720
+
1721
+ 推荐用于:日志记录、API调用、文件操作、定时任务
1722
+ 慎用于:数学计算、数据处理(考虑用@Processed多进程)
1723
+ 不适用:需要立即获取返回值的函数
1724
+ 参数说明:
1725
+ ----------
1726
+ daemon : bool,默认值为 True
1727
+ 若为 True,则创建的线程为守护线程(daemon thread),守护线程不会阻止主程序退出。
1728
+ verbose : bool,默认值为 True
1729
+ 若为 True,将通过 self._log 输出线程启动信息。
1730
+
1731
+ 返回值:
1732
+ -------
1733
+ Callable
1734
+ 返回包装后的函数,调用时会在后台线程中运行原函数,并返回 `threading.Thread` 对象。
1735
+
1736
+ 注意事项:
1737
+ --------
1738
+ - 异步函数会在新线程的独立事件循环中执行。
1739
+ - 被线程执行的函数不会返回值(使用共享变量/队列获取结果)。
1740
+ - 线程中的异常不会传播到主线程。
1741
+
1742
+ 示例:
1743
+ -----
1744
+ # 同步函数
1745
+ @Threaded()
1746
+ def sync_task():
1747
+ time.sleep(2)
1748
+ print("同步任务完成")
1749
+
1750
+ # 异步函数
1751
+ @Threaded()
1752
+ async def async_task():
1753
+ await asyncio.sleep(2)
1754
+ print("异步任务完成")
1755
+
1756
+ thread1 = sync_task() # 在后台线程运行同步函数
1757
+ thread2 = async_task() # 在后台线程运行异步函数
1758
+ print("主线程继续执行")
1759
+ """
1760
+ def __init__(self, daemon: bool = True, verbose: bool = True):
1761
+ super().__init__(verbose)
1762
+ self.daemon = daemon
1763
+
1764
+ def __call__(self, func: Callable) -> Callable:
1765
+ if asyncio.iscoroutinefunction(func):
1766
+ # 处理异步函数
1767
+ @functools.wraps(func)
1768
+ def async_wrapper(*args, **kwargs) -> threading.Thread:
1769
+ self._log(f"Starting thread for async function {func.__name__}")
1770
+ thread = threading.Thread(
1771
+ target=self._run_async_func,
1772
+ args=(func, args, kwargs),
1773
+ daemon=self.daemon,
1774
+ name=f"Threaded-{func.__name__}"
1775
+ )
1776
+ thread.start()
1777
+ return thread
1778
+ return async_wrapper
1779
+ else:
1780
+ # 处理同步函数(原始逻辑)
1781
+ @functools.wraps(func)
1782
+ def sync_wrapper(*args, **kwargs) -> threading.Thread:
1783
+ self._log(f"Starting thread for function {func.__name__}")
1784
+ thread = threading.Thread(
1785
+ target=func,
1786
+ args=args,
1787
+ kwargs=kwargs,
1788
+ daemon=self.daemon,
1789
+ name=f"Threaded-{func.__name__}"
1790
+ )
1791
+ thread.start()
1792
+ return thread
1793
+ return sync_wrapper
1794
+
1795
+ def _safe_run_sync(self, func: Callable, args, kwargs) -> None:
1796
+ """同步函数的安全执行包装器"""
1797
+ try:
1798
+ func(*args, **kwargs)
1799
+ except Exception as e:
1800
+ self._log(f"Threaded function {func.__name__} failed: {e}")
1801
+ traceback.print_exc()
1802
+
1803
+ def _run_async_func(self, func: Callable, args, kwargs) -> None:
1804
+ """在新线程中运行异步函数的事件循环"""
1805
+ loop = asyncio.new_event_loop()
1806
+ asyncio.set_event_loop(loop)
1807
+ try:
1808
+ loop.run_until_complete(func(*args, **kwargs))
1809
+ except Exception as e:
1810
+ self._log(f"Async threaded function {func.__name__} failed: {e}")
1811
+ traceback.print_exc()
1812
+ finally:
1813
+ try:
1814
+ loop.close()
1815
+ except:
1816
+ pass
1817
+ @staticmethod
1818
+ def usage_example() -> str:
1819
+ return """
1820
+ # 同步函数示例
1821
+ @Threaded(daemon=True, verbose=True)
1822
+ def background_task():
1823
+ print("Background task started")
1824
+ time.sleep(2)
1825
+ print("Background task completed")
1826
+
1827
+ # 异步函数示例
1828
+ @Threaded(daemon=True, verbose=True)
1829
+ async def async_background_task():
1830
+ print("Async background task started")
1831
+ await asyncio.sleep(2)
1832
+ print("Async background task completed")
1833
+
1834
+ # 使用
1835
+ thread1 = background_task()
1836
+ thread2 = async_background_task()
1837
+ print("Main thread continues")
1838
+ thread1.join()
1839
+ thread2.join()
1840
+ """
1841
+
1842
+ class Synchronized(DecoratorBase):
1843
+ """Synchronize function access with a lock"""
1844
+ def __init__(self, lock: threading.Lock = None, verbose: bool = True):
1845
+ super().__init__(verbose)
1846
+ self.lock = lock or threading.Lock()
1847
+
1848
+ def __call__(self, func: Callable) -> Callable:
1849
+ @functools.wraps(func)
1850
+ def wrapper(*args, **kwargs):
1851
+ self._log(f"[SYNC] Acquiring lock for {func.__name__}")
1852
+ with self.lock:
1853
+ self._log(f"[SYNC] Lock acquired for {func.__name__}")
1854
+ result = func(*args, **kwargs)
1855
+ self._log(f"[SYNC] Lock released for {func.__name__}")
1856
+ return result
1857
+ return wrapper
1858
+
1859
+ @staticmethod
1860
+ def usage_example() -> str:
1861
+ return """
1862
+ # Synchronized Example
1863
+ counter = 0
1864
+ lock = threading.Lock()
1865
+
1866
+ @Synchronized(lock=lock, verbose=True)
1867
+ def increment_counter():
1868
+ global counter
1869
+ counter += 1
1870
+
1871
+ threads = []
1872
+ for _ in range(5):
1873
+ t = threading.Thread(target=increment_counter)
1874
+ t.start()
1875
+ threads.append(t)
1876
+
1877
+ for t in threads:
1878
+ t.join()
1879
+
1880
+ print(f"Counter value: {counter}") # Should be 5
1881
+ # Expected output: Shows lock acquisition/release
1882
+ """
1883
+
1884
+ class RateLimit(DecoratorBase):
1885
+ """Limit function call rate"""
1886
+ def __init__(self, calls: int = 5, period: float = 1.0, verbose: bool = True):
1887
+ super().__init__(verbose)
1888
+ self.calls = calls
1889
+ self.period = period
1890
+ self.call_times = defaultdict(list)
1891
+ self.lock = threading.Lock()
1892
+
1893
+ def __call__(self, func: Callable) -> Callable:
1894
+ @functools.wraps(func)
1895
+ def wrapper(*args, **kwargs):
1896
+ with self.lock:
1897
+ now = time_module.monotonic()
1898
+ times = self.call_times[func.__name__]
1899
+
1900
+ # Remove old calls
1901
+ times = [t for t in times if now - t < self.period]
1902
+
1903
+ if len(times) >= self.calls:
1904
+ wait_time = self.period - (now - times[0])
1905
+ self._log(f"[RATE_LIMIT] Blocked {func.__name__}; try again in {wait_time:.2f}s")
1906
+ raise RuntimeError(
1907
+ f"Too many calls to {func.__name__}; try again in {wait_time:.2f}s"
1908
+ )
1909
+
1910
+ times.append(now)
1911
+ self.call_times[func.__name__] = times
1912
+ self._log(f"[RATE_LIMIT] {func.__name__} called ({len(times)}/{self.calls} in last {self.period}s)")
1913
+
1914
+ return func(*args, **kwargs)
1915
+ return wrapper
1916
+
1917
+ @staticmethod
1918
+ def usage_example() -> str:
1919
+ return """
1920
+ # RateLimit Example
1921
+ @RateLimit(calls=3, period=5, verbose=True)
1922
+ def api_request():
1923
+ return "Response"
1924
+
1925
+ for i in range(5):
1926
+ try:
1927
+ print(api_request())
1928
+ except Exception as e:
1929
+ print(e)
1930
+ # Expected output: Shows rate limiting in action
1931
+ """
1932
+
1933
+ class Throttle(DecoratorBase):
1934
+ """Throttle function calls to minimum interval"""
1935
+ def __init__(self, min_interval: float = 1.0, last_result: bool = True,
1936
+ verbose: bool = True):
1937
+ super().__init__(verbose)
1938
+ self.min_interval = min_interval
1939
+ self.last_result = last_result
1940
+ self.last_time = 0
1941
+ self.last_result_value = None
1942
+ self.lock = threading.Lock()
1943
+
1944
+ def __call__(self, func: Callable) -> Callable:
1945
+ @functools.wraps(func)
1946
+ def wrapper(*args, **kwargs):
1947
+ current_time = time_module.monotonic()
1948
+
1949
+ with self.lock:
1950
+ time_since_last = current_time - self.last_time
1951
+
1952
+ if time_since_last < self.min_interval:
1953
+ if self.last_result:
1954
+ self._log(f"[THROTTLE] {func.__name__} throttled, returning last result")
1955
+ return self.last_result_value
1956
+ else:
1957
+ self._log(f"[THROTTLE] {func.__name__} skipped (too frequent)")
1958
+ return None
1959
+
1960
+ result = func(*args, **kwargs)
1961
+ self.last_time = current_time
1962
+ self.last_result_value = result
1963
+ self._log(f"[THROTTLE] {func.__name__} executed after {time_since_last:.2f}s")
1964
+ return result
1965
+ return wrapper
1966
+
1967
+ @staticmethod
1968
+ def usage_example() -> str:
1969
+ return """
1970
+ # Throttle Example
1971
+ @Throttle(min_interval=2.0, last_result=True, verbose=True)
1972
+ def refresh_data():
1973
+ return time_module.time()
1974
+
1975
+ # First call executes
1976
+ print(refresh_data())
1977
+ # Subsequent calls within 2 seconds return last result
1978
+ time_module.sleep(1)
1979
+ print(refresh_data())
1980
+ time_module.sleep(1.5)
1981
+ print(refresh_data()) # Now executes again
1982
+ # Expected output: Shows throttling behavior
1983
+ """
1984
+
1985
+ class LockByResource(DecoratorBase):
1986
+ """Resource-based locking for concurrent access control"""
1987
+ def __init__(self, resource_func: Callable, verbose: bool = True):
1988
+ super().__init__(verbose)
1989
+ self.resource_func = resource_func
1990
+ self.locks = defaultdict(threading.Lock)
1991
+ self.global_lock = threading.Lock()
1992
+
1993
+ def __call__(self, func: Callable) -> Callable:
1994
+ @functools.wraps(func)
1995
+ def wrapper(*args, **kwargs):
1996
+ resource_id = self.resource_func(*args, **kwargs)
1997
+ self._log(f"[LOCKRES] Acquiring lock for resource '{resource_id}'")
1998
+
1999
+ with self.global_lock:
2000
+ lock = self.locks[resource_id]
2001
+
2002
+ with lock:
2003
+ self._log(f"[LOCKRES] Lock acquired for resource '{resource_id}'")
2004
+ result = func(*args, **kwargs)
2005
+ self._log(f"[LOCKRES] Lock released for resource '{resource_id}'")
2006
+ return result
2007
+ return wrapper
2008
+
2009
+ @staticmethod
2010
+ def usage_example() -> str:
2011
+ return """
2012
+ # LockByResource Example
2013
+ @LockByResource(
2014
+ resource_func=lambda user_id: f"user_{user_id}",
2015
+ verbose=True
2016
+ )
2017
+ def update_user_profile(user_id, updates):
2018
+ print(f"Updating profile for user {user_id}")
2019
+ time_module.sleep(1)
2020
+
2021
+ # Can update different users concurrently
2022
+ threading.Thread(target=update_user_profile, args=(1, {})).start()
2023
+ threading.Thread(target=update_user_profile, args=(2, {})).start()
2024
+ # But same user will be locked
2025
+ threading.Thread(target=update_user_profile, args=(1, {})).start()
2026
+ # Expected output: Shows resource-based locking
2027
+ """
2028
+
2029
+ ##############################
2030
+ # 6. Validation & Safety
2031
+ ##############################
2032
+
2033
+ class ValidateArgs(DecoratorBase):
2034
+ """Validate function arguments and return values"""
2035
+ def __init__(self, arg_validators: Dict[str, Callable] = None,
2036
+ result_validator: Callable = None,
2037
+ exception: Exception = ValueError, verbose: bool = True):
2038
+ super().__init__(verbose)
2039
+ self.arg_validators = arg_validators or {}
2040
+ self.result_validator = result_validator
2041
+ self.exception = exception
2042
+
2043
+ def __call__(self, func: Callable) -> Callable:
2044
+ @functools.wraps(func)
2045
+ def wrapper(*args, **kwargs):
2046
+ # Validate arguments
2047
+ sig = inspect.signature(func)
2048
+ bound_args = sig.bind(*args, **kwargs)
2049
+ bound_args.apply_defaults()
2050
+
2051
+ for arg_name, validator in self.arg_validators.items():
2052
+ if arg_name in bound_args.arguments:
2053
+ value = bound_args.arguments[arg_name]
2054
+ if not validator(value):
2055
+ self._log(f"[VALIDATE] Invalid argument: {arg_name}={value}")
2056
+ raise self.exception(f"Invalid argument: {arg_name}={value}")
2057
+
2058
+ # Execute function
2059
+ result = func(*args, **kwargs)
2060
+
2061
+ # Validate result
2062
+ if self.result_validator and not self.result_validator(result):
2063
+ self._log(f"[VALIDATE] Invalid result: {result}")
2064
+ raise self.exception(f"Invalid result: {result}")
2065
+
2066
+ self._log(f"[VALIDATE] {func.__name__} arguments and result validated")
2067
+ return result
2068
+ return wrapper
2069
+
2070
+ @staticmethod
2071
+ def usage_example() -> str:
2072
+ return """
2073
+ # ValidateArgs Example
2074
+ @ValidateArgs(
2075
+ arg_validators={
2076
+ 'age': lambda x: x >= 0,
2077
+ 'name': lambda x: isinstance(x, str) and x.strip() != ""
2078
+ },
2079
+ result_validator=lambda x: isinstance(x, int),
2080
+ verbose=True
2081
+ )
2082
+ def calculate_birth_year(age, name):
2083
+ return 2023 - age
2084
+
2085
+ try:
2086
+ year = calculate_birth_year(-5, "Alice")
2087
+ except ValueError as e:
2088
+ print(f"Error: {e}")
2089
+ # Expected output:
2090
+ # [VALIDATE] Invalid argument: age=-5
2091
+ # Error: Invalid argument: age=-5
2092
+ """
2093
+
2094
+ def Kwargs(
2095
+ how: str = "pop",
2096
+ strict: bool = False,
2097
+ ignore_private: bool = True,
2098
+ keep_extra: bool = False,
2099
+ **decorator_kwargs,
2100
+ ):
2101
+ """
2102
+ Usage:
2103
+ @Kwargs(how='filter')
2104
+ def my_function(**kwargs):
2105
+ ...
2106
+ """
2107
+
2108
+ from ..ips import handle_kwargs
2109
+ def decorator(func: Callable):
2110
+ def wrapper(*args, **kwargs):
2111
+ # Only handle keyword arguments, not positional
2112
+ if kwargs:
2113
+ processed_kwargs = handle_kwargs(
2114
+ kwargs,
2115
+ func=func,
2116
+ how=how,
2117
+ strict=strict,
2118
+ ignore_private=ignore_private,
2119
+ keep_extra=keep_extra,
2120
+ **decorator_kwargs,
2121
+ )
2122
+ return func(*args, **processed_kwargs)
2123
+ return func(*args, **kwargs)
2124
+
2125
+ return wrapper
2126
+
2127
+ return decorator
2128
+ class TypeCheck(DecoratorBase):
2129
+ """
2130
+ 装饰器:在运行时对函数参数和返回值进行类型检查,支持严格模式和宽松模式。
2131
+
2132
+ 功能描述:
2133
+ --------
2134
+ `TypeCheck` 用于在函数执行前后,对参数和返回值类型进行验证:
2135
+ - 在严格模式下,如果类型不匹配将抛出 TypeError。
2136
+ - 在宽松模式下,若类型不匹配,会尝试进行类型转换(如将字符串 "5" 转为整数 5)。
2137
+
2138
+ 参数说明:
2139
+ --------
2140
+ arg_types : Dict[str, type],默认值为 None
2141
+ 指定参数名称及其期望类型的字典,例如 {'x': int, 'y': float}。
2142
+
2143
+ return_type : type,默认值为 None
2144
+ 指定返回值的类型。如果指定,将检查函数返回值的类型。
2145
+
2146
+ strict : bool,默认值为 True
2147
+ - True:严格模式,类型不符立即抛出异常。
2148
+ - False:宽松模式,尝试自动类型转换,不可转换时报错。
2149
+
2150
+ verbose : bool,默认值为 True
2151
+ 若为 True,类型检查过程中的日志信息将被打印(或记录)。
2152
+
2153
+ 返回值:
2154
+ -------
2155
+ Callable
2156
+ 返回包装后的函数,带有类型检查功能。
2157
+
2158
+ 注意事项:
2159
+ --------
2160
+ - 宽松模式下类型转换可能失败,此时仍会抛出 TypeError。
2161
+ - 参数必须在 `arg_types` 中显式列出才会被检查。
2162
+ - return_type 仅在函数有返回值时生效。
2163
+
2164
+ 示例:
2165
+ -----
2166
+ @TypeCheck(
2167
+ arg_types={'x': int, 'y': float},
2168
+ return_type=float,
2169
+ strict=False,
2170
+ verbose=True
2171
+ )
2172
+ def multiply(x, y):
2173
+ return x * y
2174
+
2175
+ result = multiply("5", 3.2)
2176
+ # 输出示例:
2177
+ # [TYPECHECK] Converted x from <class 'str'> to <class 'int'>
2178
+ # [TYPECHECK] multiply type checks passed
2179
+ """
2180
+
2181
+ def __init__(self, arg_types: Dict[str, type] = None,
2182
+ return_type: type = None, strict: bool = True,
2183
+ verbose: bool = True):
2184
+ super().__init__(verbose)
2185
+ self.arg_types = arg_types or {}
2186
+ self.return_type = return_type
2187
+ self.strict = strict
2188
+
2189
+ def __call__(self, func: Callable) -> Callable:
2190
+ @functools.wraps(func)
2191
+ def wrapper(*args, **kwargs):
2192
+ sig = inspect.signature(func)
2193
+ bound_args = sig.bind(*args, **kwargs)
2194
+ bound_args.apply_defaults()
2195
+
2196
+ for arg_name, expected_type in self.arg_types.items():
2197
+ if arg_name in bound_args.arguments:
2198
+ value = bound_args.arguments[arg_name]
2199
+ if not isinstance(value, expected_type):
2200
+ if self.strict:
2201
+ self._log(f"[TYPECHECK] Argument {arg_name} must be {expected_type}, got {type(value)}")
2202
+ raise TypeError(
2203
+ f"Argument {arg_name} must be {expected_type}, got {type(value)}"
2204
+ )
2205
+ else:
2206
+ try:
2207
+ new_value = expected_type(value)
2208
+ self._log(f"[TYPECHECK] Converted {arg_name} from {type(value)} to {expected_type}")
2209
+ bound_args.arguments[arg_name] = new_value
2210
+ except (TypeError, ValueError):
2211
+ self._log(f"[TYPECHECK] Failed to convert {arg_name} to {expected_type}")
2212
+ raise TypeError(
2213
+ f"Argument {arg_name} must be convertible to {expected_type}, got {type(value)}"
2214
+ )
2215
+
2216
+ result = func(*bound_args.args, **bound_args.kwargs)
2217
+
2218
+ if self.return_type and not isinstance(result, self.return_type):
2219
+ self._log(f"[TYPECHECK] Return value must be {self.return_type}, got {type(result)}")
2220
+ raise TypeError(
2221
+ f"Return value must be {self.return_type}, got {type(result)}"
2222
+ )
2223
+
2224
+ self._log(f"[TYPECHECK] {func.__name__} type checks passed")
2225
+ return result
2226
+ return wrapper
2227
+
2228
+ @staticmethod
2229
+ def usage_example() -> str:
2230
+ return """
2231
+ # TypeCheck Example
2232
+ @TypeCheck(
2233
+ arg_types={'x': int, 'y': float},
2234
+ return_type=float,
2235
+ strict=False,
2236
+ verbose=True
2237
+ )
2238
+ def multiply(x, y):
2239
+ return x * y
2240
+
2241
+ result = multiply("5", 3.2) # Converts string to int
2242
+ # Expected output:
2243
+ # [TYPECHECK] Converted x from <class 'str'> to <class 'int'>
2244
+ # [TYPECHECK] multiply type checks passed
2245
+ """
2246
+
2247
+ class ValidateJSON(DecoratorBase):
2248
+ """Validate JSON output against a schema"""
2249
+ def __init__(self, schema: dict, verbose: bool = True):
2250
+ super().__init__(verbose)
2251
+ self.schema = schema
2252
+ try:
2253
+ from jsonschema import validate, ValidationError
2254
+ self.validate_func = validate
2255
+ self.ValidationError = ValidationError
2256
+ except ImportError:
2257
+ raise ImportError("jsonschema package is required for ValidateJSON")
2258
+
2259
+ def __call__(self, func: Callable) -> Callable:
2260
+ @functools.wraps(func)
2261
+ def wrapper(*args, **kwargs):
2262
+ result = func(*args, **kwargs)
2263
+ try:
2264
+ self.validate_func(instance=result, schema=self.schema)
2265
+ self._log(f"[VALIDJSON] {func.__name__} result validated against schema")
2266
+ except self.ValidationError as e:
2267
+ self._log(f"[VALIDJSON] {func.__name__} result validation failed: {e}")
2268
+ raise ValueError(f"Result validation failed: {e}") from e
2269
+ return result
2270
+ return wrapper
2271
+
2272
+ @staticmethod
2273
+ def usage_example() -> str:
2274
+ return """
2275
+ # ValidateJSON Example
2276
+ # Requires: pip install jsonschema
2277
+ user_schema = {
2278
+ "type": "object",
2279
+ "properties": {
2280
+ "id": {"type": "number"},
2281
+ "name": {"type": "string"}
2282
+ },
2283
+ "required": ["id", "name"]
2284
+ }
2285
+
2286
+ @ValidateJSON(schema=user_schema, verbose=True)
2287
+ def get_user(user_id):
2288
+ return {"id": user_id, "name": "John Doe"}
2289
+
2290
+ user = get_user(123)
2291
+ # Expected output:
2292
+ # [VALIDJSON] get_user result validated against schema
2293
+ """
2294
+
2295
+ class ValidateResponse(DecoratorBase):
2296
+ """Validate function response using a custom validator"""
2297
+ def __init__(self, validator_func: Callable, verbose: bool = True):
2298
+ super().__init__(verbose)
2299
+ self.validator_func = validator_func
2300
+
2301
+ def __call__(self, func: Callable) -> Callable:
2302
+ @functools.wraps(func)
2303
+ def wrapper(*args, **kwargs):
2304
+ response = func(*args, **kwargs)
2305
+ if not self.validator_func(response):
2306
+ self._log(f"[VALIDRESP] Response validation failed for {func.__name__}")
2307
+ raise ValueError(f"Invalid response from {func.__name__}")
2308
+ self._log(f"[VALIDRESP] Response validated for {func.__name__}")
2309
+ return response
2310
+ return wrapper
2311
+
2312
+ @staticmethod
2313
+ def usage_example() -> str:
2314
+ return """
2315
+ # ValidateResponse Example
2316
+ def validate_user(user):
2317
+ return isinstance(user, dict) and 'id' in user and 'name' in user
2318
+
2319
+ @ValidateResponse(validator_func=validate_user, verbose=True)
2320
+ def get_user(user_id):
2321
+ return {"id": user_id, "name": "Alice"}
2322
+
2323
+ user = get_user(123)
2324
+ # Expected output:
2325
+ # [VALIDRESP] Response validated for get_user
2326
+ """
2327
+
2328
+ ##############################
2329
+ # 7. Utility & Helpers
2330
+ ##############################
2331
+
2332
+ class Deprecate(DecoratorBase):
2333
+ """Mark function as deprecated"""
2334
+ def __init__(self, message: str = "This function is deprecated",
2335
+ version: str = "future", verbose: bool = True):
2336
+ super().__init__(verbose)
2337
+ self.message = message
2338
+ self.version = version
2339
+
2340
+ def __call__(self, func: Callable) -> Callable:
2341
+ @functools.wraps(func)
2342
+ def wrapper(*args, **kwargs):
2343
+ if self.verbose:
2344
+ warnings.warn(
2345
+ f"{func.__name__} is deprecated since version {self.version}: {self.message}",
2346
+ category=DeprecationWarning,
2347
+ stacklevel=2
2348
+ )
2349
+ return func(*args, **kwargs)
2350
+ return wrapper
2351
+
2352
+ @staticmethod
2353
+ def usage_example() -> str:
2354
+ return """
2355
+ # Deprecate Example
2356
+ @Deprecate(message="Use new_function instead", version="2.0", verbose=True)
2357
+ def old_function():
2358
+ return "Old result"
2359
+
2360
+ old_function()
2361
+ # Expected output:
2362
+ # DeprecationWarning: old_function is deprecated since version 2.0: Use new_function instead
2363
+ """
2364
+
2365
+ class CountCalls(DecoratorBase):
2366
+ """
2367
+ 装饰器:统计函数被调用的次数。
2368
+
2369
+ 功能描述:
2370
+ --------
2371
+ 该装饰器会对被装饰函数的调用次数进行计数,并在每次调用时打印调用次数日志。
2372
+ 线程安全,内部使用锁机制防止多线程环境下计数出错。
2373
+
2374
+ 参数说明:
2375
+ --------
2376
+ verbose : bool,默认值为 True
2377
+ 是否打印调用次数的日志信息。若为 True,则每次调用都会输出调用次数。
2378
+
2379
+ 返回值:
2380
+ -------
2381
+ Callable
2382
+ 返回包装后的函数,调用时自动统计调用次数,并调用原函数。
2383
+
2384
+ 注意事项:
2385
+ --------
2386
+ - 计数器对每个装饰器实例独立维护。
2387
+ - 线程安全设计,适用于多线程环境。
2388
+ - 可以通过访问装饰器实例的 `calls` 属性获取当前调用次数(注意当前实现中访问方式可能需要调整)。
2389
+
2390
+ 示例:
2391
+ -----
2392
+ @CountCalls(verbose=True)
2393
+ def process_item(item):
2394
+ return item * 2
2395
+
2396
+ process_item(5)
2397
+ process_item(10)
2398
+ # 输出示例:
2399
+ # [COUNT] process_item called 1 times
2400
+ # [COUNT] process_item called 2 times
2401
+ """
2402
+
2403
+ def __init__(self, verbose: bool = True):
2404
+ super().__init__(verbose)
2405
+ self.calls = 0
2406
+ self.lock = threading.Lock()
2407
+
2408
+ def __call__(self, func: Callable) -> Callable:
2409
+ @functools.wraps(func)
2410
+ def wrapper(*args, **kwargs):
2411
+ with self.lock:
2412
+ self.calls += 1
2413
+ self._log(f"[COUNT] {func.__name__} called {self.calls} times")
2414
+ return func(*args, **kwargs)
2415
+
2416
+ wrapper.calls = property(lambda self: self.calls)
2417
+ return wrapper
2418
+
2419
+ @staticmethod
2420
+ def usage_example() -> str:
2421
+ return """
2422
+ # CountCalls Example
2423
+ @CountCalls(verbose=True)
2424
+ def process_item(item):
2425
+ return item * 2
2426
+
2427
+ process_item(5)
2428
+ process_item(10)
2429
+ # Expected output:
2430
+ # [COUNT] process_item called 1 times
2431
+ # [COUNT] process_item called 2 times
2432
+ """
2433
+
2434
+ class Singleton(DecoratorBase):
2435
+ """Singleton pattern implementation"""
2436
+ def __init__(self, cls, verbose: bool = True):
2437
+ super().__init__(verbose)
2438
+ self.cls = cls
2439
+ self.instance = None
2440
+ self.lock = threading.Lock()
2441
+
2442
+ def __call__(self, *args, **kwargs):
2443
+ if self.instance is None:
2444
+ with self.lock:
2445
+ if self.instance is None:
2446
+ self._log(f"[SINGLETON] Creating new instance of {self.cls.__name__}")
2447
+ self.instance = self.cls(*args, **kwargs)
2448
+ elif self.verbose:
2449
+ self._log(f"[SINGLETON] Returning existing instance of {self.cls.__name__}")
2450
+ return self.instance
2451
+
2452
+ @staticmethod
2453
+ def usage_example() -> str:
2454
+ return """
2455
+ # Singleton Example
2456
+ @Singleton
2457
+ class DatabaseConnection:
2458
+ def __init__(self):
2459
+ print("Initializing database connection")
2460
+
2461
+ conn1 = DatabaseConnection()
2462
+ conn2 = DatabaseConnection()
2463
+ # Expected output:
2464
+ # [SINGLETON] Creating new instance of DatabaseConnection
2465
+ # Initializing database connection
2466
+ # [SINGLETON] Returning existing instance of DatabaseConnection
2467
+ """
2468
+
2469
+ class DepInject(DecoratorBase):
2470
+ """Dependency injection decorator"""
2471
+ def __init__(self, dependencies: Dict[str, Any], verbose: bool = True):
2472
+ super().__init__(verbose)
2473
+ self.dependencies = dependencies
2474
+
2475
+ def __call__(self, func: Callable) -> Callable:
2476
+ @functools.wraps(func)
2477
+ def wrapper(*args, **kwargs):
2478
+ resolved_deps = {}
2479
+ for dep_name, dep_value in self.dependencies.items():
2480
+ if dep_name in kwargs:
2481
+ resolved_deps[dep_name] = kwargs.pop(dep_name)
2482
+ else:
2483
+ resolved_deps[dep_name] = dep_value
2484
+
2485
+ self._log(f"[DEPINJECT] Injecting dependencies for {func.__name__}: "
2486
+ f"{', '.join(f'{k}={v}' for k, v in resolved_deps.items())}")
2487
+
2488
+ return func(*args, **resolved_deps, **kwargs)
2489
+ return wrapper
2490
+
2491
+ @staticmethod
2492
+ def usage_example() -> str:
2493
+ return """
2494
+ # DepInject Example
2495
+ class DatabaseService:
2496
+ def query(self, sql):
2497
+ return f"Results for {sql}"
2498
+
2499
+ @DepInject(dependencies={"db": DatabaseService()}, verbose=True)
2500
+ def run_query(query, db):
2501
+ return db.query(query)
2502
+
2503
+ result = run_query("SELECT * FROM users")
2504
+ # Expected output:
2505
+ # [DEPINJECT] Injecting dependencies for run_query: db=<__main__.DatabaseService object>
2506
+ """
2507
+
2508
+ class FeatureFlag(DecoratorBase):
2509
+ """Feature flag implementation with fallback"""
2510
+ def __init__(self, flag_name: str, flag_checker: Callable[[str], bool],
2511
+ fallback_func: Optional[Callable] = None, verbose: bool = True):
2512
+ super().__init__(verbose)
2513
+ self.flag_name = flag_name
2514
+ self.flag_checker = flag_checker
2515
+ self.fallback_func = fallback_func
2516
+
2517
+ def __call__(self, func: Callable) -> Callable:
2518
+ @functools.wraps(func)
2519
+ def wrapper(*args, **kwargs):
2520
+ is_enabled = self.flag_checker(self.flag_name)
2521
+
2522
+ if is_enabled:
2523
+ self._log(f"[FEATURE] {self.flag_name} enabled, using {func.__name__}")
2524
+ return func(*args, **kwargs)
2525
+ elif self.fallback_func:
2526
+ self._log(f"[FEATURE] {self.flag_name} disabled, using fallback")
2527
+ return self.fallback_func(*args, **kwargs)
2528
+ else:
2529
+ self._log(f"[FEATURE] {self.flag_name} disabled, no fallback")
2530
+ raise RuntimeError(f"Feature '{self.flag_name}' is disabled")
2531
+ return wrapper
2532
+
2533
+ @staticmethod
2534
+ def usage_example() -> str:
2535
+ return """
2536
+ # FeatureFlag Example
2537
+ def is_feature_enabled(feature):
2538
+ return feature == "new_ui"
2539
+
2540
+ def old_ui():
2541
+ return "Old UI"
2542
+
2543
+ @FeatureFlag(
2544
+ "new_ui",
2545
+ is_feature_enabled,
2546
+ fallback_func=old_ui,
2547
+ verbose=True
2548
+ )
2549
+ def new_ui():
2550
+ return "New UI"
2551
+
2552
+ print(new_ui()) # Output depends on feature flag
2553
+ # Expected output: Shows feature flag status
2554
+ """
2555
+
2556
+ class Repeat(DecoratorBase):
2557
+ """Repeat function execution multiple times"""
2558
+ def __init__(self, times: int = 3, verbose: bool = True):
2559
+ super().__init__(verbose)
2560
+ self.times = times
2561
+
2562
+ def __call__(self, func: Callable) -> Callable:
2563
+ @functools.wraps(func)
2564
+ def wrapper(*args, **kwargs):
2565
+ results = []
2566
+ for i in range(self.times):
2567
+ self._log(f"[REPEAT] {func.__name__} iteration {i+1}/{self.times}")
2568
+ results.append(func(*args, **kwargs))
2569
+ return results
2570
+ return wrapper
2571
+
2572
+ @staticmethod
2573
+ def usage_example() -> str:
2574
+ return """
2575
+ # Repeat Example
2576
+ @Repeat(times=3, verbose=True)
2577
+ def roll_dice():
2578
+ return random.randint(1, 6)
2579
+
2580
+ results = roll_dice()
2581
+ print(f"Dice rolls: {results}")
2582
+ # Expected output:
2583
+ # [REPEAT] roll_dice iteration 1/3
2584
+ # [REPEAT] roll_dice iteration 2/3
2585
+ # [REPEAT] roll_dice iteration 3/3
2586
+ # Dice rolls: [4, 2, 5]
2587
+ """
2588
+
2589
+ class RedirectOutput(DecoratorBase):
2590
+ """Redirect stdout/stderr during function execution"""
2591
+ def __init__(self, stdout: io.StringIO = None, stderr: io.StringIO = None,
2592
+ verbose: bool = True):
2593
+ super().__init__(verbose)
2594
+ self.stdout = stdout or io.StringIO()
2595
+ self.stderr = stderr or io.StringIO()
2596
+
2597
+ def __call__(self, func: Callable) -> Callable:
2598
+ @functools.wraps(func)
2599
+ def wrapper(*args, **kwargs):
2600
+ self._log(f"[REDIRECT] Redirecting output for {func.__name__}")
2601
+
2602
+ old_stdout = sys.stdout
2603
+ old_stderr = sys.stderr
2604
+
2605
+ sys.stdout = self.stdout
2606
+ sys.stderr = self.stderr
2607
+
2608
+ try:
2609
+ result = func(*args, **kwargs)
2610
+ finally:
2611
+ sys.stdout = old_stdout
2612
+ sys.stderr = old_stderr
2613
+ self._log(f"[REDIRECT] Restored output for {func.__name__}")
2614
+
2615
+ return result
2616
+ return wrapper
2617
+
2618
+ def get_stdout(self) -> str:
2619
+ return self.stdout.getvalue()
2620
+
2621
+ def get_stderr(self) -> str:
2622
+ return self.stderr.getvalue()
2623
+
2624
+ @staticmethod
2625
+ def usage_example() -> str:
2626
+ return """
2627
+ # RedirectOutput Example
2628
+ redirect = RedirectOutput(verbose=True)
2629
+
2630
+ @redirect
2631
+ def noisy_function():
2632
+ print("This goes to stdout")
2633
+ print("Error message", file=sys.stderr)
2634
+
2635
+ noisy_function()
2636
+
2637
+ print("Captured stdout:", redirect.get_stdout())
2638
+ print("Captured stderr:", redirect.get_stderr())
2639
+ # Expected output:
2640
+ # [REDIRECT] Redirecting output for noisy_function
2641
+ # [REDIRECT] Restored output for noisy_function
2642
+ # Captured stdout: This goes to stdout
2643
+ # Captured stderr: Error message
2644
+ """
2645
+
2646
+ # def show_progress(
2647
+ # func=None,
2648
+ # *,
2649
+ # total=None,
2650
+ # desc=None,
2651
+ # min_length=5,
2652
+ # max_length=None,
2653
+ # exclude_types=None,
2654
+ # enable=True,
2655
+ # **tqdm_kwargs
2656
+ # ):
2657
+ # """
2658
+ # Advanced version with more control over progress bar behavior.
2659
+
2660
+ # Args:
2661
+ # total: Total iterations for progress bar
2662
+ # desc: Description for progress bar
2663
+ # min_length: Minimum iterable length to show progress bar
2664
+ # max_length: Maximum iterable length to show progress bar
2665
+ # exclude_types: Additional types to exclude from wrapping
2666
+ # enable: Enable/disable progress bars
2667
+ # **tqdm_kwargs: Custom tqdm parameters
2668
+
2669
+ # Usage:
2670
+ # @show_progress(min_length=10, max_length=1000, colour='blue')
2671
+ # def filtered_processing(data):
2672
+ # for item in data: # Only shows bar if 10 <= len(data) <= 1000
2673
+ # pass
2674
+ # """
2675
+ # from functools import wraps
2676
+ # from tqdm.auto import tqdm
2677
+ # import collections.abc
2678
+
2679
+ # def _is_iterable_eligible(obj):
2680
+ # """Check if object should be wrapped with tqdm"""
2681
+ # from collections.abc import Iterable
2682
+ # EXCLUDE_TYPES = (str, bytes, dict)
2683
+
2684
+ # if not isinstance(obj, Iterable):
2685
+ # return False
2686
+ # if isinstance(obj, EXCLUDE_TYPES):
2687
+ # return False
2688
+ # return True
2689
+
2690
+
2691
+ # def _is_already_wrapped(obj):
2692
+ # """Check if object is already wrapped with tqdm"""
2693
+ # return hasattr(obj, '_tqdm_wrapped') or hasattr(obj, 'disable') or hasattr(obj, 'close')
2694
+
2695
+
2696
+ # def _is_eligible_advanced(obj, min_length, max_length, exclude_types):
2697
+ # """Advanced eligibility check for progress bar wrapping"""
2698
+ # from collections.abc import Iterable
2699
+
2700
+ # if not isinstance(obj, Iterable):
2701
+ # return False
2702
+ # if isinstance(obj, exclude_types):
2703
+ # return False
2704
+
2705
+ # # Check length constraints
2706
+ # try:
2707
+ # length = len(obj)
2708
+ # if length < min_length:
2709
+ # return False
2710
+ # if max_length and length > max_length:
2711
+ # return False
2712
+ # except (TypeError, AttributeError):
2713
+ # # Can't determine length, use default behavior
2714
+ # pass
2715
+
2716
+ # return not _is_already_wrapped(obj)
2717
+
2718
+ # def _normalize_tqdm_kwargs(kwargs):
2719
+ # """Normalize tqdm parameters to handle both color/colour spelling"""
2720
+ # # Ensure we always have a dictionary
2721
+ # if kwargs is None:
2722
+ # kwargs = {}
2723
+
2724
+ # normalized = kwargs.copy()
2725
+
2726
+ # # Handle color/colour preference
2727
+ # if 'colour' in normalized and 'color' in normalized:
2728
+ # # If both are provided, prefer 'color' (American English)
2729
+ # del normalized['color']
2730
+ # elif 'color' in normalized:
2731
+ # # Convert British 'colour' to American 'color'
2732
+ # normalized['colour'] = normalized['color']
2733
+ # del normalized['color']
2734
+
2735
+ # return normalized
2736
+
2737
+ # # Main Func
2738
+ # if exclude_types is None:
2739
+ # exclude_types = (str, bytes, dict)
2740
+
2741
+ # def decorator(f):
2742
+ # @wraps(f)
2743
+ # def wrapper(*args, **kwargs):
2744
+ # if not enable:
2745
+ # return f(*args, **kwargs)
2746
+
2747
+ # # Normalize color/colour parameter
2748
+ # normalized_kwargs = _normalize_tqdm_kwargs(tqdm_kwargs or {})
2749
+ # new_args = []
2750
+ # for i, arg in enumerate(args):
2751
+ # if _is_eligible_advanced(arg, min_length, max_length, exclude_types):
2752
+ # arg_desc = desc or f"Processing {i}"
2753
+ # new_args.append(tqdm(arg, total=total, desc=arg_desc, **normalized_kwargs))
2754
+ # else:
2755
+ # new_args.append(arg)
2756
+
2757
+ # new_kwargs = {}
2758
+ # for k, v in kwargs.items():
2759
+ # if _is_eligible_advanced(v, min_length, max_length, exclude_types):
2760
+ # arg_desc = desc or f"Processing {k}"
2761
+ # new_kwargs[k] = tqdm(v, total=total, desc=arg_desc, **normalized_kwargs)
2762
+ # else:
2763
+ # new_kwargs[k] = v
2764
+
2765
+ # return f(*new_args, **new_kwargs)
2766
+ # return wrapper
2767
+
2768
+ # if func:
2769
+ # return decorator(func)
2770
+ # return decorator
2771
+
2772
+ import inspect
2773
+ import ast
2774
+ import sys
2775
+ import types
2776
+ from functools import wraps
2777
+ from collections.abc import Iterable
2778
+ from tqdm.auto import tqdm
2779
+ import textwrap
2780
+ import dis
2781
+ import re
2782
+
2783
+ class ForLoopTracker:
2784
+ """Tracks and intercepts for loops in decorated functions"""
2785
+
2786
+ def __init__(self, iterable, pbar_params):
2787
+ self.iterable = iterable
2788
+ self.pbar_params = pbar_params
2789
+ self._iterator = None
2790
+ self.pbar = None
2791
+
2792
+ def __iter__(self):
2793
+ # Create progress bar
2794
+ total = self.pbar_params.get('total')
2795
+ if total is None and hasattr(self.iterable, '__len__'):
2796
+ total = len(self.iterable)
2797
+
2798
+ # Filter out any tqdm parameters that might cause issues
2799
+ safe_params = {}
2800
+ for k, v in self.pbar_params.items():
2801
+ if k not in ['total', 'desc']:
2802
+ # Convert 'color' to 'colour' for tqdm compatibility
2803
+ if k == 'color':
2804
+ safe_params['colour'] = v
2805
+ else:
2806
+ safe_params[k] = v
2807
+
2808
+ self.pbar = tqdm(
2809
+ total=total,
2810
+ desc=self.pbar_params.get('desc', 'Processing'),
2811
+ **{k: v for k, v in self.pbar_params.items()
2812
+ if k not in ['total', 'desc']}
2813
+ )
2814
+
2815
+ self._iterator = iter(self.iterable)
2816
+ return self
2817
+
2818
+ def __next__(self):
2819
+ try:
2820
+ item = next(self._iterator)
2821
+ self.pbar.update(1)
2822
+ return item
2823
+ except StopIteration:
2824
+ self.pbar.close()
2825
+ raise
2826
+
2827
+ class ProgressContextManager:
2828
+ """Context manager for progress tracking"""
2829
+
2830
+ def __init__(self, total, desc, **kwargs):
2831
+ self.total = total
2832
+ self.desc = desc
2833
+ self.kwargs = kwargs
2834
+ self.pbar = None
2835
+
2836
+ def __enter__(self):
2837
+ safe_kwargs = {}
2838
+ for k, v in self.kwargs.items():
2839
+ if k == 'color':
2840
+ safe_kwargs['colour'] = v
2841
+ else:
2842
+ safe_kwargs[k] = v
2843
+
2844
+ self.pbar = tqdm(total=self.total, desc=self.desc, **self.kwargs)
2845
+ return self.pbar
2846
+
2847
+ def __exit__(self, exc_type, exc_val, exc_tb):
2848
+ if self.pbar:
2849
+ self.pbar.close()
2850
+
2851
+ class LoopInterceptor:
2852
+ """Intercepts and tracks for loops"""
2853
+
2854
+ def __init__(self, pbar_params, min_length=2, max_length=None, exclude_types=None):
2855
+ self.pbar_params = pbar_params
2856
+ self.min_length = min_length
2857
+ self.max_length = max_length
2858
+ self.exclude_types = exclude_types or (str, bytes, dict)
2859
+
2860
+ def _is_eligible_iterable(self, obj):
2861
+ """Check if object should be wrapped with progress bar"""
2862
+ if obj is None:
2863
+ return False
2864
+ if not isinstance(obj, Iterable):
2865
+ return False
2866
+ if isinstance(obj, self.exclude_types):
2867
+ return False
2868
+ if hasattr(obj, '_is_tracked') and obj._is_tracked:
2869
+ return False
2870
+
2871
+ # Check length constraints
2872
+ try:
2873
+ length = len(obj)
2874
+ if length < self.min_length:
2875
+ return False
2876
+ if self.max_length and length > self.max_length:
2877
+ return False
2878
+ except (TypeError, AttributeError):
2879
+ # Can't determine length, use default behavior
2880
+ if self.min_length > 0:
2881
+ return False
2882
+
2883
+ return True
2884
+
2885
+ def _create_tracker(self, iterable):
2886
+ """Create a ForLoopTracker"""
2887
+ if not self._is_eligible_iterable(iterable):
2888
+ return iterable
2889
+
2890
+ tracker = ForLoopTracker(iterable, self.pbar_params)
2891
+ tracker._is_tracked = True
2892
+ return tracker
2893
+
2894
+ def _normalize_tqdm_kwargs(kwargs):
2895
+ """Normalize tqdm parameters - use 'colour' consistently for tqdm"""
2896
+ if kwargs is None:
2897
+ kwargs = {}
2898
+
2899
+ normalized = kwargs.copy()
2900
+
2901
+ # Always use 'colour' for tqdm (British spelling)
2902
+ if 'color' in normalized:
2903
+ normalized['colour'] = normalized['color']
2904
+ del normalized['color']
2905
+
2906
+ # Remove any other potentially problematic parameters
2907
+ safe_params = ['desc', 'total', 'leave', 'ncols', 'mininterval', 'maxinterval',
2908
+ 'miniters', 'ascii', 'disable', 'unit', 'unit_scale',
2909
+ 'dynamic_ncols', 'smoothing', 'bar_format', 'initial',
2910
+ 'position', 'postfix', 'unit_divisor', 'write_bytes',
2911
+ 'lock_args', 'nrows', 'colour', 'delay', 'gui']
2912
+
2913
+ return {k: v for k, v in normalized.items() if k in safe_params}
2914
+
2915
+ def _wrap_with_runtime_interception(func, interceptor):
2916
+ """Wrap function with runtime loop interception - MOST RELIABLE APPROACH"""
2917
+ @wraps(func)
2918
+ def wrapper(*args, **kwargs):
2919
+ # Store original builtins and globals
2920
+ original_range = __builtins__['range']
2921
+ original_builtins = __builtins__.copy()
2922
+
2923
+ # Track if we've already set up interception
2924
+ if not hasattr(wrapper, '_interception_active'):
2925
+ wrapper._interception_active = False
2926
+
2927
+ if wrapper._interception_active:
2928
+ # Avoid recursive interception
2929
+ return func(*args, **kwargs)
2930
+
2931
+ wrapper._interception_active = True
2932
+
2933
+ def tracked_range(*range_args):
2934
+ result = original_range(*range_args)
2935
+ return interceptor._create_tracker(result)
2936
+
2937
+ def tracked_enumerate(iterable, start=0):
2938
+ result = enumerate(iterable, start)
2939
+ return interceptor._create_tracker(result)
2940
+
2941
+ def tracked_zip(*iterables):
2942
+ result = zip(*iterables)
2943
+ return interceptor._create_tracker(result)
2944
+
2945
+ # Create a safe execution environment
2946
+ def safe_execute():
2947
+ # Replace builtins temporarily
2948
+ original_globals = func.__globals__.copy()
2949
+
2950
+ # Create a modified globals dict
2951
+ modified_globals = func.__globals__.copy()
2952
+ modified_globals['range'] = tracked_range
2953
+ modified_globals['enumerate'] = tracked_enumerate
2954
+ modified_globals['zip'] = tracked_zip
2955
+
2956
+ # Also intercept any iterable arguments
2957
+ new_args = []
2958
+ for arg in args:
2959
+ if interceptor._is_eligible_iterable(arg):
2960
+ new_args.append(interceptor._create_tracker(arg))
2961
+ else:
2962
+ new_args.append(arg)
2963
+
2964
+ new_kwargs = {}
2965
+ for k, v in kwargs.items():
2966
+ if interceptor._is_eligible_iterable(v):
2967
+ new_kwargs[k] = interceptor._create_tracker(v)
2968
+ else:
2969
+ new_kwargs[k] = v
2970
+
2971
+ # Update function's globals temporarily
2972
+ original_globals_backup = func.__globals__.copy()
2973
+ try:
2974
+ func.__globals__.update(modified_globals)
2975
+ result = func(*new_args, **new_kwargs)
2976
+ return result
2977
+ finally:
2978
+ # Restore original globals
2979
+ func.__globals__.clear()
2980
+ func.__globals__.update(original_globals_backup)
2981
+
2982
+ try:
2983
+ return safe_execute()
2984
+ finally:
2985
+ wrapper._interception_active = False
2986
+
2987
+ wrapper.__interceptor__ = interceptor
2988
+ return wrapper
2989
+
2990
+ def _wrap_with_frame_interception(func, interceptor):
2991
+ """Alternative approach using frame interception"""
2992
+ @wraps(func)
2993
+ def wrapper(*args, **kwargs):
2994
+ # Use sys._getframe() to intercept at frame level
2995
+ original_trace = sys.gettrace()
2996
+
2997
+ def trace_calls(frame, event, arg):
2998
+ if event == 'call' and frame.f_code == func.__code__:
2999
+ # We're inside our target function
3000
+ frame.f_trace = trace_loops
3001
+ return trace_loops
3002
+ return trace_calls
3003
+
3004
+ def trace_loops(frame, event, arg):
3005
+ if event == 'line':
3006
+ # Check if we're at a for loop
3007
+ code = frame.f_code
3008
+ line_no = frame.f_lineno
3009
+
3010
+ # Simple approach: track any iterable in locals
3011
+ for var_name, var_value in frame.f_locals.items():
3012
+ if (interceptor._is_eligible_iterable(var_value) and
3013
+ not hasattr(var_value, '_is_tracked')):
3014
+ frame.f_locals[var_name] = interceptor._create_tracker(var_value)
3015
+
3016
+ return trace_loops
3017
+
3018
+ # Set up tracing
3019
+ sys.settrace(trace_calls)
3020
+ try:
3021
+ result = func(*args, **kwargs)
3022
+ return result
3023
+ finally:
3024
+ sys.settrace(original_trace)
3025
+
3026
+ return wrapper
3027
+
3028
+ def _wrap_with_simple_interception(func, interceptor):
3029
+ """Simple but effective interception using argument wrapping"""
3030
+ @wraps(func)
3031
+ def wrapper(*args, **kwargs):
3032
+ # Track the first eligible iterable in arguments
3033
+ new_args = []
3034
+ found_tracker = False
3035
+
3036
+ for arg in args:
3037
+ if not found_tracker and interceptor._is_eligible_iterable(arg):
3038
+ new_args.append(interceptor._create_tracker(arg))
3039
+ found_tracker = True
3040
+ else:
3041
+ new_args.append(arg)
3042
+
3043
+ new_kwargs = {}
3044
+ for k, v in kwargs.items():
3045
+ if not found_tracker and interceptor._is_eligible_iterable(v):
3046
+ new_kwargs[k] = interceptor._create_tracker(v)
3047
+ found_tracker = True
3048
+ else:
3049
+ new_kwargs[k] = v
3050
+
3051
+ # If no iterable found in args/kwargs, use runtime interception
3052
+ if not found_tracker:
3053
+ return _wrap_with_runtime_interception(func, interceptor)(*new_args, **new_kwargs)
3054
+
3055
+ return func(*new_args, **new_kwargs)
3056
+
3057
+ wrapper.__interceptor__ = interceptor
3058
+ return wrapper
3059
+
3060
+ def show_progress(
3061
+ func=None,
3062
+ *,
3063
+ total=None,
3064
+ desc=None,
3065
+ min_length=2,
3066
+ max_length=None,
3067
+ exclude_types=None,
3068
+ enable=True,
3069
+ auto_detect_top_loop=True,
3070
+ interception_method="auto", # "auto", "runtime", "simple", "frame"
3071
+ **tqdm_kwargs
3072
+ ):
3073
+ """
3074
+ ULTIMATE progress bar decorator with automatic top-level for loop detection.
3075
+
3076
+ Features:
3077
+ - Multiple interception methods for maximum compatibility
3078
+ - Works with iterables from any source
3079
+ - Smart fallback between methods
3080
+
3081
+ Args:
3082
+ total: Total iterations for progress bar
3083
+ desc: Description for progress bar
3084
+ min_length: Minimum iterable length to show progress bar
3085
+ max_length: Maximum iterable length to show progress bar
3086
+ exclude_types: Additional types to exclude from wrapping
3087
+ enable: Enable/disable progress bars
3088
+ auto_detect_top_loop: Automatically find and track the top-level for loop
3089
+ interception_method: "auto" (recommended), "runtime", "simple", or "frame"
3090
+ **tqdm_kwargs: Custom tqdm parameters
3091
+ """
3092
+
3093
+ if exclude_types is None:
3094
+ exclude_types = (str, bytes, dict)
3095
+
3096
+ def decorator(f):
3097
+ if not enable:
3098
+ return f
3099
+
3100
+ normalized_kwargs = _normalize_tqdm_kwargs(tqdm_kwargs)
3101
+ pbar_params = {
3102
+ 'total': total,
3103
+ 'desc': desc,
3104
+ **normalized_kwargs
3105
+ }
3106
+
3107
+ interceptor = LoopInterceptor(pbar_params, min_length, max_length, exclude_types)
3108
+
3109
+ # Choose interception method
3110
+ if interception_method == "runtime" or (interception_method == "auto" and auto_detect_top_loop):
3111
+ return _wrap_with_runtime_interception(f, interceptor)
3112
+ elif interception_method == "frame":
3113
+ return _wrap_with_frame_interception(f, interceptor)
3114
+ elif interception_method == "simple":
3115
+ return _wrap_with_simple_interception(f, interceptor)
3116
+ else:
3117
+ # Auto: try runtime first, fall back to simple
3118
+ try:
3119
+ return _wrap_with_runtime_interception(f, interceptor)
3120
+ except Exception as e:
3121
+ print(f"Runtime interception failed: {e}, falling back to simple method")
3122
+ return _wrap_with_simple_interception(f, interceptor)
3123
+
3124
+ # Handle the case when used as @show_progress (without parentheses)
3125
+ if func is None:
3126
+ return decorator
3127
+
3128
+ # Handle the case when used as @show_progress() or @show_progress(...)
3129
+ return decorator(func)
3130
+
3131
+ # Separate context manager function
3132
+ def progress_context(total=None, desc=None, **tqdm_kwargs):
3133
+ """Context manager for progress tracking"""
3134
+ def _normalize_tqdm_kwargs(kwargs):
3135
+ if kwargs is None:
3136
+ kwargs = {}
3137
+ normalized = kwargs.copy()
3138
+ if 'colour' in normalized and 'color' in normalized:
3139
+ del normalized['color']
3140
+ elif 'colour' in normalized:
3141
+ normalized['colour'] = normalized['color']
3142
+ del normalized['color']
3143
+ return normalized
3144
+
3145
+ return ProgressContextManager(total, desc, **_normalize_tqdm_kwargs(tqdm_kwargs))
3146
+
3147
+ # Manual tracking function
3148
+ def track(iterable, desc=None, **kwargs):
3149
+ """Standalone tracking function"""
3150
+ from collections.abc import Iterable
3151
+
3152
+ def _is_eligible_iterable(obj):
3153
+ if obj is None:
3154
+ return False
3155
+ if not isinstance(obj, Iterable):
3156
+ return False
3157
+ if isinstance(obj, (str, bytes, dict)):
3158
+ return False
3159
+ return True
3160
+
3161
+ def _normalize_tqdm_kwargs(kwargs):
3162
+ if kwargs is None:
3163
+ kwargs = {}
3164
+ normalized = kwargs.copy()
3165
+ if 'colour' in normalized and 'color' in normalized:
3166
+ del normalized['color']
3167
+ elif 'colour' in normalized:
3168
+ normalized['colour'] = normalized['color']
3169
+ del normalized['color']
3170
+ return normalized
3171
+
3172
+ if not _is_eligible_iterable(iterable):
3173
+ return iterable
3174
+
3175
+ normalized_kwargs = _normalize_tqdm_kwargs(kwargs)
3176
+ total = normalized_kwargs.get('total')
3177
+ if total is None and hasattr(iterable, '__len__'):
3178
+ total = len(iterable)
3179
+
3180
+ return tqdm(iterable, total=total, desc=desc, **normalized_kwargs)
3181
+
3182
+ # Add track method to show_progress for backward compatibility
3183
+ show_progress.track = track
3184
+
3185
+ # # Test the improved version
3186
+ # if __name__ == "__main__":
3187
+ # import time
3188
+
3189
+ # print("=== Testing Ultimate Progress Bar ===")
3190
+
3191
+ # # Test 1: Your original case - range() created internally
3192
+ # @show_progress(desc="Internal range", color="red")
3193
+ # def process_data(data=None):
3194
+ # if data is None:
3195
+ # data = range(50) # This should be tracked!
3196
+ # results = []
3197
+ # for x in data: # Top-level for loop
3198
+ # time.sleep(0.01)
3199
+ # results.append(x * 2)
3200
+ # return results
3201
+
3202
+ # # Test 2: External iterable
3203
+ # @show_progress(desc="External data", color="green")
3204
+ # def process_external(data):
3205
+ # results = []
3206
+ # for item in data: # This should be tracked!
3207
+ # time.sleep(0.01)
3208
+ # results.append(item ** 2)
3209
+ # return results
3210
+
3211
+ # # Test 3: With enumerate
3212
+ # @show_progress(desc="With enumerate", color="blue")
3213
+ # def process_with_enumerate(items):
3214
+ # results = []
3215
+ # for i, item in enumerate(items): # This should be tracked!
3216
+ # time.sleep(0.01)
3217
+ # results.append((i, item * 3))
3218
+ # return results
3219
+
3220
+ # # Test 4: Complex case with multiple loops
3221
+ # @show_progress(desc="Complex case", color="yellow")
3222
+ # def complex_processing():
3223
+ # # Multiple data sources
3224
+ # data1 = range(30) # Should be tracked (top loop)
3225
+ # data2 = [1, 2, 3, 4, 5]
3226
+
3227
+ # results = []
3228
+ # for x in data1: # This loop tracked
3229
+ # time.sleep(0.01)
3230
+ # temp = 0
3231
+ # for y in data2: # This loop NOT tracked (inner loop)
3232
+ # temp += y
3233
+ # results.append(x + temp)
3234
+ # return results
3235
+
3236
+ # print("1. Testing internal range:")
3237
+ # result1 = process_data()
3238
+ # print(f" Result: {len(result1)} items")
3239
+
3240
+ # print("2. Testing external data:")
3241
+ # result2 = process_external(list(range(25)))
3242
+ # print(f" Result: {len(result2)} items")
3243
+
3244
+ # print("3. Testing enumerate:")
3245
+ # result3 = process_with_enumerate(['a', 'b', 'c', 'd', 'e', 'f'])
3246
+ # print(f" Result: {result3}")
3247
+
3248
+ # print("4. Testing complex case:")
3249
+ # result4 = complex_processing()
3250
+ # print(f" Result: {len(result4)} items")
3251
+
3252
+ # print("All tests completed! ✅")
3253
+
3254
+ # # Example usage and demonstration
3255
+ # if __name__ == "__main__":
3256
+
3257
+ # # Example 1: Auto-detection of top for loop
3258
+ # @show_progress(desc="Processing items")
3259
+ # def process_items(items):
3260
+ # results = []
3261
+ # for item in items: # This loop will be automatically tracked
3262
+ # results.append(item * 2)
3263
+ # return results
3264
+
3265
+ # # Example 2: Manual tracking
3266
+ # def manual_tracking(data):
3267
+ # total = 0
3268
+ # for item in track(data, desc="Manual progress"):
3269
+ # total += item
3270
+ # return total
3271
+
3272
+ # # Example 3: Context manager
3273
+ # def context_example():
3274
+ # with progress_context(total=100, desc="Context example") as pbar:
3275
+ # for i in range(100):
3276
+ # # Simulate work
3277
+ # import time
3278
+ # time.sleep(0.01)
3279
+ # pbar.update(1)
3280
+
3281
+ # # Example 4: Multiple loops (only top one tracked with auto_detect)
3282
+ # @show_progress(desc="Outer loop")
3283
+ # def nested_loops(outer_data, inner_data):
3284
+ # results = []
3285
+ # for outer in outer_data: # This one will be tracked
3286
+ # inner_result = 0
3287
+ # for inner in inner_data: # This one won't be tracked
3288
+ # inner_result += inner
3289
+ # results.append(inner_result)
3290
+ # return results
3291
+
3292
+ # # Test the examples
3293
+ # print("Testing auto-detection:")
3294
+ # result1 = process_items(list(range(10)))
3295
+ # print(f"Result: {result1}")
3296
+
3297
+ # print("\nTesting manual tracking:")
3298
+ # result2 = manual_tracking(list(range(5)))
3299
+ # print(f"Result: {result2}")
3300
+
3301
+ # print("\nTesting context manager:")
3302
+ # context_example()
3303
+
3304
+ # print("\nTesting nested loops:")
3305
+ # result3 = nested_loops(list(range(3)), list(range(2)))
3306
+ # print(f"Result: {result3}")
3307
+
3308
+ # print("\nTesting standalone track:")
3309
+ # for i in track(range(5), desc="Standalone"):
3310
+ # print(f"Processing {i}")
3311
+ # Example usage and testing
3312
+
3313
+
3314
+ def num_booster(func=None, *, level="auto", signature=None, cuda=False):
3315
+ """
3316
+ num_booster: Ultimate Auto-Accelerating Decorator (CPU + GPU CUDA)
3317
+ ==================================================================
3318
+
3319
+ 这是一个“贴上就加速”的终极 Python 装饰器,自动从 CPU/GPU 中选择最佳模式进行加速:
3320
+
3321
+ 自动加速策略(按优先级):
3322
+ 1. CUDA GPU 加速(如果 cuda=True 且 CUDA 可用)
3323
+ 2. Numba vectorize → 生成 ufunc(最快 CPU 模式)
3324
+ 3. Numba parallel=True → 多线程 CPU 加速
3325
+ 4. Numba njit → 普通 JIT 加速
3326
+ 5. fallback 原 Python 函数(无任何报错)
3327
+
3328
+ 本工具库特点:
3329
+ - 永不崩溃,完全兼容生产环境
3330
+ - 支持 CPU-only / 无 Numba / 无 CUDA 情况
3331
+ - 装饰器形式使用最简洁
3332
+ - GPU/CPU 自动检测,无需用户干预
3333
+
3334
+
3335
+ ----------------------------------------
3336
+ 使用示例
3337
+ ----------------------------------------
3338
+
3339
+ 1) 默认自动选择最佳 CPU 加速模式:
3340
+ -------------------------------------
3341
+ from num_booster import num_booster
3342
+
3343
+ @num_booster
3344
+ def add(x, y):
3345
+ return x + y
3346
+
3347
+
3348
+ 2) Aggressive 模式:parallel + fastmath:
3349
+ -------------------------------------
3350
+ @num_booster(level="aggressive")
3351
+ def compute(x):
3352
+ s = 0
3353
+ for i in range(len(x)):
3354
+ s += x[i] * 1.123
3355
+ return s
3356
+
3357
+
3358
+ 3) 自动创建 ufunc(如果失败则继续尝试 parallel → njit):
3359
+ -------------------------------------
3360
+ @num_booster(signature="float64(float64)")
3361
+ def square(a):
3362
+ return a * a
3363
+
3364
+ square(np.array([1,2,3]))
3365
+
3366
+
3367
+ 4) GPU CUDA 加速(如果 CUDA 可用,否则自动 fallback):
3368
+ -------------------------------------
3369
+ @num_booster(cuda=True)
3370
+ def gpu_add(a, b):
3371
+ return a + b
3372
+
3373
+ 说明:
3374
+ - 如果写成普通 Python 函数,num_booster 会自动把它编译成 CUDA kernel 并按元素执行。
3375
+ - 如果 CUDA 不可用 → 自动切到 CPU 加速,不报错。
3376
+
3377
+ num_booster: 自动选择 GPU / CPU 最佳加速策略。
3378
+
3379
+ 参数:
3380
+ level:
3381
+ "auto"(默认): 自动选择最佳策略
3382
+ "aggressive": CPU parallel + fastmath 模式
3383
+ signature:
3384
+ 用于生成 ufunc,例如:"float64(float64)"
3385
+ 若不兼容则自动 fallback
3386
+ cuda:
3387
+ 是否尝试 CUDA GPU 加速(默认 False)
3388
+ 若 True 且 CUDA 可用 → 优先使用 GPU
3389
+
3390
+ 用法:
3391
+ @num_booster
3392
+ @num_booster(level="aggressive")
3393
+ @num_booster(signature="float64(float64)")
3394
+ @num_booster(cuda=True)
3395
+ """
3396
+
3397
+ def decorator(func):
3398
+
3399
+ # Numba 不可用 → 返回原函数
3400
+ if not NUMBA_AVAILABLE:
3401
+ return func
3402
+
3403
+ accelerated = func # 默认先设置为原函数
3404
+
3405
+ # =====================================================
3406
+ # 1. GPU CUDA kernel(如果用户要求且 CUDA 可用)
3407
+ # =====================================================
3408
+ if cuda and CUDA_AVAILABLE:
3409
+
3410
+ try:
3411
+ cuda_kernel = numba.cuda.jit(func)
3412
+
3413
+ # 创建一个自动处理 GPU launch 的 wrapper
3414
+ @wraps(func)
3415
+ def cuda_wrapper(*args):
3416
+ import numpy as np
3417
+ # 将输入转成 device array
3418
+ d_args = [numba.cuda.to_device(np.asarray(arg)) for arg in args]
3419
+ # 输出大小与第一个数组相同
3420
+ n = len(d_args[0])
3421
+ d_out = numba.cuda.device_array(shape=n)
3422
+
3423
+ # 网格/线程设置
3424
+ threads = 128
3425
+ blocks = (n + threads - 1) // threads
3426
+
3427
+ cuda_kernel[blocks, threads](*d_args, d_out)
3428
+ return d_out.copy_to_host()
3429
+
3430
+ accelerated = cuda_wrapper
3431
+ return accelerated
3432
+
3433
+ except Exception as e:
3434
+ warnings.warn(f"[num_booster] CUDA 编译失败,尝试 CPU 模式 ({e})")
3435
+
3436
+ # =====================================================
3437
+ # 2. vectorize 自动 ufunc(最快 CPU 路线)
3438
+ # =====================================================
3439
+ if signature is not None:
3440
+ try:
3441
+ ufunc = numba.vectorize([signature])(func)
3442
+ accelerated = ufunc
3443
+ return accelerated
3444
+ except Exception as e:
3445
+ warnings.warn(f"[num_booster] vectorize 失败,尝试 parallel ({e})")
3446
+
3447
+ # =====================================================
3448
+ # 3. parallel=True,多线程 CPU 优化
3449
+ # =====================================================
3450
+ try:
3451
+ if level == "aggressive":
3452
+ parallel_jit = numba.njit(parallel=True, fastmath=True)
3453
+ else:
3454
+ parallel_jit = numba.njit(parallel=True)
3455
+
3456
+ accelerated_parallel = parallel_jit(func)
3457
+ accelerated = accelerated_parallel
3458
+ return accelerated
3459
+
3460
+ except Exception as e:
3461
+ warnings.warn(f"[num_booster] parallel jit 失败,尝试普通 njit ({e})")
3462
+
3463
+ # =====================================================
3464
+ # 4. 普通 njit(最稳定 CPU acceleration)
3465
+ # =====================================================
3466
+ try:
3467
+ accelerated_njit = numba.njit(func)
3468
+ accelerated = accelerated_njit
3469
+ return accelerated
3470
+
3471
+ except Exception as e:
3472
+ warnings.warn(f"[num_booster] 普通 njit 失败,fallback 原函数 ({e})")
3473
+
3474
+ # =====================================================
3475
+ # 5. fallback
3476
+ # =====================================================
3477
+ return func
3478
+
3479
+ if func is not None:
3480
+ return decorator(func)
3481
+ return decorator
3482
+
3483
+
3484
+ if __name__ == "__main__":
3485
+ @show_progress
3486
+ def process_data(data):
3487
+ results = []
3488
+ for x in data:
3489
+ # Simulate work
3490
+ for _ in range(100000):
3491
+ pass
3492
+ results.append(x * 2)
3493
+ return results
3494
+
3495
+ # Custom tqdm parameters
3496
+ @show_progress(desc="Custom Progress", ncols=80, colour='red', position=0)
3497
+ def custom_processing(items):
3498
+ results = []
3499
+ for item in items:
3500
+ results.append(item ** 2)
3501
+ return results
3502
+
3503
+ # Using pre-configured decorator
3504
+ @fast_progress
3505
+ def fast_processing(data):
3506
+ for item in data:
3507
+ pass
3508
+ return len(data)
3509
+
3510
+ # Advanced filtering
3511
+ @show_progress_advanced(min_length=5, max_length=50, desc="Filtered")
3512
+ def filtered_processing(data):
3513
+ for item in data:
3514
+ pass
3515
+ return len(data)
3516
+
3517
+ print("Testing show_progress decorator with custom tqdm support...")
3518
+
3519
+ print("\n1. Basic usage:")
3520
+ result1 = process_data(range(10))
3521
+ print(f"Result: {result1}")
3522
+
3523
+ print("\n2. Custom tqdm parameters:")
3524
+ result2 = custom_processing(range(15))
3525
+ print(f"Result: {result2}")
3526
+
3527
+ print("\n3. Fast progress preset:")
3528
+ result3 = fast_processing(range(20))
3529
+ print(f"Processed {result3} items")
3530
+
3531
+ print("\n4. Filtered processing:")
3532
+ result4 = filtered_processing(range(8)) # Should not show progress bar (min_length=10)
3533
+ result5 = filtered_processing(range(25)) # Should show progress bar
3534
+ print(f"Results: {result4}, {result5}")
3535
+
3536
+ print("\nAll tests completed!")
3537
+
3538
+
3539
+ # # Utility function for manual progress bar creation
3540
+ # def wrap_with_progress(iterable, desc=None, **tqdm_kwargs):
3541
+ # """
3542
+ # Manually wrap an iterable with progress bar.
3543
+
3544
+ # Usage:
3545
+ # data = range(100)
3546
+ # for item in wrap_with_progress(data, desc="Manual"):
3547
+ # process(item)
3548
+ # """
3549
+ # from tqdm.auto import tqdm
3550
+ # return tqdm(iterable, desc=desc, **tqdm_kwargs)
3551
+
3552
+ # if __name__ == "__main__":
3553
+ # Demonstrate verbose mode showing usage examples
3554
+ print("\n=== Timer Usage Example ===")
3555
+ print(Timer.usage_example())
3556
+
3557
+ print("\n=== Retry Usage Example ===")
3558
+ print(Retry.usage_example())
3559
+
3560
+ print("\n=== Memoize Usage Example ===")
3561
+ print(Memoize.usage_example())
3562
+
3563
+ print("\n=== CircuitBreaker Usage Example ===")
3564
+ print(CircuitBreaker.usage_example())