dwipe 2.0.1__py3-none-any.whl → 3.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,644 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Structured Logger with JSON Lines format and weighted-age trimming.
4
+ ERR entries age 10x slower than other entries, so they persist longer.
5
+ """
6
+ import os
7
+ import sys
8
+ import json
9
+ import inspect
10
+ from pathlib import Path
11
+ from datetime import datetime
12
+ from dataclasses import dataclass, asdict, field
13
+ from typing import Optional, List, Dict, Any
14
+
15
+ # ============================================================================
16
+ # Data Classes for Structured Logging
17
+ # ============================================================================
18
+
19
+ @dataclass
20
+ class LogEntry:
21
+ """Structured log entry for JSON Lines format."""
22
+ timestamp: str
23
+ level: str # 'ERR', 'OK', 'MSG', 'DBG', etc.
24
+ file: str
25
+ line: int
26
+ function: str
27
+ module: str = ""
28
+ message: str = ""
29
+ data: Dict[str, Any] = field(default_factory=dict)
30
+ session_id: str = ""
31
+ _raw: str = "" # Original raw message
32
+
33
+ def to_dict(self) -> Dict[str, Any]:
34
+ """Convert to dictionary for JSON serialization."""
35
+ result = asdict(self)
36
+ # Remove private fields
37
+ result.pop('_raw', None)
38
+ return result
39
+
40
+ @staticmethod
41
+ def from_dict(data: Dict[str, Any]) -> 'LogEntry':
42
+ """Safely create LogEntry from dict, filtering unknown fields."""
43
+ # Only extract known fields to avoid TypeError from extra fields
44
+ known_fields = {
45
+ 'timestamp', 'level', 'file', 'line', 'function',
46
+ 'module', 'message', 'data', 'session_id'
47
+ }
48
+ filtered = {k: v for k, v in data.items() if k in known_fields}
49
+ return LogEntry(**filtered)
50
+
51
+ @property
52
+ def location(self) -> str:
53
+ """Short location string for display."""
54
+ return f"{self.file}:{self.line}"
55
+
56
+ @property
57
+ def display_summary(self) -> str:
58
+ """
59
+ Extract display-friendly summary from message.
60
+
61
+ If message contains JSON with 'filebase' or 'filepath', use that.
62
+ Otherwise return truncated message or location.
63
+ """
64
+ if not self.message:
65
+ return f"{self.file}:{self.line} {self.function}()"
66
+
67
+ # Try to extract filebase from JSON in message
68
+ if '{' in self.message:
69
+ try:
70
+ json_start = self.message.index('{')
71
+ json_str = self.message[json_start:]
72
+ data = json.loads(json_str)
73
+ filebase = data.get('filebase', data.get('filepath', None))
74
+ if filebase:
75
+ return filebase
76
+ except (json.JSONDecodeError, ValueError, KeyError):
77
+ pass
78
+
79
+ # Fall back to truncated message
80
+ return self.message[:70]
81
+
82
+ @staticmethod
83
+ def format_time_delta(seconds: float, signed: bool = False) -> str:
84
+ """
85
+ Format time delta in compact form (e.g., '18h39m', '5d3h').
86
+
87
+ Args:
88
+ seconds: Time difference in seconds
89
+ signed: If True, include '-' prefix for negative values
90
+
91
+ Returns:
92
+ Compact time string (e.g., '2h30m', '5d', '45s')
93
+ """
94
+ ago = int(max(0, abs(seconds)))
95
+ divs = (60, 60, 24, 7, 52, 9999999)
96
+ units = ('s', 'm', 'h', 'd', 'w', 'y')
97
+ vals = (ago % 60, int(ago / 60)) # seed with secs, mins
98
+ uidx = 1 # best units
99
+
100
+ for div in divs[1:]:
101
+ if vals[1] < div:
102
+ break
103
+ vals = (vals[1] % div, int(vals[1] / div))
104
+ uidx += 1
105
+
106
+ rv = '-' if signed and seconds < 0 else ''
107
+ rv += f'{vals[1]}{units[uidx]}' if vals[1] else ''
108
+ rv += f'{vals[0]:d}{units[uidx-1]}'
109
+ return rv
110
+
111
+ def format_ago(self) -> str:
112
+ """
113
+ Format this entry's timestamp as relative time (e.g., '5m', '2h39m').
114
+
115
+ Returns:
116
+ Compact relative time string, or '???' if timestamp is invalid
117
+ """
118
+ try:
119
+ ts = datetime.fromisoformat(self.timestamp)
120
+ now = datetime.now()
121
+ delta = now - ts
122
+ return LogEntry.format_time_delta(delta.total_seconds())
123
+ except (ValueError, AttributeError):
124
+ return "???"
125
+
126
+ # ============================================================================
127
+ # Main Logger Class
128
+ # ============================================================================
129
+
130
+ class StructuredLogger:
131
+ """
132
+ Structured logger using JSON Lines format with single log file
133
+ and weighted-age trimming (ERR entries age 10x slower).
134
+ """
135
+
136
+ # Size limits (adjust as needed)
137
+ MAX_LOG_SIZE = 10 * 1024 * 1024 # 10 MB
138
+ TRIM_TO_RATIO = 0.67 # Trim to 67% when max exceeded
139
+ ERR_AGE_WEIGHT = 10 # ERR entries age 10x slower
140
+
141
+ def __init__(self, app_name: str = 'rmbloat',
142
+ log_dir: Optional[Path] = None,
143
+ session_id: str = ""):
144
+ """
145
+ Initialize the structured logger.
146
+
147
+ Args:
148
+ app_name: Application name for log directory
149
+ log_dir: Optional override for log directory
150
+ session_id: Optional session identifier for log correlation
151
+ """
152
+ self.app_name = app_name
153
+ self.session_id = session_id or datetime.now().strftime("%Y%m%d_%H%M%S")
154
+ self._setup_paths(log_dir)
155
+
156
+ # Statistics
157
+ self.stats = {
158
+ 'entries_written': 0,
159
+ 'last_trim': datetime.now()
160
+ }
161
+
162
+ def _fix_ownership(self, path: Path) -> None:
163
+ """Fix file/directory ownership to real user when running with sudo."""
164
+ real_user = os.environ.get('SUDO_USER')
165
+ if real_user:
166
+ try:
167
+ import pwd
168
+ pw_record = pwd.getpwnam(real_user)
169
+ uid, gid = pw_record.pw_uid, pw_record.pw_gid
170
+ os.chown(path, uid, gid)
171
+ except (OSError, KeyError):
172
+ pass # Ignore permission errors and missing users
173
+
174
+ def _setup_paths(self, log_dir: Optional[Path]) -> None:
175
+ """Set up log directory and file paths."""
176
+ try:
177
+ if log_dir:
178
+ # Use provided directory exactly as specified
179
+ self.log_dir = Path(log_dir)
180
+ else:
181
+ # Construct default path from app_name
182
+ base_dir = Path.home() / '.config'
183
+ self.log_dir = base_dir / self.app_name
184
+
185
+ self.log_dir.mkdir(parents=True, exist_ok=True)
186
+ self._fix_ownership(self.log_dir)
187
+
188
+ # Single log file (JSON Lines format)
189
+ self.log_file = self.log_dir / "events.jsonl"
190
+
191
+ except Exception as e:
192
+ print(f"FATAL: Cannot setup log directory: {e}", file=sys.stderr)
193
+ # Fallback to current directory
194
+ self.log_dir = Path.cwd()
195
+ self.log_file = Path("events.jsonl")
196
+
197
+ def _get_caller_info(self, depth: int = 3) -> tuple:
198
+ """Get caller information from stack frame."""
199
+ try:
200
+ frame = inspect.currentframe()
201
+ for _ in range(depth):
202
+ if frame:
203
+ frame = frame.f_back
204
+
205
+ if frame:
206
+ return (
207
+ Path(frame.f_code.co_filename).name,
208
+ frame.f_lineno,
209
+ frame.f_code.co_name,
210
+ frame.f_code.co_filename.split('/')[-2] if '/' in frame.f_code.co_filename else ""
211
+ )
212
+ except Exception:
213
+ pass
214
+ return ("unknown", 0, "unknown", "")
215
+
216
+ def _create_log_entry(self, level: str, *args,
217
+ data: Optional[Dict] = None,
218
+ **kwargs) -> LogEntry:
219
+ """Create a structured log entry."""
220
+ file, line, function, module = self._get_caller_info()
221
+ timestamp = datetime.now().isoformat()
222
+ message = " ".join(str(arg) for arg in args)
223
+
224
+ return LogEntry(
225
+ timestamp=timestamp,
226
+ level=level,
227
+ file=file,
228
+ line=line,
229
+ function=function,
230
+ module=module,
231
+ message=message,
232
+ data=data or {},
233
+ session_id=self.session_id,
234
+ _raw=message
235
+ )
236
+
237
+ def _append_log(self, entry: LogEntry) -> None:
238
+ """
239
+ Append entry to log file, trimming if necessary.
240
+
241
+ Args:
242
+ entry: Log entry to append
243
+ """
244
+ # Check if we need to trim
245
+ if self.log_file.exists() and self.log_file.stat().st_size >= self.MAX_LOG_SIZE:
246
+ self._trim_log_file()
247
+
248
+ # Write the entry
249
+ try:
250
+ with open(self.log_file, 'a', encoding='utf-8') as f:
251
+ json_line = json.dumps(entry.to_dict())
252
+ f.write(json_line + '\n')
253
+
254
+ self.stats['entries_written'] += 1
255
+
256
+ # Fix ownership after writing
257
+ self._fix_ownership(self.log_file)
258
+
259
+ except Exception as e:
260
+ print(f"LOG WRITE ERROR: {e}", file=sys.stderr)
261
+
262
+ def _trim_log_file(self) -> None:
263
+ """
264
+ Trim log file by removing oldest entries (by weighted age) until size < MAX_LOG_SIZE * TRIM_TO_RATIO.
265
+ ERR entries have age/ERR_AGE_WEIGHT, so they're kept longer.
266
+ """
267
+ if not self.log_file.exists():
268
+ return
269
+
270
+ try:
271
+ # Read all entries
272
+ entries = [] # (timestamp, level, line, line_size)
273
+ with open(self.log_file, 'r', encoding='utf-8') as f:
274
+ while True:
275
+ line = f.readline()
276
+ if not line:
277
+ break
278
+ if line.strip():
279
+ try:
280
+ data = json.loads(line)
281
+ timestamp = datetime.fromisoformat(data['timestamp'])
282
+ level = data.get('level', 'OK')
283
+ line_size = len(line.encode('utf-8'))
284
+ entries.append((timestamp, level, line, line_size))
285
+ except (json.JSONDecodeError, KeyError, ValueError):
286
+ pass # Skip malformed entries
287
+
288
+ # Calculate effective age for each entry
289
+ now = datetime.now()
290
+ weighted = [] # (effective_age_seconds, line, line_size)
291
+ for timestamp, level, line, line_size in entries:
292
+ age_seconds = (now - timestamp).total_seconds()
293
+ # ERRs age slower (kept longer)
294
+ effective_age = age_seconds / self.ERR_AGE_WEIGHT if level == 'ERR' else age_seconds
295
+ weighted.append((effective_age, line, line_size))
296
+
297
+ # Sort by effective age (newest first = lowest age)
298
+
299
+ weighted.sort(key=lambda x: x[0])
300
+
301
+ # Target: keep newest entries until total size <= MAX_LOG_SIZE * TRIM_TO_RATIO
302
+ target_size = int(self.MAX_LOG_SIZE * self.TRIM_TO_RATIO)
303
+ kept = []
304
+ total_size = 0
305
+
306
+ # Keep newest entries (lowest effective age) until we hit target
307
+ for effective_age, line, line_size in weighted:
308
+ if total_size + line_size <= target_size:
309
+ kept.append(line)
310
+ total_size += line_size
311
+ # else: discard (too old with weighted age)
312
+
313
+ # Write back (reverse to restore chronological order)
314
+ kept.reverse()
315
+ with open(self.log_file, 'w', encoding='utf-8') as f:
316
+ f.writelines(kept)
317
+
318
+ self.stats['last_trim'] = datetime.now()
319
+
320
+ # Fix ownership after trimming
321
+ self._fix_ownership(self.log_file)
322
+
323
+ except Exception as e:
324
+ print(f"TRIM ERROR: {e}", file=sys.stderr)
325
+
326
+
327
+ # ========================================================================
328
+ # Public API
329
+ # ========================================================================
330
+
331
+ def event(self, *args, data: Optional[Dict] = None, **kwargs) -> None:
332
+ """Log an event (successful operation)."""
333
+ entry = self._create_log_entry("OK", *args, data=data, **kwargs)
334
+ self._append_log(entry)
335
+
336
+ def error(self, *args, data: Optional[Dict] = None, **kwargs) -> None:
337
+ """Log an error."""
338
+ entry = self._create_log_entry("ERR", *args, data=data, **kwargs)
339
+ self._append_log(entry)
340
+
341
+ # Also print to stderr for immediate visibility
342
+ print(f"ERROR: {args[0] if args else ''}", file=sys.stderr)
343
+ if data:
344
+ print(f" Data: {json.dumps(data, indent=2)[:200]}...", file=sys.stderr)
345
+
346
+ def info(self, *args, data: Optional[Dict] = None, **kwargs) -> None:
347
+ """Log informational message."""
348
+ entry = self._create_log_entry("MSG", *args, data=data, **kwargs)
349
+ self._append_log(entry)
350
+
351
+ def debug(self, *args, data: Optional[Dict] = None, **kwargs) -> None:
352
+ """Log debug message."""
353
+ entry = self._create_log_entry("DBG", *args, data=data, **kwargs)
354
+ self._append_log(entry)
355
+
356
+ # ========================================================================
357
+ # Backward Compatibility Aliases (for RotatingLogger API)
358
+ # ========================================================================
359
+
360
+ def lg(self, *args, **kwargs) -> None:
361
+ """
362
+ Alias for info() - backward compatibility with RotatingLogger.
363
+
364
+ Logs an ordinary message with a 'MSG' tag.
365
+ Supports both simple messages and lists of strings.
366
+ """
367
+ # Handle list of strings like RotatingLogger did
368
+ if args and isinstance(args[0], list):
369
+ list_message = '\n'.join(str(item) for item in args[0])
370
+ args = (list_message,) + args[1:]
371
+
372
+ self.info(*args, **kwargs)
373
+
374
+ def err(self, *args, **kwargs) -> None:
375
+ """
376
+ Alias for error() - backward compatibility with RotatingLogger.
377
+
378
+ Logs an error message with an 'ERR' tag.
379
+ Supports both simple messages and lists of strings.
380
+ """
381
+ # Handle list of strings like RotatingLogger did
382
+ if args and isinstance(args[0], list):
383
+ list_message = '\n'.join(str(item) for item in args[0])
384
+ args = (list_message,) + args[1:]
385
+
386
+ self.error(*args, **kwargs)
387
+
388
+ def put(self, message_type: str, *args, **kwargs) -> None:
389
+ """
390
+ Alias for custom level logging - backward compatibility with RotatingLogger.
391
+
392
+ Logs a message with an arbitrary MESSAGE_TYPE tag.
393
+ Supports both simple messages and lists of strings.
394
+ """
395
+ # Handle list of strings like RotatingLogger did
396
+ if args and isinstance(args[0], list):
397
+ list_message = '\n'.join(str(item) for item in args[0])
398
+ args = (list_message,) + args[1:]
399
+
400
+ # Create entry with custom level
401
+ # Extract data from kwargs to avoid passing it twice
402
+ data = kwargs.pop('data', None)
403
+ entry = self._create_log_entry(str(message_type).upper(), *args, data=data)
404
+ self._append_log(entry)
405
+
406
+ # ========================================================================
407
+ # Filtering and Search Methods
408
+ # ========================================================================
409
+
410
+ @staticmethod
411
+ def filter_entries(entries: List[LogEntry], pattern: str,
412
+ deep: bool = False) -> tuple[List[LogEntry], set]:
413
+ """
414
+ Filter log entries by pattern (case-insensitive).
415
+
416
+ Args:
417
+ entries: List of LogEntry objects to filter
418
+ pattern: Search pattern (case-insensitive)
419
+ deep: If True, also search within JSON data in messages
420
+
421
+ Returns:
422
+ (filtered_entries, deep_match_timestamps): Tuple of filtered list and
423
+ set of timestamps that matched only in deep (JSON) search
424
+ """
425
+ if not pattern:
426
+ return entries, set()
427
+
428
+ pattern_lower = pattern.lower()
429
+ filtered = []
430
+ deep_matches = set()
431
+
432
+ for entry in entries:
433
+ # Build visible text (what shows in collapsed view)
434
+ timestamp_short = entry.timestamp[:19]
435
+ level = entry.level
436
+ summary = entry.display_summary
437
+
438
+ # Shallow search: check visible text
439
+ visible_text = f"{timestamp_short} {level} {summary}".lower()
440
+ shallow_match = pattern_lower in visible_text
441
+
442
+ # Deep search: check JSON content if requested
443
+ deep_match = False
444
+ if deep:
445
+ try:
446
+ # Search in the structured data field
447
+ if entry.data:
448
+ json_str = json.dumps(entry.data)
449
+ deep_match = pattern_lower in json_str.lower()
450
+ # Also search in message if it contains JSON
451
+ if not deep_match and '{' in entry.message:
452
+ json_start = entry.message.index('{')
453
+ json_str = entry.message[json_start:]
454
+ deep_match = pattern_lower in json_str.lower()
455
+ except (ValueError, IndexError, TypeError):
456
+ pass
457
+
458
+ # Include if either match
459
+ if shallow_match or deep_match:
460
+ filtered.append(entry)
461
+ # Mark as deep-only match
462
+ if deep_match and not shallow_match:
463
+ deep_matches.add(entry.timestamp)
464
+
465
+ return filtered, deep_matches
466
+
467
+ # ========================================================================
468
+ # Query Methods - Window-based for efficient incremental reads
469
+ # ========================================================================
470
+
471
+ def get_window_of_entries(self, window_size: int = 1000):
472
+ """
473
+ Get a window of log entries (newest first).
474
+ Returns: (entries_dict, window_state)
475
+ entries_dict: OrderedDict keyed by timestamp
476
+ window_state: dict with 'file_size' and 'last_position'
477
+ """
478
+ from collections import OrderedDict
479
+
480
+ entries = OrderedDict()
481
+ if not self.log_file.exists():
482
+ return entries, {'file_size': 0, 'last_position': 0}
483
+
484
+ file_size = self.log_file.stat().st_size
485
+
486
+ try:
487
+ with open(self.log_file, 'r', encoding='utf-8') as f:
488
+ while True:
489
+ line = f.readline()
490
+ if not line:
491
+ break
492
+ if line.strip():
493
+ try:
494
+ entry_dict = json.loads(line)
495
+ entry = LogEntry.from_dict(entry_dict)
496
+ entries[entry.timestamp] = entry
497
+ except (json.JSONDecodeError, TypeError, KeyError):
498
+ pass
499
+ except Exception:
500
+ pass
501
+
502
+ # Keep only newest window_size entries
503
+ if len(entries) > window_size:
504
+ # OrderedDict preserves insertion order (chronological)
505
+ # Keep last window_size items
506
+ items = list(entries.items())
507
+ entries = OrderedDict(items[-window_size:])
508
+
509
+ window_state = {
510
+ 'file_size': file_size,
511
+ 'last_position': file_size # Next read starts here
512
+ }
513
+
514
+ return entries, window_state
515
+
516
+ def refresh_window(self, window, window_state, window_size: int = 1000):
517
+ """
518
+ Refresh window with new entries from log file.
519
+ Returns: updated (entries_dict, window_state)
520
+ """
521
+ from collections import OrderedDict
522
+
523
+ if not self.log_file.exists():
524
+ return window, window_state
525
+
526
+ current_file_size = self.log_file.stat().st_size
527
+ last_file_size = window_state.get('file_size', 0)
528
+ last_position = window_state.get('last_position', 0)
529
+
530
+ # File was trimmed (size dropped), reset and re-read
531
+ if current_file_size < last_file_size:
532
+ return self.get_window_of_entries(window_size)
533
+
534
+ # No new data
535
+ if current_file_size == last_position:
536
+ return window, window_state
537
+
538
+ # Read new entries from last position
539
+ try:
540
+ with open(self.log_file, 'r', encoding='utf-8') as f:
541
+ f.seek(last_position)
542
+ while True:
543
+ line = f.readline()
544
+ if not line:
545
+ break
546
+ if line.strip():
547
+ try:
548
+ entry_dict = json.loads(line)
549
+ entry = LogEntry.from_dict(entry_dict)
550
+ window[entry.timestamp] = entry
551
+ except (json.JSONDecodeError, TypeError, KeyError):
552
+ pass
553
+
554
+ new_position = f.tell()
555
+ except Exception:
556
+ new_position = last_position
557
+
558
+ # Trim window if too large (keep newest)
559
+ if len(window) > window_size:
560
+ items = list(window.items())
561
+ window = OrderedDict(items[-window_size:])
562
+
563
+ window_state = {
564
+ 'file_size': current_file_size,
565
+ 'last_position': new_position
566
+ }
567
+
568
+ return window, window_state
569
+
570
+ # ========================================================================
571
+ # Properties
572
+ # ========================================================================
573
+
574
+ @property
575
+ def log_paths(self) -> List[str]:
576
+ """Return list of log file paths (for backward compatibility with -L option)."""
577
+ return [str(self.log_file)]
578
+
579
+
580
+ # ============================================================================
581
+ # Aliases for Backward Compatibility
582
+ # ============================================================================
583
+
584
+ # Alias for standard use (matches RotatingLogger pattern: Log = RotatingLogger)
585
+ Log = StructuredLogger
586
+
587
+ # ============================================================================
588
+ # Example Usage
589
+ # ============================================================================
590
+
591
+ def example_usage():
592
+ """Example of how to use the structured logger."""
593
+
594
+ # Create logger
595
+ logger = StructuredLogger(
596
+ app_name="VideoProcessor",
597
+ session_id="session_12345"
598
+ )
599
+
600
+ print(f"Logs will be written to: {logger.log_dir}")
601
+ print(f"Log file: {logger.log_file}")
602
+
603
+ # Log some events
604
+ logger.info("Starting video processing batch")
605
+
606
+ # Simulate processing
607
+ for i in range(5):
608
+ if i == 2:
609
+ # Log an error with structured data
610
+ logger.error(
611
+ "Failed to encode video",
612
+ data={
613
+ "filepath": f"/videos/video_{i}.mp4",
614
+ "error_code": 183,
615
+ "ffmpeg_output": ["Error opening input", "Invalid data"],
616
+ "attempts": 3
617
+ }
618
+ )
619
+ else:
620
+ # Log a successful event
621
+ logger.event(
622
+ f"Successfully encoded video_{i}",
623
+ data={
624
+ "filepath": f"/videos/video_{i}.mp4",
625
+ "original_size": 1000000,
626
+ "encoded_size": 500000,
627
+ "reduction": "50%",
628
+ "duration_seconds": 120.5
629
+ }
630
+ )
631
+
632
+ logger.info("Batch processing complete")
633
+
634
+ # Get recent entries programmatically using window
635
+ print("\n" + "="*60)
636
+ print("Recent Log Entries (window-based access):")
637
+ window, _ = logger.get_window_of_entries(window_size=10)
638
+ for _, entry in window.items():
639
+ print(f"{entry.timestamp} [{entry.level}] {entry.location}: {entry.message}")
640
+ if entry.data:
641
+ print(f" Data keys: {list(entry.data.keys())}")
642
+
643
+ if __name__ == "__main__":
644
+ example_usage()
dwipe/Tunables.py ADDED
@@ -0,0 +1,62 @@
1
+ #!/usr/bin/env python3
2
+ """ Ini file for dwipe"""
3
+ import configparser
4
+ import os
5
+ from .Utils import Utils
6
+
7
+ class Tunables:
8
+ """Manages dwipe configuration and provides hardware-tuning defaults."""
9
+ singleton = None
10
+
11
+
12
+ def __init__(self):
13
+ assert not Tunables.singleton
14
+
15
+ config_dir = Utils.get_config_dir()
16
+ self.path = config_dir / 'tunables.ini'
17
+ self.config = configparser.ConfigParser()
18
+
19
+ # Load existing or create default
20
+ if not os.path.exists(self.path):
21
+ self._create_default()
22
+ else:
23
+ self.config.read(self.path)
24
+
25
+ @staticmethod
26
+ def get_singleton(self):
27
+ """ Get the singleton Tunable object """
28
+ if not Tunables.get_singleton:
29
+ Tunables.singleton = Tunables()
30
+ return Tunables.singleton
31
+
32
+ def _create_default(self):
33
+ """Generates a default .ini file with recommended 'turkey' hardware timings."""
34
+ self.config['SATA_TIMING'] = {
35
+ 'post_password_delay': '1.0',
36
+ 'post_unlock_delay': '1.5',
37
+ 'reappearance_timeout': '120',
38
+ 'poll_interval': '5.0'
39
+ }
40
+
41
+ with open(self.path, 'w', encoding='utf-8') as f:
42
+ f.write("# dwipe Configuration - Tune these for 'grumpy' hardware\n")
43
+ self.config.write(f)
44
+
45
+ # Fix ownership when running with sudo
46
+ Utils.fix_file_ownership(self.path)
47
+
48
+ @property
49
+ def post_password_delay(self):
50
+ return self.config.getfloat('SATA_TIMING', 'post_password_delay', fallback=1.0)
51
+
52
+ @property
53
+ def post_unlock_delay(self):
54
+ return self.config.getfloat('SATA_TIMING', 'post_unlock_delay', fallback=1.5)
55
+
56
+ @property
57
+ def reappearance_timeout(self):
58
+ return self.config.getfloat('SATA_TIMING', 'reappearance_timeout', fallback=120.0)
59
+
60
+ @property
61
+ def poll_interval(self):
62
+ return self.config.getfloat('SATA_TIMING', 'poll_interval', fallback=5.0)