ptdu 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ptdu/__init__.py +38 -0
- ptdu/cache.py +374 -0
- ptdu/errors.py +627 -0
- ptdu/fonts.py +237 -0
- ptdu/main.py +118 -0
- ptdu/models.py +130 -0
- ptdu/performance.py +348 -0
- ptdu/scanner.py +490 -0
- ptdu/threads.py +250 -0
- ptdu/treeview.py +426 -0
- ptdu/ui.py +1247 -0
- ptdu/utils.py +80 -0
- ptdu-0.1.0.dist-info/METADATA +341 -0
- ptdu-0.1.0.dist-info/RECORD +17 -0
- ptdu-0.1.0.dist-info/WHEEL +4 -0
- ptdu-0.1.0.dist-info/entry_points.txt +2 -0
- ptdu-0.1.0.dist-info/licenses/LICENSE +21 -0
ptdu/__init__.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""PTDU - Python Tkinter Disk Usage analyzer."""
|
|
2
|
+
|
|
3
|
+
__version__ = "0.1.0"
|
|
4
|
+
|
|
5
|
+
from ptdu.cache import ScanCache
|
|
6
|
+
from ptdu.errors import ErrorHandler, PathValidator, get_error_handler
|
|
7
|
+
from ptdu.fonts import FontManager
|
|
8
|
+
from ptdu.models import DirNode
|
|
9
|
+
from ptdu.performance import (
|
|
10
|
+
LargeDirectoryHandler,
|
|
11
|
+
MemoryOptimizer,
|
|
12
|
+
VirtualScroller,
|
|
13
|
+
estimate_directory_size,
|
|
14
|
+
)
|
|
15
|
+
from ptdu.scanner import MemoryMonitor, Scanner, ScanResult, get_system_info
|
|
16
|
+
from ptdu.treeview import DirectoryTreeview
|
|
17
|
+
from ptdu.ui import MainWindow
|
|
18
|
+
from ptdu.utils import SizeCalculator
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
"DirNode",
|
|
22
|
+
"DirectoryTreeview",
|
|
23
|
+
"ErrorHandler",
|
|
24
|
+
"FontManager",
|
|
25
|
+
"LargeDirectoryHandler",
|
|
26
|
+
"MainWindow",
|
|
27
|
+
"MemoryMonitor",
|
|
28
|
+
"MemoryOptimizer",
|
|
29
|
+
"PathValidator",
|
|
30
|
+
"ScanCache",
|
|
31
|
+
"ScanResult",
|
|
32
|
+
"Scanner",
|
|
33
|
+
"SizeCalculator",
|
|
34
|
+
"VirtualScroller",
|
|
35
|
+
"estimate_directory_size",
|
|
36
|
+
"get_error_handler",
|
|
37
|
+
"get_system_info",
|
|
38
|
+
]
|
ptdu/cache.py
ADDED
|
@@ -0,0 +1,374 @@
|
|
|
1
|
+
"""SQLite caching for directory scan results."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import sqlite3
|
|
7
|
+
import threading
|
|
8
|
+
import time
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import TYPE_CHECKING, Optional
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from ptdu.models import DirNode
|
|
15
|
+
|
|
16
|
+
from ptdu.scanner import ScanResult
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class CacheEntry:
|
|
21
|
+
"""A cached directory entry."""
|
|
22
|
+
|
|
23
|
+
path: str
|
|
24
|
+
name: str
|
|
25
|
+
size: int
|
|
26
|
+
is_dir: bool
|
|
27
|
+
mtime: float
|
|
28
|
+
scan_time: float
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class ScanCache:
|
|
32
|
+
"""SQLite-based cache for directory scan results."""
|
|
33
|
+
|
|
34
|
+
def __init__(self, db_path: Optional[Path] = None) -> None:
|
|
35
|
+
"""Initialize the scan cache.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
db_path: Path to SQLite database (default: ~/.cache/ptdu/cache.db)
|
|
39
|
+
"""
|
|
40
|
+
if db_path is None:
|
|
41
|
+
cache_dir = Path.home() / ".cache" / "ptdu"
|
|
42
|
+
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
43
|
+
db_path = cache_dir / "cache.db"
|
|
44
|
+
|
|
45
|
+
self._db_path: Path = db_path
|
|
46
|
+
self._lock: threading.Lock = threading.Lock()
|
|
47
|
+
self._local = threading.local()
|
|
48
|
+
self._init_db()
|
|
49
|
+
|
|
50
|
+
def _get_connection(self) -> sqlite3.Connection:
|
|
51
|
+
"""Get thread-local database connection."""
|
|
52
|
+
if not hasattr(self._local, "connection") or self._local.connection is None:
|
|
53
|
+
self._local.connection = sqlite3.connect(str(self._db_path))
|
|
54
|
+
self._local.connection.row_factory = sqlite3.Row
|
|
55
|
+
return self._local.connection
|
|
56
|
+
|
|
57
|
+
def _init_db(self) -> None:
|
|
58
|
+
"""Initialize the database schema."""
|
|
59
|
+
with self._lock:
|
|
60
|
+
conn = sqlite3.connect(str(self._db_path))
|
|
61
|
+
try:
|
|
62
|
+
conn.execute(
|
|
63
|
+
"""
|
|
64
|
+
CREATE TABLE IF NOT EXISTS scan_cache (
|
|
65
|
+
path TEXT PRIMARY KEY,
|
|
66
|
+
name TEXT NOT NULL,
|
|
67
|
+
size INTEGER NOT NULL,
|
|
68
|
+
is_dir INTEGER NOT NULL,
|
|
69
|
+
mtime REAL NOT NULL,
|
|
70
|
+
scan_time REAL NOT NULL,
|
|
71
|
+
parent_path TEXT,
|
|
72
|
+
FOREIGN KEY (parent_path) REFERENCES scan_cache(path)
|
|
73
|
+
)
|
|
74
|
+
"""
|
|
75
|
+
)
|
|
76
|
+
conn.execute(
|
|
77
|
+
"""
|
|
78
|
+
CREATE INDEX IF NOT EXISTS idx_parent_path
|
|
79
|
+
ON scan_cache(parent_path)
|
|
80
|
+
"""
|
|
81
|
+
)
|
|
82
|
+
conn.execute(
|
|
83
|
+
"""
|
|
84
|
+
CREATE INDEX IF NOT EXISTS idx_scan_time
|
|
85
|
+
ON scan_cache(scan_time)
|
|
86
|
+
"""
|
|
87
|
+
)
|
|
88
|
+
conn.commit()
|
|
89
|
+
finally:
|
|
90
|
+
conn.close()
|
|
91
|
+
|
|
92
|
+
def _get_mtime(self, path: Path) -> float:
|
|
93
|
+
"""Get modification time of a path.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
path: Path to check
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
Modification time as float, or 0 if unavailable
|
|
100
|
+
"""
|
|
101
|
+
try:
|
|
102
|
+
return os.path.getmtime(path)
|
|
103
|
+
except (OSError, PermissionError):
|
|
104
|
+
return 0.0
|
|
105
|
+
|
|
106
|
+
def is_valid(self, path: Path) -> bool:
|
|
107
|
+
"""Check if cached data is still valid (mtime unchanged).
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
path: Directory path to check
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
True if cache is valid, False otherwise
|
|
114
|
+
"""
|
|
115
|
+
path_str = str(path.resolve())
|
|
116
|
+
current_mtime = self._get_mtime(path)
|
|
117
|
+
|
|
118
|
+
with self._lock:
|
|
119
|
+
conn = self._get_connection()
|
|
120
|
+
cursor = conn.execute(
|
|
121
|
+
"SELECT mtime FROM scan_cache WHERE path = ?",
|
|
122
|
+
(path_str,),
|
|
123
|
+
)
|
|
124
|
+
row = cursor.fetchone()
|
|
125
|
+
|
|
126
|
+
if row is None:
|
|
127
|
+
return False
|
|
128
|
+
|
|
129
|
+
cached_mtime = row["mtime"]
|
|
130
|
+
return cached_mtime == current_mtime
|
|
131
|
+
|
|
132
|
+
def get(self, path: Path) -> Optional[list[CacheEntry]]:
|
|
133
|
+
"""Get cached entries for a directory.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
path: Directory path
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
List of cached entries or None if not cached/invalid
|
|
140
|
+
"""
|
|
141
|
+
path_str = str(path.resolve())
|
|
142
|
+
|
|
143
|
+
if not self.is_valid(path):
|
|
144
|
+
return None
|
|
145
|
+
|
|
146
|
+
with self._lock:
|
|
147
|
+
conn = self._get_connection()
|
|
148
|
+
cursor = conn.execute(
|
|
149
|
+
"SELECT path, name, size, is_dir, mtime, scan_time "
|
|
150
|
+
"FROM scan_cache WHERE parent_path = ?",
|
|
151
|
+
(path_str,),
|
|
152
|
+
)
|
|
153
|
+
rows = cursor.fetchall()
|
|
154
|
+
|
|
155
|
+
if not rows:
|
|
156
|
+
return None
|
|
157
|
+
|
|
158
|
+
return [
|
|
159
|
+
CacheEntry(
|
|
160
|
+
path=row["path"],
|
|
161
|
+
name=row["name"],
|
|
162
|
+
size=row["size"],
|
|
163
|
+
is_dir=bool(row["is_dir"]),
|
|
164
|
+
mtime=row["mtime"],
|
|
165
|
+
scan_time=row["scan_time"],
|
|
166
|
+
)
|
|
167
|
+
for row in rows
|
|
168
|
+
]
|
|
169
|
+
|
|
170
|
+
def get_recursive(self, path: Path) -> Optional[list[CacheEntry]]:
|
|
171
|
+
"""Get all cached entries recursively for a directory.
|
|
172
|
+
|
|
173
|
+
Args:
|
|
174
|
+
path: Directory path
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
List of cached entries or None if root not cached/invalid
|
|
178
|
+
"""
|
|
179
|
+
path_str = str(path.resolve())
|
|
180
|
+
|
|
181
|
+
if not self.is_valid(path):
|
|
182
|
+
return None
|
|
183
|
+
|
|
184
|
+
with self._lock:
|
|
185
|
+
conn = self._get_connection()
|
|
186
|
+
# Get all entries where path starts with the given path
|
|
187
|
+
cursor = conn.execute(
|
|
188
|
+
"SELECT path, name, size, is_dir, mtime, scan_time "
|
|
189
|
+
"FROM scan_cache WHERE path LIKE ?",
|
|
190
|
+
(f"{path_str}%",),
|
|
191
|
+
)
|
|
192
|
+
rows = cursor.fetchall()
|
|
193
|
+
|
|
194
|
+
if not rows:
|
|
195
|
+
return None
|
|
196
|
+
|
|
197
|
+
return [
|
|
198
|
+
CacheEntry(
|
|
199
|
+
path=row["path"],
|
|
200
|
+
name=row["name"],
|
|
201
|
+
size=row["size"],
|
|
202
|
+
is_dir=bool(row["is_dir"]),
|
|
203
|
+
mtime=row["mtime"],
|
|
204
|
+
scan_time=row["scan_time"],
|
|
205
|
+
)
|
|
206
|
+
for row in rows
|
|
207
|
+
]
|
|
208
|
+
|
|
209
|
+
def store(
|
|
210
|
+
self,
|
|
211
|
+
parent_path: Path,
|
|
212
|
+
results: list[ScanResult],
|
|
213
|
+
) -> None:
|
|
214
|
+
"""Store scan results in cache.
|
|
215
|
+
|
|
216
|
+
Args:
|
|
217
|
+
parent_path: Parent directory path
|
|
218
|
+
results: Scan results to cache
|
|
219
|
+
"""
|
|
220
|
+
parent_str = str(parent_path.resolve())
|
|
221
|
+
parent_mtime = self._get_mtime(parent_path)
|
|
222
|
+
scan_time = time.time()
|
|
223
|
+
|
|
224
|
+
with self._lock:
|
|
225
|
+
conn = self._get_connection()
|
|
226
|
+
try:
|
|
227
|
+
# Store parent entry
|
|
228
|
+
conn.execute(
|
|
229
|
+
"""
|
|
230
|
+
INSERT OR REPLACE INTO scan_cache
|
|
231
|
+
(path, name, size, is_dir, mtime, scan_time, parent_path)
|
|
232
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
233
|
+
""",
|
|
234
|
+
(
|
|
235
|
+
parent_str,
|
|
236
|
+
parent_path.name,
|
|
237
|
+
0,
|
|
238
|
+
1,
|
|
239
|
+
parent_mtime,
|
|
240
|
+
scan_time,
|
|
241
|
+
str(parent_path.parent.resolve())
|
|
242
|
+
if parent_path.parent != parent_path
|
|
243
|
+
else None,
|
|
244
|
+
),
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
# Store child entries
|
|
248
|
+
for result in results:
|
|
249
|
+
path_str = str(result.path.resolve())
|
|
250
|
+
entry_mtime = (
|
|
251
|
+
parent_mtime if result.is_dir else self._get_mtime(result.path)
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
conn.execute(
|
|
255
|
+
"""
|
|
256
|
+
INSERT OR REPLACE INTO scan_cache
|
|
257
|
+
(path, name, size, is_dir, mtime, scan_time, parent_path)
|
|
258
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
259
|
+
""",
|
|
260
|
+
(
|
|
261
|
+
path_str,
|
|
262
|
+
result.name,
|
|
263
|
+
result.size,
|
|
264
|
+
1 if result.is_dir else 0,
|
|
265
|
+
entry_mtime,
|
|
266
|
+
scan_time,
|
|
267
|
+
parent_str,
|
|
268
|
+
),
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
conn.commit()
|
|
272
|
+
except sqlite3.Error:
|
|
273
|
+
# If cache storage fails, continue without caching
|
|
274
|
+
pass
|
|
275
|
+
|
|
276
|
+
def store_entry(self, parent_path: Path, result: ScanResult) -> None:
|
|
277
|
+
"""Store a single scan result in cache.
|
|
278
|
+
|
|
279
|
+
Args:
|
|
280
|
+
parent_path: Parent directory path
|
|
281
|
+
result: Scan result to cache
|
|
282
|
+
"""
|
|
283
|
+
parent_str = str(parent_path.resolve())
|
|
284
|
+
parent_mtime = self._get_mtime(parent_path)
|
|
285
|
+
scan_time = time.time()
|
|
286
|
+
path_str = str(result.path.resolve())
|
|
287
|
+
entry_mtime = parent_mtime if result.is_dir else self._get_mtime(result.path)
|
|
288
|
+
|
|
289
|
+
with self._lock:
|
|
290
|
+
conn = self._get_connection()
|
|
291
|
+
try:
|
|
292
|
+
conn.execute(
|
|
293
|
+
"""
|
|
294
|
+
INSERT OR REPLACE INTO scan_cache
|
|
295
|
+
(path, name, size, is_dir, mtime, scan_time, parent_path)
|
|
296
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
297
|
+
""",
|
|
298
|
+
(
|
|
299
|
+
path_str,
|
|
300
|
+
result.name,
|
|
301
|
+
result.size,
|
|
302
|
+
1 if result.is_dir else 0,
|
|
303
|
+
entry_mtime,
|
|
304
|
+
scan_time,
|
|
305
|
+
parent_str,
|
|
306
|
+
),
|
|
307
|
+
)
|
|
308
|
+
conn.commit()
|
|
309
|
+
except sqlite3.Error:
|
|
310
|
+
pass
|
|
311
|
+
|
|
312
|
+
def invalidate(self, path: Path) -> None:
|
|
313
|
+
"""Invalidate cache for a path and its children.
|
|
314
|
+
|
|
315
|
+
Args:
|
|
316
|
+
path: Path to invalidate
|
|
317
|
+
"""
|
|
318
|
+
path_str = str(path.resolve())
|
|
319
|
+
|
|
320
|
+
with self._lock:
|
|
321
|
+
conn = self._get_connection()
|
|
322
|
+
try:
|
|
323
|
+
conn.execute(
|
|
324
|
+
"DELETE FROM scan_cache WHERE path LIKE ?",
|
|
325
|
+
(f"{path_str}%",),
|
|
326
|
+
)
|
|
327
|
+
conn.commit()
|
|
328
|
+
except sqlite3.Error:
|
|
329
|
+
pass
|
|
330
|
+
|
|
331
|
+
def clear(self) -> None:
|
|
332
|
+
"""Clear all cached data."""
|
|
333
|
+
with self._lock:
|
|
334
|
+
conn = self._get_connection()
|
|
335
|
+
try:
|
|
336
|
+
conn.execute("DELETE FROM scan_cache")
|
|
337
|
+
conn.commit()
|
|
338
|
+
except sqlite3.Error:
|
|
339
|
+
pass
|
|
340
|
+
|
|
341
|
+
def get_stats(self) -> dict[str, int | float]:
|
|
342
|
+
"""Get cache statistics.
|
|
343
|
+
|
|
344
|
+
Returns:
|
|
345
|
+
Dictionary with cache stats
|
|
346
|
+
"""
|
|
347
|
+
with self._lock:
|
|
348
|
+
conn = self._get_connection()
|
|
349
|
+
try:
|
|
350
|
+
cursor = conn.execute("SELECT COUNT(*) as count FROM scan_cache")
|
|
351
|
+
count = cursor.fetchone()["count"]
|
|
352
|
+
|
|
353
|
+
cursor = conn.execute(
|
|
354
|
+
"SELECT MAX(scan_time) as last_scan FROM scan_cache"
|
|
355
|
+
)
|
|
356
|
+
row = cursor.fetchone()
|
|
357
|
+
last_scan = row["last_scan"] if row else 0.0
|
|
358
|
+
|
|
359
|
+
return {
|
|
360
|
+
"entry_count": count,
|
|
361
|
+
"last_scan": last_scan,
|
|
362
|
+
}
|
|
363
|
+
except sqlite3.Error:
|
|
364
|
+
return {
|
|
365
|
+
"entry_count": 0,
|
|
366
|
+
"last_scan": 0.0,
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
def close(self) -> None:
|
|
370
|
+
"""Close database connections."""
|
|
371
|
+
with self._lock:
|
|
372
|
+
if hasattr(self._local, "connection") and self._local.connection:
|
|
373
|
+
self._local.connection.close()
|
|
374
|
+
self._local.connection = None
|