lenslet 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lenslet/__init__.py +7 -0
- lenslet/api.py +157 -0
- lenslet/cli.py +121 -0
- lenslet/frontend/assets/index-B-0lZ7yu.js +44 -0
- lenslet/frontend/assets/index-c56aKxHZ.css +1 -0
- lenslet/frontend/favicon.ico +0 -0
- lenslet/frontend/index.html +14 -0
- lenslet/metadata.py +151 -0
- lenslet/server.py +520 -0
- lenslet/storage/__init__.py +6 -0
- lenslet/storage/base.py +35 -0
- lenslet/storage/dataset.py +591 -0
- lenslet/storage/local.py +69 -0
- lenslet/storage/memory.py +472 -0
- lenslet/storage/parquet.py +483 -0
- lenslet/workspace.py +60 -0
- lenslet-0.2.1.dist-info/METADATA +134 -0
- lenslet-0.2.1.dist-info/RECORD +20 -0
- lenslet-0.2.1.dist-info/WHEEL +4 -0
- lenslet-0.2.1.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,472 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import os
|
|
3
|
+
import struct
|
|
4
|
+
import sys
|
|
5
|
+
import time
|
|
6
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from datetime import datetime, timezone
|
|
9
|
+
from io import BytesIO
|
|
10
|
+
from PIL import Image
|
|
11
|
+
from .local import LocalStorage
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class CachedItem:
|
|
16
|
+
"""In-memory cached metadata for an image."""
|
|
17
|
+
path: str
|
|
18
|
+
name: str
|
|
19
|
+
mime: str
|
|
20
|
+
width: int # 0 = not yet loaded
|
|
21
|
+
height: int # 0 = not yet loaded
|
|
22
|
+
size: int
|
|
23
|
+
mtime: float
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class CachedIndex:
|
|
28
|
+
"""In-memory cached folder index."""
|
|
29
|
+
path: str
|
|
30
|
+
generated_at: str
|
|
31
|
+
items: list[CachedItem] = field(default_factory=list)
|
|
32
|
+
dirs: list[str] = field(default_factory=list)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class MemoryStorage:
|
|
36
|
+
"""
|
|
37
|
+
In-memory storage that wraps LocalStorage for reading,
|
|
38
|
+
but keeps all indexes, thumbnails, and metadata in RAM.
|
|
39
|
+
Does NOT write anything to the source directory.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
IMAGE_EXTS = (".jpg", ".jpeg", ".png", ".webp")
|
|
43
|
+
LOCAL_INDEX_WORKERS = 16
|
|
44
|
+
|
|
45
|
+
def __init__(self, root: str, thumb_size: int = 256, thumb_quality: int = 70):
|
|
46
|
+
self.local = LocalStorage(root)
|
|
47
|
+
self.root = root
|
|
48
|
+
self.thumb_size = thumb_size
|
|
49
|
+
self.thumb_quality = thumb_quality
|
|
50
|
+
|
|
51
|
+
# In-memory caches
|
|
52
|
+
self._indexes: dict[str, CachedIndex] = {}
|
|
53
|
+
self._thumbnails: dict[str, bytes] = {} # path -> thumbnail bytes
|
|
54
|
+
self._metadata: dict[str, dict] = {} # path -> sidecar-like metadata
|
|
55
|
+
self._dimensions: dict[str, tuple[int, int]] = {} # path -> (w, h)
|
|
56
|
+
|
|
57
|
+
def _normalize_path(self, path: str) -> str:
|
|
58
|
+
"""Normalize path for consistent cache keys."""
|
|
59
|
+
return path.strip("/") if path else ""
|
|
60
|
+
|
|
61
|
+
def _abs_path(self, path: str) -> str:
|
|
62
|
+
"""Fast, safe absolute path resolution via LocalStorage."""
|
|
63
|
+
return self.local.resolve_path(path)
|
|
64
|
+
|
|
65
|
+
def _is_supported_image(self, name: str) -> bool:
|
|
66
|
+
return name.lower().endswith(self.IMAGE_EXTS)
|
|
67
|
+
|
|
68
|
+
def list_dir(self, path: str) -> tuple[list[str], list[str]]:
|
|
69
|
+
"""List directory, filtering out metadata files."""
|
|
70
|
+
files, dirs = self.local.list_dir(path)
|
|
71
|
+
# Filter out any existing metadata files from the listing
|
|
72
|
+
files = [
|
|
73
|
+
f for f in files
|
|
74
|
+
if not f.endswith(".json")
|
|
75
|
+
and not f.endswith(".thumbnail")
|
|
76
|
+
and not f.startswith("_")
|
|
77
|
+
]
|
|
78
|
+
dirs = [d for d in dirs if not d.startswith("_")]
|
|
79
|
+
return files, dirs
|
|
80
|
+
|
|
81
|
+
def read_bytes(self, path: str) -> bytes:
|
|
82
|
+
"""Read file from disk (images only)."""
|
|
83
|
+
return self.local.read_bytes(path)
|
|
84
|
+
|
|
85
|
+
def write_bytes(self, path: str, data: bytes) -> None:
|
|
86
|
+
"""No-op - we don't write to source directory."""
|
|
87
|
+
pass
|
|
88
|
+
|
|
89
|
+
def exists(self, path: str) -> bool:
|
|
90
|
+
"""Check if file exists on disk."""
|
|
91
|
+
return self.local.exists(path)
|
|
92
|
+
|
|
93
|
+
def size(self, path: str) -> int:
|
|
94
|
+
"""Get file size."""
|
|
95
|
+
return self.local.size(path)
|
|
96
|
+
|
|
97
|
+
def join(self, *parts: str) -> str:
|
|
98
|
+
return self.local.join(*parts)
|
|
99
|
+
|
|
100
|
+
def etag(self, path: str) -> str | None:
|
|
101
|
+
return self.local.etag(path)
|
|
102
|
+
|
|
103
|
+
# --- In-memory index/thumbnail operations ---
|
|
104
|
+
|
|
105
|
+
def get_index(self, path: str) -> CachedIndex | None:
|
|
106
|
+
"""Get cached index for a folder, building if needed."""
|
|
107
|
+
norm = self._normalize_path(path)
|
|
108
|
+
if norm in self._indexes:
|
|
109
|
+
return self._indexes[norm]
|
|
110
|
+
return self._build_index(path)
|
|
111
|
+
|
|
112
|
+
def validate_image_path(self, path: str) -> None:
|
|
113
|
+
"""Ensure path is a supported image and exists on disk."""
|
|
114
|
+
if not path:
|
|
115
|
+
raise ValueError("empty path")
|
|
116
|
+
if not self._is_supported_image(path):
|
|
117
|
+
raise ValueError("unsupported file type")
|
|
118
|
+
# Resolve to catch traversal attempts even if file is missing
|
|
119
|
+
self._abs_path(path)
|
|
120
|
+
if not self.exists(path):
|
|
121
|
+
raise FileNotFoundError(path)
|
|
122
|
+
|
|
123
|
+
def _build_item(self, path: str, name: str, idx: int) -> tuple[int, CachedItem | None, tuple[int, int] | None]:
|
|
124
|
+
"""Build a CachedItem for a single image file."""
|
|
125
|
+
try:
|
|
126
|
+
full = self.join(path, name)
|
|
127
|
+
abs_path = self._abs_path(full)
|
|
128
|
+
stat = os.stat(abs_path)
|
|
129
|
+
size = stat.st_size
|
|
130
|
+
mtime = stat.st_mtime
|
|
131
|
+
mime = self._guess_mime(name)
|
|
132
|
+
|
|
133
|
+
w, h = self._dimensions.get(full, (0, 0))
|
|
134
|
+
dims = None
|
|
135
|
+
if w == 0 or h == 0:
|
|
136
|
+
try:
|
|
137
|
+
dims = self._read_dimensions_fast(abs_path)
|
|
138
|
+
if dims:
|
|
139
|
+
w, h = dims
|
|
140
|
+
except Exception:
|
|
141
|
+
dims = None
|
|
142
|
+
|
|
143
|
+
item = CachedItem(
|
|
144
|
+
path=full,
|
|
145
|
+
name=name,
|
|
146
|
+
mime=mime,
|
|
147
|
+
width=w,
|
|
148
|
+
height=h,
|
|
149
|
+
size=size,
|
|
150
|
+
mtime=mtime,
|
|
151
|
+
)
|
|
152
|
+
return idx, item, dims
|
|
153
|
+
except Exception:
|
|
154
|
+
return idx, None, None
|
|
155
|
+
|
|
156
|
+
def _progress(self, done: int, total: int, label: str) -> None:
|
|
157
|
+
if total <= 0:
|
|
158
|
+
return
|
|
159
|
+
bar_len = 24
|
|
160
|
+
filled = int(bar_len * done / total)
|
|
161
|
+
bar = "#" * filled + "-" * (bar_len - filled)
|
|
162
|
+
pct = (done / total) * 100
|
|
163
|
+
label_part = f" ({label})" if label else ""
|
|
164
|
+
msg = f"[lenslet] Indexing{label_part}: [{bar}] {done}/{total} ({pct:5.1f}%)"
|
|
165
|
+
end = "\n" if done >= total else "\r"
|
|
166
|
+
print(msg, end=end, file=sys.stderr, flush=True)
|
|
167
|
+
|
|
168
|
+
def _effective_workers(self, total: int) -> int:
|
|
169
|
+
if total <= 0:
|
|
170
|
+
return 0
|
|
171
|
+
cpu = os.cpu_count() or 1
|
|
172
|
+
return max(1, min(self.LOCAL_INDEX_WORKERS, cpu, total))
|
|
173
|
+
|
|
174
|
+
def _build_index(self, path: str) -> CachedIndex:
|
|
175
|
+
"""Build and cache folder index. Fast - no image reading."""
|
|
176
|
+
norm = self._normalize_path(path)
|
|
177
|
+
files, dirs = self.list_dir(path)
|
|
178
|
+
|
|
179
|
+
image_files = [f for f in files if self._is_supported_image(f)]
|
|
180
|
+
items: list[CachedItem | None] = [None] * len(image_files)
|
|
181
|
+
|
|
182
|
+
total = len(image_files)
|
|
183
|
+
if total:
|
|
184
|
+
self._progress(0, total, "local")
|
|
185
|
+
|
|
186
|
+
done = 0
|
|
187
|
+
workers = self._effective_workers(total)
|
|
188
|
+
last_print = 0.0
|
|
189
|
+
if workers:
|
|
190
|
+
with ThreadPoolExecutor(max_workers=workers) as executor:
|
|
191
|
+
futures = [
|
|
192
|
+
executor.submit(self._build_item, path, name, i)
|
|
193
|
+
for i, name in enumerate(image_files)
|
|
194
|
+
]
|
|
195
|
+
for future in as_completed(futures):
|
|
196
|
+
idx, item, dims = future.result()
|
|
197
|
+
if item is not None:
|
|
198
|
+
items[idx] = item
|
|
199
|
+
if dims:
|
|
200
|
+
self._dimensions[item.path] = dims
|
|
201
|
+
done += 1
|
|
202
|
+
now = time.monotonic()
|
|
203
|
+
if now - last_print > 0.1 or done == total:
|
|
204
|
+
self._progress(done, total, "local")
|
|
205
|
+
last_print = now
|
|
206
|
+
else:
|
|
207
|
+
done = total
|
|
208
|
+
|
|
209
|
+
index = CachedIndex(
|
|
210
|
+
path=path,
|
|
211
|
+
generated_at=datetime.now(timezone.utc).isoformat(),
|
|
212
|
+
items=[it for it in items if it is not None],
|
|
213
|
+
dirs=dirs,
|
|
214
|
+
)
|
|
215
|
+
self._indexes[norm] = index
|
|
216
|
+
return index
|
|
217
|
+
|
|
218
|
+
def get_dimensions(self, path: str) -> tuple[int, int]:
|
|
219
|
+
"""Get image dimensions, loading lazily if needed."""
|
|
220
|
+
if path in self._dimensions:
|
|
221
|
+
return self._dimensions[path]
|
|
222
|
+
|
|
223
|
+
# Try fast header-only read first
|
|
224
|
+
try:
|
|
225
|
+
abs_path = self._abs_path(path)
|
|
226
|
+
dims = self._read_dimensions_fast(abs_path)
|
|
227
|
+
if dims:
|
|
228
|
+
self._dimensions[path] = dims
|
|
229
|
+
return dims
|
|
230
|
+
except Exception:
|
|
231
|
+
pass
|
|
232
|
+
|
|
233
|
+
# Fallback to PIL (loads more data but works for all formats)
|
|
234
|
+
try:
|
|
235
|
+
raw = self.read_bytes(path)
|
|
236
|
+
with Image.open(BytesIO(raw)) as im:
|
|
237
|
+
w, h = im.size
|
|
238
|
+
self._dimensions[path] = (w, h)
|
|
239
|
+
return w, h
|
|
240
|
+
except Exception:
|
|
241
|
+
return 0, 0
|
|
242
|
+
|
|
243
|
+
def _read_dimensions_fast(self, filepath: str) -> tuple[int, int] | None:
|
|
244
|
+
"""Read image dimensions from header only (fast)."""
|
|
245
|
+
ext = filepath.lower().split(".")[-1]
|
|
246
|
+
|
|
247
|
+
try:
|
|
248
|
+
with open(filepath, "rb") as f:
|
|
249
|
+
if ext in ("jpg", "jpeg"):
|
|
250
|
+
return self._jpeg_dimensions(f)
|
|
251
|
+
elif ext == "png":
|
|
252
|
+
return self._png_dimensions(f)
|
|
253
|
+
elif ext == "webp":
|
|
254
|
+
return self._webp_dimensions(f)
|
|
255
|
+
except Exception:
|
|
256
|
+
pass
|
|
257
|
+
return None
|
|
258
|
+
|
|
259
|
+
def _jpeg_dimensions(self, f) -> tuple[int, int] | None:
|
|
260
|
+
"""Read JPEG dimensions from SOF marker."""
|
|
261
|
+
f.seek(0)
|
|
262
|
+
if f.read(2) != b'\xff\xd8':
|
|
263
|
+
return None
|
|
264
|
+
while True:
|
|
265
|
+
marker = f.read(2)
|
|
266
|
+
if len(marker) < 2:
|
|
267
|
+
return None
|
|
268
|
+
if marker[0] != 0xff:
|
|
269
|
+
return None
|
|
270
|
+
if marker[1] == 0xd9: # EOI
|
|
271
|
+
return None
|
|
272
|
+
if 0xc0 <= marker[1] <= 0xcf and marker[1] not in (0xc4, 0xc8, 0xcc):
|
|
273
|
+
# SOF marker
|
|
274
|
+
length = struct.unpack(">H", f.read(2))[0]
|
|
275
|
+
f.read(1) # precision
|
|
276
|
+
h, w = struct.unpack(">HH", f.read(4))
|
|
277
|
+
return w, h
|
|
278
|
+
else:
|
|
279
|
+
length = struct.unpack(">H", f.read(2))[0]
|
|
280
|
+
f.seek(length - 2, 1)
|
|
281
|
+
|
|
282
|
+
def _png_dimensions(self, f) -> tuple[int, int] | None:
|
|
283
|
+
"""Read PNG dimensions from IHDR chunk."""
|
|
284
|
+
f.seek(0)
|
|
285
|
+
sig = f.read(8)
|
|
286
|
+
if sig != b'\x89PNG\r\n\x1a\n':
|
|
287
|
+
return None
|
|
288
|
+
f.read(4) # chunk length
|
|
289
|
+
chunk_type = f.read(4)
|
|
290
|
+
if chunk_type != b'IHDR':
|
|
291
|
+
return None
|
|
292
|
+
w, h = struct.unpack(">II", f.read(8))
|
|
293
|
+
return w, h
|
|
294
|
+
|
|
295
|
+
def _webp_dimensions(self, f) -> tuple[int, int] | None:
|
|
296
|
+
"""Read WebP dimensions from header."""
|
|
297
|
+
f.seek(0)
|
|
298
|
+
riff = f.read(4)
|
|
299
|
+
if riff != b'RIFF':
|
|
300
|
+
return None
|
|
301
|
+
f.read(4) # file size
|
|
302
|
+
webp = f.read(4)
|
|
303
|
+
if webp != b'WEBP':
|
|
304
|
+
return None
|
|
305
|
+
chunk = f.read(4)
|
|
306
|
+
if chunk == b'VP8 ':
|
|
307
|
+
f.read(4) # chunk size
|
|
308
|
+
f.read(3) # frame tag
|
|
309
|
+
if f.read(3) != b'\x9d\x01\x2a':
|
|
310
|
+
return None
|
|
311
|
+
data = f.read(4)
|
|
312
|
+
w = (data[0] | (data[1] << 8)) & 0x3fff
|
|
313
|
+
h = (data[2] | (data[3] << 8)) & 0x3fff
|
|
314
|
+
return w, h
|
|
315
|
+
elif chunk == b'VP8L':
|
|
316
|
+
f.read(4) # chunk size
|
|
317
|
+
sig = f.read(1)
|
|
318
|
+
if sig != b'\x2f':
|
|
319
|
+
return None
|
|
320
|
+
data = struct.unpack("<I", f.read(4))[0]
|
|
321
|
+
w = (data & 0x3fff) + 1
|
|
322
|
+
h = ((data >> 14) & 0x3fff) + 1
|
|
323
|
+
return w, h
|
|
324
|
+
elif chunk == b'VP8X':
|
|
325
|
+
f.read(4) # chunk size
|
|
326
|
+
f.read(4) # flags
|
|
327
|
+
data = f.read(6)
|
|
328
|
+
w = (data[0] | (data[1] << 8) | (data[2] << 16)) + 1
|
|
329
|
+
h = (data[3] | (data[4] << 8) | (data[5] << 16)) + 1
|
|
330
|
+
return w, h
|
|
331
|
+
return None
|
|
332
|
+
|
|
333
|
+
def _all_items(self) -> list[CachedItem]:
|
|
334
|
+
"""Return cached items; build root index if nothing is cached yet."""
|
|
335
|
+
if self._indexes:
|
|
336
|
+
return [it for idx in self._indexes.values() for it in idx.items]
|
|
337
|
+
try:
|
|
338
|
+
return list(self.get_index("/").items)
|
|
339
|
+
except Exception:
|
|
340
|
+
return []
|
|
341
|
+
|
|
342
|
+
def search(self, query: str = "", path: str = "/", limit: int = 100) -> list[CachedItem]:
|
|
343
|
+
"""Simple in-memory search over cached indexes."""
|
|
344
|
+
q = (query or "").lower()
|
|
345
|
+
norm = self._normalize_path(path)
|
|
346
|
+
scope_prefix = f"{norm}/" if norm else ""
|
|
347
|
+
|
|
348
|
+
results: list[CachedItem] = []
|
|
349
|
+
for item in self._all_items():
|
|
350
|
+
logical_path = item.path.lstrip("/")
|
|
351
|
+
if norm and not (logical_path == norm or logical_path.startswith(scope_prefix)):
|
|
352
|
+
continue
|
|
353
|
+
meta = self.get_metadata(item.path)
|
|
354
|
+
haystack = " ".join([
|
|
355
|
+
item.name,
|
|
356
|
+
" ".join(meta.get("tags", [])),
|
|
357
|
+
meta.get("notes", ""),
|
|
358
|
+
]).lower()
|
|
359
|
+
if q in haystack:
|
|
360
|
+
results.append(item)
|
|
361
|
+
if len(results) >= limit:
|
|
362
|
+
break
|
|
363
|
+
return results
|
|
364
|
+
|
|
365
|
+
def get_thumbnail(self, path: str) -> bytes | None:
|
|
366
|
+
"""Get thumbnail, generating if needed."""
|
|
367
|
+
if path in self._thumbnails:
|
|
368
|
+
return self._thumbnails[path]
|
|
369
|
+
|
|
370
|
+
try:
|
|
371
|
+
raw = self.read_bytes(path)
|
|
372
|
+
thumb, dims = self._make_thumbnail(raw)
|
|
373
|
+
self._thumbnails[path] = thumb
|
|
374
|
+
# Cache dimensions from thumbnail generation
|
|
375
|
+
if dims:
|
|
376
|
+
self._dimensions[path] = dims
|
|
377
|
+
return thumb
|
|
378
|
+
except Exception:
|
|
379
|
+
return None
|
|
380
|
+
|
|
381
|
+
def _make_thumbnail(self, img_bytes: bytes) -> tuple[bytes, tuple[int, int] | None]:
|
|
382
|
+
"""Generate a WebP thumbnail. Returns (thumb_bytes, (w, h))."""
|
|
383
|
+
with Image.open(BytesIO(img_bytes)) as im:
|
|
384
|
+
w, h = im.size
|
|
385
|
+
short = min(w, h)
|
|
386
|
+
if short > self.thumb_size:
|
|
387
|
+
scale = self.thumb_size / short
|
|
388
|
+
new_w = max(1, int(w * scale))
|
|
389
|
+
new_h = max(1, int(h * scale))
|
|
390
|
+
im = im.convert("RGB").resize((new_w, new_h), Image.LANCZOS)
|
|
391
|
+
else:
|
|
392
|
+
im = im.convert("RGB")
|
|
393
|
+
out = BytesIO()
|
|
394
|
+
im.save(out, format="WEBP", quality=self.thumb_quality, method=6)
|
|
395
|
+
return out.getvalue(), (w, h)
|
|
396
|
+
|
|
397
|
+
def get_metadata(self, path: str) -> dict:
|
|
398
|
+
"""Get metadata for an image (in-memory only)."""
|
|
399
|
+
if path in self._metadata:
|
|
400
|
+
return self._metadata[path]
|
|
401
|
+
# Build minimal metadata - dimensions loaded lazily
|
|
402
|
+
w, h = self._dimensions.get(path, (0, 0))
|
|
403
|
+
meta = {
|
|
404
|
+
"width": w,
|
|
405
|
+
"height": h,
|
|
406
|
+
"tags": [],
|
|
407
|
+
"notes": "",
|
|
408
|
+
"star": None,
|
|
409
|
+
}
|
|
410
|
+
self._metadata[path] = meta
|
|
411
|
+
return meta
|
|
412
|
+
|
|
413
|
+
def set_metadata(self, path: str, meta: dict) -> None:
|
|
414
|
+
"""Update in-memory metadata (session-only, lost on restart)."""
|
|
415
|
+
self._metadata[path] = meta
|
|
416
|
+
|
|
417
|
+
def invalidate_cache(self, path: str | None = None) -> None:
|
|
418
|
+
"""Clear cached data. If path is None, clear everything."""
|
|
419
|
+
if path is None:
|
|
420
|
+
self._indexes.clear()
|
|
421
|
+
self._thumbnails.clear()
|
|
422
|
+
self._metadata.clear()
|
|
423
|
+
self._dimensions.clear()
|
|
424
|
+
else:
|
|
425
|
+
norm = self._normalize_path(path)
|
|
426
|
+
self._indexes.pop(norm, None)
|
|
427
|
+
self._thumbnails.pop(path, None)
|
|
428
|
+
self._metadata.pop(path, None)
|
|
429
|
+
self._dimensions.pop(path, None)
|
|
430
|
+
|
|
431
|
+
def invalidate_subtree(self, path: str) -> None:
|
|
432
|
+
"""Drop all cached entries for a folder and its descendants."""
|
|
433
|
+
norm = self._normalize_path(path)
|
|
434
|
+
|
|
435
|
+
# Normalize item path to "/foo" form for prefix matching
|
|
436
|
+
def _canonical_item(p: str) -> str:
|
|
437
|
+
p = "/" + p.lstrip("/") if p else "/"
|
|
438
|
+
if p != "/":
|
|
439
|
+
p = p.rstrip("/")
|
|
440
|
+
return p
|
|
441
|
+
|
|
442
|
+
canonical = _canonical_item(path)
|
|
443
|
+
|
|
444
|
+
# Invalidate folder indexes at or below the target
|
|
445
|
+
if not norm: # root => clear everything fast
|
|
446
|
+
self._indexes.clear()
|
|
447
|
+
else:
|
|
448
|
+
prefix = f"{norm}/"
|
|
449
|
+
for key in list(self._indexes.keys()):
|
|
450
|
+
if key == norm or key.startswith(prefix):
|
|
451
|
+
self._indexes.pop(key, None)
|
|
452
|
+
|
|
453
|
+
# Invalidate per-item caches (thumbs, metadata, dimensions)
|
|
454
|
+
def _matches(item_path: str) -> bool:
|
|
455
|
+
candidate = _canonical_item(item_path)
|
|
456
|
+
if canonical == "/":
|
|
457
|
+
return True
|
|
458
|
+
return candidate == canonical or candidate.startswith(canonical + "/")
|
|
459
|
+
|
|
460
|
+
for cache in (self._thumbnails, self._metadata, self._dimensions):
|
|
461
|
+
for key in list(cache.keys()):
|
|
462
|
+
if _matches(key):
|
|
463
|
+
cache.pop(key, None)
|
|
464
|
+
|
|
465
|
+
@staticmethod
|
|
466
|
+
def _guess_mime(name: str) -> str:
|
|
467
|
+
n = name.lower()
|
|
468
|
+
if n.endswith(".webp"):
|
|
469
|
+
return "image/webp"
|
|
470
|
+
if n.endswith(".png"):
|
|
471
|
+
return "image/png"
|
|
472
|
+
return "image/jpeg"
|