lenslet 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lenslet/__init__.py +7 -0
- lenslet/api.py +157 -0
- lenslet/cli.py +121 -0
- lenslet/frontend/assets/index-B-0lZ7yu.js +44 -0
- lenslet/frontend/assets/index-c56aKxHZ.css +1 -0
- lenslet/frontend/favicon.ico +0 -0
- lenslet/frontend/index.html +14 -0
- lenslet/metadata.py +151 -0
- lenslet/server.py +520 -0
- lenslet/storage/__init__.py +6 -0
- lenslet/storage/base.py +35 -0
- lenslet/storage/dataset.py +591 -0
- lenslet/storage/local.py +69 -0
- lenslet/storage/memory.py +472 -0
- lenslet/storage/parquet.py +483 -0
- lenslet/workspace.py +60 -0
- lenslet-0.2.1.dist-info/METADATA +134 -0
- lenslet-0.2.1.dist-info/RECORD +20 -0
- lenslet-0.2.1.dist-info/WHEEL +4 -0
- lenslet-0.2.1.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,483 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import os
|
|
3
|
+
import struct
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
from io import BytesIO
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from PIL import Image
|
|
10
|
+
|
|
11
|
+
from .local import LocalStorage
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class CachedItem:
|
|
16
|
+
"""In-memory cached metadata for an image loaded from Parquet."""
|
|
17
|
+
path: str
|
|
18
|
+
name: str
|
|
19
|
+
mime: str
|
|
20
|
+
width: int
|
|
21
|
+
height: int
|
|
22
|
+
size: int
|
|
23
|
+
mtime: float
|
|
24
|
+
metrics: dict[str, float] = field(default_factory=dict)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class CachedIndex:
|
|
29
|
+
"""In-memory cached folder index."""
|
|
30
|
+
path: str
|
|
31
|
+
generated_at: str
|
|
32
|
+
items: list[CachedItem] = field(default_factory=list)
|
|
33
|
+
dirs: list[str] = field(default_factory=list)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def _load_parquet(path: str) -> dict[str, list[Any]]:
|
|
37
|
+
try:
|
|
38
|
+
import pyarrow.parquet as pq
|
|
39
|
+
except ImportError as exc: # pragma: no cover - dependency is optional until parquet is used
|
|
40
|
+
raise ImportError(
|
|
41
|
+
"pyarrow is required for Parquet datasets. Install with: pip install pyarrow"
|
|
42
|
+
) from exc
|
|
43
|
+
|
|
44
|
+
table = pq.read_table(path)
|
|
45
|
+
return table.to_pydict()
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _normalize_id(value: Any) -> str | None:
|
|
49
|
+
if value is None:
|
|
50
|
+
return None
|
|
51
|
+
return str(value)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _coerce_float(value: Any) -> float | None:
|
|
55
|
+
if value is None:
|
|
56
|
+
return None
|
|
57
|
+
if isinstance(value, (int, float)):
|
|
58
|
+
return float(value)
|
|
59
|
+
try:
|
|
60
|
+
return float(value)
|
|
61
|
+
except (TypeError, ValueError):
|
|
62
|
+
return None
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _coerce_int(value: Any) -> int | None:
|
|
66
|
+
if value is None:
|
|
67
|
+
return None
|
|
68
|
+
if isinstance(value, int):
|
|
69
|
+
return value
|
|
70
|
+
if isinstance(value, float):
|
|
71
|
+
return int(value)
|
|
72
|
+
try:
|
|
73
|
+
return int(value)
|
|
74
|
+
except (TypeError, ValueError):
|
|
75
|
+
return None
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _coerce_timestamp(value: Any) -> float | None:
|
|
79
|
+
if value is None:
|
|
80
|
+
return None
|
|
81
|
+
if isinstance(value, (int, float)):
|
|
82
|
+
return float(value)
|
|
83
|
+
if hasattr(value, "timestamp"):
|
|
84
|
+
try:
|
|
85
|
+
return float(value.timestamp())
|
|
86
|
+
except Exception:
|
|
87
|
+
return None
|
|
88
|
+
return _coerce_float(value)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class ParquetStorage:
|
|
92
|
+
"""
|
|
93
|
+
In-memory storage backed by Parquet files for item metadata/metrics.
|
|
94
|
+
Reads image bytes from a local dataset root.
|
|
95
|
+
"""
|
|
96
|
+
|
|
97
|
+
IMAGE_EXTS = (".jpg", ".jpeg", ".png", ".webp")
|
|
98
|
+
|
|
99
|
+
def __init__(self, root: str, thumb_size: int = 256, thumb_quality: int = 70):
|
|
100
|
+
self.local = LocalStorage(root)
|
|
101
|
+
self.root = root
|
|
102
|
+
self.thumb_size = thumb_size
|
|
103
|
+
self.thumb_quality = thumb_quality
|
|
104
|
+
|
|
105
|
+
self._indexes: dict[str, CachedIndex] = {}
|
|
106
|
+
self._items: dict[str, CachedItem] = {}
|
|
107
|
+
self._thumbnails: dict[str, bytes] = {}
|
|
108
|
+
self._metadata: dict[str, dict] = {}
|
|
109
|
+
self._dimensions: dict[str, tuple[int, int]] = {}
|
|
110
|
+
|
|
111
|
+
self._build_indexes()
|
|
112
|
+
|
|
113
|
+
def _normalize_path(self, path: str) -> str:
|
|
114
|
+
return path.strip("/") if path else ""
|
|
115
|
+
|
|
116
|
+
def _normalize_item_path(self, path: str) -> str:
|
|
117
|
+
p = (path or "").replace("\\", "/").lstrip("/")
|
|
118
|
+
if p.startswith("./"):
|
|
119
|
+
p = p[2:]
|
|
120
|
+
return p.strip("/")
|
|
121
|
+
|
|
122
|
+
def _is_supported_image(self, name: str) -> bool:
|
|
123
|
+
return name.lower().endswith(self.IMAGE_EXTS)
|
|
124
|
+
|
|
125
|
+
def _guess_mime(self, name: str) -> str:
|
|
126
|
+
n = name.lower()
|
|
127
|
+
if n.endswith(".webp"):
|
|
128
|
+
return "image/webp"
|
|
129
|
+
if n.endswith(".png"):
|
|
130
|
+
return "image/png"
|
|
131
|
+
return "image/jpeg"
|
|
132
|
+
|
|
133
|
+
def _build_metrics_map(self, data: dict[str, list[Any]] | None) -> dict[str, dict[str, float]]:
|
|
134
|
+
if not data:
|
|
135
|
+
return {}
|
|
136
|
+
if "image_id" not in data:
|
|
137
|
+
return {}
|
|
138
|
+
|
|
139
|
+
ids = data.get("image_id") or []
|
|
140
|
+
metric_keys = [k for k in data.keys() if k != "image_id"]
|
|
141
|
+
metrics: dict[str, dict[str, float]] = {}
|
|
142
|
+
|
|
143
|
+
for idx, raw_id in enumerate(ids):
|
|
144
|
+
norm_id = _normalize_id(raw_id)
|
|
145
|
+
if norm_id is None:
|
|
146
|
+
continue
|
|
147
|
+
row: dict[str, float] = {}
|
|
148
|
+
for key in metric_keys:
|
|
149
|
+
col = data.get(key) or []
|
|
150
|
+
if idx >= len(col):
|
|
151
|
+
continue
|
|
152
|
+
val = _coerce_float(col[idx])
|
|
153
|
+
if val is None:
|
|
154
|
+
continue
|
|
155
|
+
row[key] = val
|
|
156
|
+
if row:
|
|
157
|
+
metrics[norm_id] = row
|
|
158
|
+
return metrics
|
|
159
|
+
|
|
160
|
+
def _build_indexes(self) -> None:
|
|
161
|
+
items_path = os.path.join(self.root, "items.parquet")
|
|
162
|
+
data = _load_parquet(items_path)
|
|
163
|
+
|
|
164
|
+
if "path" not in data:
|
|
165
|
+
raise ValueError("items.parquet must include a 'path' column")
|
|
166
|
+
if "image_id" not in data:
|
|
167
|
+
print("[lenslet] Warning: items.parquet missing 'image_id'; metrics join may be incomplete")
|
|
168
|
+
|
|
169
|
+
ids = data.get("image_id") or [None] * len(data["path"])
|
|
170
|
+
paths = data.get("path") or []
|
|
171
|
+
sizes = data.get("size")
|
|
172
|
+
mtimes = data.get("mtime")
|
|
173
|
+
widths = data.get("width")
|
|
174
|
+
heights = data.get("height")
|
|
175
|
+
|
|
176
|
+
metrics_path = os.path.join(self.root, "metrics.parquet")
|
|
177
|
+
metrics_data = None
|
|
178
|
+
if os.path.exists(metrics_path):
|
|
179
|
+
try:
|
|
180
|
+
metrics_data = _load_parquet(metrics_path)
|
|
181
|
+
except Exception as exc:
|
|
182
|
+
print(f"[lenslet] Warning: Failed to read metrics.parquet: {exc}")
|
|
183
|
+
metrics_data = None
|
|
184
|
+
|
|
185
|
+
metrics_map = self._build_metrics_map(metrics_data)
|
|
186
|
+
|
|
187
|
+
generated_at = datetime.now(timezone.utc).isoformat()
|
|
188
|
+
dir_children: dict[str, set[str]] = {}
|
|
189
|
+
|
|
190
|
+
for i, raw_path in enumerate(paths):
|
|
191
|
+
if raw_path is None:
|
|
192
|
+
continue
|
|
193
|
+
norm_path = self._normalize_item_path(str(raw_path))
|
|
194
|
+
if not norm_path:
|
|
195
|
+
continue
|
|
196
|
+
name = os.path.basename(norm_path)
|
|
197
|
+
if not self._is_supported_image(name):
|
|
198
|
+
continue
|
|
199
|
+
|
|
200
|
+
item_id = _normalize_id(ids[i]) if i < len(ids) else None
|
|
201
|
+
metrics = metrics_map.get(item_id, {})
|
|
202
|
+
|
|
203
|
+
size = _coerce_int(sizes[i]) if sizes and i < len(sizes) else None
|
|
204
|
+
mtime = _coerce_timestamp(mtimes[i]) if mtimes and i < len(mtimes) else None
|
|
205
|
+
width = _coerce_int(widths[i]) if widths and i < len(widths) else None
|
|
206
|
+
height = _coerce_int(heights[i]) if heights and i < len(heights) else None
|
|
207
|
+
|
|
208
|
+
if size is None or mtime is None:
|
|
209
|
+
try:
|
|
210
|
+
abs_path = self.local.resolve_path(norm_path)
|
|
211
|
+
stat = os.stat(abs_path)
|
|
212
|
+
if size is None:
|
|
213
|
+
size = stat.st_size
|
|
214
|
+
if mtime is None:
|
|
215
|
+
mtime = stat.st_mtime
|
|
216
|
+
except Exception:
|
|
217
|
+
size = size or 0
|
|
218
|
+
mtime = mtime or 0.0
|
|
219
|
+
|
|
220
|
+
w = width or 0
|
|
221
|
+
h = height or 0
|
|
222
|
+
if w == 0 or h == 0:
|
|
223
|
+
try:
|
|
224
|
+
abs_path = self.local.resolve_path(norm_path)
|
|
225
|
+
dims = self._read_dimensions_fast(abs_path)
|
|
226
|
+
if dims:
|
|
227
|
+
w, h = dims
|
|
228
|
+
self._dimensions[norm_path] = dims
|
|
229
|
+
except Exception:
|
|
230
|
+
pass
|
|
231
|
+
|
|
232
|
+
item = CachedItem(
|
|
233
|
+
path=norm_path,
|
|
234
|
+
name=name,
|
|
235
|
+
mime=self._guess_mime(name),
|
|
236
|
+
width=w,
|
|
237
|
+
height=h,
|
|
238
|
+
size=size or 0,
|
|
239
|
+
mtime=mtime or 0.0,
|
|
240
|
+
metrics=metrics,
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
self._items[norm_path] = item
|
|
244
|
+
|
|
245
|
+
folder = os.path.dirname(norm_path).replace("\\", "/")
|
|
246
|
+
folder_norm = self._normalize_path(folder)
|
|
247
|
+
self._indexes.setdefault(folder_norm, CachedIndex(
|
|
248
|
+
path="/" + folder_norm if folder_norm else "/",
|
|
249
|
+
generated_at=generated_at,
|
|
250
|
+
items=[],
|
|
251
|
+
dirs=[],
|
|
252
|
+
)).items.append(item)
|
|
253
|
+
|
|
254
|
+
parts = folder_norm.split("/") if folder_norm else []
|
|
255
|
+
for depth in range(len(parts)):
|
|
256
|
+
parent = "/".join(parts[:depth])
|
|
257
|
+
child = parts[depth]
|
|
258
|
+
dir_children.setdefault(parent, set()).add(child)
|
|
259
|
+
|
|
260
|
+
# Build index entries for directories
|
|
261
|
+
self._indexes.setdefault("", CachedIndex(path="/", generated_at=generated_at, items=[], dirs=[]))
|
|
262
|
+
for parent, children in dir_children.items():
|
|
263
|
+
index = self._indexes.setdefault(parent, CachedIndex(
|
|
264
|
+
path="/" + parent if parent else "/",
|
|
265
|
+
generated_at=generated_at,
|
|
266
|
+
items=[],
|
|
267
|
+
dirs=[],
|
|
268
|
+
))
|
|
269
|
+
index.dirs = sorted(children)
|
|
270
|
+
|
|
271
|
+
def get_index(self, path: str) -> CachedIndex:
|
|
272
|
+
norm = self._normalize_path(path)
|
|
273
|
+
if norm in self._indexes:
|
|
274
|
+
return self._indexes[norm]
|
|
275
|
+
raise FileNotFoundError(path)
|
|
276
|
+
|
|
277
|
+
def validate_image_path(self, path: str) -> None:
|
|
278
|
+
if not path:
|
|
279
|
+
raise ValueError("empty path")
|
|
280
|
+
norm = self._normalize_item_path(path)
|
|
281
|
+
if norm not in self._items:
|
|
282
|
+
raise FileNotFoundError(path)
|
|
283
|
+
|
|
284
|
+
def read_bytes(self, path: str) -> bytes:
|
|
285
|
+
norm = self._normalize_item_path(path)
|
|
286
|
+
return self.local.read_bytes(norm)
|
|
287
|
+
|
|
288
|
+
def exists(self, path: str) -> bool:
|
|
289
|
+
norm = self._normalize_item_path(path)
|
|
290
|
+
return norm in self._items
|
|
291
|
+
|
|
292
|
+
def size(self, path: str) -> int:
|
|
293
|
+
norm = self._normalize_item_path(path)
|
|
294
|
+
item = self._items.get(norm)
|
|
295
|
+
return item.size if item else 0
|
|
296
|
+
|
|
297
|
+
def join(self, *parts: str) -> str:
|
|
298
|
+
return self.local.join(*parts)
|
|
299
|
+
|
|
300
|
+
def etag(self, path: str) -> str | None:
|
|
301
|
+
norm = self._normalize_item_path(path)
|
|
302
|
+
item = self._items.get(norm)
|
|
303
|
+
if not item:
|
|
304
|
+
return None
|
|
305
|
+
return f"{int(item.mtime)}-{item.size}"
|
|
306
|
+
|
|
307
|
+
def get_thumbnail(self, path: str) -> bytes | None:
|
|
308
|
+
norm = self._normalize_item_path(path)
|
|
309
|
+
if norm in self._thumbnails:
|
|
310
|
+
return self._thumbnails[norm]
|
|
311
|
+
|
|
312
|
+
try:
|
|
313
|
+
raw = self.read_bytes(norm)
|
|
314
|
+
thumb, dims = self._make_thumbnail(raw)
|
|
315
|
+
self._thumbnails[norm] = thumb
|
|
316
|
+
if dims:
|
|
317
|
+
self._dimensions[norm] = dims
|
|
318
|
+
return thumb
|
|
319
|
+
except Exception:
|
|
320
|
+
return None
|
|
321
|
+
|
|
322
|
+
def get_dimensions(self, path: str) -> tuple[int, int]:
|
|
323
|
+
norm = self._normalize_item_path(path)
|
|
324
|
+
if norm in self._dimensions:
|
|
325
|
+
return self._dimensions[norm]
|
|
326
|
+
try:
|
|
327
|
+
abs_path = self.local.resolve_path(norm)
|
|
328
|
+
dims = self._read_dimensions_fast(abs_path)
|
|
329
|
+
if dims:
|
|
330
|
+
self._dimensions[norm] = dims
|
|
331
|
+
if norm in self._items:
|
|
332
|
+
self._items[norm].width, self._items[norm].height = dims
|
|
333
|
+
return dims
|
|
334
|
+
except Exception:
|
|
335
|
+
pass
|
|
336
|
+
try:
|
|
337
|
+
raw = self.read_bytes(norm)
|
|
338
|
+
with Image.open(BytesIO(raw)) as im:
|
|
339
|
+
w, h = im.size
|
|
340
|
+
self._dimensions[norm] = (w, h)
|
|
341
|
+
if norm in self._items:
|
|
342
|
+
self._items[norm].width = w
|
|
343
|
+
self._items[norm].height = h
|
|
344
|
+
return w, h
|
|
345
|
+
except Exception:
|
|
346
|
+
return 0, 0
|
|
347
|
+
|
|
348
|
+
def _make_thumbnail(self, img_bytes: bytes) -> tuple[bytes, tuple[int, int] | None]:
|
|
349
|
+
with Image.open(BytesIO(img_bytes)) as im:
|
|
350
|
+
w, h = im.size
|
|
351
|
+
short = min(w, h)
|
|
352
|
+
if short > self.thumb_size:
|
|
353
|
+
scale = self.thumb_size / short
|
|
354
|
+
new_w = max(1, int(w * scale))
|
|
355
|
+
new_h = max(1, int(h * scale))
|
|
356
|
+
im = im.convert("RGB").resize((new_w, new_h), Image.LANCZOS)
|
|
357
|
+
else:
|
|
358
|
+
im = im.convert("RGB")
|
|
359
|
+
out = BytesIO()
|
|
360
|
+
im.save(out, format="WEBP", quality=self.thumb_quality, method=6)
|
|
361
|
+
return out.getvalue(), (w, h)
|
|
362
|
+
|
|
363
|
+
def _read_dimensions_fast(self, filepath: str) -> tuple[int, int] | None:
|
|
364
|
+
ext = filepath.lower().split(".")[-1]
|
|
365
|
+
try:
|
|
366
|
+
with open(filepath, "rb") as f:
|
|
367
|
+
if ext in ("jpg", "jpeg"):
|
|
368
|
+
return self._jpeg_dimensions(f)
|
|
369
|
+
if ext == "png":
|
|
370
|
+
return self._png_dimensions(f)
|
|
371
|
+
if ext == "webp":
|
|
372
|
+
return self._webp_dimensions(f)
|
|
373
|
+
except Exception:
|
|
374
|
+
pass
|
|
375
|
+
return None
|
|
376
|
+
|
|
377
|
+
def _jpeg_dimensions(self, f) -> tuple[int, int] | None:
|
|
378
|
+
f.seek(0)
|
|
379
|
+
if f.read(2) != b"\xff\xd8":
|
|
380
|
+
return None
|
|
381
|
+
while True:
|
|
382
|
+
marker = f.read(2)
|
|
383
|
+
if len(marker) < 2 or marker[0] != 0xFF:
|
|
384
|
+
return None
|
|
385
|
+
if marker[1] == 0xD9:
|
|
386
|
+
return None
|
|
387
|
+
if 0xC0 <= marker[1] <= 0xCF and marker[1] not in (0xC4, 0xC8, 0xCC):
|
|
388
|
+
f.read(2)
|
|
389
|
+
f.read(1)
|
|
390
|
+
h, w = struct.unpack(">HH", f.read(4))
|
|
391
|
+
return w, h
|
|
392
|
+
length = struct.unpack(">H", f.read(2))[0]
|
|
393
|
+
f.seek(length - 2, 1)
|
|
394
|
+
|
|
395
|
+
def _png_dimensions(self, f) -> tuple[int, int] | None:
|
|
396
|
+
f.seek(0)
|
|
397
|
+
if f.read(8) != b"\x89PNG\r\n\x1a\n":
|
|
398
|
+
return None
|
|
399
|
+
f.read(4)
|
|
400
|
+
if f.read(4) != b"IHDR":
|
|
401
|
+
return None
|
|
402
|
+
w, h = struct.unpack(">II", f.read(8))
|
|
403
|
+
return w, h
|
|
404
|
+
|
|
405
|
+
def _webp_dimensions(self, f) -> tuple[int, int] | None:
|
|
406
|
+
f.seek(0)
|
|
407
|
+
if f.read(4) != b"RIFF":
|
|
408
|
+
return None
|
|
409
|
+
f.read(4)
|
|
410
|
+
if f.read(4) != b"WEBP":
|
|
411
|
+
return None
|
|
412
|
+
chunk = f.read(4)
|
|
413
|
+
if chunk == b"VP8 ":
|
|
414
|
+
f.read(4)
|
|
415
|
+
f.read(3)
|
|
416
|
+
if f.read(3) != b"\x9d\x01\x2a":
|
|
417
|
+
return None
|
|
418
|
+
data = f.read(4)
|
|
419
|
+
w = (data[0] | (data[1] << 8)) & 0x3FFF
|
|
420
|
+
h = (data[2] | (data[3] << 8)) & 0x3FFF
|
|
421
|
+
return w, h
|
|
422
|
+
if chunk == b"VP8L":
|
|
423
|
+
f.read(4)
|
|
424
|
+
if f.read(1) != b"\x2f":
|
|
425
|
+
return None
|
|
426
|
+
data = struct.unpack("<I", f.read(4))[0]
|
|
427
|
+
w = (data & 0x3FFF) + 1
|
|
428
|
+
h = ((data >> 14) & 0x3FFF) + 1
|
|
429
|
+
return w, h
|
|
430
|
+
if chunk == b"VP8X":
|
|
431
|
+
f.read(4)
|
|
432
|
+
f.read(4)
|
|
433
|
+
data = f.read(6)
|
|
434
|
+
w = (data[0] | (data[1] << 8) | (data[2] << 16)) + 1
|
|
435
|
+
h = (data[3] | (data[4] << 8) | (data[5] << 16)) + 1
|
|
436
|
+
return w, h
|
|
437
|
+
return None
|
|
438
|
+
|
|
439
|
+
def get_metadata(self, path: str) -> dict:
|
|
440
|
+
norm = self._normalize_item_path(path)
|
|
441
|
+
if norm in self._metadata:
|
|
442
|
+
return self._metadata[norm]
|
|
443
|
+
w, h = self._dimensions.get(norm, (0, 0))
|
|
444
|
+
meta = {
|
|
445
|
+
"width": w,
|
|
446
|
+
"height": h,
|
|
447
|
+
"tags": [],
|
|
448
|
+
"notes": "",
|
|
449
|
+
"star": None,
|
|
450
|
+
}
|
|
451
|
+
self._metadata[norm] = meta
|
|
452
|
+
return meta
|
|
453
|
+
|
|
454
|
+
def set_metadata(self, path: str, meta: dict) -> None:
|
|
455
|
+
norm = self._normalize_item_path(path)
|
|
456
|
+
self._metadata[norm] = meta
|
|
457
|
+
|
|
458
|
+
def _all_items(self) -> list[CachedItem]:
|
|
459
|
+
if self._indexes:
|
|
460
|
+
return [it for idx in self._indexes.values() for it in idx.items]
|
|
461
|
+
return list(self._items.values())
|
|
462
|
+
|
|
463
|
+
def search(self, query: str = "", path: str = "/", limit: int = 100) -> list[CachedItem]:
|
|
464
|
+
q = (query or "").lower()
|
|
465
|
+
norm = self._normalize_path(path)
|
|
466
|
+
scope_prefix = f"{norm}/" if norm else ""
|
|
467
|
+
|
|
468
|
+
results: list[CachedItem] = []
|
|
469
|
+
for item in self._all_items():
|
|
470
|
+
logical_path = item.path.lstrip("/")
|
|
471
|
+
if norm and not (logical_path == norm or logical_path.startswith(scope_prefix)):
|
|
472
|
+
continue
|
|
473
|
+
meta = self.get_metadata(item.path)
|
|
474
|
+
haystack = " ".join([
|
|
475
|
+
item.name,
|
|
476
|
+
" ".join(meta.get("tags", [])),
|
|
477
|
+
meta.get("notes", ""),
|
|
478
|
+
]).lower()
|
|
479
|
+
if q in haystack:
|
|
480
|
+
results.append(item)
|
|
481
|
+
if len(results) >= limit:
|
|
482
|
+
break
|
|
483
|
+
return results
|
lenslet/workspace.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import json
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class Workspace:
|
|
10
|
+
root: Path | None
|
|
11
|
+
can_write: bool
|
|
12
|
+
|
|
13
|
+
@classmethod
|
|
14
|
+
def for_dataset(cls, dataset_root: str | None, can_write: bool) -> "Workspace":
|
|
15
|
+
if not dataset_root:
|
|
16
|
+
return cls(root=None, can_write=False)
|
|
17
|
+
return cls(root=Path(dataset_root) / ".lenslet", can_write=can_write)
|
|
18
|
+
|
|
19
|
+
def ensure(self) -> None:
|
|
20
|
+
if not self.can_write or self.root is None:
|
|
21
|
+
return
|
|
22
|
+
self.root.mkdir(parents=True, exist_ok=True)
|
|
23
|
+
|
|
24
|
+
@property
|
|
25
|
+
def views_path(self) -> Path | None:
|
|
26
|
+
if self.root is None:
|
|
27
|
+
return None
|
|
28
|
+
return self.root / "views.json"
|
|
29
|
+
|
|
30
|
+
def load_views(self) -> dict[str, Any]:
|
|
31
|
+
default = {"version": 1, "views": []}
|
|
32
|
+
path = self.views_path
|
|
33
|
+
if path is None or not path.exists():
|
|
34
|
+
return default
|
|
35
|
+
try:
|
|
36
|
+
raw = path.read_text(encoding="utf-8")
|
|
37
|
+
data = json.loads(raw)
|
|
38
|
+
except Exception as exc:
|
|
39
|
+
print(f"[lenslet] Warning: failed to read views.json: {exc}")
|
|
40
|
+
return default
|
|
41
|
+
if not isinstance(data, dict):
|
|
42
|
+
return default
|
|
43
|
+
views = data.get("views")
|
|
44
|
+
if not isinstance(views, list):
|
|
45
|
+
views = []
|
|
46
|
+
version = data.get("version", 1)
|
|
47
|
+
if not isinstance(version, int):
|
|
48
|
+
version = 1
|
|
49
|
+
return {"version": version, "views": views}
|
|
50
|
+
|
|
51
|
+
def write_views(self, payload: dict[str, Any]) -> None:
|
|
52
|
+
if not self.can_write or self.root is None:
|
|
53
|
+
raise PermissionError("workspace is read-only")
|
|
54
|
+
self.ensure()
|
|
55
|
+
path = self.views_path
|
|
56
|
+
if path is None:
|
|
57
|
+
raise PermissionError("workspace is unavailable")
|
|
58
|
+
temp = path.with_suffix(".tmp")
|
|
59
|
+
temp.write_text(json.dumps(payload, indent=2, sort_keys=True), encoding="utf-8")
|
|
60
|
+
temp.replace(path)
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: lenslet
|
|
3
|
+
Version: 0.2.1
|
|
4
|
+
Summary: Lightweight image gallery server - no database, no files left behind
|
|
5
|
+
Project-URL: Homepage, https://github.com/trojblue/lenslet
|
|
6
|
+
Project-URL: Repository, https://github.com/trojblue/lenslet
|
|
7
|
+
Author-email: yada <trojblue@gmail.com>
|
|
8
|
+
License: MIT
|
|
9
|
+
Keywords: gallery,image,photos,server,viewer
|
|
10
|
+
Classifier: Development Status :: 3 - Alpha
|
|
11
|
+
Classifier: Environment :: Console
|
|
12
|
+
Classifier: Environment :: Web Environment
|
|
13
|
+
Classifier: Framework :: FastAPI
|
|
14
|
+
Classifier: Intended Audience :: Developers
|
|
15
|
+
Classifier: Intended Audience :: End Users/Desktop
|
|
16
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
17
|
+
Classifier: Operating System :: OS Independent
|
|
18
|
+
Classifier: Programming Language :: Python :: 3
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
22
|
+
Classifier: Topic :: Multimedia :: Graphics :: Viewers
|
|
23
|
+
Requires-Python: >=3.10
|
|
24
|
+
Requires-Dist: fastapi>=0.115
|
|
25
|
+
Requires-Dist: pillow>=10.4
|
|
26
|
+
Requires-Dist: pyarrow>=14.0
|
|
27
|
+
Requires-Dist: uvicorn[standard]>=0.30
|
|
28
|
+
Provides-Extra: dev
|
|
29
|
+
Requires-Dist: httpx>=0.27; extra == 'dev'
|
|
30
|
+
Requires-Dist: pytest>=8.0; extra == 'dev'
|
|
31
|
+
Provides-Extra: s3
|
|
32
|
+
Requires-Dist: boto3>=1.34; extra == 's3'
|
|
33
|
+
Description-Content-Type: text/markdown
|
|
34
|
+
|
|
35
|
+
# Lenslet
|
|
36
|
+
|
|
37
|
+
A lightweight image gallery server that runs entirely in-memory. Point it at a directory of images and browse them in your browser. No database, no metadata files left behind.
|
|
38
|
+
|
|
39
|
+
## Introduction
|
|
40
|
+
|
|
41
|
+
<img width="1511" height="851" alt="image" src="https://github.com/user-attachments/assets/8dc0e7f9-c1bd-4be3-bd06-443fb603e3a5" />
|
|
42
|
+
|
|
43
|
+
Lenslet is a self-contained image gallery server designed for simplicity and speed. It indexes directories on-the-fly, generates thumbnails in memory, and serves everything through a clean web interface. Perfect for quickly browsing local image collections without modifying the source directory.
|
|
44
|
+
|
|
45
|
+
## Features
|
|
46
|
+
|
|
47
|
+
- **Clean operation**: No files written to your image directories
|
|
48
|
+
- **In-memory indexing**: Fast directory scanning and caching
|
|
49
|
+
- **On-demand thumbnails**: Generated and cached in RAM
|
|
50
|
+
- **Full web UI**: Browse, search, and view images in your browser
|
|
51
|
+
- **Metadata support**: Add tags, notes, and ratings (session-only)
|
|
52
|
+
- **Single command**: Just point to a directory and go
|
|
53
|
+
|
|
54
|
+
## Installation
|
|
55
|
+
|
|
56
|
+
```bash
|
|
57
|
+
pip install lenslet
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
## Usage
|
|
61
|
+
|
|
62
|
+
### Command Line Interface
|
|
63
|
+
|
|
64
|
+
```bash
|
|
65
|
+
lenslet /path/to/images
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
Then open http://127.0.0.1:7070 in your browser.
|
|
69
|
+
|
|
70
|
+
**Options:**
|
|
71
|
+
|
|
72
|
+
```bash
|
|
73
|
+
lenslet <directory> [options]
|
|
74
|
+
|
|
75
|
+
Options:
|
|
76
|
+
-p, --port PORT Port to listen on (default: 7070)
|
|
77
|
+
-H, --host HOST Host to bind to (default: 127.0.0.1)
|
|
78
|
+
--thumb-size SIZE Thumbnail short edge in pixels (default: 256)
|
|
79
|
+
--thumb-quality QUALITY Thumbnail WebP quality 1-100 (default: 70)
|
|
80
|
+
--reload Enable auto-reload for development
|
|
81
|
+
--verbose Show detailed server logs
|
|
82
|
+
-v, --version Show version and exit
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
**Examples:**
|
|
86
|
+
|
|
87
|
+
```bash
|
|
88
|
+
# Serve images from your Pictures folder
|
|
89
|
+
lenslet ~/Pictures
|
|
90
|
+
|
|
91
|
+
# Use a custom port
|
|
92
|
+
lenslet ~/Photos --port 8080
|
|
93
|
+
|
|
94
|
+
# Make accessible on local network
|
|
95
|
+
lenslet ~/Images --host 0.0.0.0 --port 7070
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
### Programmatic API (Python/Jupyter)
|
|
99
|
+
|
|
100
|
+
Launch lenslet directly from Python code or notebooks:
|
|
101
|
+
|
|
102
|
+
```python
|
|
103
|
+
import lenslet
|
|
104
|
+
|
|
105
|
+
datasets = {
|
|
106
|
+
"my_images": ["/path/to/img1.jpg", "/path/to/img2.jpg"],
|
|
107
|
+
"more_images": [
|
|
108
|
+
"s3://bucket/img3.jpg", # S3 URIs
|
|
109
|
+
"https://example.com/img4.jpg", # HTTP/HTTPS URLs
|
|
110
|
+
],
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
# Launch in non-blocking mode (returns immediately)
|
|
114
|
+
lenslet.launch(datasets, blocking=False, port=7070)
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
**Key Features:**
|
|
118
|
+
- 🚀 **Jupyter-friendly**: Non-blocking mode for notebooks
|
|
119
|
+
- ☁️ **S3 support**: Automatically handles S3 URIs via presigned URLs
|
|
120
|
+
- 📁 **Multiple datasets**: Organize images into named collections
|
|
121
|
+
- 🔗 **Mixed sources**: Combine local files and S3 images
|
|
122
|
+
|
|
123
|
+
See [Programmatic API Documentation](docs/PROGRAMMATIC_API.md) for details and examples.
|
|
124
|
+
|
|
125
|
+
## Notes
|
|
126
|
+
|
|
127
|
+
- All indexes, thumbnails, and metadata are kept in memory
|
|
128
|
+
- Metadata changes (tags, ratings, notes) are lost when the server stops
|
|
129
|
+
- Supports JPEG, PNG, and WebP formats
|
|
130
|
+
- Hidden files and folders (starting with `.`) are ignored
|
|
131
|
+
|
|
132
|
+
## License
|
|
133
|
+
|
|
134
|
+
MIT License
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
lenslet/__init__.py,sha256=o8-Ql0JYfD0Ln7kN8j7Lzj8-IkFFwPUY-qSoX5SNN6E,136
|
|
2
|
+
lenslet/api.py,sha256=1HT-SptgbrafkLYbhgf6TSy5tq4WD1XU363o6Hp60Q4,5380
|
|
3
|
+
lenslet/cli.py,sha256=lUOFGNzVqrS3lgzD6m-T2oD58NA1F0JB7IFe9N2z0I8,3596
|
|
4
|
+
lenslet/metadata.py,sha256=4rbBApKLMUHcg-wJOGNlptw7tC_vzSZ6v5QNMTg_IO4,5066
|
|
5
|
+
lenslet/server.py,sha256=C2j0voeYqxEVNS6ZuNlLM_UoV1OY9GSi57pkBf1farM,16736
|
|
6
|
+
lenslet/workspace.py,sha256=xW7u8YGfx5BQ6dscEXGSeSfPPLMxbU96XCrrsjQlJ9g,1984
|
|
7
|
+
lenslet/frontend/favicon.ico,sha256=_48dyP9tt9IN3q-swaAtV0qSV-FJWWB3QX-TrxUSMEk,4286
|
|
8
|
+
lenslet/frontend/index.html,sha256=70eLH130msP4Ml0sw-VwLsEFaDjik3-U1KP139QbFYo,456
|
|
9
|
+
lenslet/frontend/assets/index-B-0lZ7yu.js,sha256=HB_pkQIO9SznPdSuVGw9cyQejZRF9IHNvmwTrLLulyE,298760
|
|
10
|
+
lenslet/frontend/assets/index-c56aKxHZ.css,sha256=70Z8kL8gwG3LeOhVCJ5_irWJllJNfsAx0WeBPp9vHa8,31923
|
|
11
|
+
lenslet/storage/__init__.py,sha256=ZJZ9GFJaDNNZEy4ZDEaOLi1Nw-Ne_z9up0RTpTu7hP0,149
|
|
12
|
+
lenslet/storage/base.py,sha256=ZNJMxiIzwFCSGxmEkl1Lfk2KEdnYSu6B-x3PI6p4G-0,849
|
|
13
|
+
lenslet/storage/dataset.py,sha256=sX1SYkY4lQAq0dGnaBpMaP-SQtJ_roX5BBNRL_-2PK0,21001
|
|
14
|
+
lenslet/storage/local.py,sha256=C0cM2bOzA5A15JyFWYebsDzmp441cdqDa3tEEjzmv-s,2302
|
|
15
|
+
lenslet/storage/memory.py,sha256=s36P9ULzuAWSnJoNvdKaeuAI11Ffc8Eq4JePq8sWBQg,16572
|
|
16
|
+
lenslet/storage/parquet.py,sha256=vEBs7KDTPIOY9K2EAJIenqCv1wTPTfzUmGKmaRXBHTQ,16102
|
|
17
|
+
lenslet-0.2.1.dist-info/METADATA,sha256=i6XLzLrmu2nLXNV3kTQPQ_8PtygeABewp6mHZ9OWqoQ,4338
|
|
18
|
+
lenslet-0.2.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
19
|
+
lenslet-0.2.1.dist-info/entry_points.txt,sha256=8H8vFqKynPsQXS1fkUXti5J-P1igcRrp6uaNaE4eeyc,45
|
|
20
|
+
lenslet-0.2.1.dist-info/RECORD,,
|