televault 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
televault/compress.py ADDED
@@ -0,0 +1,138 @@
1
+ """Compression utilities for TeleVault - zstd for speed and ratio."""
2
+
3
+ import io
4
+ from pathlib import Path
5
+ from typing import BinaryIO
6
+
7
+ import zstandard as zstd
8
+
9
+ # Compression level: 3 is a good balance (default)
10
+ # Level 1-3: fast, decent compression
11
+ # Level 10-15: slower, better compression
12
+ # Level 19-22: very slow, best compression
13
+ DEFAULT_LEVEL = 3
14
+
15
+ # File extensions that are already compressed (skip compression)
16
+ INCOMPRESSIBLE_EXTENSIONS = {
17
+ # Images
18
+ ".jpg", ".jpeg", ".png", ".gif", ".webp", ".heic", ".heif", ".avif",
19
+ # Video
20
+ ".mp4", ".mkv", ".avi", ".mov", ".webm", ".m4v", ".wmv", ".flv",
21
+ # Audio
22
+ ".mp3", ".aac", ".ogg", ".opus", ".flac", ".m4a", ".wma",
23
+ # Archives
24
+ ".zip", ".gz", ".bz2", ".xz", ".7z", ".rar", ".zst", ".lz4", ".lzma",
25
+ # Documents (already compressed)
26
+ ".pdf", ".docx", ".xlsx", ".pptx", ".odt",
27
+ # Other
28
+ ".woff", ".woff2", ".br",
29
+ }
30
+
31
+
32
+ def should_compress(filename: str) -> bool:
33
+ """Check if file should be compressed based on extension."""
34
+ suffix = Path(filename).suffix.lower()
35
+ return suffix not in INCOMPRESSIBLE_EXTENSIONS
36
+
37
+
38
+ def compress_data(data: bytes, level: int = DEFAULT_LEVEL) -> bytes:
39
+ """Compress data using zstd."""
40
+ cctx = zstd.ZstdCompressor(level=level)
41
+ return cctx.compress(data)
42
+
43
+
44
+ def decompress_data(data: bytes, max_output_size: int = 0) -> bytes:
45
+ """Decompress zstd data."""
46
+ dctx = zstd.ZstdDecompressor()
47
+ # max_output_size=0 means use content size from frame header
48
+ # For streaming data without content size, caller must provide max_output_size
49
+ return dctx.decompress(data, max_output_size=max_output_size)
50
+
51
+
52
+ def compress_file(input_path: str | Path, output_path: str | Path, level: int = DEFAULT_LEVEL) -> float:
53
+ """
54
+ Compress a file using zstd.
55
+
56
+ Returns compression ratio (compressed_size / original_size).
57
+ """
58
+ cctx = zstd.ZstdCompressor(level=level)
59
+
60
+ with open(input_path, "rb") as fin, open(output_path, "wb") as fout:
61
+ cctx.copy_stream(fin, fout)
62
+
63
+ original_size = Path(input_path).stat().st_size
64
+ compressed_size = Path(output_path).stat().st_size
65
+
66
+ return compressed_size / original_size if original_size > 0 else 1.0
67
+
68
+
69
+ def decompress_file(input_path: str | Path, output_path: str | Path) -> None:
70
+ """Decompress a zstd file."""
71
+ dctx = zstd.ZstdDecompressor()
72
+
73
+ with open(input_path, "rb") as fin, open(output_path, "wb") as fout:
74
+ dctx.copy_stream(fin, fout)
75
+
76
+
77
+ class StreamingCompressor:
78
+ """Streaming compressor for pipeline integration."""
79
+
80
+ def __init__(self, level: int = DEFAULT_LEVEL):
81
+ self.cctx = zstd.ZstdCompressor(level=level)
82
+ self.compressor = self.cctx.compressobj()
83
+ self.total_in = 0
84
+ self.total_out = 0
85
+
86
+ def compress(self, data: bytes) -> bytes:
87
+ """Compress a chunk of data."""
88
+ self.total_in += len(data)
89
+ compressed = self.compressor.compress(data)
90
+ self.total_out += len(compressed)
91
+ return compressed
92
+
93
+ def flush(self) -> bytes:
94
+ """Flush remaining data and finalize compression."""
95
+ final = self.compressor.flush()
96
+ self.total_out += len(final)
97
+ return final
98
+
99
+ @property
100
+ def ratio(self) -> float:
101
+ """Current compression ratio."""
102
+ if self.total_in == 0:
103
+ return 1.0
104
+ return self.total_out / self.total_in
105
+
106
+
107
+ class StreamingDecompressor:
108
+ """Streaming decompressor for pipeline integration."""
109
+
110
+ def __init__(self):
111
+ self.dctx = zstd.ZstdDecompressor()
112
+ self.decompressor = self.dctx.decompressobj()
113
+
114
+ def decompress(self, data: bytes) -> bytes:
115
+ """Decompress a chunk of data."""
116
+ return self.decompressor.decompress(data)
117
+
118
+
119
+ def estimate_compressed_size(original_size: int, filename: str) -> int:
120
+ """
121
+ Estimate compressed size based on file type.
122
+
123
+ Returns estimated size in bytes.
124
+ """
125
+ if not should_compress(filename):
126
+ return original_size
127
+
128
+ # Typical compression ratios by type
129
+ suffix = Path(filename).suffix.lower()
130
+
131
+ if suffix in {".txt", ".log", ".csv", ".json", ".xml", ".html", ".md"}:
132
+ return int(original_size * 0.2) # Text compresses well
133
+ elif suffix in {".sql", ".py", ".js", ".ts", ".go", ".rs", ".c", ".cpp", ".h"}:
134
+ return int(original_size * 0.25) # Code compresses well
135
+ elif suffix in {".tar", ".iso", ".img"}:
136
+ return int(original_size * 0.6) # Containers vary
137
+ else:
138
+ return int(original_size * 0.5) # Default estimate
televault/config.py ADDED
@@ -0,0 +1,81 @@
1
+ """Configuration management for TeleVault."""
2
+
3
+ import json
4
+ from pathlib import Path
5
+ from dataclasses import dataclass, field, asdict
6
+ from typing import Optional
7
+ import os
8
+
9
+
10
+ def get_config_dir() -> Path:
11
+ """Get TeleVault config directory."""
12
+ if os.name == "nt": # Windows
13
+ base = Path(os.environ.get("APPDATA", "~"))
14
+ else: # Unix
15
+ base = Path(os.environ.get("XDG_CONFIG_HOME", "~/.config"))
16
+
17
+ config_dir = base.expanduser() / "televault"
18
+ config_dir.mkdir(parents=True, exist_ok=True)
19
+ return config_dir
20
+
21
+
22
+ def get_data_dir() -> Path:
23
+ """Get TeleVault data directory (for temp files, cache)."""
24
+ if os.name == "nt":
25
+ base = Path(os.environ.get("LOCALAPPDATA", "~"))
26
+ else:
27
+ base = Path(os.environ.get("XDG_DATA_HOME", "~/.local/share"))
28
+
29
+ data_dir = base.expanduser() / "televault"
30
+ data_dir.mkdir(parents=True, exist_ok=True)
31
+ return data_dir
32
+
33
+
34
+ @dataclass
35
+ class Config:
36
+ """TeleVault configuration."""
37
+
38
+ # Telegram settings
39
+ channel_id: Optional[int] = None
40
+
41
+ # Chunking
42
+ chunk_size: int = 100 * 1024 * 1024 # 100MB
43
+
44
+ # Processing options
45
+ compression: bool = True
46
+ encryption: bool = True
47
+
48
+ # Concurrency
49
+ parallel_uploads: int = 3
50
+ parallel_downloads: int = 5
51
+
52
+ # Retry settings
53
+ max_retries: int = 3
54
+ retry_delay: float = 1.0
55
+
56
+ def save(self) -> None:
57
+ """Save config to file."""
58
+ config_path = get_config_dir() / "config.json"
59
+ with open(config_path, "w") as f:
60
+ json.dump(asdict(self), f, indent=2)
61
+
62
+ @classmethod
63
+ def load(cls) -> "Config":
64
+ """Load config from file."""
65
+ config_path = get_config_dir() / "config.json"
66
+
67
+ if not config_path.exists():
68
+ return cls()
69
+
70
+ with open(config_path) as f:
71
+ data = json.load(f)
72
+
73
+ return cls(**data)
74
+
75
+ @classmethod
76
+ def load_or_create(cls) -> "Config":
77
+ """Load config or create default."""
78
+ config = cls.load()
79
+ if not (get_config_dir() / "config.json").exists():
80
+ config.save()
81
+ return config
televault/core.py ADDED
@@ -0,0 +1,479 @@
1
+ """Core TeleVault operations - upload, download, list."""
2
+
3
+ import asyncio
4
+ import os
5
+ import tempfile
6
+ from pathlib import Path
7
+ from typing import Optional, Callable
8
+ from dataclasses import dataclass
9
+ import hashlib
10
+
11
+ from .config import Config, get_data_dir
12
+ from .models import FileMetadata, ChunkInfo, VaultIndex
13
+ from .telegram import TelegramVault, TelegramConfig
14
+ from .chunker import iter_chunks, hash_file, hash_data, ChunkWriter, DEFAULT_CHUNK_SIZE
15
+ from .crypto import encrypt_chunk, decrypt_chunk
16
+ from .compress import compress_data, decompress_data, should_compress
17
+
18
+
19
+ def generate_file_id(name: str, size: int) -> str:
20
+ """Generate short unique file ID."""
21
+ data = f"{name}:{size}:{os.urandom(8).hex()}"
22
+ return hashlib.sha256(data.encode()).hexdigest()[:12]
23
+
24
+
25
+ @dataclass
26
+ class UploadProgress:
27
+ """Progress information for upload."""
28
+ file_name: str
29
+ total_size: int
30
+ uploaded_size: int
31
+ total_chunks: int
32
+ uploaded_chunks: int
33
+ current_chunk: int
34
+
35
+ @property
36
+ def percent(self) -> float:
37
+ if self.total_chunks == 0:
38
+ return 100.0
39
+ return (self.uploaded_chunks / self.total_chunks) * 100
40
+
41
+
42
+ @dataclass
43
+ class DownloadProgress:
44
+ """Progress information for download."""
45
+ file_name: str
46
+ total_size: int
47
+ downloaded_size: int
48
+ total_chunks: int
49
+ downloaded_chunks: int
50
+ current_chunk: int
51
+
52
+ @property
53
+ def percent(self) -> float:
54
+ if self.total_chunks == 0:
55
+ return 100.0
56
+ return (self.downloaded_chunks / self.total_chunks) * 100
57
+
58
+
59
+ ProgressCallback = Callable[[UploadProgress | DownloadProgress], None]
60
+
61
+
62
+ class TeleVault:
63
+ """
64
+ Main TeleVault interface.
65
+
66
+ Handles file upload, download, listing with compression and encryption.
67
+ """
68
+
69
+ def __init__(
70
+ self,
71
+ config: Optional[Config] = None,
72
+ telegram_config: Optional[TelegramConfig] = None,
73
+ password: Optional[str] = None,
74
+ ):
75
+ self.config = config or Config.load_or_create()
76
+ self.telegram = TelegramVault(telegram_config)
77
+ self.password = password
78
+ self._connected = False
79
+
80
+ async def connect(self, skip_channel: bool = False) -> None:
81
+ """Connect to Telegram."""
82
+ await self.telegram.connect()
83
+
84
+ if not skip_channel and self.config.channel_id:
85
+ # Only set channel if we're already authenticated
86
+ if await self.telegram._client.is_user_authorized():
87
+ await self.telegram.set_channel(self.config.channel_id)
88
+
89
+ self._connected = True
90
+
91
+ async def disconnect(self) -> None:
92
+ """Disconnect from Telegram."""
93
+ await self.telegram.disconnect()
94
+ self._connected = False
95
+
96
+ async def login(self, phone: Optional[str] = None) -> str:
97
+ """Interactive login flow."""
98
+ return await self.telegram.login(phone)
99
+
100
+ async def setup_channel(self, channel_id: Optional[int] = None) -> int:
101
+ """Set up storage channel."""
102
+ if channel_id:
103
+ await self.telegram.set_channel(channel_id)
104
+ self.config.channel_id = channel_id
105
+ else:
106
+ channel_id = await self.telegram.create_channel()
107
+ self.config.channel_id = channel_id
108
+
109
+ self.config.save()
110
+ return channel_id
111
+
112
+ async def upload(
113
+ self,
114
+ file_path: str | Path,
115
+ password: Optional[str] = None,
116
+ progress_callback: Optional[ProgressCallback] = None,
117
+ preserve_path: bool = False,
118
+ ) -> FileMetadata:
119
+ """
120
+ Upload a file to TeleVault with parallel chunk uploads.
121
+
122
+ Args:
123
+ file_path: Path to file to upload
124
+ password: Encryption password (uses instance password if not provided)
125
+ progress_callback: Optional progress callback
126
+ preserve_path: If True, include full path in filename (for directory uploads)
127
+
128
+ Returns:
129
+ FileMetadata of uploaded file
130
+ """
131
+ if not self._connected:
132
+ raise RuntimeError("Not connected. Call connect() first.")
133
+
134
+ file_path = Path(file_path)
135
+ if not file_path.exists():
136
+ raise FileNotFoundError(f"File not found: {file_path}")
137
+
138
+ password = password or self.password
139
+
140
+ # Get file info
141
+ file_name = file_path.name
142
+ if preserve_path:
143
+ # Use full path relative to upload root (replace / with _ for safety)
144
+ # For now, just use the full path
145
+ file_name = str(file_path)
146
+ file_name = file_name.replace("/", "_")
147
+
148
+ file_size = file_path.stat().st_size
149
+ file_hash = hash_file(file_path)
150
+ file_id = generate_file_id(file_name, file_size)
151
+
152
+ # Count chunks
153
+ chunk_size = self.config.chunk_size
154
+ total_chunks = (file_size + chunk_size - 1) // chunk_size
155
+ if total_chunks == 0:
156
+ total_chunks = 1 # Empty file = 1 empty chunk
157
+
158
+ # Create initial metadata
159
+ metadata = FileMetadata(
160
+ id=file_id,
161
+ name=file_name,
162
+ size=file_size,
163
+ hash=file_hash,
164
+ encrypted=self.config.encryption and password is not None,
165
+ compressed=self.config.compression and should_compress(file_name),
166
+ )
167
+
168
+ # Upload metadata message first
169
+ metadata_msg_id = await self.telegram.upload_metadata(metadata)
170
+ metadata.message_id = metadata_msg_id
171
+
172
+ # Prepare chunks for parallel upload
173
+ chunk_results: dict[int, ChunkInfo] = {}
174
+ uploaded_count = 0
175
+ lock = asyncio.Lock()
176
+
177
+ async def upload_single_chunk(chunk):
178
+ nonlocal uploaded_count
179
+
180
+ data = chunk.data
181
+
182
+ # Compress if enabled
183
+ if metadata.compressed:
184
+ data = compress_data(data)
185
+
186
+ # Encrypt if enabled
187
+ if metadata.encrypted and password:
188
+ data = encrypt_chunk(data, password)
189
+
190
+ # Upload chunk
191
+ chunk_msg_id = await self.telegram.upload_chunk(
192
+ data=data,
193
+ filename=f"{file_id}_{chunk.index:04d}.chunk",
194
+ reply_to=metadata_msg_id,
195
+ )
196
+
197
+ # Track chunk info
198
+ chunk_info = ChunkInfo(
199
+ index=chunk.index,
200
+ message_id=chunk_msg_id,
201
+ size=len(data),
202
+ hash=hash_data(data),
203
+ )
204
+
205
+ async with lock:
206
+ chunk_results[chunk.index] = chunk_info
207
+ uploaded_count += 1
208
+
209
+ # Progress callback
210
+ if progress_callback:
211
+ progress_callback(UploadProgress(
212
+ file_name=file_name,
213
+ total_size=file_size,
214
+ uploaded_size=int(file_size * uploaded_count / total_chunks),
215
+ total_chunks=total_chunks,
216
+ uploaded_chunks=uploaded_count,
217
+ current_chunk=chunk.index,
218
+ ))
219
+
220
+ # Upload chunks in parallel (limited concurrency)
221
+ semaphore = asyncio.Semaphore(self.config.parallel_uploads)
222
+
223
+ async def upload_with_limit(chunk):
224
+ async with semaphore:
225
+ await upload_single_chunk(chunk)
226
+
227
+ # Collect all chunks first for parallel processing
228
+ chunks = list(iter_chunks(file_path, chunk_size))
229
+
230
+ if chunks:
231
+ await asyncio.gather(*[upload_with_limit(c) for c in chunks])
232
+
233
+ # Sort chunks by index
234
+ metadata.chunks = [chunk_results[i] for i in sorted(chunk_results.keys())]
235
+
236
+ # Update metadata with chunk info
237
+ await self.telegram.update_metadata(metadata_msg_id, metadata)
238
+
239
+ # Update index
240
+ index = await self.telegram.get_index()
241
+ index.add_file(file_id, metadata_msg_id)
242
+ await self.telegram.save_index(index)
243
+
244
+ return metadata
245
+
246
+ # Upload metadata message first
247
+ metadata_msg_id = await self.telegram.upload_metadata(metadata)
248
+ metadata.message_id = metadata_msg_id
249
+
250
+ # Prepare chunks for parallel upload
251
+ chunk_results: dict[int, ChunkInfo] = {}
252
+ uploaded_count = 0
253
+ lock = asyncio.Lock()
254
+
255
+ async def upload_single_chunk(chunk):
256
+ nonlocal uploaded_count
257
+
258
+ data = chunk.data
259
+
260
+ # Compress if enabled
261
+ if metadata.compressed:
262
+ data = compress_data(data)
263
+
264
+ # Encrypt if enabled
265
+ if metadata.encrypted and password:
266
+ data = encrypt_chunk(data, password)
267
+
268
+ # Upload chunk
269
+ chunk_msg_id = await self.telegram.upload_chunk(
270
+ data=data,
271
+ filename=f"{file_id}_{chunk.index:04d}.chunk",
272
+ reply_to=metadata_msg_id,
273
+ )
274
+
275
+ # Track chunk info
276
+ chunk_info = ChunkInfo(
277
+ index=chunk.index,
278
+ message_id=chunk_msg_id,
279
+ size=len(data),
280
+ hash=hash_data(data),
281
+ )
282
+
283
+ async with lock:
284
+ chunk_results[chunk.index] = chunk_info
285
+ uploaded_count += 1
286
+
287
+ # Progress callback
288
+ if progress_callback:
289
+ progress_callback(UploadProgress(
290
+ file_name=file_name,
291
+ total_size=file_size,
292
+ uploaded_size=int(file_size * uploaded_count / total_chunks),
293
+ total_chunks=total_chunks,
294
+ uploaded_chunks=uploaded_count,
295
+ current_chunk=chunk.index,
296
+ ))
297
+
298
+ # Upload chunks in parallel (limited concurrency)
299
+ semaphore = asyncio.Semaphore(self.config.parallel_uploads)
300
+
301
+ async def upload_with_limit(chunk):
302
+ async with semaphore:
303
+ await upload_single_chunk(chunk)
304
+
305
+ # Collect all chunks first for parallel processing
306
+ chunks = list(iter_chunks(file_path, chunk_size))
307
+
308
+ if chunks:
309
+ await asyncio.gather(*[upload_with_limit(c) for c in chunks])
310
+
311
+ # Sort chunks by index
312
+ metadata.chunks = [chunk_results[i] for i in sorted(chunk_results.keys())]
313
+
314
+ # Update metadata with chunk info
315
+ await self.telegram.update_metadata(metadata_msg_id, metadata)
316
+
317
+ # Update index
318
+ index = await self.telegram.get_index()
319
+ index.add_file(file_id, metadata_msg_id)
320
+ await self.telegram.save_index(index)
321
+
322
+ return metadata
323
+
324
+ async def download(
325
+ self,
326
+ file_id_or_name: str,
327
+ output_path: Optional[str | Path] = None,
328
+ password: Optional[str] = None,
329
+ progress_callback: Optional[ProgressCallback] = None,
330
+ ) -> Path:
331
+ """
332
+ Download a file from TeleVault.
333
+
334
+ Args:
335
+ file_id_or_name: File ID or name to download
336
+ output_path: Output path (uses original filename in current dir if not provided)
337
+ password: Decryption password
338
+ progress_callback: Optional progress callback
339
+
340
+ Returns:
341
+ Path to downloaded file
342
+ """
343
+ if not self._connected:
344
+ raise RuntimeError("Not connected. Call connect() first.")
345
+
346
+ password = password or self.password
347
+
348
+ # Find file
349
+ index = await self.telegram.get_index()
350
+
351
+ # Try as file ID first
352
+ if file_id_or_name in index.files:
353
+ metadata_msg_id = index.files[file_id_or_name]
354
+ else:
355
+ # Search by name
356
+ files = await self.telegram.list_files()
357
+ matches = [f for f in files if f.name == file_id_or_name or file_id_or_name in f.name]
358
+
359
+ if not matches:
360
+ raise FileNotFoundError(f"File not found: {file_id_or_name}")
361
+ if len(matches) > 1:
362
+ raise ValueError(f"Multiple files match '{file_id_or_name}': {[f.name for f in matches]}")
363
+
364
+ metadata_msg_id = matches[0].message_id
365
+
366
+ # Get metadata
367
+ metadata = await self.telegram.get_metadata(metadata_msg_id)
368
+
369
+ # Determine output path
370
+ if output_path:
371
+ output_path = Path(output_path)
372
+ else:
373
+ output_path = Path.cwd() / metadata.name
374
+
375
+ # Create chunk writer
376
+ writer = ChunkWriter(output_path, metadata.size, self.config.chunk_size)
377
+
378
+ downloaded_size = 0
379
+
380
+ # Download chunks in order
381
+ for chunk_info in sorted(metadata.chunks, key=lambda c: c.index):
382
+ # Download chunk
383
+ data = await self.telegram.download_chunk(chunk_info.message_id)
384
+
385
+ # Verify hash
386
+ if hash_data(data) != chunk_info.hash:
387
+ raise ValueError(f"Chunk {chunk_info.index} hash mismatch - data corrupted")
388
+
389
+ # Decrypt if needed
390
+ if metadata.encrypted:
391
+ if not password:
392
+ raise ValueError("File is encrypted but no password provided")
393
+ data = decrypt_chunk(data, password)
394
+
395
+ # Decompress if needed
396
+ if metadata.compressed:
397
+ data = decompress_data(data)
398
+
399
+ # Write chunk
400
+ from .chunker import Chunk
401
+ writer.write_chunk(Chunk(
402
+ index=chunk_info.index,
403
+ data=data,
404
+ hash="", # Already verified
405
+ size=len(data),
406
+ ))
407
+
408
+ downloaded_size += len(data)
409
+
410
+ # Progress callback
411
+ if progress_callback:
412
+ progress_callback(DownloadProgress(
413
+ file_name=metadata.name,
414
+ total_size=metadata.size,
415
+ downloaded_size=downloaded_size,
416
+ total_chunks=len(metadata.chunks),
417
+ downloaded_chunks=chunk_info.index + 1,
418
+ current_chunk=chunk_info.index,
419
+ ))
420
+
421
+ # Verify final hash
422
+ if hash_file(output_path) != metadata.hash:
423
+ output_path.unlink() # Delete corrupted file
424
+ raise ValueError("Downloaded file hash mismatch - file corrupted")
425
+
426
+ return output_path
427
+
428
+ async def list_files(self) -> list[FileMetadata]:
429
+ """List all files in the vault."""
430
+ if not self._connected:
431
+ raise RuntimeError("Not connected. Call connect() first.")
432
+
433
+ return await self.telegram.list_files()
434
+
435
+ async def search(self, query: str) -> list[FileMetadata]:
436
+ """Search files by name."""
437
+ if not self._connected:
438
+ raise RuntimeError("Not connected. Call connect() first.")
439
+
440
+ return await self.telegram.search_files(query)
441
+
442
+ async def delete(self, file_id_or_name: str) -> bool:
443
+ """Delete a file."""
444
+ if not self._connected:
445
+ raise RuntimeError("Not connected. Call connect() first.")
446
+
447
+ index = await self.telegram.get_index()
448
+
449
+ # Try as file ID first
450
+ if file_id_or_name in index.files:
451
+ return await self.telegram.delete_file(file_id_or_name)
452
+
453
+ # Search by name
454
+ files = await self.telegram.list_files()
455
+ matches = [f for f in files if f.name == file_id_or_name]
456
+
457
+ if not matches:
458
+ return False
459
+ if len(matches) > 1:
460
+ raise ValueError(f"Multiple files match '{file_id_or_name}'")
461
+
462
+ return await self.telegram.delete_file(matches[0].id)
463
+
464
+ async def get_status(self) -> dict:
465
+ """Get vault status."""
466
+ if not self._connected:
467
+ raise RuntimeError("Not connected. Call connect() first.")
468
+
469
+ files = await self.list_files()
470
+ total_size = sum(f.size for f in files)
471
+ stored_size = sum(f.total_stored_size for f in files)
472
+
473
+ return {
474
+ "channel_id": self.config.channel_id,
475
+ "file_count": len(files),
476
+ "total_size": total_size,
477
+ "stored_size": stored_size,
478
+ "compression_ratio": stored_size / total_size if total_size > 0 else 1.0,
479
+ }