televault 0.1.0__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- televault/__init__.py +1 -1
- televault/chunker.py +29 -27
- televault/cli.py +237 -90
- televault/compress.py +59 -23
- televault/config.py +16 -17
- televault/core.py +140 -203
- televault/crypto.py +26 -33
- televault/models.py +29 -30
- televault/telegram.py +136 -107
- televault/tui.py +632 -0
- televault-2.0.0.dist-info/METADATA +310 -0
- televault-2.0.0.dist-info/RECORD +14 -0
- {televault-0.1.0.dist-info → televault-2.0.0.dist-info}/entry_points.txt +1 -0
- televault-0.1.0.dist-info/METADATA +0 -242
- televault-0.1.0.dist-info/RECORD +0 -13
- {televault-0.1.0.dist-info → televault-2.0.0.dist-info}/WHEEL +0 -0
televault/crypto.py
CHANGED
|
@@ -2,12 +2,11 @@
|
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
4
|
import struct
|
|
5
|
-
from typing import BinaryIO, Iterator
|
|
6
5
|
from dataclasses import dataclass
|
|
7
6
|
|
|
7
|
+
from cryptography.hazmat.backends import default_backend
|
|
8
8
|
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
|
9
9
|
from cryptography.hazmat.primitives.kdf.scrypt import Scrypt
|
|
10
|
-
from cryptography.hazmat.backends import default_backend
|
|
11
10
|
|
|
12
11
|
# Constants
|
|
13
12
|
SALT_SIZE = 16
|
|
@@ -23,34 +22,28 @@ BLOCK_SIZE = 64 * 1024 # 64KB blocks for streaming encryption
|
|
|
23
22
|
@dataclass
|
|
24
23
|
class EncryptionHeader:
|
|
25
24
|
"""Header prepended to encrypted data."""
|
|
26
|
-
|
|
25
|
+
|
|
27
26
|
salt: bytes
|
|
28
27
|
nonce: bytes
|
|
29
|
-
|
|
28
|
+
|
|
30
29
|
def to_bytes(self) -> bytes:
|
|
31
30
|
return self.salt + self.nonce
|
|
32
|
-
|
|
31
|
+
|
|
33
32
|
@classmethod
|
|
34
33
|
def from_bytes(cls, data: bytes) -> "EncryptionHeader":
|
|
35
34
|
if len(data) < HEADER_SIZE:
|
|
36
35
|
raise ValueError(f"Header too short: {len(data)} < {HEADER_SIZE}")
|
|
37
|
-
return cls(
|
|
38
|
-
|
|
39
|
-
nonce=data[SALT_SIZE:HEADER_SIZE]
|
|
40
|
-
)
|
|
41
|
-
|
|
36
|
+
return cls(salt=data[:SALT_SIZE], nonce=data[SALT_SIZE:HEADER_SIZE])
|
|
37
|
+
|
|
42
38
|
@classmethod
|
|
43
39
|
def generate(cls) -> "EncryptionHeader":
|
|
44
|
-
return cls(
|
|
45
|
-
salt=os.urandom(SALT_SIZE),
|
|
46
|
-
nonce=os.urandom(NONCE_SIZE)
|
|
47
|
-
)
|
|
40
|
+
return cls(salt=os.urandom(SALT_SIZE), nonce=os.urandom(NONCE_SIZE))
|
|
48
41
|
|
|
49
42
|
|
|
50
43
|
def derive_key(password: str, salt: bytes) -> bytes:
|
|
51
44
|
"""
|
|
52
45
|
Derive encryption key from password using Scrypt.
|
|
53
|
-
|
|
46
|
+
|
|
54
47
|
Using Scrypt instead of Argon2id for broader compatibility.
|
|
55
48
|
Parameters tuned for ~100ms on modern hardware.
|
|
56
49
|
"""
|
|
@@ -58,9 +51,9 @@ def derive_key(password: str, salt: bytes) -> bytes:
|
|
|
58
51
|
salt=salt,
|
|
59
52
|
length=KEY_SIZE,
|
|
60
53
|
n=2**17, # CPU/memory cost
|
|
61
|
-
r=8,
|
|
62
|
-
p=1,
|
|
63
|
-
backend=default_backend()
|
|
54
|
+
r=8, # Block size
|
|
55
|
+
p=1, # Parallelization
|
|
56
|
+
backend=default_backend(),
|
|
64
57
|
)
|
|
65
58
|
return kdf.derive(password.encode("utf-8"))
|
|
66
59
|
|
|
@@ -68,13 +61,13 @@ def derive_key(password: str, salt: bytes) -> bytes:
|
|
|
68
61
|
def encrypt_chunk(data: bytes, password: str) -> bytes:
|
|
69
62
|
"""
|
|
70
63
|
Encrypt a chunk of data.
|
|
71
|
-
|
|
64
|
+
|
|
72
65
|
Returns: header (28 bytes) + ciphertext + tag (16 bytes)
|
|
73
66
|
"""
|
|
74
67
|
header = EncryptionHeader.generate()
|
|
75
68
|
key = derive_key(password, header.salt)
|
|
76
69
|
cipher = AESGCM(key)
|
|
77
|
-
|
|
70
|
+
|
|
78
71
|
ciphertext = cipher.encrypt(header.nonce, data, None)
|
|
79
72
|
return header.to_bytes() + ciphertext
|
|
80
73
|
|
|
@@ -82,13 +75,13 @@ def encrypt_chunk(data: bytes, password: str) -> bytes:
|
|
|
82
75
|
def decrypt_chunk(encrypted_data: bytes, password: str) -> bytes:
|
|
83
76
|
"""
|
|
84
77
|
Decrypt a chunk of data.
|
|
85
|
-
|
|
78
|
+
|
|
86
79
|
Expects: header (28 bytes) + ciphertext + tag (16 bytes)
|
|
87
80
|
"""
|
|
88
81
|
header = EncryptionHeader.from_bytes(encrypted_data)
|
|
89
82
|
key = derive_key(password, header.salt)
|
|
90
83
|
cipher = AESGCM(key)
|
|
91
|
-
|
|
84
|
+
|
|
92
85
|
ciphertext = encrypted_data[HEADER_SIZE:]
|
|
93
86
|
return cipher.decrypt(header.nonce, ciphertext, None)
|
|
94
87
|
|
|
@@ -96,22 +89,22 @@ def decrypt_chunk(encrypted_data: bytes, password: str) -> bytes:
|
|
|
96
89
|
class StreamingEncryptor:
|
|
97
90
|
"""
|
|
98
91
|
Streaming encryptor for large files.
|
|
99
|
-
|
|
92
|
+
|
|
100
93
|
Note: For simplicity, this encrypts the entire file with one key/nonce.
|
|
101
94
|
For very large files, consider chunking with per-chunk nonces.
|
|
102
95
|
"""
|
|
103
|
-
|
|
96
|
+
|
|
104
97
|
def __init__(self, password: str):
|
|
105
98
|
self.password = password
|
|
106
99
|
self.header = EncryptionHeader.generate()
|
|
107
100
|
self.key = derive_key(password, self.header.salt)
|
|
108
101
|
self.cipher = AESGCM(self.key)
|
|
109
102
|
self._counter = 0
|
|
110
|
-
|
|
103
|
+
|
|
111
104
|
def get_header(self) -> bytes:
|
|
112
105
|
"""Get the header to prepend to encrypted output."""
|
|
113
106
|
return self.header.to_bytes()
|
|
114
|
-
|
|
107
|
+
|
|
115
108
|
def _get_nonce(self) -> bytes:
|
|
116
109
|
"""Generate unique nonce for each block using counter mode."""
|
|
117
110
|
# Use base nonce + counter to ensure uniqueness
|
|
@@ -122,7 +115,7 @@ class StreamingEncryptor:
|
|
|
122
115
|
for i in range(8):
|
|
123
116
|
nonce[i] ^= counter_bytes[i]
|
|
124
117
|
return bytes(nonce)
|
|
125
|
-
|
|
118
|
+
|
|
126
119
|
def encrypt_block(self, data: bytes, is_last: bool = False) -> bytes:
|
|
127
120
|
"""Encrypt a block of data."""
|
|
128
121
|
nonce = self._get_nonce()
|
|
@@ -133,13 +126,13 @@ class StreamingEncryptor:
|
|
|
133
126
|
|
|
134
127
|
class StreamingDecryptor:
|
|
135
128
|
"""Streaming decryptor for large files."""
|
|
136
|
-
|
|
129
|
+
|
|
137
130
|
def __init__(self, password: str, header: EncryptionHeader):
|
|
138
131
|
self.password = password
|
|
139
132
|
self.header = header
|
|
140
133
|
self.key = derive_key(password, header.salt)
|
|
141
134
|
self.cipher = AESGCM(self.key)
|
|
142
|
-
|
|
135
|
+
|
|
143
136
|
def decrypt_block(self, encrypted_block: bytes) -> bytes:
|
|
144
137
|
"""Decrypt a block of data."""
|
|
145
138
|
# Extract nonce from block
|
|
@@ -152,9 +145,9 @@ def encrypt_file_simple(input_path: str, output_path: str, password: str) -> Non
|
|
|
152
145
|
"""Simple file encryption - loads entire file into memory."""
|
|
153
146
|
with open(input_path, "rb") as f:
|
|
154
147
|
data = f.read()
|
|
155
|
-
|
|
148
|
+
|
|
156
149
|
encrypted = encrypt_chunk(data, password)
|
|
157
|
-
|
|
150
|
+
|
|
158
151
|
with open(output_path, "wb") as f:
|
|
159
152
|
f.write(encrypted)
|
|
160
153
|
|
|
@@ -163,8 +156,8 @@ def decrypt_file_simple(input_path: str, output_path: str, password: str) -> Non
|
|
|
163
156
|
"""Simple file decryption - loads entire file into memory."""
|
|
164
157
|
with open(input_path, "rb") as f:
|
|
165
158
|
encrypted = f.read()
|
|
166
|
-
|
|
159
|
+
|
|
167
160
|
decrypted = decrypt_chunk(encrypted, password)
|
|
168
|
-
|
|
161
|
+
|
|
169
162
|
with open(output_path, "wb") as f:
|
|
170
163
|
f.write(decrypted)
|
televault/models.py
CHANGED
|
@@ -1,23 +1,22 @@
|
|
|
1
1
|
"""Data models for TeleVault - stored as JSON on Telegram."""
|
|
2
2
|
|
|
3
|
-
from dataclasses import dataclass, field, asdict
|
|
4
|
-
from datetime import datetime
|
|
5
|
-
from typing import Optional
|
|
6
3
|
import json
|
|
4
|
+
from dataclasses import asdict, dataclass, field
|
|
5
|
+
from datetime import datetime
|
|
7
6
|
|
|
8
7
|
|
|
9
8
|
@dataclass
|
|
10
9
|
class ChunkInfo:
|
|
11
10
|
"""Information about a single chunk stored on Telegram."""
|
|
12
|
-
|
|
11
|
+
|
|
13
12
|
index: int # Chunk order (0-based)
|
|
14
13
|
message_id: int # Telegram message ID
|
|
15
14
|
size: int # Chunk size in bytes
|
|
16
15
|
hash: str # BLAKE3 hash for verification
|
|
17
|
-
|
|
16
|
+
|
|
18
17
|
def to_dict(self) -> dict:
|
|
19
18
|
return asdict(self)
|
|
20
|
-
|
|
19
|
+
|
|
21
20
|
@classmethod
|
|
22
21
|
def from_dict(cls, data: dict) -> "ChunkInfo":
|
|
23
22
|
return cls(**data)
|
|
@@ -29,49 +28,49 @@ class FileMetadata:
|
|
|
29
28
|
Metadata for a file stored on Telegram.
|
|
30
29
|
This is stored as a JSON text message, with chunks replying to it.
|
|
31
30
|
"""
|
|
32
|
-
|
|
31
|
+
|
|
33
32
|
id: str # Unique file ID (short hash)
|
|
34
33
|
name: str # Original filename
|
|
35
34
|
size: int # Original file size in bytes
|
|
36
35
|
hash: str # BLAKE3 hash of original file
|
|
37
36
|
chunks: list[ChunkInfo] = field(default_factory=list)
|
|
38
|
-
|
|
37
|
+
|
|
39
38
|
# Optional fields
|
|
40
39
|
encrypted: bool = True
|
|
41
40
|
compressed: bool = False
|
|
42
|
-
compression_ratio:
|
|
43
|
-
mime_type:
|
|
44
|
-
|
|
41
|
+
compression_ratio: float | None = None
|
|
42
|
+
mime_type: str | None = None
|
|
43
|
+
|
|
45
44
|
# Timestamps
|
|
46
45
|
created_at: float = field(default_factory=lambda: datetime.now().timestamp())
|
|
47
|
-
modified_at:
|
|
48
|
-
|
|
46
|
+
modified_at: float | None = None
|
|
47
|
+
|
|
49
48
|
# Telegram reference
|
|
50
|
-
message_id:
|
|
51
|
-
|
|
49
|
+
message_id: int | None = None # Message ID of this metadata
|
|
50
|
+
|
|
52
51
|
def to_json(self) -> str:
|
|
53
52
|
"""Serialize to JSON for storage on Telegram."""
|
|
54
53
|
data = asdict(self)
|
|
55
54
|
# Convert ChunkInfo objects
|
|
56
55
|
data["chunks"] = [c.to_dict() if isinstance(c, ChunkInfo) else c for c in data["chunks"]]
|
|
57
56
|
return json.dumps(data, separators=(",", ":")) # Compact JSON
|
|
58
|
-
|
|
57
|
+
|
|
59
58
|
@classmethod
|
|
60
59
|
def from_json(cls, text: str) -> "FileMetadata":
|
|
61
60
|
"""Deserialize from JSON stored on Telegram."""
|
|
62
61
|
data = json.loads(text)
|
|
63
62
|
data["chunks"] = [ChunkInfo.from_dict(c) for c in data.get("chunks", [])]
|
|
64
63
|
return cls(**data)
|
|
65
|
-
|
|
64
|
+
|
|
66
65
|
@property
|
|
67
66
|
def chunk_count(self) -> int:
|
|
68
67
|
return len(self.chunks)
|
|
69
|
-
|
|
68
|
+
|
|
70
69
|
@property
|
|
71
70
|
def total_stored_size(self) -> int:
|
|
72
71
|
"""Total size of all chunks (after compression/encryption)."""
|
|
73
72
|
return sum(c.size for c in self.chunks)
|
|
74
|
-
|
|
73
|
+
|
|
75
74
|
def is_complete(self) -> bool:
|
|
76
75
|
"""Check if all chunks are present."""
|
|
77
76
|
if not self.chunks:
|
|
@@ -87,14 +86,14 @@ class VaultIndex:
|
|
|
87
86
|
Master index of all files in the vault.
|
|
88
87
|
Stored as pinned message in the channel.
|
|
89
88
|
"""
|
|
90
|
-
|
|
89
|
+
|
|
91
90
|
version: int = 1
|
|
92
91
|
files: dict[str, int] = field(default_factory=dict) # file_id -> metadata_message_id
|
|
93
92
|
updated_at: float = field(default_factory=lambda: datetime.now().timestamp())
|
|
94
|
-
|
|
93
|
+
|
|
95
94
|
def to_json(self) -> str:
|
|
96
95
|
return json.dumps(asdict(self), separators=(",", ":"))
|
|
97
|
-
|
|
96
|
+
|
|
98
97
|
@classmethod
|
|
99
98
|
def from_json(cls, text: str) -> "VaultIndex":
|
|
100
99
|
data = json.loads(text)
|
|
@@ -104,12 +103,12 @@ class VaultIndex:
|
|
|
104
103
|
files=data.get("files", {}),
|
|
105
104
|
updated_at=data.get("updated_at", datetime.now().timestamp()),
|
|
106
105
|
)
|
|
107
|
-
|
|
106
|
+
|
|
108
107
|
def add_file(self, file_id: str, message_id: int) -> None:
|
|
109
108
|
self.files[file_id] = message_id
|
|
110
109
|
self.updated_at = datetime.now().timestamp()
|
|
111
|
-
|
|
112
|
-
def remove_file(self, file_id: str) ->
|
|
110
|
+
|
|
111
|
+
def remove_file(self, file_id: str) -> int | None:
|
|
113
112
|
msg_id = self.files.pop(file_id, None)
|
|
114
113
|
if msg_id:
|
|
115
114
|
self.updated_at = datetime.now().timestamp()
|
|
@@ -122,26 +121,26 @@ class TransferProgress:
|
|
|
122
121
|
Progress tracking for resumable transfers.
|
|
123
122
|
Stored as a temporary message, deleted on completion.
|
|
124
123
|
"""
|
|
125
|
-
|
|
124
|
+
|
|
126
125
|
operation: str # "upload" or "download"
|
|
127
126
|
file_id: str
|
|
128
127
|
file_name: str
|
|
129
128
|
total_chunks: int
|
|
130
129
|
completed_chunks: list[int] = field(default_factory=list) # Completed chunk indices
|
|
131
130
|
started_at: float = field(default_factory=lambda: datetime.now().timestamp())
|
|
132
|
-
|
|
131
|
+
|
|
133
132
|
def to_json(self) -> str:
|
|
134
133
|
return json.dumps(asdict(self), separators=(",", ":"))
|
|
135
|
-
|
|
134
|
+
|
|
136
135
|
@classmethod
|
|
137
136
|
def from_json(cls, text: str) -> "TransferProgress":
|
|
138
137
|
return cls(**json.loads(text))
|
|
139
|
-
|
|
138
|
+
|
|
140
139
|
@property
|
|
141
140
|
def pending_chunks(self) -> list[int]:
|
|
142
141
|
completed = set(self.completed_chunks)
|
|
143
142
|
return [i for i in range(self.total_chunks) if i not in completed]
|
|
144
|
-
|
|
143
|
+
|
|
145
144
|
@property
|
|
146
145
|
def progress_percent(self) -> float:
|
|
147
146
|
if self.total_chunks == 0:
|