@nightowne/tas-cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +174 -0
- package/package.json +57 -0
- package/src/cli.js +1010 -0
- package/src/crypto/encryption.js +116 -0
- package/src/db/index.js +356 -0
- package/src/fuse/mount.js +516 -0
- package/src/index.js +219 -0
- package/src/sync/sync.js +297 -0
- package/src/telegram/client.js +131 -0
- package/src/utils/branding.js +94 -0
- package/src/utils/chunker.js +155 -0
- package/src/utils/compression.js +84 -0
- package/systemd/README.md +32 -0
- package/systemd/tas-sync.service +21 -0
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Encryption module - AES-256-GCM with PBKDF2 key derivation
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import crypto from 'crypto';
|
|
6
|
+
|
|
7
|
+
const ALGORITHM = 'aes-256-gcm';
|
|
8
|
+
const KEY_LENGTH = 32; // 256 bits
|
|
9
|
+
const IV_LENGTH = 12; // 96 bits for GCM
|
|
10
|
+
const TAG_LENGTH = 16; // 128 bits auth tag
|
|
11
|
+
const SALT_LENGTH = 32;
|
|
12
|
+
const PBKDF2_ITERATIONS = 100000;
|
|
13
|
+
|
|
14
|
+
export class Encryptor {
|
|
15
|
+
constructor(password) {
|
|
16
|
+
this.password = password;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Get a hash of the password for verification (not the actual key!)
|
|
21
|
+
*/
|
|
22
|
+
getPasswordHash() {
|
|
23
|
+
return crypto.createHash('sha256')
|
|
24
|
+
.update(this.password + 'was-verify')
|
|
25
|
+
.digest('hex');
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Derive encryption key from password using PBKDF2
|
|
30
|
+
*/
|
|
31
|
+
deriveKey(salt) {
|
|
32
|
+
return crypto.pbkdf2Sync(
|
|
33
|
+
this.password,
|
|
34
|
+
salt,
|
|
35
|
+
PBKDF2_ITERATIONS,
|
|
36
|
+
KEY_LENGTH,
|
|
37
|
+
'sha512'
|
|
38
|
+
);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Encrypt data
|
|
43
|
+
* Returns: Buffer containing [salt (32) | iv (12) | ciphertext | authTag (16)]
|
|
44
|
+
*/
|
|
45
|
+
encrypt(data) {
|
|
46
|
+
// Generate random salt and IV
|
|
47
|
+
const salt = crypto.randomBytes(SALT_LENGTH);
|
|
48
|
+
const iv = crypto.randomBytes(IV_LENGTH);
|
|
49
|
+
|
|
50
|
+
// Derive key from password
|
|
51
|
+
const key = this.deriveKey(salt);
|
|
52
|
+
|
|
53
|
+
// Create cipher
|
|
54
|
+
const cipher = crypto.createCipheriv(ALGORITHM, key, iv);
|
|
55
|
+
|
|
56
|
+
// Encrypt
|
|
57
|
+
const encrypted = Buffer.concat([
|
|
58
|
+
cipher.update(data),
|
|
59
|
+
cipher.final()
|
|
60
|
+
]);
|
|
61
|
+
|
|
62
|
+
// Get auth tag
|
|
63
|
+
const authTag = cipher.getAuthTag();
|
|
64
|
+
|
|
65
|
+
// Combine: salt + iv + ciphertext + authTag
|
|
66
|
+
return Buffer.concat([salt, iv, encrypted, authTag]);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* Decrypt data
|
|
71
|
+
* Input: Buffer containing [salt (32) | iv (12) | ciphertext | authTag (16)]
|
|
72
|
+
*/
|
|
73
|
+
decrypt(encryptedData) {
|
|
74
|
+
// Extract components
|
|
75
|
+
const salt = encryptedData.subarray(0, SALT_LENGTH);
|
|
76
|
+
const iv = encryptedData.subarray(SALT_LENGTH, SALT_LENGTH + IV_LENGTH);
|
|
77
|
+
const authTag = encryptedData.subarray(-TAG_LENGTH);
|
|
78
|
+
const ciphertext = encryptedData.subarray(SALT_LENGTH + IV_LENGTH, -TAG_LENGTH);
|
|
79
|
+
|
|
80
|
+
// Derive key from password
|
|
81
|
+
const key = this.deriveKey(salt);
|
|
82
|
+
|
|
83
|
+
// Create decipher
|
|
84
|
+
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);
|
|
85
|
+
decipher.setAuthTag(authTag);
|
|
86
|
+
|
|
87
|
+
// Decrypt
|
|
88
|
+
return Buffer.concat([
|
|
89
|
+
decipher.update(ciphertext),
|
|
90
|
+
decipher.final()
|
|
91
|
+
]);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Generate SHA-256 hash of data
|
|
97
|
+
*/
|
|
98
|
+
export function hashData(data) {
|
|
99
|
+
return crypto.createHash('sha256').update(data).digest('hex');
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Generate SHA-256 hash of a file (streaming)
|
|
104
|
+
*/
|
|
105
|
+
export async function hashFile(filePath) {
|
|
106
|
+
const { createReadStream } = await import('fs');
|
|
107
|
+
|
|
108
|
+
return new Promise((resolve, reject) => {
|
|
109
|
+
const hash = crypto.createHash('sha256');
|
|
110
|
+
const stream = createReadStream(filePath);
|
|
111
|
+
|
|
112
|
+
stream.on('data', (chunk) => hash.update(chunk));
|
|
113
|
+
stream.on('end', () => resolve(hash.digest('hex')));
|
|
114
|
+
stream.on('error', reject);
|
|
115
|
+
});
|
|
116
|
+
}
|
package/src/db/index.js
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SQLite database for file index
|
|
3
|
+
* Stores metadata about uploaded files
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import Database from 'better-sqlite3';
|
|
7
|
+
import path from 'path';
|
|
8
|
+
|
|
9
|
+
export class FileIndex {
|
|
10
|
+
constructor(dbPath) {
|
|
11
|
+
this.dbPath = dbPath;
|
|
12
|
+
this.db = null;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Initialize the database and create tables
|
|
17
|
+
*/
|
|
18
|
+
init() {
|
|
19
|
+
this.db = new Database(this.dbPath);
|
|
20
|
+
|
|
21
|
+
// Enable WAL mode for better concurrent access
|
|
22
|
+
this.db.pragma('journal_mode = WAL');
|
|
23
|
+
|
|
24
|
+
// Create files table
|
|
25
|
+
this.db.exec(`
|
|
26
|
+
CREATE TABLE IF NOT EXISTS files (
|
|
27
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
28
|
+
filename TEXT NOT NULL,
|
|
29
|
+
hash TEXT UNIQUE NOT NULL,
|
|
30
|
+
original_size INTEGER NOT NULL,
|
|
31
|
+
stored_size INTEGER NOT NULL,
|
|
32
|
+
chunks INTEGER NOT NULL DEFAULT 1,
|
|
33
|
+
compressed INTEGER NOT NULL DEFAULT 0,
|
|
34
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
35
|
+
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
|
36
|
+
);
|
|
37
|
+
|
|
38
|
+
CREATE INDEX IF NOT EXISTS idx_files_filename ON files(filename);
|
|
39
|
+
CREATE INDEX IF NOT EXISTS idx_files_hash ON files(hash);
|
|
40
|
+
`);
|
|
41
|
+
|
|
42
|
+
// Create chunks table (for multi-part files)
|
|
43
|
+
this.db.exec(`
|
|
44
|
+
CREATE TABLE IF NOT EXISTS chunks (
|
|
45
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
46
|
+
file_id INTEGER NOT NULL,
|
|
47
|
+
chunk_index INTEGER NOT NULL,
|
|
48
|
+
message_id TEXT NOT NULL,
|
|
49
|
+
file_telegram_id TEXT,
|
|
50
|
+
size INTEGER NOT NULL,
|
|
51
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
52
|
+
FOREIGN KEY (file_id) REFERENCES files(id) ON DELETE CASCADE,
|
|
53
|
+
UNIQUE(file_id, chunk_index)
|
|
54
|
+
);
|
|
55
|
+
`);
|
|
56
|
+
|
|
57
|
+
// Create tags table for file organization
|
|
58
|
+
this.db.exec(`
|
|
59
|
+
CREATE TABLE IF NOT EXISTS tags (
|
|
60
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
61
|
+
file_id INTEGER NOT NULL,
|
|
62
|
+
tag TEXT NOT NULL,
|
|
63
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
64
|
+
FOREIGN KEY (file_id) REFERENCES files(id) ON DELETE CASCADE,
|
|
65
|
+
UNIQUE(file_id, tag)
|
|
66
|
+
);
|
|
67
|
+
|
|
68
|
+
CREATE INDEX IF NOT EXISTS idx_tags_tag ON tags(tag);
|
|
69
|
+
`);
|
|
70
|
+
|
|
71
|
+
// Create sync_folders table
|
|
72
|
+
this.db.exec(`
|
|
73
|
+
CREATE TABLE IF NOT EXISTS sync_folders (
|
|
74
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
75
|
+
local_path TEXT UNIQUE NOT NULL,
|
|
76
|
+
enabled INTEGER NOT NULL DEFAULT 1,
|
|
77
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
|
78
|
+
);
|
|
79
|
+
`);
|
|
80
|
+
|
|
81
|
+
// Create sync_state table for tracking file changes
|
|
82
|
+
this.db.exec(`
|
|
83
|
+
CREATE TABLE IF NOT EXISTS sync_state (
|
|
84
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
85
|
+
folder_id INTEGER NOT NULL,
|
|
86
|
+
relative_path TEXT NOT NULL,
|
|
87
|
+
file_hash TEXT,
|
|
88
|
+
mtime INTEGER,
|
|
89
|
+
synced_at TEXT,
|
|
90
|
+
FOREIGN KEY (folder_id) REFERENCES sync_folders(id) ON DELETE CASCADE,
|
|
91
|
+
UNIQUE(folder_id, relative_path)
|
|
92
|
+
);
|
|
93
|
+
`);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Add a new file record
|
|
98
|
+
*/
|
|
99
|
+
addFile(fileData) {
|
|
100
|
+
const stmt = this.db.prepare(`
|
|
101
|
+
INSERT INTO files (filename, hash, original_size, stored_size, chunks, compressed)
|
|
102
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
103
|
+
`);
|
|
104
|
+
|
|
105
|
+
const result = stmt.run(
|
|
106
|
+
fileData.filename,
|
|
107
|
+
fileData.hash,
|
|
108
|
+
fileData.originalSize,
|
|
109
|
+
fileData.storedSize,
|
|
110
|
+
fileData.chunks,
|
|
111
|
+
fileData.compressed ? 1 : 0
|
|
112
|
+
);
|
|
113
|
+
|
|
114
|
+
return result.lastInsertRowid;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Add chunk metadata
|
|
119
|
+
*/
|
|
120
|
+
addChunk(fileId, chunkIndex, messageId, size) {
|
|
121
|
+
const stmt = this.db.prepare(`
|
|
122
|
+
INSERT INTO chunks (file_id, chunk_index, message_id, size)
|
|
123
|
+
VALUES (?, ?, ?, ?)
|
|
124
|
+
`);
|
|
125
|
+
|
|
126
|
+
stmt.run(fileId, chunkIndex, messageId, size);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Find file by hash
|
|
131
|
+
*/
|
|
132
|
+
findByHash(hash) {
|
|
133
|
+
const stmt = this.db.prepare(`
|
|
134
|
+
SELECT * FROM files WHERE hash = ? OR hash LIKE ?
|
|
135
|
+
`);
|
|
136
|
+
|
|
137
|
+
return stmt.get(hash, hash + '%');
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
/**
|
|
141
|
+
* Find file by filename
|
|
142
|
+
*/
|
|
143
|
+
findByName(filename) {
|
|
144
|
+
const stmt = this.db.prepare(`
|
|
145
|
+
SELECT * FROM files WHERE filename = ? OR filename LIKE ?
|
|
146
|
+
`);
|
|
147
|
+
|
|
148
|
+
return stmt.get(filename, '%' + filename + '%');
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
/**
|
|
152
|
+
* Get chunks for a file
|
|
153
|
+
*/
|
|
154
|
+
getChunks(fileId) {
|
|
155
|
+
const stmt = this.db.prepare(`
|
|
156
|
+
SELECT * FROM chunks WHERE file_id = ? ORDER BY chunk_index
|
|
157
|
+
`);
|
|
158
|
+
|
|
159
|
+
return stmt.all(fileId);
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* List all files
|
|
164
|
+
*/
|
|
165
|
+
listAll() {
|
|
166
|
+
const stmt = this.db.prepare(`
|
|
167
|
+
SELECT * FROM files ORDER BY created_at DESC
|
|
168
|
+
`);
|
|
169
|
+
|
|
170
|
+
return stmt.all();
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* Delete a file record
|
|
175
|
+
*/
|
|
176
|
+
delete(fileId) {
|
|
177
|
+
// Chunks are deleted automatically via CASCADE
|
|
178
|
+
const stmt = this.db.prepare('DELETE FROM files WHERE id = ?');
|
|
179
|
+
stmt.run(fileId);
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/**
|
|
183
|
+
* Check if file exists by hash
|
|
184
|
+
*/
|
|
185
|
+
exists(hash) {
|
|
186
|
+
const stmt = this.db.prepare('SELECT 1 FROM files WHERE hash = ?');
|
|
187
|
+
return stmt.get(hash) !== undefined;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
/**
|
|
191
|
+
* Get total stats
|
|
192
|
+
*/
|
|
193
|
+
getStats() {
|
|
194
|
+
const stmt = this.db.prepare(`
|
|
195
|
+
SELECT
|
|
196
|
+
COUNT(*) as file_count,
|
|
197
|
+
COALESCE(SUM(original_size), 0) as total_original,
|
|
198
|
+
COALESCE(SUM(stored_size), 0) as total_stored
|
|
199
|
+
FROM files
|
|
200
|
+
`);
|
|
201
|
+
|
|
202
|
+
return stmt.get();
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// ============== TAG METHODS ==============
|
|
206
|
+
|
|
207
|
+
/**
|
|
208
|
+
* Add a tag to a file
|
|
209
|
+
*/
|
|
210
|
+
addTag(fileId, tag) {
|
|
211
|
+
const stmt = this.db.prepare(`
|
|
212
|
+
INSERT OR IGNORE INTO tags (file_id, tag) VALUES (?, ?)
|
|
213
|
+
`);
|
|
214
|
+
stmt.run(fileId, tag.toLowerCase().trim());
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
/**
|
|
218
|
+
* Remove a tag from a file
|
|
219
|
+
*/
|
|
220
|
+
removeTag(fileId, tag) {
|
|
221
|
+
const stmt = this.db.prepare(`
|
|
222
|
+
DELETE FROM tags WHERE file_id = ? AND tag = ?
|
|
223
|
+
`);
|
|
224
|
+
stmt.run(fileId, tag.toLowerCase().trim());
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
/**
|
|
228
|
+
* Get all tags for a file
|
|
229
|
+
*/
|
|
230
|
+
getFileTags(fileId) {
|
|
231
|
+
const stmt = this.db.prepare(`
|
|
232
|
+
SELECT tag FROM tags WHERE file_id = ? ORDER BY tag
|
|
233
|
+
`);
|
|
234
|
+
return stmt.all(fileId).map(row => row.tag);
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
/**
|
|
238
|
+
* Find all files with a specific tag
|
|
239
|
+
*/
|
|
240
|
+
findByTag(tag) {
|
|
241
|
+
const stmt = this.db.prepare(`
|
|
242
|
+
SELECT f.* FROM files f
|
|
243
|
+
INNER JOIN tags t ON f.id = t.file_id
|
|
244
|
+
WHERE t.tag = ?
|
|
245
|
+
ORDER BY f.created_at DESC
|
|
246
|
+
`);
|
|
247
|
+
return stmt.all(tag.toLowerCase().trim());
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
/**
|
|
251
|
+
* Get all unique tags
|
|
252
|
+
*/
|
|
253
|
+
getAllTags() {
|
|
254
|
+
const stmt = this.db.prepare(`
|
|
255
|
+
SELECT tag, COUNT(*) as count FROM tags GROUP BY tag ORDER BY tag
|
|
256
|
+
`);
|
|
257
|
+
return stmt.all();
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// ============== SYNC METHODS ==============
|
|
261
|
+
|
|
262
|
+
/**
|
|
263
|
+
* Add a folder to sync
|
|
264
|
+
*/
|
|
265
|
+
addSyncFolder(localPath) {
|
|
266
|
+
const stmt = this.db.prepare(`
|
|
267
|
+
INSERT OR IGNORE INTO sync_folders (local_path) VALUES (?)
|
|
268
|
+
`);
|
|
269
|
+
const result = stmt.run(localPath);
|
|
270
|
+
return result.lastInsertRowid || this.getSyncFolderByPath(localPath)?.id;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
/**
|
|
274
|
+
* Remove a sync folder
|
|
275
|
+
*/
|
|
276
|
+
removeSyncFolder(localPath) {
|
|
277
|
+
const stmt = this.db.prepare(`
|
|
278
|
+
DELETE FROM sync_folders WHERE local_path = ?
|
|
279
|
+
`);
|
|
280
|
+
stmt.run(localPath);
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
/**
|
|
284
|
+
* Get sync folder by path
|
|
285
|
+
*/
|
|
286
|
+
getSyncFolderByPath(localPath) {
|
|
287
|
+
const stmt = this.db.prepare(`
|
|
288
|
+
SELECT * FROM sync_folders WHERE local_path = ?
|
|
289
|
+
`);
|
|
290
|
+
return stmt.get(localPath);
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
/**
|
|
294
|
+
* Get all sync folders
|
|
295
|
+
*/
|
|
296
|
+
getSyncFolders() {
|
|
297
|
+
const stmt = this.db.prepare(`
|
|
298
|
+
SELECT * FROM sync_folders ORDER BY created_at
|
|
299
|
+
`);
|
|
300
|
+
return stmt.all();
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
/**
|
|
304
|
+
* Update sync state for a file
|
|
305
|
+
*/
|
|
306
|
+
updateSyncState(folderId, relativePath, fileHash, mtime) {
|
|
307
|
+
const stmt = this.db.prepare(`
|
|
308
|
+
INSERT INTO sync_state (folder_id, relative_path, file_hash, mtime, synced_at)
|
|
309
|
+
VALUES (?, ?, ?, ?, datetime('now'))
|
|
310
|
+
ON CONFLICT(folder_id, relative_path) DO UPDATE SET
|
|
311
|
+
file_hash = excluded.file_hash,
|
|
312
|
+
mtime = excluded.mtime,
|
|
313
|
+
synced_at = datetime('now')
|
|
314
|
+
`);
|
|
315
|
+
stmt.run(folderId, relativePath, fileHash, mtime);
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
/**
|
|
319
|
+
* Get sync state for a file
|
|
320
|
+
*/
|
|
321
|
+
getSyncState(folderId, relativePath) {
|
|
322
|
+
const stmt = this.db.prepare(`
|
|
323
|
+
SELECT * FROM sync_state WHERE folder_id = ? AND relative_path = ?
|
|
324
|
+
`);
|
|
325
|
+
return stmt.get(folderId, relativePath);
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
/**
|
|
329
|
+
* Get all sync states for a folder
|
|
330
|
+
*/
|
|
331
|
+
getFolderSyncStates(folderId) {
|
|
332
|
+
const stmt = this.db.prepare(`
|
|
333
|
+
SELECT * FROM sync_state WHERE folder_id = ?
|
|
334
|
+
`);
|
|
335
|
+
return stmt.all(folderId);
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
/**
|
|
339
|
+
* Remove sync state for a file
|
|
340
|
+
*/
|
|
341
|
+
removeSyncState(folderId, relativePath) {
|
|
342
|
+
const stmt = this.db.prepare(`
|
|
343
|
+
DELETE FROM sync_state WHERE folder_id = ? AND relative_path = ?
|
|
344
|
+
`);
|
|
345
|
+
stmt.run(folderId, relativePath);
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
/**
|
|
349
|
+
* Close database connection
|
|
350
|
+
*/
|
|
351
|
+
close() {
|
|
352
|
+
if (this.db) {
|
|
353
|
+
this.db.close();
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
}
|