@nightowne/tas-cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +174 -0
- package/package.json +57 -0
- package/src/cli.js +1010 -0
- package/src/crypto/encryption.js +116 -0
- package/src/db/index.js +356 -0
- package/src/fuse/mount.js +516 -0
- package/src/index.js +219 -0
- package/src/sync/sync.js +297 -0
- package/src/telegram/client.js +131 -0
- package/src/utils/branding.js +94 -0
- package/src/utils/chunker.js +155 -0
- package/src/utils/compression.js +84 -0
- package/systemd/README.md +32 -0
- package/systemd/tas-sync.service +21 -0
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ASCII Art and Branding for TAS CLI
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import chalk from 'chalk';
|
|
6
|
+
|
|
7
|
+
export const LOGO = `
|
|
8
|
+
████████╗ █████╗ ███████╗
|
|
9
|
+
╚══██╔══╝██╔══██╗██╔════╝
|
|
10
|
+
██║ ███████║███████╗
|
|
11
|
+
██║ ██╔══██║╚════██║
|
|
12
|
+
██║ ██║ ██║███████║
|
|
13
|
+
╚═╝ ╚═╝ ╚═╝╚══════╝
|
|
14
|
+
`;
|
|
15
|
+
|
|
16
|
+
export const TAGLINE = 'Telegram as Storage';
|
|
17
|
+
export const VERSION = '1.0.0';
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Print the TAS banner
|
|
21
|
+
*/
|
|
22
|
+
export function printBanner() {
|
|
23
|
+
console.log(chalk.cyan(LOGO));
|
|
24
|
+
console.log(chalk.dim(` ${TAGLINE} v${VERSION}`));
|
|
25
|
+
console.log(chalk.dim(' Free • Encrypted • Unlimited\n'));
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Print a success message with icon
|
|
30
|
+
*/
|
|
31
|
+
export function success(msg) {
|
|
32
|
+
console.log(chalk.green('✓'), msg);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Print an error message with icon
|
|
37
|
+
*/
|
|
38
|
+
export function error(msg) {
|
|
39
|
+
console.log(chalk.red('✗'), msg);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Print an info message with icon
|
|
44
|
+
*/
|
|
45
|
+
export function info(msg) {
|
|
46
|
+
console.log(chalk.blue('ℹ'), msg);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Print a warning message with icon
|
|
51
|
+
*/
|
|
52
|
+
export function warn(msg) {
|
|
53
|
+
console.log(chalk.yellow('⚠'), msg);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Format file size
|
|
58
|
+
*/
|
|
59
|
+
export function formatSize(bytes) {
|
|
60
|
+
if (bytes === 0) return '0 B';
|
|
61
|
+
const k = 1024;
|
|
62
|
+
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
|
|
63
|
+
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
64
|
+
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Print a table-like display for files
|
|
69
|
+
*/
|
|
70
|
+
export function printFileTable(files, showTags = false) {
|
|
71
|
+
if (files.length === 0) {
|
|
72
|
+
console.log(chalk.yellow('\n📭 No files stored yet.\n'));
|
|
73
|
+
return;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
console.log(chalk.cyan(`\n📦 Stored Files (${files.length})\n`));
|
|
77
|
+
console.log(chalk.dim('─'.repeat(60)));
|
|
78
|
+
|
|
79
|
+
for (const file of files) {
|
|
80
|
+
const size = formatSize(file.original_size).padEnd(10);
|
|
81
|
+
const name = chalk.white(file.filename);
|
|
82
|
+
const date = new Date(file.created_at).toLocaleDateString();
|
|
83
|
+
|
|
84
|
+
console.log(` ${chalk.blue('●')} ${name}`);
|
|
85
|
+
console.log(chalk.dim(` ${size} • ${date} • ${file.chunks} chunk(s)`));
|
|
86
|
+
|
|
87
|
+
if (showTags && file.tags && file.tags.length > 0) {
|
|
88
|
+
console.log(chalk.magenta(` 🏷️ ${file.tags.join(', ')}`));
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
console.log(chalk.dim('─'.repeat(60)));
|
|
93
|
+
console.log();
|
|
94
|
+
}
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File chunking utilities for large files
|
|
3
|
+
* WhatsApp document limit is ~2GB, we use 1.9GB chunks to be safe
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const MAX_CHUNK_SIZE = 1.9 * 1024 * 1024 * 1024; // 1.9 GB
|
|
7
|
+
|
|
8
|
+
export class Chunker {
|
|
9
|
+
/**
|
|
10
|
+
* Split data into chunks if needed
|
|
11
|
+
* Returns array of { index, total, data }
|
|
12
|
+
*/
|
|
13
|
+
chunk(data) {
|
|
14
|
+
const totalSize = data.length;
|
|
15
|
+
|
|
16
|
+
// If small enough, return as single chunk
|
|
17
|
+
if (totalSize <= MAX_CHUNK_SIZE) {
|
|
18
|
+
return [{
|
|
19
|
+
index: 0,
|
|
20
|
+
total: 1,
|
|
21
|
+
data: data
|
|
22
|
+
}];
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// Split into chunks
|
|
26
|
+
const chunks = [];
|
|
27
|
+
const numChunks = Math.ceil(totalSize / MAX_CHUNK_SIZE);
|
|
28
|
+
|
|
29
|
+
for (let i = 0; i < numChunks; i++) {
|
|
30
|
+
const start = i * MAX_CHUNK_SIZE;
|
|
31
|
+
const end = Math.min(start + MAX_CHUNK_SIZE, totalSize);
|
|
32
|
+
|
|
33
|
+
chunks.push({
|
|
34
|
+
index: i,
|
|
35
|
+
total: numChunks,
|
|
36
|
+
data: data.subarray(start, end)
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
return chunks;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Reassemble chunks into original data
|
|
45
|
+
* Chunks must be in order
|
|
46
|
+
*/
|
|
47
|
+
reassemble(chunks) {
|
|
48
|
+
// Sort by index just in case
|
|
49
|
+
chunks.sort((a, b) => a.index - b.index);
|
|
50
|
+
|
|
51
|
+
// Verify we have all chunks
|
|
52
|
+
const total = chunks[0].total;
|
|
53
|
+
if (chunks.length !== total) {
|
|
54
|
+
throw new Error(`Missing chunks: have ${chunks.length}, need ${total}`);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Concatenate
|
|
58
|
+
return Buffer.concat(chunks.map(c => c.data));
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Create WAS file header
|
|
64
|
+
*/
|
|
65
|
+
export function createHeader(filename, originalSize, chunkIndex, totalChunks, flags) {
|
|
66
|
+
const header = Buffer.alloc(64);
|
|
67
|
+
let offset = 0;
|
|
68
|
+
|
|
69
|
+
// Magic bytes "WAS1"
|
|
70
|
+
header.write('WAS1', offset);
|
|
71
|
+
offset += 4;
|
|
72
|
+
|
|
73
|
+
// Version (1)
|
|
74
|
+
header.writeUInt16LE(1, offset);
|
|
75
|
+
offset += 2;
|
|
76
|
+
|
|
77
|
+
// Flags (bit 0: compressed)
|
|
78
|
+
header.writeUInt16LE(flags, offset);
|
|
79
|
+
offset += 2;
|
|
80
|
+
|
|
81
|
+
// Original size (8 bytes for large files)
|
|
82
|
+
header.writeBigUInt64LE(BigInt(originalSize), offset);
|
|
83
|
+
offset += 8;
|
|
84
|
+
|
|
85
|
+
// Chunk index
|
|
86
|
+
header.writeUInt16LE(chunkIndex, offset);
|
|
87
|
+
offset += 2;
|
|
88
|
+
|
|
89
|
+
// Total chunks
|
|
90
|
+
header.writeUInt16LE(totalChunks, offset);
|
|
91
|
+
offset += 2;
|
|
92
|
+
|
|
93
|
+
// Filename length
|
|
94
|
+
const filenameBytes = Buffer.from(filename, 'utf-8').subarray(0, 42);
|
|
95
|
+
header.writeUInt16LE(filenameBytes.length, offset);
|
|
96
|
+
offset += 2;
|
|
97
|
+
|
|
98
|
+
// Filename (max 42 bytes)
|
|
99
|
+
filenameBytes.copy(header, offset);
|
|
100
|
+
|
|
101
|
+
return header;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* Parse WAS file header
|
|
106
|
+
*/
|
|
107
|
+
export function parseHeader(buffer) {
|
|
108
|
+
let offset = 0;
|
|
109
|
+
|
|
110
|
+
// Check magic bytes
|
|
111
|
+
const magic = buffer.subarray(0, 4).toString();
|
|
112
|
+
if (magic !== 'WAS1') {
|
|
113
|
+
throw new Error('Invalid WAS file: bad magic bytes');
|
|
114
|
+
}
|
|
115
|
+
offset += 4;
|
|
116
|
+
|
|
117
|
+
// Version
|
|
118
|
+
const version = buffer.readUInt16LE(offset);
|
|
119
|
+
offset += 2;
|
|
120
|
+
|
|
121
|
+
// Flags
|
|
122
|
+
const flags = buffer.readUInt16LE(offset);
|
|
123
|
+
offset += 2;
|
|
124
|
+
|
|
125
|
+
// Original size
|
|
126
|
+
const originalSize = Number(buffer.readBigUInt64LE(offset));
|
|
127
|
+
offset += 8;
|
|
128
|
+
|
|
129
|
+
// Chunk index
|
|
130
|
+
const chunkIndex = buffer.readUInt16LE(offset);
|
|
131
|
+
offset += 2;
|
|
132
|
+
|
|
133
|
+
// Total chunks
|
|
134
|
+
const totalChunks = buffer.readUInt16LE(offset);
|
|
135
|
+
offset += 2;
|
|
136
|
+
|
|
137
|
+
// Filename length
|
|
138
|
+
const filenameLength = buffer.readUInt16LE(offset);
|
|
139
|
+
offset += 2;
|
|
140
|
+
|
|
141
|
+
// Filename
|
|
142
|
+
const filename = buffer.subarray(offset, offset + filenameLength).toString('utf-8');
|
|
143
|
+
|
|
144
|
+
return {
|
|
145
|
+
version,
|
|
146
|
+
flags,
|
|
147
|
+
compressed: (flags & 1) === 1,
|
|
148
|
+
originalSize,
|
|
149
|
+
chunkIndex,
|
|
150
|
+
totalChunks,
|
|
151
|
+
filename
|
|
152
|
+
};
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
export const HEADER_SIZE = 64;
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Compression utilities - gzip with smart bypass for already-compressed formats
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import zlib from 'zlib';
|
|
6
|
+
import { promisify } from 'util';
|
|
7
|
+
import path from 'path';
|
|
8
|
+
|
|
9
|
+
const gzip = promisify(zlib.gzip);
|
|
10
|
+
const gunzip = promisify(zlib.gunzip);
|
|
11
|
+
|
|
12
|
+
// File extensions that are already compressed (skip compression for these)
|
|
13
|
+
const SKIP_COMPRESSION = new Set([
|
|
14
|
+
// Images
|
|
15
|
+
'.jpg', '.jpeg', '.png', '.gif', '.webp', '.avif', '.heic', '.heif',
|
|
16
|
+
// Video
|
|
17
|
+
'.mp4', '.mkv', '.avi', '.mov', '.webm', '.m4v',
|
|
18
|
+
// Audio
|
|
19
|
+
'.mp3', '.aac', '.m4a', '.ogg', '.opus', '.flac', '.wma',
|
|
20
|
+
// Archives
|
|
21
|
+
'.zip', '.rar', '.7z', '.gz', '.bz2', '.xz', '.tar.gz', '.tgz',
|
|
22
|
+
// Documents (already compressed internally)
|
|
23
|
+
'.pdf', '.docx', '.xlsx', '.pptx', '.epub',
|
|
24
|
+
// Other
|
|
25
|
+
'.dmg', '.iso', '.apk', '.ipa'
|
|
26
|
+
]);
|
|
27
|
+
|
|
28
|
+
export class Compressor {
|
|
29
|
+
/**
|
|
30
|
+
* Check if a file should skip compression
|
|
31
|
+
*/
|
|
32
|
+
shouldSkip(filename) {
|
|
33
|
+
const ext = path.extname(filename).toLowerCase();
|
|
34
|
+
return SKIP_COMPRESSION.has(ext);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Compress data using gzip
|
|
39
|
+
* Returns: { data: Buffer, compressed: boolean }
|
|
40
|
+
*/
|
|
41
|
+
async compress(data, filename = '') {
|
|
42
|
+
// Skip if already compressed format
|
|
43
|
+
if (this.shouldSkip(filename)) {
|
|
44
|
+
return {
|
|
45
|
+
data: data,
|
|
46
|
+
compressed: false
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
try {
|
|
51
|
+
const compressed = await gzip(data, { level: 6 });
|
|
52
|
+
|
|
53
|
+
// Only use compression if it actually reduces size
|
|
54
|
+
if (compressed.length < data.length) {
|
|
55
|
+
return {
|
|
56
|
+
data: compressed,
|
|
57
|
+
compressed: true
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
return {
|
|
62
|
+
data: data,
|
|
63
|
+
compressed: false
|
|
64
|
+
};
|
|
65
|
+
} catch (err) {
|
|
66
|
+
// If compression fails, return original
|
|
67
|
+
return {
|
|
68
|
+
data: data,
|
|
69
|
+
compressed: false
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* Decompress gzip data
|
|
76
|
+
*/
|
|
77
|
+
async decompress(data, wasCompressed) {
|
|
78
|
+
if (!wasCompressed) {
|
|
79
|
+
return data;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
return await gunzip(data);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# TAS Systemd Service Setup
|
|
2
|
+
|
|
3
|
+
## Install the sync service to start automatically on boot:
|
|
4
|
+
|
|
5
|
+
```bash
|
|
6
|
+
# 1. Copy service file
|
|
7
|
+
sudo cp systemd/tas-sync.service /etc/systemd/user/
|
|
8
|
+
|
|
9
|
+
# 2. Edit the service file - replace USER with your username
|
|
10
|
+
sudo nano /etc/systemd/user/tas-sync.service
|
|
11
|
+
|
|
12
|
+
# 3. Create password file (for headless operation)
|
|
13
|
+
echo "your-password" > ~/.tas-password
|
|
14
|
+
chmod 600 ~/.tas-password
|
|
15
|
+
|
|
16
|
+
# 4. Enable and start the service
|
|
17
|
+
systemctl --user daemon-reload
|
|
18
|
+
systemctl --user enable tas-sync
|
|
19
|
+
systemctl --user start tas-sync
|
|
20
|
+
|
|
21
|
+
# 5. Check status
|
|
22
|
+
systemctl --user status tas-sync
|
|
23
|
+
|
|
24
|
+
# 6. View logs
|
|
25
|
+
journalctl --user -u tas-sync -f
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
## To stop the service:
|
|
29
|
+
```bash
|
|
30
|
+
systemctl --user stop tas-sync
|
|
31
|
+
systemctl --user disable tas-sync
|
|
32
|
+
```
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
[Unit]
|
|
2
|
+
Description=TAS - Telegram as Storage Sync Service
|
|
3
|
+
After=network-online.target
|
|
4
|
+
Wants=network-online.target
|
|
5
|
+
|
|
6
|
+
[Service]
|
|
7
|
+
Type=simple
|
|
8
|
+
ExecStart=/usr/bin/node /home/USER/tas/src/cli.js sync start --password-file /home/USER/.tas-password
|
|
9
|
+
Restart=on-failure
|
|
10
|
+
RestartSec=10
|
|
11
|
+
StandardOutput=journal
|
|
12
|
+
StandardError=journal
|
|
13
|
+
|
|
14
|
+
# Security hardening
|
|
15
|
+
NoNewPrivileges=true
|
|
16
|
+
ProtectSystem=strict
|
|
17
|
+
ProtectHome=read-only
|
|
18
|
+
ReadWritePaths=/home/USER/tas/data /home/USER/TelegramCloud
|
|
19
|
+
|
|
20
|
+
[Install]
|
|
21
|
+
WantedBy=default.target
|