dc-cloud 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,164 @@
1
+ import { Compress, getDirSize, Restore } from "../utils/7zip.js";
2
+ import { normalize } from "../utils/lib.js";
3
+ import { CloudBot } from "./cloudBot.js";
4
+ import { EmbedBuilder } from "discord.js";
5
+ import path from "path";
6
+ import fs from "fs";
7
+ import chalk from "chalk";
8
+ export class DCloudClass {
9
+ bot = null;
10
+ thread = null;
11
+ inputPath = null;
12
+ fileMetaData = null;
13
+ constructor(config) {
14
+ this.bot = CloudBot(config);
15
+ }
16
+ async init() {
17
+ await this.bot?.Init();
18
+ }
19
+ async initializeNewChannel(fileName) {
20
+ if (!this.bot?.IsBotOnline()) {
21
+ console.error(chalk.red("[DCloud] [Error]: Bot is not online."));
22
+ return null;
23
+ }
24
+ const channelName = fileName
25
+ .split(".")[0]
26
+ .toLowerCase()
27
+ .replace(/[^a-z0-9-_]/gi, "-");
28
+ if (!this.inputPath || !fs.existsSync(this.inputPath)) {
29
+ throw new Error(`[DCloud] [Error]: Input path invalid: ${this.inputPath}`);
30
+ }
31
+ const stats = fs.statSync(this.inputPath);
32
+ const totalByteSize = stats.isDirectory()
33
+ ? getDirSize(this.inputPath)
34
+ : stats.size;
35
+ const sizeInMB = (totalByteSize / (1024 * 1024)).toFixed(2);
36
+ const embed = new EmbedBuilder()
37
+ .setTitle("📦 Upload Info")
38
+ .addFields({ name: "📄 Name", value: fileName, inline: false }, { name: "📏 Size", value: `${sizeInMB} MB`, inline: true })
39
+ .setColor(0x5865f2);
40
+ this.thread = await this.bot.initializeNewThread(channelName, embed);
41
+ if (!this.thread) {
42
+ throw new Error("[DCloud] [Error]: Failed to initialize Discord thread.");
43
+ }
44
+ this.fileMetaData = {
45
+ originalName: fileName,
46
+ totalSize: `${sizeInMB} MB`,
47
+ createdAt: new Date().toLocaleString(),
48
+ parts: [],
49
+ };
50
+ return this.thread;
51
+ }
52
+ async upload(_inputPath) {
53
+ try {
54
+ this.inputPath = normalize(_inputPath);
55
+ if (!this.inputPath)
56
+ return Promise.reject(undefined);
57
+ console.log(chalk.cyan("[DCloud] [Process]: Preparing Discord thread..."));
58
+ const threadReady = await this.initializeNewChannel(path.basename(this.inputPath));
59
+ if (!threadReady) {
60
+ throw new Error("[DCloud] [Error]: Discord thread could not be established.");
61
+ }
62
+ const _outputPath = path.join(path.dirname(this.inputPath), "TempOutput");
63
+ if (fs.existsSync(_outputPath)) {
64
+ fs.rmSync(_outputPath, { recursive: true, force: true });
65
+ }
66
+ fs.mkdirSync(_outputPath, { recursive: true });
67
+ console.log(chalk.yellow("[DCloud] [Process]: Starting compression..."));
68
+ await Compress(this.inputPath, _outputPath, async (filePath) => {
69
+ const fileName = path.basename(filePath);
70
+ let lastSize = -1;
71
+ let currentSize = 0;
72
+ while (lastSize !== currentSize || currentSize === 0) {
73
+ lastSize = currentSize;
74
+ await new Promise((resolve) => setTimeout(resolve, 500));
75
+ if (fs.existsSync(filePath)) {
76
+ currentSize = fs.statSync(filePath).size;
77
+ }
78
+ }
79
+ console.log(chalk.blue(`[DCloud] [Queue]: Part ready for upload: ${fileName} (${(currentSize / 1024).toFixed(2)} KB)`));
80
+ const attachmentUrl = await this.bot?.sendToForumChannel(filePath, fileName);
81
+ if (attachmentUrl && this.fileMetaData) {
82
+ this.fileMetaData.parts.push({
83
+ name: fileName,
84
+ url: attachmentUrl,
85
+ });
86
+ }
87
+ });
88
+ this.fileMetaData?.parts.sort((a, b) => a.name.localeCompare(b.name, undefined, { numeric: true }));
89
+ console.log(chalk.green("[DCloud] [Success]: Compression and Upload sequence complete."));
90
+ const metaDataPath = path.join(_outputPath, "metadata.json");
91
+ fs.writeFileSync(metaDataPath, JSON.stringify(this.fileMetaData, null, 2));
92
+ const metadataUrl = await this.bot?.sendToForumChannel(metaDataPath, "metadata.json");
93
+ await this.thread?.send({
94
+ content: metadataUrl?.toString(),
95
+ });
96
+ if (!metadataUrl)
97
+ throw new Error("[DCloud] [Error]: Metadata upload failed.");
98
+ fs.rmSync(_outputPath, { recursive: true, force: true });
99
+ return {
100
+ originalName: this.fileMetaData?.originalName || "",
101
+ totalSize: this.fileMetaData?.totalSize || "",
102
+ createdAt: this.fileMetaData?.createdAt || "",
103
+ metadata: metadataUrl,
104
+ };
105
+ }
106
+ catch (error) {
107
+ console.error(chalk.red("[DCloud] [Error]: Process Failed:"), error?.message || error);
108
+ this.fileMetaData = null;
109
+ throw error;
110
+ }
111
+ }
112
+ async downloader(attachmentUrl, output) {
113
+ const normalizedOutput = normalize(output);
114
+ if (!normalizedOutput)
115
+ return;
116
+ const metadataRes = await fetch(attachmentUrl);
117
+ if (!metadataRes.ok)
118
+ throw new Error(`[DCloud] [Error]: Metadata fetch failed: ${metadataRes.status}`);
119
+ const metadataBuffer = Buffer.from(await metadataRes.arrayBuffer());
120
+ const fileData = JSON.parse(metadataBuffer.toString());
121
+ const folderPath = path.join(normalizedOutput, fileData.originalName.split(".")[0]);
122
+ if (!fs.existsSync(folderPath))
123
+ fs.mkdirSync(folderPath, { recursive: true });
124
+ const metadataPath = path.join(folderPath, "metadata.json");
125
+ fs.writeFileSync(metadataPath, metadataBuffer);
126
+ const downloadParts = fileData.parts;
127
+ if (downloadParts.length === 0)
128
+ return;
129
+ console.log(chalk.yellow(`[Downloader] [Info]: Found ${downloadParts.length} parts. Starting download...`));
130
+ let partIndex = 1;
131
+ for (const part of downloadParts) {
132
+ console.log(chalk.blue(`[Downloader] [Transfer]: Fetching part ${partIndex}: ${part.name}...`));
133
+ const filePartsRes = await fetch(part.url);
134
+ if (!filePartsRes.ok)
135
+ throw new Error(`[Downloader] [Error]: Failed to download part: ${filePartsRes.status}`);
136
+ const partBuffer = Buffer.from(await filePartsRes.arrayBuffer());
137
+ const _savePath = path.join(folderPath, part.name);
138
+ fs.writeFileSync(_savePath, partBuffer);
139
+ console.log(chalk.green(`[Downloader] [Success]: Saved: ${part.name}`));
140
+ partIndex++;
141
+ }
142
+ console.log(chalk.cyan("[DCloud] [Restore]: All parts downloaded. Starting Restore..."));
143
+ await Restore(folderPath);
144
+ console.log(chalk.yellow("[DCloud] [Cleanup]: Cleaning up temporary archive parts..."));
145
+ try {
146
+ for (const part of downloadParts) {
147
+ const partPath = path.join(folderPath, part.name);
148
+ if (fs.existsSync(partPath)) {
149
+ fs.unlinkSync(partPath);
150
+ }
151
+ }
152
+ if (fs.existsSync(metadataPath)) {
153
+ fs.unlinkSync(metadataPath);
154
+ }
155
+ console.log(chalk.green("[DCloud] [Success]: Cleanup complete. Only the extracted files remain."));
156
+ }
157
+ catch (err) {
158
+ console.error(chalk.red("[DCloud] [Error]: Cleanup failed:"), err.message);
159
+ }
160
+ }
161
+ }
162
+ export const DCloud = (config) => {
163
+ return new DCloudClass(config);
164
+ };
package/dist/index.js ADDED
@@ -0,0 +1,10 @@
1
+ import { DCloud } from "./classes/dCloud.js";
2
+ const TOKEN = "MTQxODYyMjg0MDE4NTQyMTgzNg.Gtq4z_.FT7e39PscQjNet23o4TfMBzYVAMbG_TzkjQ9a8";
3
+ const CHANNEL_ID = "1465235994243502162";
4
+ const GUILD_ID = "1316439358609297418";
5
+ const dCloud = DCloud({ FORUM_CHANNEL_ID: CHANNEL_ID, TOKEN: TOKEN, SERVER_ID: GUILD_ID });
6
+ dCloud.init();
7
+ setTimeout(() => {
8
+ dCloud.upload("C:\\Users\\neelo\\OneDrive\\Desktop\\DCloud\\text.txt");
9
+ }, 10000);
10
+ // dCloud.downloader("https://cdn.discordapp.com/attachments/1465344532462702639/1465344646329929923/metadata.json?ex=6978c40c&is=6977728c&hm=3b51d006d31594712dbc8c76c5a5b24d83c01c4b6d9bf8339ed175f9c5114e4a&", "..")
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,169 @@
1
+ import { path7za } from "7zip-bin";
2
+ import { existsSync } from "node:fs";
3
+ import { spawn } from "node:child_process";
4
+ import { exists, normalize } from "./lib.js";
5
+ import path from "node:path";
6
+ import fs from "fs";
7
+ import chalk from "chalk";
8
+ export const SEVEN_ZIP = path7za;
9
+ /**
10
+ * Executes a 7z command process
11
+ */
12
+ export function run7z(args, onDone, onFailed) {
13
+ if (!existsSync(SEVEN_ZIP)) {
14
+ console.error(chalk.red("[7z] [Error]: 7-Zip binary not found."));
15
+ onFailed?.();
16
+ return;
17
+ }
18
+ const proc = spawn(SEVEN_ZIP, args, {
19
+ stdio: "pipe",
20
+ shell: false,
21
+ });
22
+ proc.on("close", (code) => {
23
+ process.stdout.write("\n");
24
+ if (code === 0) {
25
+ onDone();
26
+ }
27
+ else {
28
+ onFailed();
29
+ }
30
+ });
31
+ proc.on("error", (err) => {
32
+ console.error(chalk.red("[7z] [Error]: Execution failed:"), err);
33
+ onFailed();
34
+ });
35
+ }
36
+ /**
37
+ * Watches for specific 7z split parts being created
38
+ */
39
+ export function fileWatcher(dir, baseName, callback) {
40
+ const seenFile = new Set();
41
+ return fs.watch(dir, (eventType, fileName) => {
42
+ if (fileName &&
43
+ fileName.startsWith(baseName) &&
44
+ fileName.includes(".7z.")) {
45
+ if (!seenFile.has(fileName)) {
46
+ seenFile.add(fileName);
47
+ callback(fileName);
48
+ }
49
+ }
50
+ });
51
+ }
52
+ /**
53
+ * Compresses input into split 7z archives and triggers upload tasks
54
+ */
55
+ export async function Compress(input, outPut, onNewFileCreated) {
56
+ const inputPath = normalize(input);
57
+ const outPutPath = normalize(outPut);
58
+ if (!exists(inputPath || "") || !inputPath || !outPutPath) {
59
+ console.error(chalk.red("[7z] [Error]: Input path does not exist"));
60
+ return null;
61
+ }
62
+ const baseName = path.basename(inputPath);
63
+ const archiveBase = path.join(outPutPath, baseName);
64
+ if (exists(outPutPath)) {
65
+ fs.rmSync(outPutPath, { recursive: true, force: true });
66
+ }
67
+ fs.mkdirSync(outPutPath, { recursive: true });
68
+ const uploadPromises = [];
69
+ const watcher = fileWatcher(outPutPath, baseName, (fileName) => {
70
+ const fullPath = path.join(outPutPath, fileName);
71
+ const uploadTask = onNewFileCreated(fullPath);
72
+ uploadPromises.push(uploadTask);
73
+ });
74
+ const stats = fs.statSync(inputPath);
75
+ const totalByteSize = stats.isDirectory()
76
+ ? getDirSize(inputPath)
77
+ : stats.size;
78
+ const sizeInMB = (totalByteSize / (1024 * 1024)).toFixed(2);
79
+ const args = [
80
+ "a",
81
+ "-t7z",
82
+ "-mx=9",
83
+ "-m0=lzma2",
84
+ "-md=512m",
85
+ "-ms=on",
86
+ "-bsp1",
87
+ "-bb3",
88
+ "-v8m",
89
+ `${archiveBase}.7z`,
90
+ inputPath,
91
+ ];
92
+ return new Promise((resolve, reject) => {
93
+ run7z(args, async () => {
94
+ watcher.close();
95
+ console.log(chalk.yellow(`[7z] [Process]: Waiting for ${uploadPromises.length} uploads to finalize...`));
96
+ const uploadedParts = (await Promise.all(uploadPromises)).filter((p) => p !== null);
97
+ uploadedParts.sort((a, b) => a.name.localeCompare(b.name, undefined, { numeric: true }));
98
+ const result = {
99
+ originalName: baseName,
100
+ createdAt: new Date().toISOString(),
101
+ totalSize: `${sizeInMB} MB`,
102
+ parts: uploadedParts,
103
+ };
104
+ resolve(result);
105
+ }, (err) => {
106
+ watcher.close();
107
+ console.error(chalk.red("[7z] [Error]: 7zip Failed To Compile"), err);
108
+ reject(new Error(err || "7zip failed"));
109
+ });
110
+ });
111
+ }
112
+ /**
113
+ * Restores a split archive back to its original form
114
+ */
115
+ export async function Restore(folder, destination, onDone, onFailed) {
116
+ return new Promise(async (resolve, reject) => {
117
+ const folderPath = normalize(folder);
118
+ if (!folderPath)
119
+ return reject(new Error("[7z] [Error]: Invalid folder path"));
120
+ const indexPath = path.join(folderPath, "metadata.json");
121
+ if (!exists(indexPath)) {
122
+ console.error(chalk.red(`[7z] [Error]: metadata.json not found in ${folderPath}`));
123
+ return reject(new Error("metadata.json missing"));
124
+ }
125
+ const metadata = JSON.parse(fs.readFileSync(indexPath, "utf-8"));
126
+ if (!metadata.parts || !metadata.parts.length) {
127
+ return reject(new Error("[7z] [Error]: No parts found in metadata"));
128
+ }
129
+ const fileNameArray = metadata.parts.map((part) => part.name);
130
+ const firstPart = path.join(folderPath, fileNameArray[0]);
131
+ if (!exists(firstPart)) {
132
+ console.error(chalk.red(`[7z] [Error]: First part missing: ${firstPart}`));
133
+ return reject(new Error("First archive part missing"));
134
+ }
135
+ const outDir = destination ? normalize(destination) : folderPath;
136
+ const args = ["x", firstPart, `-o${outDir}`, "-aoa", "-y"];
137
+ const on7zDone = () => {
138
+ console.log(chalk.green("[7z] [Restore]: Extraction completed successfully."));
139
+ if (onDone)
140
+ onDone();
141
+ resolve();
142
+ };
143
+ const on7zFailed = () => {
144
+ console.error(chalk.red("[7z] [Restore]: Extraction failed."));
145
+ if (onFailed)
146
+ onFailed();
147
+ reject(new Error("7-Zip extraction failed"));
148
+ };
149
+ run7z(args, () => on7zDone(), () => on7zFailed());
150
+ });
151
+ }
152
+ /**
153
+ * Recursively calculates directory size in bytes
154
+ */
155
+ export function getDirSize(dirPath) {
156
+ const files = fs.readdirSync(dirPath);
157
+ let totalSize = 0;
158
+ files.forEach((file) => {
159
+ const filePath = path.join(dirPath, file);
160
+ const stats = fs.statSync(filePath);
161
+ if (stats.isDirectory()) {
162
+ totalSize += getDirSize(filePath);
163
+ }
164
+ else {
165
+ totalSize += stats.size;
166
+ }
167
+ });
168
+ return totalSize;
169
+ }
@@ -0,0 +1,12 @@
1
+ import path from "path";
2
+ import fs from "fs";
3
+ export function normalize(filePath) {
4
+ if (!filePath && typeof filePath !== "string")
5
+ return null;
6
+ return path.resolve(filePath.replace(/["']/g, ""));
7
+ }
8
+ export function exists(filePath) {
9
+ if (!fs.existsSync(filePath))
10
+ return false;
11
+ return true;
12
+ }
package/package.json ADDED
@@ -0,0 +1,58 @@
1
+ {
2
+ "name": "dc-cloud",
3
+ "version": "0.0.1",
4
+ "description": "Discord-based cloud storage solution for file compression, segmentation, and backup using Discord as a backend",
5
+ "type": "module",
6
+ "module": "dist/index.js",
7
+ "main": "dist/index.js",
8
+
9
+ "author": {
10
+ "name": "Neel Frostrain",
11
+ "url": "https://github.com/NeelFrostrain"
12
+ },
13
+
14
+ "repository": {
15
+ "type": "git",
16
+ "url": "https://github.com/NeelFrostrain/DCloud.git"
17
+ },
18
+
19
+ "keywords": [
20
+ "discord",
21
+ "cloud-storage",
22
+ "compression",
23
+ "7zip",
24
+ "file-backup",
25
+ "bot"
26
+ ],
27
+
28
+ "engines": {
29
+ "node": ">=18.0.0",
30
+ "npm": ">=9.0.0"
31
+ },
32
+
33
+ "scripts": {
34
+ "start": "npm run build && node dist/index.js",
35
+ "dev": "nodemon --exec ts-node src/index.ts",
36
+ "build": "tsc",
37
+ "lint": "eslint src/**/*.ts",
38
+ "type-check": "tsc --noEmit"
39
+ },
40
+
41
+ "dependencies": {
42
+ "7zip-bin": "^5.2.0",
43
+ "chalk": "^5.6.2",
44
+ "discord.js": "^14.25.1"
45
+ },
46
+
47
+ "devDependencies": {
48
+ "eslint": "^9.39.2",
49
+ "globals": "^17.1.0",
50
+ "jiti": "^2.6.1",
51
+ "nodemon": "^3.1.11",
52
+ "typescript-eslint": "^8.53.1"
53
+ },
54
+
55
+ "peerDependencies": {
56
+ "typescript": "^5"
57
+ }
58
+ }