@leogps/file-uploader 2.0.1 → 2.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -35,6 +35,23 @@ object-assign
35
35
  * MIT Licensed
36
36
  */
37
37
 
38
+ /*!
39
+ * compressible
40
+ * Copyright(c) 2013 Jonathan Ong
41
+ * Copyright(c) 2014 Jeremiah Senkpiel
42
+ * Copyright(c) 2015 Douglas Christopher Wilson
43
+ * MIT Licensed
44
+ */
45
+
46
+ /*!
47
+ * compression
48
+ * Copyright(c) 2010 Sencha Inc.
49
+ * Copyright(c) 2011 TJ Holowaychuk
50
+ * Copyright(c) 2014 Jonathan Ong
51
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
52
+ * MIT Licensed
53
+ */
54
+
38
55
  /*!
39
56
  * content-disposition
40
57
  * Copyright(c) 2014-2017 Douglas Christopher Wilson
@@ -183,6 +200,12 @@ object-assign
183
200
  * MIT Licensed
184
201
  */
185
202
 
203
+ /*!
204
+ * on-headers
205
+ * Copyright(c) 2014 Douglas Christopher Wilson
206
+ * MIT Licensed
207
+ */
208
+
186
209
  /*!
187
210
  * parseurl
188
211
  * Copyright(c) 2014 Jonathan Ong
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@leogps/file-uploader",
3
- "version": "2.0.1",
3
+ "version": "2.0.2",
4
4
  "description": "Facilitates file uploader server.",
5
5
  "main": "src/index.ts",
6
6
  "repository": {
@@ -52,7 +52,9 @@
52
52
  "license": "MIT",
53
53
  "dependencies": {
54
54
  "@fortawesome/fontawesome-free": "^5.15.4",
55
+ "@leogps/file-uploader": "^2.0.1",
55
56
  "bulma": "^1.0.4",
57
+ "compression": "^1.8.1",
56
58
  "express": "^4.18.2",
57
59
  "formidable": "^3.5.4",
58
60
  "jquery": "^3.7.1",
@@ -62,7 +64,6 @@
62
64
  "moment": "^2.29.4",
63
65
  "mv": "^2.1.1",
64
66
  "pretty-bytes": "^6.1.1",
65
- "rimraf": "^5.0.5",
66
67
  "serve-favicon": "^2.5.0",
67
68
  "socket.io": "^4.7.2",
68
69
  "socket.io-client": "^4.7.2",
@@ -71,6 +72,7 @@
71
72
  "yargs": "^17.7.2"
72
73
  },
73
74
  "devDependencies": {
75
+ "@types/compression": "^1.8.1",
74
76
  "@types/express": "^4.17.20",
75
77
  "@types/formidable": "^3.4.6",
76
78
  "@types/jquery": "^3.5.24",
@@ -100,6 +102,7 @@
100
102
  "npm-run-all": "^4.1.5",
101
103
  "postcss-loader": "^7.0.1",
102
104
  "prettier": "^3.7.4",
105
+ "rimraf": "^6.1.2",
103
106
  "sass": "^1.94.2",
104
107
  "sass-loader": "^13.3.3",
105
108
  "style-loader": "^3.3.1",
package/src/globals.ts CHANGED
@@ -2,9 +2,13 @@ import { Progress } from './model/progress';
2
2
  import { ProgressWriter } from './service/progress_writer';
3
3
  import _ from "lodash";
4
4
 
5
- export const MAX_CHUNK_SIZE = 2 * 1024 * 1024;
6
- export const MAX_PARALLEL_CHUNK_UPLOADS = 10;
7
- export let uploadsDir: string;
5
+ let uploadChunkSize = 512 * 1024;
6
+ let maxParallelChunkUploads = 10;
7
+ let uploadsDir: string;
8
+ let enableCompression = true;
9
+ let serverPort = 8082;
10
+ let maxFileSize = 100 * 1024 * 1024 * 1024; // 100Gb
11
+
8
12
  export const progresses: Progress[] = [];
9
13
  export const uploadsProgressMap: Map<string, Progress> = new Map();
10
14
  let progressWriter: ProgressWriter;
@@ -13,6 +17,21 @@ export const throttleWaitTimeInMillis = 250;
13
17
  export const setUploadsDir = (dir: string) => { uploadsDir = dir; };
14
18
  export const getUploadsDir = () => uploadsDir;
15
19
 
20
+ export const setUploadChunkSize = (size: number) => { uploadChunkSize = size; };
21
+ export const getUploadChunkSize = () => uploadChunkSize;
22
+
23
+ export const setMaxParallelChunkUploads = (count: number) => { maxParallelChunkUploads = count; };
24
+ export const getMaxParallelChunkUploads = () => maxParallelChunkUploads;
25
+
26
+ export const setEnableCompression = (enable: boolean) => { enableCompression = enable; };
27
+ export const getEnableCompression = () => enableCompression;
28
+
29
+ export const setServerPort = (port: number) => { serverPort = port; };
30
+ export const getServerPort = () => serverPort;
31
+
32
+ export const setMaxFileSize = (size: number) => { maxFileSize = size; };
33
+ export const getMaxFileSize = () => maxFileSize;
34
+
16
35
  export const setProgressWriter = (writer: ProgressWriter) => {
17
36
  progressWriter = writer;
18
37
  };
package/src/index.ts CHANGED
@@ -1,23 +1,28 @@
1
-
2
-
3
1
  import express, { Express, Request, Response } from 'express';
2
+ import compression from 'compression';
4
3
  import {createServer, Server} from "http"
5
4
  import {router as uploadInitRouter} from "./routes/uploadInit";
6
5
  import {router as uploadChunkRouter} from "./routes/uploadChunk";
7
6
  import {router as uploadCompleteRouter} from "./routes/uploadComplete";
8
7
  import {router as uploadStatusRouter} from "./routes/uploadStatus";
8
+ import {router as uploadRouter} from "./routes/upload";
9
9
  import {ProgressWriter} from "./service/progress_writer";
10
10
  import * as socketio from "socket.io";
11
11
  import yargs from "yargs";
12
12
  import {hideBin} from "yargs/helpers";
13
13
  import * as os from 'os';
14
- import {getProgressWriter, progresses, setProgressWriter, setUploadsDir} from "./globals";
14
+ import {
15
+ getEnableCompression, getMaxFileSize,
16
+ getMaxParallelChunkUploads, getServerPort, getUploadChunkSize,
17
+ progresses, setEnableCompression, setMaxFileSize,
18
+ setMaxParallelChunkUploads, setProgressWriter, setServerPort, setUploadChunkSize, setUploadsDir, throttledBroadcaster
19
+ } from "./globals";
20
+ import prettyBytes from "pretty-bytes";
15
21
 
16
22
  const homedir = os.homedir();
17
- let port = 8082;
18
- let uploadsDir = homedir + "/Downloads/uploads/"
23
+ let uploadsDir = homedir + "/uploads/"
19
24
  const argv: any = yargs(hideBin(process.argv))
20
- .option('upload_location', {
25
+ .option('upload-location', {
21
26
  alias: 'l',
22
27
  type: 'string',
23
28
  description: 'upload location',
@@ -26,25 +31,65 @@ const argv: any = yargs(hideBin(process.argv))
26
31
  .option('port', {
27
32
  alias: 'p',
28
33
  type: 'number',
34
+ default: getServerPort(),
29
35
  description: 'server port'
30
36
  })
37
+ .option('chunk-size', {
38
+ alias: 's',
39
+ type: 'number',
40
+ description: 'chunk size in bytes',
41
+ default: 512 * 1024
42
+ })
43
+ .option('parallel-uploads', {
44
+ alias: 'n',
45
+ type: 'number',
46
+ description: 'number of simultaneous parallel chunk uploads (per file)',
47
+ default: 10
48
+ })
49
+ .option('enable-compression', {
50
+ alias: 'c',
51
+ type: 'boolean',
52
+ description: 'enable gzip compression (server to client responses)',
53
+ default: true
54
+ })
55
+ .option('max-file-size', {
56
+ alias: 'm',
57
+ type: 'number',
58
+ description: 'maximum file size in bytes',
59
+ default: getMaxFileSize()
60
+ })
31
61
  .help()
32
62
  .argv
33
63
 
34
- if (argv.port) {
35
- port = argv.port
36
- }
37
- if (argv.upload_location) {
38
- uploadsDir = argv.upload_location.endsWith('/') ? argv.upload_location: argv.upload_location + '/'
64
+ const uploadLocationArg = argv["upload-location"]
65
+ if (uploadLocationArg) {
66
+ uploadsDir = uploadLocationArg.endsWith('/') ? uploadLocationArg: uploadLocationArg + '/'
39
67
  }
40
68
  setUploadsDir(uploadsDir)
69
+ setUploadChunkSize(argv["chunk-size"])
70
+ setMaxParallelChunkUploads(argv["parallel-uploads"])
71
+ setEnableCompression(argv["enable-compression"])
72
+ setServerPort(argv.port)
73
+ setMaxFileSize(argv["max-file-size"])
74
+ const port = getServerPort()
41
75
 
42
- console.log("Upload location: " + uploadsDir)
43
- console.log("Server port: " + port)
76
+ console.log(`Upload location: ${uploadsDir}`)
77
+ console.log(`Max Parallel uploads per file: ${getMaxParallelChunkUploads()}`)
78
+ console.log(`Parallel upload chunk size: ${prettyBytes(getUploadChunkSize())}`)
79
+ console.log(`Compression: ${getEnableCompression() ? "Enabled" : "Disabled"}`)
80
+ console.log(`Server port: ${port}`)
44
81
 
45
82
  const app: Express = express();
46
83
  const httpServer: Server = createServer(app)
47
84
  const io: socketio.Server = new socketio.Server(httpServer);
85
+ if (getEnableCompression()) {
86
+ app.use(
87
+ compression({
88
+ threshold: 10 * 1024,
89
+ level: 4
90
+ })
91
+ )
92
+ }
48
93
 
49
94
  setProgressWriter(new ProgressWriter(io));
50
95
 
@@ -54,6 +99,7 @@ app.use('/upload/init', uploadInitRouter);
54
99
  app.use('/upload/chunk', uploadChunkRouter);
55
100
  app.use('/upload/complete', uploadCompleteRouter);
56
101
  app.use('/upload/status', uploadStatusRouter);
102
+ app.use('/upload', uploadRouter);
57
103
 
58
104
  app.get('/', (_, res) => {
59
105
  res.sendFile(__dirname + '/client/index.html');
@@ -69,7 +115,7 @@ app.get('/progresses', (_: Request, res: Response) => {
69
115
  io.on('connection', (socket: socketio.Socket) => {
70
116
  console.log('a user connected');
71
117
  // socket.emit('progresses', progresses);
72
- getProgressWriter().writeProgress(progresses);
118
+ throttledBroadcaster();
73
119
  socket.on('disconnect', () => {
74
120
  console.log('user disconnected');
75
121
  });
@@ -2,6 +2,7 @@ import prettyBytes from 'pretty-bytes'
2
2
 
3
3
  const TRANSFER_SAMPLE_FREQ = 1000; // 1second
4
4
  const MIN_SAMPLE_FREQ = TRANSFER_SAMPLE_FREQ / 4;
5
+ export const RATE_WINDOW_MS = 6000;
5
6
 
6
7
  export enum UploadStatus {
7
8
  INITIATED = "INITIATED",
@@ -1,17 +1,38 @@
1
1
  import {Progress} from './progress';
2
2
 
3
3
  export class ProgressUtils {
4
- public static calculateTransferRate(progress: Progress): number {
5
- const transferSamples = progress.transferSamples;
6
- if (!transferSamples || transferSamples.length < 2) {
7
- return 0;
8
- }
9
- const first = transferSamples[0];
10
- const last = transferSamples[transferSamples.length - 1];
4
+ public static calculateTransferRate(progress: Progress): number {
5
+ const samples = progress.transferSamples;
6
+ if (!samples || samples.length < 2) {
7
+ return 0;
8
+ }
9
+
10
+ let totalBytes = 0;
11
+ let totalTimeMs = 0;
12
+
13
+ // Case vs Reason for skipping
14
+ // bytes === 0 idle / waiting / verification
15
+ // bytes < 0 possibly corrupted or reset counter
16
+ // timeMs === 0 divide-by-zero risk
17
+ // timeMs < 0 invalid timestamp
18
+ // To prevent idle gaps from dragging the rate down artificially.
19
+ for (let i = 1; i < samples.length; i++) {
20
+ const prev = samples[i - 1];
21
+ const curr = samples[i];
11
22
 
12
- const dataSize = last.bytesReceived - first.bytesReceived;
13
- const timeIntervalSeconds = (last.timestamp - first.timestamp) / 1000;
23
+ const bytes = curr.bytesReceived - prev.bytesReceived;
24
+ const timeMs = curr.timestamp - prev.timestamp;
14
25
 
15
- return dataSize / timeIntervalSeconds;
26
+ if (bytes > 0 && timeMs > 0) {
27
+ totalBytes += bytes;
28
+ totalTimeMs += timeMs;
29
+ }
30
+ }
31
+
32
+ if (totalTimeMs === 0) {
33
+ return 0;
16
34
  }
17
- }
35
+
36
+ return totalBytes / (totalTimeMs / 1000);
37
+ }
38
+ }
@@ -0,0 +1,116 @@
1
+ import { Router } from 'express';
2
+ import formidable, { File } from "formidable";
3
+ import { v4 as uuidv4 } from 'uuid';
4
+ import {
5
+ getMaxFileSize,
6
+ getUploadsDir,
7
+ progresses,
8
+ throttledBroadcaster,
9
+ throttleWaitTimeInMillis,
10
+ uploadsProgressMap
11
+ } from "../globals";
12
+ import {FileTransferProgress, Progress, UploadStatus} from "../model/progress";
13
+ import _ from "lodash";
14
+ import prettyBytes from "pretty-bytes";
15
+ import mv from "mv";
16
+
17
+ export const router = Router();
18
+
19
+ router.post('/', (req: any, res: any) => {
20
+ const maxFileSize = getMaxFileSize();
21
+ const uploadsDir = getUploadsDir();
22
+ // parse a file upload
23
+ const form = formidable({
24
+ multiples: true,
25
+ maxFileSize,
26
+ uploadDir: uploadsDir
27
+ });
28
+ const timestamp: number = new Date().getTime();
29
+ const uuid = uuidv4();
30
+ const progress: Progress = new FileTransferProgress(uuid, timestamp);
31
+ uploadsProgressMap.set(uuid, progress);
32
+ progresses.push(progress);
33
+
34
+ const progressProcessorThrottled = _.throttle((bytesReceived, bytesExpected) => {
35
+ console.log("Progress: (" + bytesReceived + "/" + bytesExpected + ")");
36
+ if (uploadsProgressMap.has(uuid)) {
37
+ const existingProgress = uploadsProgressMap.get(uuid);
38
+ if (existingProgress) {
39
+ existingProgress.bytesReceived = bytesReceived;
40
+ existingProgress.bytesExpected = bytesExpected;
41
+ existingProgress.bytesReceivedPretty = prettyBytes(bytesReceived);
42
+ existingProgress.bytesExpectedPretty = prettyBytes(bytesExpected);
43
+ existingProgress.markSample();
44
+ }
45
+ } else {
46
+ // This can't be.
47
+ console.warn("Progress not found in the map for uuid: " + uuid);
48
+ return;
49
+ }
50
+ }, throttleWaitTimeInMillis, {
51
+ leading: true
52
+ });
53
+
54
+ form.on('progress', (bytesReceived, bytesExpected) => {
55
+ progressProcessorThrottled(bytesReceived, bytesExpected);
56
+ throttledBroadcaster();
57
+ });
58
+
59
+ form.on('fileBegin', (formName: string, file: File) => {
60
+ console.log('File Begin: ' + JSON.stringify(file));
61
+ if (!file.originalFilename) {
62
+ return;
63
+ }
64
+
65
+ const existingProgress = uploadsProgressMap.get(uuid);
66
+ console.log('File Begin progress: ' + JSON.stringify(existingProgress));
67
+ if (existingProgress) {
68
+ existingProgress.fileName = file.originalFilename;
69
+ }
70
+ console.log('File Begin progress after: ' + JSON.stringify(existingProgress));
71
+ });
72
+
73
+ form.on('file', (formName: string, file: File) => {
74
+ console.log('File received: ' + JSON.stringify(file));
75
+ if (file.originalFilename) {
76
+ console.log("file name: " + file.originalFilename);
77
+ } else {
78
+ return
79
+ }
80
+ const completed = new Date().getTime();
81
+ const oldPath = file.filepath;
82
+ const newPath = uploadsDir + file.originalFilename;
83
+ mv(oldPath, newPath, {mkdirp: true}, (err) => {
84
+ // done. it first created all the necessary directories, and then
85
+ // tried fs.rename, then falls back to using ncp to copy the dir
86
+ // to dest and then rimraf to remove the source dir
87
+ if (err) {
88
+ console.error(err);
89
+ return;
90
+ }
91
+ console.log("File moved to: " + newPath);
92
+ });
93
+
94
+ if (uploadsProgressMap.has(uuid)) {
95
+ const existingProgress = uploadsProgressMap.get(uuid);
96
+ if (existingProgress) {
97
+ existingProgress.savedLocation = newPath;
98
+ existingProgress.completed = completed
99
+ existingProgress.lastState = UploadStatus.COMPLETE;
100
+ }
101
+ }
102
+ throttledBroadcaster();
103
+ });
104
+
105
+ form.parse(req, (err, fields, files) => {
106
+ res.writeHead(200, { 'content-type': 'application/json' });
107
+ console.log(files);
108
+ const success = {
109
+ "msg": 'File uploaded and moved!'
110
+ };
111
+ res.write(JSON.stringify(success));
112
+ res.end();
113
+ });
114
+
115
+ return;
116
+ });
@@ -1,5 +1,5 @@
1
1
  import {Request, Response, Router} from 'express';
2
- import {getProgressWriter, progresses, uploadsProgressMap} from '../globals';
2
+ import {throttledBroadcaster, uploadsProgressMap} from '../globals';
3
3
  import {FileTransferProgress, UploadStatus} from "../model/progress";
4
4
 
5
5
  export const router = Router();
@@ -30,7 +30,7 @@ router.post('/', (req: Request, res: Response) => {
30
30
  console.log(`Marking upload failed for file ${progress.fileName} ${progress.uuid}`);
31
31
  progress.lastState = UploadStatus.FAILED;
32
32
  }
33
- getProgressWriter().writeProgress(progresses);
33
+ throttledBroadcaster();
34
34
  return res.status(400).json({
35
35
  msg: 'File incomplete',
36
36
  uploadedChunks: Array.from(progress.uploadedChunks),
@@ -40,7 +40,7 @@ router.post('/', (req: Request, res: Response) => {
40
40
 
41
41
  progress.completed = Date.now();
42
42
  progress.lastState = UploadStatus.COMPLETE;
43
- getProgressWriter().writeProgress(progresses);
43
+ throttledBroadcaster();
44
44
 
45
45
  res.json({
46
46
  msg: 'File upload complete',
@@ -1,6 +1,11 @@
1
1
  import { Router, Request, Response } from 'express';
2
2
  import { v4 as uuidv4 } from 'uuid';
3
- import {uploadsProgressMap, progresses, getUploadsDir, MAX_PARALLEL_CHUNK_UPLOADS, MAX_CHUNK_SIZE} from '../globals';
3
+ import {
4
+ uploadsProgressMap,
5
+ progresses,
6
+ getUploadsDir,
7
+ getUploadChunkSize, getMaxParallelChunkUploads
8
+ } from '../globals';
4
9
  import { FileTransferProgress } from '../model/progress';
5
10
  import path from "path";
6
11
  import fs from "fs";
@@ -48,7 +53,7 @@ router.post('/', (req: Request, res: Response) => {
48
53
  progress = new FileTransferProgress(fileId, Date.now());
49
54
  progress.fileName = fileName;
50
55
  progress.bytesExpected = fileSize;
51
- progress.chunkSize = 5 * 1024 * 1024;
56
+ progress.chunkSize = getUploadChunkSize();
52
57
  progress.totalChunks = Math.ceil(fileSize / progress.chunkSize);
53
58
  progress.bytesReceived = fs.statSync(finalPath).size; // resume
54
59
  progress.resetUploadedChunks();
@@ -63,7 +68,7 @@ router.post('/', (req: Request, res: Response) => {
63
68
  progress = new FileTransferProgress(fileId, Date.now());
64
69
  progress.fileName = fileName;
65
70
  progress.bytesExpected = fileSize;
66
- progress.chunkSize = MAX_CHUNK_SIZE;
71
+ progress.chunkSize = getUploadChunkSize();
67
72
  progress.totalChunks = Math.ceil(fileSize / progress.chunkSize);
68
73
  progress.resetUploadedChunks();
69
74
  uploadsProgressMap.set(fileId, progress);
@@ -77,7 +82,7 @@ router.post('/', (req: Request, res: Response) => {
77
82
  fileId,
78
83
  chunkSize: progress.chunkSize,
79
84
  totalChunks: progress.totalChunks,
80
- maxParallel: MAX_PARALLEL_CHUNK_UPLOADS,
85
+ maxParallel: getMaxParallelChunkUploads(),
81
86
  bytesReceived: progress.bytesReceived || 0 // client can skip uploaded chunks
82
87
  });
83
88
  });
@@ -1,4 +1,4 @@
1
- import {FileTransferProgress, Progress, TransferSample} from "../model/progress";
1
+ import {FileTransferProgress, Progress, TransferSample, RATE_WINDOW_MS} from "../model/progress";
2
2
  import { Server } from "socket.io";
3
3
 
4
4
  export class ProgressWriter {
@@ -8,24 +8,51 @@ export class ProgressWriter {
8
8
  this.io = io;
9
9
  }
10
10
 
11
+ private getWindowedSamples(
12
+ samples?: TransferSample[]
13
+ ): TransferSample[] {
14
+ if (!samples || samples.length < 2) {
15
+ return [];
16
+ }
17
+
18
+ const last = samples[samples.length - 1];
19
+ const windowStart = last.timestamp - RATE_WINDOW_MS;
20
+
21
+ const windowed: TransferSample[] = [];
22
+
23
+ for (let i = samples.length - 1; i >= 0; i--) {
24
+ const s = samples[i];
25
+ windowed.push(s);
26
+
27
+ if (s.timestamp <= windowStart) {
28
+ break;
29
+ }
30
+ }
31
+
32
+ return windowed.reverse(); // chronological order
33
+ }
34
+
11
35
  public writeProgress(progresses: Progress[]) {
12
36
  // Clone progresses to safely emit without sending full transferSamples
13
37
  const progressesEmittable: FileTransferProgress[] = progresses.map(p => p as FileTransferProgress).map(p => {
14
38
  const cloned = this.cloneObjectExceptField(p, "transferSamples") as any;
15
39
 
16
- // Only include first and last sample for minimal data
17
- cloned.transferSamples = [] as TransferSample[];
18
- if (p.transferSamples && p.transferSamples.length >= 2) {
19
- cloned.transferSamples.push(p.transferSamples[0]);
20
- cloned.transferSamples.push(p.transferSamples[p.transferSamples.length - 1]);
21
- }
40
+ // Send only sliding-window samples
41
+ cloned.transferSamples = this.getWindowedSamples(p.transferSamples);
22
42
 
23
- // Ensure uploadedChunks, verifyingChunks, and uploadingChunks are serialized as arrays
24
- cloned.uploadedChunks = p.uploadedChunks instanceof Set ? Array.from(p.uploadedChunks)
25
- : Array.isArray(p.uploadedChunks) ? p.uploadedChunks : [];
43
+ cloned.uploadedChunks =
44
+ p.uploadedChunks instanceof Set
45
+ ? Array.from(p.uploadedChunks)
46
+ : Array.isArray(p.uploadedChunks)
47
+ ? p.uploadedChunks
48
+ : [];
26
49
 
27
- cloned.uploadingChunks = p.uploadingChunks instanceof Set ? Array.from(p.uploadingChunks)
28
- : Array.isArray(p.uploadingChunks) ? p.uploadingChunks : [];
50
+ cloned.uploadingChunks =
51
+ p.uploadingChunks instanceof Set
52
+ ? Array.from(p.uploadingChunks)
53
+ : Array.isArray(p.uploadingChunks)
54
+ ? p.uploadingChunks
55
+ : [];
29
56
 
30
57
  return cloned as FileTransferProgress;
31
58
  });
@@ -54,7 +54,7 @@ class PageEventRegistrar {
54
54
  private registerFileInputEventHandler() {
55
55
  const $fileDiv = jQuery("#file-div");
56
56
  const $fileNameDiv = $fileDiv.find("#file-name");
57
- const $fileInput = jQuery("form#uploadForm input[name='multipleFiles']");
57
+ const $fileInput = jQuery("form#uploadForm input[name='file']");
58
58
  $fileInput.on("change", () => {
59
59
  this.onFilesChange($fileNameDiv, $fileInput);
60
60
  });
@@ -83,7 +83,7 @@ class PageEventRegistrar {
83
83
 
84
84
  // wrap async logic in an IIFE
85
85
  (async () => {
86
- const formElement: any = $('input[name="multipleFiles"]')[0];
86
+ const formElement: any = $('input[name="file"]')[0];
87
87
  const files: FileList = formElement.files;
88
88
 
89
89
  if (!files || files.length === 0) {
@@ -95,6 +95,8 @@ class PageEventRegistrar {
95
95
  return;
96
96
  }
97
97
 
98
+ const disableChunked = (jQuery("#disableChunkedUpload").prop("checked") === true);
99
+
98
100
  // Block form before uploading
99
101
  $uploadForm.block({
100
102
  message: '<h1 class="upload-block-modal p-2 m-0">Uploading...</h1>'
@@ -103,7 +105,11 @@ class PageEventRegistrar {
103
105
  try {
104
106
  // Upload all files sequentially
105
107
  for (const file of Array.from(files)) {
106
- await this.uploadFile(file);
108
+ if (disableChunked) {
109
+ await this.uploadFileNonChunked(file);
110
+ } else {
111
+ await this.uploadFile(file);
112
+ }
107
113
  }
108
114
  } finally {
109
115
  // Unblock and reset form after all files finish
@@ -111,7 +117,7 @@ class PageEventRegistrar {
111
117
 
112
118
  const $fileDiv = jQuery("#file-div");
113
119
  const $fileNameDiv = $fileDiv.find("#file-name");
114
- const $fileInput = jQuery("form#uploadForm input[name='multipleFiles']");
120
+ const $fileInput = jQuery("form#uploadForm input[name='file']");
115
121
  this.onFilesChange($fileNameDiv, $fileInput);
116
122
 
117
123
  $uploadForm.unblock();
@@ -128,6 +134,36 @@ class PageEventRegistrar {
128
134
  });
129
135
  }
130
136
 
137
+ private async uploadFileNonChunked(file: File): Promise<void> {
138
+ const formData = new FormData();
139
+ // Server-side uses formidable({ multiples: true }) so using the same field name is fine
140
+ formData.append("file", file, file.name);
141
+
142
+ const resp = await fetch("/upload", {
143
+ method: "POST",
144
+ body: formData
145
+ });
146
+
147
+ let data: any;
148
+ const contentType = resp.headers.get("content-type") || "";
149
+ if (contentType.includes("application/json")) {
150
+ data = await resp.json();
151
+ } else {
152
+ data = await resp.text();
153
+ }
154
+
155
+ if (!resp.ok) {
156
+ throw new Error(typeof data === "string" ? data : (data?.msg || "Upload failed"));
157
+ }
158
+
159
+ Toastify({
160
+ text: `Upload complete: ${file.name}`,
161
+ duration: -1,
162
+ close: true,
163
+ style: { background: "linear-gradient(to right, #00b09b, #96c93d)" }
164
+ }).showToast();
165
+ }
166
+
131
167
  private async uploadFile(file: File): Promise<void> {
132
168
  try {
133
169
  // Initialize upload
@@ -161,12 +161,13 @@ export class ProgressHandler {
161
161
  <i class="fas fa-minus-circle m-0 p-0" aria-hidden="true" title="collapse"></i>
162
162
  </span>
163
163
  </a>
164
- <span class="ml-0 pl-0">
164
+ <span class="upload-file-name ml-0 pl-0">
165
165
  ${progress.fileName}
166
166
  </span>
167
167
  </div>`);
168
168
  $panel.append($panelHeading);
169
169
  }
170
+ $panelHeading.find(".upload-file-name").text(progress.fileName || "");
170
171
 
171
172
  // Main progress bar (bytes)
172
173
  let $progressElem = $panel.find(`progress#${progressId}`);
@@ -197,6 +198,10 @@ export class ProgressHandler {
197
198
  // Clear previous rows
198
199
  $table.empty();
199
200
 
201
+ let progressPercent = 0;
202
+ if (progress.bytesReceived !== undefined && progress.bytesExpected !== undefined) {
203
+ progressPercent = (progress.bytesReceived / progress.bytesExpected) * 100;
204
+ }
200
205
  // Define table rows
201
206
  const rows: [string, string][] = [
202
207
  // ["File Name", progress.fileName || "-"],
@@ -208,6 +213,7 @@ export class ProgressHandler {
208
213
  <b>|</b> Uploaded: ${uploaded}/${totalChunks}`],
209
214
  ["Speed", `${prettyBytes(ProgressUtils.calculateTransferRate(progress))}/s`],
210
215
  ["Status", `${progress.lastState || "-"}`],
216
+ ["Progress", `${progressPercent}%`],
211
217
  ];
212
218
 
213
219
  if (progress.completed) {