hfs 0.57.0 → 0.57.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -2
- package/src/const.js +1 -1
- package/src/middlewares.js +4 -1
- package/src/serveGuiAndSharedFiles.js +1 -1
- package/src/upload.js +5 -5
- package/src/vfs.js +3 -3
- package/src/walkDir.js +29 -27
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "hfs",
|
|
3
|
-
"version": "0.57.
|
|
3
|
+
"version": "0.57.1",
|
|
4
4
|
"description": "HTTP File Server",
|
|
5
5
|
"keywords": ["file server", "http server"],
|
|
6
6
|
"homepage": "https://rejetto.com/hfs",
|
|
@@ -76,7 +76,7 @@
|
|
|
76
76
|
"buffer-crc32": "^1.0.0",
|
|
77
77
|
"fast-glob": "^3.3.3",
|
|
78
78
|
"find-process": "^1.4.7",
|
|
79
|
-
"formidable": "^3.5.
|
|
79
|
+
"formidable": "^3.5.4",
|
|
80
80
|
"fs-x-attributes": "^1.0.2",
|
|
81
81
|
"fswin": "^3.24.829",
|
|
82
82
|
"iconv-lite": "^0.6.3",
|
package/src/const.js
CHANGED
|
@@ -64,7 +64,7 @@ exports.DEV = process.env.DEV ? 'DEV' : '';
|
|
|
64
64
|
exports.ORIGINAL_CWD = process.cwd();
|
|
65
65
|
exports.HFS_STARTED = new Date();
|
|
66
66
|
const PKG_PATH = (0, path_1.join)(__dirname, '..', 'package.json');
|
|
67
|
-
exports.BUILD_TIMESTAMP = "2025-04-
|
|
67
|
+
exports.BUILD_TIMESTAMP = "2025-04-30T12:19:36.833Z";
|
|
68
68
|
const pkg = JSON.parse(fs.readFileSync(PKG_PATH, 'utf8'));
|
|
69
69
|
exports.VERSION = pkg.version;
|
|
70
70
|
exports.RUNNING_BETA = exports.VERSION.includes('-');
|
package/src/middlewares.js
CHANGED
|
@@ -137,7 +137,10 @@ function failAllowNet(ctx, a) {
|
|
|
137
137
|
const mask = cached !== null && cached !== void 0 ? cached : (0, perm_1.getFromAccount)(a || '', a => a.allow_net);
|
|
138
138
|
if (!cached && mask && ((_b = ctx.session) === null || _b === void 0 ? void 0 : _b.username))
|
|
139
139
|
ctx.session.allowNet = mask; // must be deleted on logout by setLoggedIn
|
|
140
|
-
|
|
140
|
+
const ret = mask && !(0, misc_1.netMatches)(ctx.ip, mask, true);
|
|
141
|
+
if (ret)
|
|
142
|
+
console.debug("login failed: allow_net");
|
|
143
|
+
return ret;
|
|
141
144
|
}
|
|
142
145
|
const paramsDecoder = async (ctx, next) => {
|
|
143
146
|
ctx.state.params = ctx.method === 'POST' && ctx.originalUrl.startsWith(const_1.API_URI)
|
|
@@ -177,7 +177,7 @@ async function sendFolderList(node, ctx) {
|
|
|
177
177
|
|| URL.protocol + '//' + URL.host + ctx.state.revProxyPath;
|
|
178
178
|
prepend = base + (0, misc_1.pathEncode)(decodeURI(ctx.path)); // redo the encoding our way, keeping unicode chars unchanged
|
|
179
179
|
}
|
|
180
|
-
const walker = (0, vfs_1.walkNode)(node, { ctx, depth: depth === '*' ? Infinity : Number(depth) });
|
|
180
|
+
const walker = (0, vfs_1.walkNode)(node, { ctx, depth: depth === '*' ? Infinity : Number(depth), parallelizeRecursion: false });
|
|
181
181
|
ctx.body = (0, misc_1.asyncGeneratorToReadable)((0, misc_1.filterMapGenerator)(walker, async (el) => {
|
|
182
182
|
const isFolder = await (0, vfs_1.nodeIsDirectory)(el);
|
|
183
183
|
return !folders && isFolder ? undefined
|
package/src/upload.js
CHANGED
|
@@ -223,6 +223,7 @@ function uploadWriter(base, baseUri, path, ctx) {
|
|
|
223
223
|
void (0, comments_1.setCommentFor)(dest, String(ctx.query.comment));
|
|
224
224
|
obj.uri = (0, misc_1.enforceFinal)('/', baseUri) + (0, misc_1.pathEncode)((0, path_1.basename)(dest));
|
|
225
225
|
events_1.default.emit('uploadFinished', obj);
|
|
226
|
+
console.debug("upload finished", dest);
|
|
226
227
|
if (resEvent)
|
|
227
228
|
for (const cb of resEvent)
|
|
228
229
|
if (lodash_1.default.isFunction(cb))
|
|
@@ -304,13 +305,12 @@ function uploadWriter(base, baseUri, path, ctx) {
|
|
|
304
305
|
function releaseFile() {
|
|
305
306
|
uploadingFiles.delete(fullPath);
|
|
306
307
|
}
|
|
307
|
-
function fail(status, msg) {
|
|
308
|
-
console.debug('upload failed', status
|
|
308
|
+
function fail(status = ctx.status, msg) {
|
|
309
|
+
console.debug('upload failed', status, msg || '');
|
|
309
310
|
releaseFile();
|
|
310
|
-
|
|
311
|
-
ctx.status = status;
|
|
311
|
+
ctx.status = status;
|
|
312
312
|
if (msg)
|
|
313
313
|
ctx.body = msg;
|
|
314
|
-
(0, frontEndApis_1.notifyClient)(ctx, const_1.UPLOAD_REQUEST_STATUS, { [path]:
|
|
314
|
+
(0, frontEndApis_1.notifyClient)(ctx, const_1.UPLOAD_REQUEST_STATUS, { [path]: status }); // allow browsers to detect failure while still sending body
|
|
315
315
|
}
|
|
316
316
|
}
|
package/src/vfs.js
CHANGED
|
@@ -250,7 +250,7 @@ function statusCodeForMissingPerm(node, perm, ctx, assign = true) {
|
|
|
250
250
|
}
|
|
251
251
|
}
|
|
252
252
|
// it's the responsibility of the caller to verify you have list permission on parent, as callers have different needs.
|
|
253
|
-
async function* walkNode(parent, { ctx, depth = Infinity, prefixPath = '', requiredPerm, onlyFolders = false, onlyFiles = false, } = {}) {
|
|
253
|
+
async function* walkNode(parent, { ctx, depth = Infinity, prefixPath = '', requiredPerm, onlyFolders = false, onlyFiles = false, parallelizeRecursion = true, } = {}) {
|
|
254
254
|
let started = false;
|
|
255
255
|
const stream = new node_stream_1.Readable({
|
|
256
256
|
objectMode: true,
|
|
@@ -297,7 +297,7 @@ async function* walkNode(parent, { ctx, depth = Infinity, prefixPath = '', requi
|
|
|
297
297
|
try {
|
|
298
298
|
let lastDir = prefixPath.slice(0, -1) || '.';
|
|
299
299
|
parentsCache.set(lastDir, parent);
|
|
300
|
-
await (0, walkDir_1.walkDir)(source, { depth, ctx, hidden: showHiddenFiles.get() }, async (entry) => {
|
|
300
|
+
await (0, walkDir_1.walkDir)(source, { depth, ctx, hidden: showHiddenFiles.get(), parallelizeRecursion }, async (entry) => {
|
|
301
301
|
var _a, _b;
|
|
302
302
|
if (ctx === null || ctx === void 0 ? void 0 : ctx.isAborted()) {
|
|
303
303
|
stream.push(null);
|
|
@@ -339,7 +339,7 @@ async function* walkNode(parent, { ctx, depth = Infinity, prefixPath = '', requi
|
|
|
339
339
|
}
|
|
340
340
|
finally {
|
|
341
341
|
for (const [item, name] of visitLater)
|
|
342
|
-
for await (const x of walkNode(item, {
|
|
342
|
+
for await (const x of walkNode(item, { depth: depth - 1, prefixPath: name + '/', ctx, requiredPerm, onlyFolders, parallelizeRecursion }))
|
|
343
343
|
stream.push(x);
|
|
344
344
|
stream.push(null);
|
|
345
345
|
}
|
package/src/walkDir.js
CHANGED
|
@@ -17,7 +17,7 @@ const fswin_1 = __importDefault(require("fswin"));
|
|
|
17
17
|
const util_files_1 = require("./util-files");
|
|
18
18
|
const dirQ = (0, makeQ_1.makeQ)(3);
|
|
19
19
|
// cb returns void = just go on, null = stop, false = go on but don't recur (in case of depth)
|
|
20
|
-
function walkDir(path, { depth = 0, hidden = true, ctx }, cb) {
|
|
20
|
+
function walkDir(path, { depth = 0, hidden = true, parallelizeRecursion = false, ctx }, cb) {
|
|
21
21
|
let stopped = false;
|
|
22
22
|
const closingQ = [];
|
|
23
23
|
return new Promise(async (resolve, reject) => {
|
|
@@ -60,25 +60,31 @@ function walkDir(path, { depth = 0, hidden = true, ctx }, cb) {
|
|
|
60
60
|
stats: { size: f.SIZE, birthtime: f.CREATION_TIME, mtime: f.LAST_WRITE_TIME }
|
|
61
61
|
}));
|
|
62
62
|
}, true));
|
|
63
|
-
pluginReceiver === null || pluginReceiver === void 0 ? void 0 : pluginReceiver(!stopped);
|
|
64
|
-
return;
|
|
65
|
-
}
|
|
66
|
-
for await (let entry of (pluginIterator || await (0, promises_1.opendir)(base))) {
|
|
67
|
-
if (stopped)
|
|
68
|
-
break;
|
|
69
|
-
if (!hidden && entry.name[0] === '.' && !const_1.IS_WINDOWS)
|
|
70
|
-
continue;
|
|
71
|
-
const stats = ((_b = entry.isSymbolicLink) === null || _b === void 0 ? void 0 : _b.call(entry)) && await (0, promises_1.stat)((0, path_1.join)(base, entry.name)).catch(() => null);
|
|
72
|
-
if (stats === null)
|
|
73
|
-
continue;
|
|
74
|
-
if (stats)
|
|
75
|
-
entry = new DirentFromStats(entry.name, stats);
|
|
76
|
-
const expanded = entry;
|
|
77
|
-
if (stats)
|
|
78
|
-
expanded.stats = stats;
|
|
79
|
-
await work(expanded);
|
|
80
63
|
}
|
|
64
|
+
else
|
|
65
|
+
for await (let entry of (pluginIterator || await (0, promises_1.opendir)(base))) {
|
|
66
|
+
if (stopped)
|
|
67
|
+
break;
|
|
68
|
+
if (!hidden && entry.name[0] === '.' && !const_1.IS_WINDOWS)
|
|
69
|
+
continue;
|
|
70
|
+
const stats = ((_b = entry.isSymbolicLink) === null || _b === void 0 ? void 0 : _b.call(entry)) && await (0, promises_1.stat)((0, path_1.join)(base, entry.name)).catch(() => null);
|
|
71
|
+
if (stats === null)
|
|
72
|
+
continue;
|
|
73
|
+
if (stats)
|
|
74
|
+
entry = new DirentFromStats(entry.name, stats);
|
|
75
|
+
const expanded = entry;
|
|
76
|
+
if (stats)
|
|
77
|
+
expanded.stats = stats;
|
|
78
|
+
await work(expanded);
|
|
79
|
+
}
|
|
81
80
|
pluginReceiver === null || pluginReceiver === void 0 ? void 0 : pluginReceiver(!stopped);
|
|
81
|
+
const branchDone = Promise.allSettled(subDirsDone);
|
|
82
|
+
if (last) // using streams, we don't know when the entries are received, so we need to notify on last item
|
|
83
|
+
last.closingBranch = branchDone.then(() => relativePath);
|
|
84
|
+
else
|
|
85
|
+
closingQ.push(relativePath); // ok, we'll ask next one to carry this info
|
|
86
|
+
// don't return the promise directly, as this job ends here, but communicate to caller the promise for the whole branch
|
|
87
|
+
return { branchDone, n };
|
|
82
88
|
async function work(entry) {
|
|
83
89
|
entry.path = (relativePath && relativePath + '/') + entry.name;
|
|
84
90
|
pluginReceiver === null || pluginReceiver === void 0 ? void 0 : pluginReceiver(entry);
|
|
@@ -94,6 +100,7 @@ function walkDir(path, { depth = 0, hidden = true, ctx }, cb) {
|
|
|
94
100
|
if (!depth || !entry.isDirectory())
|
|
95
101
|
return;
|
|
96
102
|
const branchDone = (0, cross_1.pendingPromise)(); // per-job
|
|
103
|
+
subDirsDone.push(branchDone);
|
|
97
104
|
const job = () => readDir(entry.path, depth - 1) // recur
|
|
98
105
|
.then(x => x, () => { }) // mute errors
|
|
99
106
|
.then(res => {
|
|
@@ -101,16 +108,11 @@ function walkDir(path, { depth = 0, hidden = true, ctx }, cb) {
|
|
|
101
108
|
closingQ.push(entry.path); // no children to tell i'm done
|
|
102
109
|
Promise.resolve(res === null || res === void 0 ? void 0 : res.branchDone).then(() => branchDone.resolve());
|
|
103
110
|
});
|
|
104
|
-
|
|
105
|
-
|
|
111
|
+
if (parallelizeRecursion)
|
|
112
|
+
dirQ.add(job);
|
|
113
|
+
else
|
|
114
|
+
await job();
|
|
106
115
|
}
|
|
107
|
-
const branchDone = Promise.allSettled(subDirsDone).then(() => { });
|
|
108
|
-
if (last) // using streams, we don't know when the entries are received, so we need to notify on last item
|
|
109
|
-
last.closingBranch = branchDone.then(() => relativePath);
|
|
110
|
-
else
|
|
111
|
-
closingQ.push(relativePath); // ok, we'll ask next one to carry this info
|
|
112
|
-
// don't return the promise directly, as this job ends here, but communicate to caller the promise for the whole branch
|
|
113
|
-
return { branchDone, n };
|
|
114
116
|
}
|
|
115
117
|
}
|
|
116
118
|
const kStats = Symbol('stats');
|