hfs 0.1.6 → 0.26.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.txt +674 -0
- package/README.md +102 -8
- package/admin/assets/index.dcc78777.css +1 -0
- package/admin/assets/index.f056db34.js +282 -0
- package/admin/assets/sha512.3c0e384c.js +8 -0
- package/admin/index.html +17 -0
- package/admin/logo.svg +36 -0
- package/frontend/assets/index.55c710c2.js +85 -0
- package/frontend/assets/index.ee805a6c.css +1 -0
- package/frontend/assets/sha512.634b743e.js +8 -0
- package/frontend/fontello.css +77 -0
- package/frontend/fontello.woff2 +0 -0
- package/frontend/index.html +18 -0
- package/package.json +93 -28
- package/plugins/antibrute/plugin.js +38 -0
- package/plugins/download-counter/plugin.js +47 -0
- package/plugins/download-counter/public/hits.js +5 -0
- package/plugins/updater-disabled/plugin.js +44 -0
- package/plugins/vhosting/plugin.js +42 -0
- package/src/QuickZipStream.js +285 -0
- package/src/ThrottledStream.js +93 -0
- package/src/adminApis.js +169 -0
- package/src/api.accounts.js +59 -0
- package/src/api.auth.js +130 -0
- package/src/api.file_list.js +103 -0
- package/src/api.helpers.js +32 -0
- package/src/api.monitor.js +102 -0
- package/src/api.plugins.js +125 -0
- package/src/api.vfs.js +164 -0
- package/src/apiMiddleware.js +136 -0
- package/src/block.js +33 -0
- package/src/commands.js +105 -0
- package/src/config.js +172 -0
- package/src/connections.js +57 -0
- package/src/const.js +83 -0
- package/src/crypt.js +21 -0
- package/src/debounceAsync.js +48 -0
- package/src/events.js +9 -0
- package/src/frontEndApis.js +38 -0
- package/src/github.js +102 -0
- package/src/index.js +53 -0
- package/src/listen.js +226 -0
- package/src/log.js +137 -0
- package/src/middlewares.js +154 -0
- package/src/misc.js +160 -0
- package/src/pbkdf2.js +74 -0
- package/src/perm.js +176 -0
- package/src/plugins.js +338 -0
- package/src/serveFile.js +104 -0
- package/src/serveGuiFiles.js +113 -0
- package/src/sse.js +29 -0
- package/src/throttler.js +91 -0
- package/src/update.js +69 -0
- package/src/util-files.js +141 -0
- package/src/util-generators.js +30 -0
- package/src/util-http.js +30 -0
- package/src/vfs.js +227 -0
- package/src/watchLoad.js +73 -0
- package/src/zip.js +69 -0
- package/.npmignore +0 -19
- package/admin-server.js +0 -212
- package/cli.js +0 -33
- package/file-server.js +0 -100
- package/lib/common.js +0 -10
- package/lib/extending.js +0 -158
- package/lib/mime.js +0 -19
- package/lib/misc.js +0 -75
- package/lib/serving.js +0 -81
- package/lib/vfs.js +0 -403
- package/main.js +0 -24
- package/note.txt +0 -104
- package/speedtest.js +0 -21
- package/static/backend.css +0 -14
- package/static/backend.html +0 -32
- package/static/backend.js +0 -694
- package/static/extending.js +0 -187
- package/static/frontend.css +0 -29
- package/static/frontend.html +0 -23
- package/static/frontend.js +0 -230
- package/static/icons/files/archive.png +0 -0
- package/static/icons/files/audio.png +0 -0
- package/static/icons/files/file.png +0 -0
- package/static/icons/files/folder.png +0 -0
- package/static/icons/files/image.png +0 -0
- package/static/icons/files/link.png +0 -0
- package/static/icons/files/video.png +0 -0
- package/static/jquery.js +0 -4
- package/static/jquery.rule-1.0.2.js +0 -273
- package/static/misc.js +0 -194
- package/static/tpl.js +0 -17
- package/todo.txt +0 -25
package/src/update.js
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.update = exports.getUpdate = void 0;
|
|
4
|
+
const github_1 = require("./github");
|
|
5
|
+
const const_1 = require("./const");
|
|
6
|
+
const path_1 = require("path");
|
|
7
|
+
const child_process_1 = require("child_process");
|
|
8
|
+
const misc_1 = require("./misc");
|
|
9
|
+
const fs_1 = require("fs");
|
|
10
|
+
const plugins_1 = require("./plugins");
|
|
11
|
+
const promises_1 = require("fs/promises");
|
|
12
|
+
const HFS_REPO = 'rejetto/hfs';
|
|
13
|
+
async function getUpdate() {
|
|
14
|
+
const [latest] = await (0, github_1.getRepoInfo)(HFS_REPO + '/releases?per_page=1');
|
|
15
|
+
if (latest.name === const_1.VERSION)
|
|
16
|
+
throw "you already have the latest version: " + const_1.VERSION;
|
|
17
|
+
return latest;
|
|
18
|
+
}
|
|
19
|
+
exports.getUpdate = getUpdate;
|
|
20
|
+
async function update() {
|
|
21
|
+
if (process.argv0.endsWith('node'))
|
|
22
|
+
throw "only binary versions are supported for now";
|
|
23
|
+
const update = await getUpdate();
|
|
24
|
+
const assetSearch = { win32: 'windows', darwin: 'mac', linux: 'linux' }[process.platform];
|
|
25
|
+
if (!assetSearch)
|
|
26
|
+
throw "this feature doesn't support your platform: " + process.platform;
|
|
27
|
+
const asset = update.assets.find((x) => x.name.endsWith('.zip') && x.name.includes(assetSearch));
|
|
28
|
+
if (!asset)
|
|
29
|
+
throw "asset not found";
|
|
30
|
+
const url = asset.browser_download_url;
|
|
31
|
+
console.log("downloading", url);
|
|
32
|
+
const bin = process.argv[0];
|
|
33
|
+
const binPath = (0, path_1.dirname)(bin);
|
|
34
|
+
const binFile = (0, path_1.basename)(bin);
|
|
35
|
+
const newBinFile = 'new-' + binFile;
|
|
36
|
+
plugins_1.pluginsWatcher.pause();
|
|
37
|
+
try {
|
|
38
|
+
await (0, misc_1.unzip)(await (0, misc_1.httpsStream)(url), path => (0, path_1.join)(binPath, path === binFile ? newBinFile : path));
|
|
39
|
+
const newBin = (0, path_1.join)(binPath, newBinFile);
|
|
40
|
+
if (!const_1.IS_WINDOWS) {
|
|
41
|
+
const { mode } = await (0, promises_1.stat)(bin);
|
|
42
|
+
await (0, promises_1.chmod)(newBin, mode).catch(console.error);
|
|
43
|
+
}
|
|
44
|
+
(0, misc_1.onProcessExit)(() => {
|
|
45
|
+
const oldBinFile = 'old-' + binFile;
|
|
46
|
+
console.log("old version is kept as", oldBinFile);
|
|
47
|
+
const oldBin = (0, path_1.join)(binPath, oldBinFile);
|
|
48
|
+
try {
|
|
49
|
+
(0, fs_1.unlinkSync)(oldBin);
|
|
50
|
+
}
|
|
51
|
+
catch (_a) { }
|
|
52
|
+
(0, fs_1.renameSync)(bin, oldBin);
|
|
53
|
+
console.log("launching new version in background", newBinFile);
|
|
54
|
+
(0, child_process_1.spawn)(newBin, ['--updating', binFile], { detached: true, shell: true })
|
|
55
|
+
.on('error', console.error);
|
|
56
|
+
});
|
|
57
|
+
console.log('quitting');
|
|
58
|
+
process.exit();
|
|
59
|
+
}
|
|
60
|
+
catch (_a) {
|
|
61
|
+
plugins_1.pluginsWatcher.unpause();
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
exports.update = update;
|
|
65
|
+
if (const_1.argv.updating) { // we were launched with a temporary name, restore original name to avoid breaking references
|
|
66
|
+
const bin = process.argv[0];
|
|
67
|
+
(0, fs_1.renameSync)(bin, (0, path_1.join)((0, path_1.dirname)(bin), const_1.argv.updating));
|
|
68
|
+
console.log("renamed binary file to", const_1.argv.updating);
|
|
69
|
+
}
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.unzip = exports.run = exports.dirStream = exports.isWindowsDrive = exports.dirTraversal = exports.watchDir = exports.readFileBusy = exports.isFile = exports.isDirectory = void 0;
|
|
7
|
+
const promises_1 = __importDefault(require("fs/promises"));
|
|
8
|
+
const misc_1 = require("./misc");
|
|
9
|
+
const fs_1 = require("fs");
|
|
10
|
+
const path_1 = require("path");
|
|
11
|
+
const fast_glob_1 = __importDefault(require("fast-glob"));
|
|
12
|
+
const const_1 = require("./const");
|
|
13
|
+
const child_process_1 = require("child_process");
|
|
14
|
+
const stream_1 = require("stream");
|
|
15
|
+
// @ts-ignore
|
|
16
|
+
const unzip_stream_1 = __importDefault(require("unzip-stream"));
|
|
17
|
+
async function isDirectory(path) {
|
|
18
|
+
try {
|
|
19
|
+
return (await promises_1.default.stat(path)).isDirectory();
|
|
20
|
+
}
|
|
21
|
+
catch (_a) {
|
|
22
|
+
return false;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
exports.isDirectory = isDirectory;
|
|
26
|
+
async function isFile(path) {
|
|
27
|
+
try {
|
|
28
|
+
return (await promises_1.default.stat(path)).isFile();
|
|
29
|
+
}
|
|
30
|
+
catch (_a) {
|
|
31
|
+
return false;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
exports.isFile = isFile;
|
|
35
|
+
async function readFileBusy(path) {
|
|
36
|
+
return promises_1.default.readFile(path, 'utf8').catch(e => {
|
|
37
|
+
if ((e === null || e === void 0 ? void 0 : e.code) !== 'EBUSY')
|
|
38
|
+
throw e;
|
|
39
|
+
console.debug('busy');
|
|
40
|
+
return (0, misc_1.wait)(100).then(() => readFileBusy(path));
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
exports.readFileBusy = readFileBusy;
|
|
44
|
+
function watchDir(dir, cb) {
|
|
45
|
+
let watcher;
|
|
46
|
+
let paused = false;
|
|
47
|
+
try {
|
|
48
|
+
watcher = (0, fs_1.watch)(dir, controlledCb);
|
|
49
|
+
}
|
|
50
|
+
catch (_a) {
|
|
51
|
+
// failing watching the content of the dir, we try to monitor its parent, but filtering events only for our target dir
|
|
52
|
+
const base = (0, path_1.basename)(dir);
|
|
53
|
+
try {
|
|
54
|
+
watcher = (0, fs_1.watch)((0, path_1.dirname)(dir), (event, name) => {
|
|
55
|
+
if (name !== base)
|
|
56
|
+
return;
|
|
57
|
+
try {
|
|
58
|
+
watcher.close(); // if we succeed, we give up the parent watching
|
|
59
|
+
watcher = (0, fs_1.watch)(dir, controlledCb); // attempt at passing to a more specific watching
|
|
60
|
+
}
|
|
61
|
+
catch (_a) { }
|
|
62
|
+
controlledCb();
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
catch (e) {
|
|
66
|
+
console.debug(String(e));
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
return {
|
|
70
|
+
working() { return Boolean(watcher); },
|
|
71
|
+
stop() { watcher === null || watcher === void 0 ? void 0 : watcher.close(); },
|
|
72
|
+
pause() { paused = true; },
|
|
73
|
+
unpause() { paused = false; },
|
|
74
|
+
};
|
|
75
|
+
function controlledCb() {
|
|
76
|
+
if (!paused)
|
|
77
|
+
cb();
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
exports.watchDir = watchDir;
|
|
81
|
+
function dirTraversal(s) {
|
|
82
|
+
return s && /(^|[/\\])\.\.($|[/\\])/.test(s);
|
|
83
|
+
}
|
|
84
|
+
exports.dirTraversal = dirTraversal;
|
|
85
|
+
function isWindowsDrive(s) {
|
|
86
|
+
return s && /^[a-zA-Z]:$/.test(s);
|
|
87
|
+
}
|
|
88
|
+
exports.isWindowsDrive = isWindowsDrive;
|
|
89
|
+
async function* dirStream(path) {
|
|
90
|
+
const stats = await promises_1.default.stat(path);
|
|
91
|
+
if (!stats.isDirectory())
|
|
92
|
+
throw Error('ENOTDIR');
|
|
93
|
+
const dirStream = fast_glob_1.default.stream('*', {
|
|
94
|
+
cwd: path,
|
|
95
|
+
dot: true,
|
|
96
|
+
onlyFiles: false,
|
|
97
|
+
suppressErrors: true,
|
|
98
|
+
});
|
|
99
|
+
const skip = await getItemsToSkip(path);
|
|
100
|
+
for await (let path of dirStream) {
|
|
101
|
+
if (path instanceof Buffer)
|
|
102
|
+
path = path.toString('utf8');
|
|
103
|
+
if (skip === null || skip === void 0 ? void 0 : skip.includes(path))
|
|
104
|
+
continue;
|
|
105
|
+
yield path;
|
|
106
|
+
}
|
|
107
|
+
async function getItemsToSkip(path) {
|
|
108
|
+
if (!const_1.IS_WINDOWS)
|
|
109
|
+
return;
|
|
110
|
+
const out = await run('dir', ['/ah', '/b', path.replace(/\//g, '\\')])
|
|
111
|
+
.catch(() => ''); // error in case of no matching file
|
|
112
|
+
return out.split('\r\n').slice(0, -1);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
exports.dirStream = dirStream;
|
|
116
|
+
function run(cmd, args = []) {
|
|
117
|
+
return new Promise((resolve, reject) => (0, child_process_1.execFile)('cmd', ['/c', cmd, ...args], (err, stdout) => {
|
|
118
|
+
if (err)
|
|
119
|
+
reject(err);
|
|
120
|
+
else
|
|
121
|
+
resolve(stdout);
|
|
122
|
+
}));
|
|
123
|
+
}
|
|
124
|
+
exports.run = run;
|
|
125
|
+
async function unzip(stream, cb) {
|
|
126
|
+
let pending = Promise.resolve();
|
|
127
|
+
return new Promise(resolve => stream.pipe(unzip_stream_1.default.Parse())
|
|
128
|
+
.on('end', () => pending.then(resolve))
|
|
129
|
+
.on('entry', async (entry) => {
|
|
130
|
+
const { path, type } = entry;
|
|
131
|
+
const dest = cb(path);
|
|
132
|
+
if (!dest || type !== 'File')
|
|
133
|
+
return entry.autodrain();
|
|
134
|
+
await pending; // don't overlap writings
|
|
135
|
+
console.debug('unzip', dest);
|
|
136
|
+
(0, fs_1.mkdirSync)((0, path_1.dirname)(dest), { recursive: true }); // easy way be sure to have the folder ready before proceeding
|
|
137
|
+
const thisFile = entry.pipe((0, fs_1.createWriteStream)(dest));
|
|
138
|
+
pending = (0, stream_1.once)(thisFile, 'finish');
|
|
139
|
+
}));
|
|
140
|
+
}
|
|
141
|
+
exports.unzip = unzip;
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.asyncGeneratorToReadable = exports.asyncGeneratorToArray = exports.filterMapGenerator = void 0;
|
|
4
|
+
// callback can return undefined to skip element
|
|
5
|
+
const stream_1 = require("stream");
|
|
6
|
+
async function* filterMapGenerator(generator, filterMap) {
|
|
7
|
+
for await (const x of generator) {
|
|
8
|
+
const res = await filterMap(x);
|
|
9
|
+
if (res !== undefined)
|
|
10
|
+
yield res;
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
exports.filterMapGenerator = filterMapGenerator;
|
|
14
|
+
async function asyncGeneratorToArray(generator) {
|
|
15
|
+
const ret = [];
|
|
16
|
+
for await (const x of generator)
|
|
17
|
+
ret.push(x);
|
|
18
|
+
return ret;
|
|
19
|
+
}
|
|
20
|
+
exports.asyncGeneratorToArray = asyncGeneratorToArray;
|
|
21
|
+
function asyncGeneratorToReadable(generator) {
|
|
22
|
+
const iterator = generator[Symbol.asyncIterator]();
|
|
23
|
+
return new stream_1.Readable({
|
|
24
|
+
objectMode: true,
|
|
25
|
+
read() {
|
|
26
|
+
iterator.next().then(it => this.push(it.done ? null : it.value));
|
|
27
|
+
}
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
exports.asyncGeneratorToReadable = asyncGeneratorToReadable;
|
package/src/util-http.js
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.httpsStream = exports.httpsString = void 0;
|
|
7
|
+
const node_https_1 = __importDefault(require("node:https"));
|
|
8
|
+
function httpsString(url, options = {}) {
|
|
9
|
+
return httpsStream(url, options).then(res => new Promise(resolve => {
|
|
10
|
+
let buf = '';
|
|
11
|
+
res.on('data', chunk => buf += chunk.toString());
|
|
12
|
+
res.on('end', () => resolve(Object.assign(res, {
|
|
13
|
+
ok: (res.statusCode || 400) < 400,
|
|
14
|
+
body: buf
|
|
15
|
+
})));
|
|
16
|
+
}));
|
|
17
|
+
}
|
|
18
|
+
exports.httpsString = httpsString;
|
|
19
|
+
function httpsStream(url, options = {}) {
|
|
20
|
+
return new Promise((resolve, reject) => {
|
|
21
|
+
node_https_1.default.request(url, options, res => {
|
|
22
|
+
if (!res.statusCode || res.statusCode >= 400)
|
|
23
|
+
throw res;
|
|
24
|
+
if (res.statusCode === 302 && res.headers.location)
|
|
25
|
+
return resolve(httpsStream(res.headers.location, options));
|
|
26
|
+
resolve(res);
|
|
27
|
+
}).on('error', reject).end();
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
exports.httpsStream = httpsStream;
|
package/src/vfs.js
ADDED
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// This file is part of HFS - Copyright 2021-2022, Massimo Melina <a@rejetto.com> - License https://www.gnu.org/licenses/gpl-3.0.txt
|
|
3
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
5
|
+
};
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.cantReadStatusCode = exports.walkNode = exports.hasPermission = exports.nodeIsDirectory = exports.getNodeName = exports.saveVfs = exports.vfs = exports.urlToNode = exports.MIME_AUTO = exports.defaultPerms = void 0;
|
|
8
|
+
const promises_1 = __importDefault(require("fs/promises"));
|
|
9
|
+
const path_1 = require("path");
|
|
10
|
+
const micromatch_1 = require("micromatch");
|
|
11
|
+
const misc_1 = require("./misc");
|
|
12
|
+
const lodash_1 = __importDefault(require("lodash"));
|
|
13
|
+
const config_1 = require("./config");
|
|
14
|
+
const const_1 = require("./const");
|
|
15
|
+
const events_1 = __importDefault(require("./events"));
|
|
16
|
+
const perm_1 = require("./perm");
|
|
17
|
+
const misc_2 = require("./misc");
|
|
18
|
+
const WHO_ANYONE = true;
|
|
19
|
+
const WHO_NO_ONE = false;
|
|
20
|
+
const WHO_ANY_ACCOUNT = '*';
|
|
21
|
+
exports.defaultPerms = {
|
|
22
|
+
can_see: WHO_ANYONE,
|
|
23
|
+
can_read: WHO_ANYONE,
|
|
24
|
+
};
|
|
25
|
+
exports.MIME_AUTO = 'auto';
|
|
26
|
+
function inheritFromParent(parent, child) {
|
|
27
|
+
for (const k of (0, misc_1.typedKeys)(exports.defaultPerms)) {
|
|
28
|
+
const v = parent[k];
|
|
29
|
+
if (v !== undefined)
|
|
30
|
+
child[k] = v;
|
|
31
|
+
}
|
|
32
|
+
if (typeof parent.mime === 'object' && typeof child.mime === 'object')
|
|
33
|
+
Object.assign(child.mime, parent.mime);
|
|
34
|
+
else
|
|
35
|
+
child.mime = parent.mime;
|
|
36
|
+
return child;
|
|
37
|
+
}
|
|
38
|
+
async function urlToNode(url, ctx, parent = exports.vfs) {
|
|
39
|
+
var _a;
|
|
40
|
+
let i = url.indexOf('/', 1);
|
|
41
|
+
const name = decodeURIComponent(url.slice(url[0] === '/' ? 1 : 0, i < 0 ? undefined : i));
|
|
42
|
+
if (!name)
|
|
43
|
+
return parent;
|
|
44
|
+
const rest = i < 0 ? '' : url.slice(i + 1, url.endsWith('/') ? -1 : undefined);
|
|
45
|
+
if ((0, misc_1.dirTraversal)(name) || /[\/]/.test(name)) {
|
|
46
|
+
if (ctx)
|
|
47
|
+
ctx.status = 418;
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
const parents = parent.parents || []; // don't waste time cloning the array, as we won't keep intermediate nodes
|
|
51
|
+
const ret = {
|
|
52
|
+
isTemp: true,
|
|
53
|
+
url: (0, misc_1.enforceFinal)('/', parent.url || '') + name,
|
|
54
|
+
parents,
|
|
55
|
+
};
|
|
56
|
+
parents.push(parent);
|
|
57
|
+
inheritFromParent(parent, ret);
|
|
58
|
+
inheritMasks(ret, parent, name);
|
|
59
|
+
applyMasks(ret, parent, name);
|
|
60
|
+
// does the tree node have a child that goes by this name?
|
|
61
|
+
const sameName = !const_1.IS_WINDOWS ? (x) => x === name // easy
|
|
62
|
+
: (0, misc_2.with_)(name.toLowerCase(), lc => (x) => x.toLowerCase() === lc);
|
|
63
|
+
const child = (_a = parent.children) === null || _a === void 0 ? void 0 : _a.find(x => sameName(getNodeName(x)));
|
|
64
|
+
if (child) // yes
|
|
65
|
+
return urlToNode(rest, ctx, Object.assign(ret, child, { original: child }));
|
|
66
|
+
// not in the tree, we can see consider continuing on the disk
|
|
67
|
+
if (!parent.source)
|
|
68
|
+
return; // but then we need the current node to be linked to the disk, otherwise, we give up
|
|
69
|
+
let onDisk = name;
|
|
70
|
+
if (parent.rename) { // reverse the mapping
|
|
71
|
+
for (const [from, to] of Object.entries(parent.rename))
|
|
72
|
+
if (name === to) {
|
|
73
|
+
onDisk = from;
|
|
74
|
+
break; // found, search no more
|
|
75
|
+
}
|
|
76
|
+
ret.rename = renameUnderPath(parent.rename, name);
|
|
77
|
+
}
|
|
78
|
+
ret.source = (0, misc_1.enforceFinal)('/', parent.source) + onDisk;
|
|
79
|
+
if (parent.default)
|
|
80
|
+
inheritFromParent({ mime: { '*': exports.MIME_AUTO } }, ret);
|
|
81
|
+
if (rest)
|
|
82
|
+
return urlToNode(rest, ctx, ret);
|
|
83
|
+
if (ret.source)
|
|
84
|
+
try {
|
|
85
|
+
await promises_1.default.stat(ret.source);
|
|
86
|
+
} // check existence
|
|
87
|
+
catch (_b) {
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
return ret;
|
|
91
|
+
}
|
|
92
|
+
exports.urlToNode = urlToNode;
|
|
93
|
+
exports.vfs = {};
|
|
94
|
+
(0, config_1.defineConfig)('vfs', {}).sub(data => exports.vfs = data);
|
|
95
|
+
function saveVfs() {
|
|
96
|
+
return (0, config_1.setConfig)({ vfs: lodash_1.default.cloneDeep(exports.vfs) }, true);
|
|
97
|
+
}
|
|
98
|
+
exports.saveVfs = saveVfs;
|
|
99
|
+
function getNodeName(node) {
|
|
100
|
+
return node.name
|
|
101
|
+
|| node.source && (/^[a-zA-Z]:\\?$/.test(node.source) && node.source.slice(0, 2)
|
|
102
|
+
|| (0, path_1.basename)(node.source)
|
|
103
|
+
|| node.source)
|
|
104
|
+
|| ''; // should happen only for root
|
|
105
|
+
}
|
|
106
|
+
exports.getNodeName = getNodeName;
|
|
107
|
+
async function nodeIsDirectory(node) {
|
|
108
|
+
return Boolean(!node.source || await (0, misc_1.isDirectory)(node.source));
|
|
109
|
+
}
|
|
110
|
+
exports.nodeIsDirectory = nodeIsDirectory;
|
|
111
|
+
function hasPermission(node, perm, ctx) {
|
|
112
|
+
var _a;
|
|
113
|
+
return matchWho((_a = node[perm]) !== null && _a !== void 0 ? _a : exports.defaultPerms[perm], ctx)
|
|
114
|
+
&& (perm !== 'can_see' || hasPermission(node, 'can_read', ctx)); // for can_see you must also can_read
|
|
115
|
+
}
|
|
116
|
+
exports.hasPermission = hasPermission;
|
|
117
|
+
async function* walkNode(parent, ctx, depth = 0, prefixPath = '') {
|
|
118
|
+
const { children, source } = parent;
|
|
119
|
+
if (children)
|
|
120
|
+
for (let idx = 0; idx < children.length; idx++) {
|
|
121
|
+
const child = children[idx];
|
|
122
|
+
yield* workItem({
|
|
123
|
+
...child,
|
|
124
|
+
name: prefixPath ? (prefixPath + getNodeName(child)) : child.name
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
if (!source)
|
|
128
|
+
return;
|
|
129
|
+
try {
|
|
130
|
+
for await (const path of (0, misc_1.dirStream)(source)) {
|
|
131
|
+
if (ctx.req.aborted)
|
|
132
|
+
return;
|
|
133
|
+
let { rename } = parent;
|
|
134
|
+
const renamed = rename === null || rename === void 0 ? void 0 : rename[path];
|
|
135
|
+
yield* workItem({
|
|
136
|
+
name: (prefixPath || renamed) && prefixPath + (renamed || path),
|
|
137
|
+
source: (0, path_1.join)(source, path),
|
|
138
|
+
rename: renameUnderPath(rename, path),
|
|
139
|
+
});
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
catch (e) {
|
|
143
|
+
console.debug('glob', source, e); // ENOTDIR, or lacking permissions
|
|
144
|
+
}
|
|
145
|
+
async function* workItem(item) {
|
|
146
|
+
// we basename for depth>0 where we already have the rest of the path in the parent's url, and would be duplicated
|
|
147
|
+
const name = (0, path_1.basename)(getNodeName(item));
|
|
148
|
+
const url = (0, misc_1.enforceFinal)('/', parent.url || '') + name;
|
|
149
|
+
const temp = inheritFromParent(parent, {
|
|
150
|
+
...item,
|
|
151
|
+
isTemp: true,
|
|
152
|
+
url,
|
|
153
|
+
parents: [...parent.parents || [], parent],
|
|
154
|
+
});
|
|
155
|
+
applyMasks(temp, parent, name);
|
|
156
|
+
if (!hasPermission(temp, 'can_see', ctx))
|
|
157
|
+
return;
|
|
158
|
+
yield temp;
|
|
159
|
+
try {
|
|
160
|
+
if (!depth || !await nodeIsDirectory(temp))
|
|
161
|
+
return;
|
|
162
|
+
inheritMasks(temp, parent, name);
|
|
163
|
+
yield* walkNode(temp, ctx, depth - 1, getNodeName(temp) + '/');
|
|
164
|
+
}
|
|
165
|
+
catch (_a) { } // stat failed in nodeIsDirectory, ignore
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
exports.walkNode = walkNode;
|
|
169
|
+
function applyMasks(item, parent, name) {
|
|
170
|
+
const { masks } = parent;
|
|
171
|
+
if (!masks)
|
|
172
|
+
return;
|
|
173
|
+
for (const k in masks)
|
|
174
|
+
if (k.startsWith('**/') && (0, micromatch_1.isMatch)(name, k.slice(3))
|
|
175
|
+
|| !k.includes('/') && (0, micromatch_1.isMatch)(name, k))
|
|
176
|
+
Object.assign(item, masks[k]);
|
|
177
|
+
}
|
|
178
|
+
function inheritMasks(item, parent, name) {
|
|
179
|
+
const { masks } = parent;
|
|
180
|
+
if (!masks)
|
|
181
|
+
return;
|
|
182
|
+
const o = {};
|
|
183
|
+
for (const k in masks)
|
|
184
|
+
if (k.startsWith('**/'))
|
|
185
|
+
o[k.slice(3)] = masks[k];
|
|
186
|
+
else if (k.startsWith(name + '/'))
|
|
187
|
+
o[k.slice(name.length + 1)] = masks[k];
|
|
188
|
+
if (Object.keys(o).length)
|
|
189
|
+
item.masks = o;
|
|
190
|
+
}
|
|
191
|
+
function renameUnderPath(rename, path) {
|
|
192
|
+
if (!rename)
|
|
193
|
+
return rename;
|
|
194
|
+
const match = path + '/';
|
|
195
|
+
rename = Object.fromEntries(Object.entries(rename).map(([k, v]) => [k.startsWith(match) ? k.slice(match.length) : '', v]));
|
|
196
|
+
delete rename[''];
|
|
197
|
+
return lodash_1.default.isEmpty(rename) ? undefined : rename;
|
|
198
|
+
}
|
|
199
|
+
function matchWho(who, ctx) {
|
|
200
|
+
return who === WHO_ANYONE
|
|
201
|
+
|| who === WHO_ANY_ACCOUNT && Boolean(ctx.state.account)
|
|
202
|
+
|| Array.isArray(who) && (() => // check if I or any ancestor match `who`, but cache ancestors' usernames inside context state
|
|
203
|
+
(0, misc_1.getOrSet)(ctx.state, 'usernames', () => (0, perm_1.getCurrentUsernameExpanded)(ctx)).some((u) => who.includes(u)))();
|
|
204
|
+
}
|
|
205
|
+
function cantReadStatusCode(node) {
|
|
206
|
+
return node.can_read === false ? const_1.FORBIDDEN : const_1.UNAUTHORIZED;
|
|
207
|
+
}
|
|
208
|
+
exports.cantReadStatusCode = cantReadStatusCode;
|
|
209
|
+
events_1.default.on('accountRenamed', (from, to) => {
|
|
210
|
+
recur(exports.vfs);
|
|
211
|
+
saveVfs();
|
|
212
|
+
function recur(n) {
|
|
213
|
+
var _a;
|
|
214
|
+
replace(n.can_see);
|
|
215
|
+
replace(n.can_read);
|
|
216
|
+
if (n.masks)
|
|
217
|
+
Object.values(n.masks).forEach(recur);
|
|
218
|
+
(_a = n.children) === null || _a === void 0 ? void 0 : _a.forEach(recur);
|
|
219
|
+
}
|
|
220
|
+
function replace(a) {
|
|
221
|
+
if (!Array.isArray(a))
|
|
222
|
+
return;
|
|
223
|
+
for (let i = 0; i < a.length; i++)
|
|
224
|
+
if (a[i] === from)
|
|
225
|
+
a[i] = to;
|
|
226
|
+
}
|
|
227
|
+
});
|
package/src/watchLoad.js
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// This file is part of HFS - Copyright 2021-2022, Massimo Melina <a@rejetto.com> - License https://www.gnu.org/licenses/gpl-3.0.txt
|
|
3
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
5
|
+
};
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.watchLoad = void 0;
|
|
8
|
+
const fs_1 = require("fs");
|
|
9
|
+
const promises_1 = __importDefault(require("fs/promises"));
|
|
10
|
+
const yaml_1 = __importDefault(require("yaml"));
|
|
11
|
+
const misc_1 = require("./misc");
|
|
12
|
+
function watchLoad(path, parser, { failedOnFirstAttempt } = {}) {
|
|
13
|
+
let doing = false;
|
|
14
|
+
let watcher;
|
|
15
|
+
const debounced = (0, misc_1.debounceAsync)(load, 500, { leading: true });
|
|
16
|
+
let retry;
|
|
17
|
+
let saving;
|
|
18
|
+
let lastStats;
|
|
19
|
+
init().then(ok => ok || (failedOnFirstAttempt === null || failedOnFirstAttempt === void 0 ? void 0 : failedOnFirstAttempt()));
|
|
20
|
+
return {
|
|
21
|
+
unwatch() {
|
|
22
|
+
watcher === null || watcher === void 0 ? void 0 : watcher.close();
|
|
23
|
+
clearTimeout(retry);
|
|
24
|
+
watcher = undefined;
|
|
25
|
+
},
|
|
26
|
+
save(...args) {
|
|
27
|
+
return Promise.resolve(saving).then(() => // wait in case another is ongoing
|
|
28
|
+
saving = promises_1.default.writeFile(...args).finally(() => // save but also keep track of the current operation
|
|
29
|
+
saving = undefined)); // clear
|
|
30
|
+
}
|
|
31
|
+
};
|
|
32
|
+
async function init() {
|
|
33
|
+
try {
|
|
34
|
+
debounced().then();
|
|
35
|
+
watcher = (0, fs_1.watch)(path, () => {
|
|
36
|
+
if (!saving)
|
|
37
|
+
debounced().then();
|
|
38
|
+
});
|
|
39
|
+
return true; // used actually just by the first invocation
|
|
40
|
+
}
|
|
41
|
+
catch (e) {
|
|
42
|
+
retry = setTimeout(init, 3000); // manual watching until watch is successful
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
async function load() {
|
|
46
|
+
if (doing)
|
|
47
|
+
return;
|
|
48
|
+
doing = true;
|
|
49
|
+
let data;
|
|
50
|
+
try {
|
|
51
|
+
try { // I've seen watch() firing 'change' without any change, so we'll check if any change is detectable before going on
|
|
52
|
+
const stats = await promises_1.default.stat(path);
|
|
53
|
+
if (stats.mtimeMs === (lastStats === null || lastStats === void 0 ? void 0 : lastStats.mtimeMs))
|
|
54
|
+
return;
|
|
55
|
+
lastStats = stats;
|
|
56
|
+
data = await (0, misc_1.readFileBusy)(path);
|
|
57
|
+
console.debug('loaded', path);
|
|
58
|
+
}
|
|
59
|
+
catch (e) {
|
|
60
|
+
if (e.code === 'EPERM')
|
|
61
|
+
console.error("missing permissions on file", path); // warn user, who could be clueless about this problem
|
|
62
|
+
return; // ignore read errors
|
|
63
|
+
}
|
|
64
|
+
if (path.endsWith('.yaml'))
|
|
65
|
+
data = yaml_1.default.parse(data);
|
|
66
|
+
await parser(data);
|
|
67
|
+
}
|
|
68
|
+
finally {
|
|
69
|
+
doing = false;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
exports.watchLoad = watchLoad;
|
package/src/zip.js
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// This file is part of HFS - Copyright 2021-2022, Massimo Melina <a@rejetto.com> - License https://www.gnu.org/licenses/gpl-3.0.txt
|
|
3
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
5
|
+
};
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.zipStreamFromFolder = void 0;
|
|
8
|
+
const vfs_1 = require("./vfs");
|
|
9
|
+
const misc_1 = require("./misc");
|
|
10
|
+
const QuickZipStream_1 = require("./QuickZipStream");
|
|
11
|
+
const fs_1 = require("fs");
|
|
12
|
+
const promises_1 = __importDefault(require("fs/promises"));
|
|
13
|
+
const config_1 = require("./config");
|
|
14
|
+
const path_1 = require("path");
|
|
15
|
+
const serveFile_1 = require("./serveFile");
|
|
16
|
+
async function zipStreamFromFolder(node, ctx) {
|
|
17
|
+
ctx.status = 200;
|
|
18
|
+
ctx.mime = 'zip';
|
|
19
|
+
const name = (0, vfs_1.getNodeName)(node);
|
|
20
|
+
ctx.attachment((name || 'archive') + '.zip');
|
|
21
|
+
const filter = (0, misc_1.pattern2filter)(String(ctx.query.search || ''));
|
|
22
|
+
const { list } = ctx.query;
|
|
23
|
+
const walker = !list ? (0, vfs_1.walkNode)(node, ctx, Infinity)
|
|
24
|
+
: (async function* () {
|
|
25
|
+
for await (const el of String(list).split('*')) { // we are using * as separator because it cannot be used in a file name and doesn't need url encoding
|
|
26
|
+
const subNode = await (0, vfs_1.urlToNode)(el, ctx, node);
|
|
27
|
+
if (!subNode || !(0, vfs_1.hasPermission)(subNode, 'can_read', ctx))
|
|
28
|
+
continue;
|
|
29
|
+
if (await (0, vfs_1.nodeIsDirectory)(subNode)) { // a directory needs to walked
|
|
30
|
+
yield* (0, vfs_1.walkNode)(subNode, ctx, Infinity, el + '/');
|
|
31
|
+
continue;
|
|
32
|
+
}
|
|
33
|
+
let folder = (0, path_1.dirname)(el);
|
|
34
|
+
folder = folder === '.' ? '' : folder + '/';
|
|
35
|
+
yield { ...subNode, name: folder + (0, vfs_1.getNodeName)(subNode) }; // reflect relative path in archive, otherwise way may have name-clashes
|
|
36
|
+
}
|
|
37
|
+
})();
|
|
38
|
+
const mappedWalker = (0, misc_1.filterMapGenerator)(walker, async (el) => {
|
|
39
|
+
const { source } = el;
|
|
40
|
+
const name = (0, vfs_1.getNodeName)(el);
|
|
41
|
+
if (!source || ctx.req.aborted || !filter(name))
|
|
42
|
+
return;
|
|
43
|
+
try {
|
|
44
|
+
const st = await promises_1.default.stat(source);
|
|
45
|
+
if (!st || !st.isFile())
|
|
46
|
+
return;
|
|
47
|
+
return {
|
|
48
|
+
path: name,
|
|
49
|
+
size: st.size,
|
|
50
|
+
ts: st.mtime || st.ctime,
|
|
51
|
+
mode: st.mode,
|
|
52
|
+
sourcePath: source,
|
|
53
|
+
getData: () => (0, fs_1.createReadStream)(source, { start: 0, end: Math.max(0, st.size - 1) })
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
catch (_a) { }
|
|
57
|
+
});
|
|
58
|
+
const zip = new QuickZipStream_1.QuickZipStream(mappedWalker);
|
|
59
|
+
const time = 1000 * zipSeconds.get();
|
|
60
|
+
ctx.response.length = await zip.calculateSize(time);
|
|
61
|
+
const range = (0, serveFile_1.getRange)(ctx, ctx.response.length);
|
|
62
|
+
if (range)
|
|
63
|
+
zip.applyRange(range.start, range.end);
|
|
64
|
+
ctx.body = zip;
|
|
65
|
+
ctx.req.on('close', () => zip.destroy());
|
|
66
|
+
ctx.state.archive = 'zip';
|
|
67
|
+
}
|
|
68
|
+
exports.zipStreamFromFolder = zipStreamFromFolder;
|
|
69
|
+
const zipSeconds = (0, config_1.defineConfig)('zip_calculate_size_for_seconds', 1);
|