hackmud-script-manager 0.11.0-2c7f410 → 0.12.0-c276bb2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/hsm.js +228 -203
- package/index.d.ts +1 -1
- package/index.js +10 -718
- package/package.json +17 -7
- package/shared.js +902 -0
- package/lib.js +0 -74
package/index.d.ts
CHANGED
package/index.js
CHANGED
@@ -1,718 +1,10 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
const acorn_1 = require("acorn");
|
12
|
-
const fs_1 = __importDefault(require("fs"));
|
13
|
-
const lib_1 = require("./lib");
|
14
|
-
const { readFile: readFile, readdir: readDirectory, stat: getFileStatus, writeFile: writeFile } = fs_1.default.promises;
|
15
|
-
exports.supportedExtensions = [".js", ".ts"];
|
16
|
-
// TODO `clean()` function that delete all scripts in hackmud directory #70
|
17
|
-
// TODO optional argument (defaults to false) for `clean()` that makes it only remove scripts without a source file #70
|
18
|
-
/**
|
19
|
-
* Push a specific or all scripts to a specific or all users.
|
20
|
-
* In source directory, scripts in folders will override scripts with same name for user with folder name.
|
21
|
-
*
|
22
|
-
* e.g. foo/bar.js overrides other bar.js script just for user foo.
|
23
|
-
*
|
24
|
-
* @param srcDir path to folder containing source files
|
25
|
-
* @param hackmudDir path to hackmud directory
|
26
|
-
* @param users users to push to (pushes to all if empty)
|
27
|
-
* @param scripts scripts to push from (pushes from all if empty)
|
28
|
-
* @param onPush function that's called when a script has been pushed
|
29
|
-
*/
|
30
|
-
function push(srcDir, hackmudDir, users, scripts, onPush) {
|
31
|
-
return new Promise(async (resolve) => {
|
32
|
-
const infoAll = [];
|
33
|
-
const files = await readDirectory(srcDir, { withFileTypes: true });
|
34
|
-
const skips = new Map();
|
35
|
-
const promises = [];
|
36
|
-
for (const dir of files) {
|
37
|
-
const user = dir.name;
|
38
|
-
if (dir.isDirectory() && (!users.length || users.includes(user))) {
|
39
|
-
promises.push(readDirectory((0, path_1.resolve)(srcDir, user), { withFileTypes: true }).then(files => {
|
40
|
-
for (const file of files) {
|
41
|
-
const extension = (0, path_1.extname)(file.name);
|
42
|
-
const name = (0, path_1.basename)(file.name, extension);
|
43
|
-
if (exports.supportedExtensions.includes(extension) && file.isFile() && (!scripts.length || scripts.includes(name))) {
|
44
|
-
let skip = skips.get(name);
|
45
|
-
if (skip)
|
46
|
-
skip.push(user);
|
47
|
-
else
|
48
|
-
skips.set(name, [user]);
|
49
|
-
readFile((0, path_1.resolve)(srcDir, user, file.name), { encoding: "utf-8" }).then(async (code) => {
|
50
|
-
let error = null;
|
51
|
-
const { srcLength, script: minCode } = await processScript(code).catch(reason => {
|
52
|
-
error = reason;
|
53
|
-
return {
|
54
|
-
srcLength: 0,
|
55
|
-
script: ""
|
56
|
-
};
|
57
|
-
});
|
58
|
-
const info = {
|
59
|
-
file: `${user}/${file.name}`,
|
60
|
-
users: [user],
|
61
|
-
minLength: 0,
|
62
|
-
error,
|
63
|
-
srcLength
|
64
|
-
};
|
65
|
-
infoAll.push(info);
|
66
|
-
if (!error) {
|
67
|
-
if (minCode) {
|
68
|
-
info.minLength = (0, lib_1.hackmudLength)(minCode);
|
69
|
-
await (0, lib_1.writeFilePersist)((0, path_1.resolve)(hackmudDir, user, "scripts", `${name}.js`), minCode);
|
70
|
-
}
|
71
|
-
else
|
72
|
-
info.error = new Error("processed script was empty");
|
73
|
-
}
|
74
|
-
onPush === null || onPush === void 0 ? void 0 : onPush(info);
|
75
|
-
});
|
76
|
-
}
|
77
|
-
}
|
78
|
-
}));
|
79
|
-
}
|
80
|
-
}
|
81
|
-
if (!users.length) {
|
82
|
-
users = (await readDirectory(hackmudDir, { withFileTypes: true }))
|
83
|
-
.filter(a => a.isFile() && (0, path_1.extname)(a.name) == ".key")
|
84
|
-
.map(a => (0, path_1.basename)(a.name, ".key"));
|
85
|
-
}
|
86
|
-
Promise.all(promises).then(() => {
|
87
|
-
const promises = [];
|
88
|
-
for (const file of files) {
|
89
|
-
if (file.isFile()) {
|
90
|
-
const extension = (0, path_1.extname)(file.name);
|
91
|
-
if (exports.supportedExtensions.includes(extension)) {
|
92
|
-
const name = (0, path_1.basename)(file.name, extension);
|
93
|
-
if (!scripts.length || scripts.includes(name)) {
|
94
|
-
promises.push(readFile((0, path_1.resolve)(srcDir, file.name), { encoding: "utf-8" }).then(async (code) => {
|
95
|
-
let error = null;
|
96
|
-
const { script: minCode, srcLength } = await processScript(code).catch(reason => {
|
97
|
-
error = reason;
|
98
|
-
return {
|
99
|
-
script: "",
|
100
|
-
srcLength: 0
|
101
|
-
};
|
102
|
-
});
|
103
|
-
const info = {
|
104
|
-
file: file.name,
|
105
|
-
users: [],
|
106
|
-
minLength: 0,
|
107
|
-
error,
|
108
|
-
srcLength
|
109
|
-
};
|
110
|
-
infoAll.push(info);
|
111
|
-
if (!error) {
|
112
|
-
if (minCode) {
|
113
|
-
info.minLength = (0, lib_1.hackmudLength)(minCode);
|
114
|
-
const skip = skips.get(name) || [];
|
115
|
-
const promises = [];
|
116
|
-
for (const user of users) {
|
117
|
-
if (!skip.includes(user)) {
|
118
|
-
info.users.push(user);
|
119
|
-
promises.push((0, lib_1.writeFilePersist)((0, path_1.resolve)(hackmudDir, user, "scripts", `${name}.js`), minCode));
|
120
|
-
}
|
121
|
-
}
|
122
|
-
}
|
123
|
-
else
|
124
|
-
info.error = new Error("processed script was empty");
|
125
|
-
}
|
126
|
-
if (onPush)
|
127
|
-
Promise.all(promises).then(() => onPush(info));
|
128
|
-
}));
|
129
|
-
}
|
130
|
-
}
|
131
|
-
}
|
132
|
-
}
|
133
|
-
Promise.all(promises).then(() => resolve(infoAll));
|
134
|
-
});
|
135
|
-
});
|
136
|
-
}
|
137
|
-
exports.push = push;
|
138
|
-
/**
|
139
|
-
* Watches target file or folder for updates and builds and pushes updated file.
|
140
|
-
*
|
141
|
-
* @param srcDir path to folder containing source files
|
142
|
-
* @param hackmudDir path to hackmud directory
|
143
|
-
* @param users users to push to (pushes to all if empty)
|
144
|
-
* @param scripts scripts to push from (pushes from all if empty)
|
145
|
-
* @param onPush function that's called after each script has been built and written
|
146
|
-
*/
|
147
|
-
function watch(srcDir, hackmudDir, users, scripts, onPush, { genTypes } = {}) {
|
148
|
-
const watcher = (0, chokidar_1.watch)("", { depth: 1, cwd: srcDir, awaitWriteFinish: { stabilityThreshold: 100 } }).on("change", async (path) => {
|
149
|
-
const extension = (0, path_1.extname)(path);
|
150
|
-
if (exports.supportedExtensions.includes(extension)) {
|
151
|
-
const name = (0, path_1.basename)(path, extension);
|
152
|
-
const fileName = (0, path_1.basename)(path);
|
153
|
-
if (path == fileName) {
|
154
|
-
if (!scripts.length || scripts.includes(name)) {
|
155
|
-
const sourceCode = await readFile((0, path_1.resolve)(srcDir, path), { encoding: "utf-8" });
|
156
|
-
const skips = new Map();
|
157
|
-
const promisesSkips = [];
|
158
|
-
for (const dir of await readDirectory(srcDir, { withFileTypes: true })) {
|
159
|
-
if (!dir.isDirectory())
|
160
|
-
continue;
|
161
|
-
promisesSkips.push(readDirectory((0, path_1.resolve)(srcDir, dir.name), { withFileTypes: true }).then(files => {
|
162
|
-
for (const file of files) {
|
163
|
-
if (!file.isFile())
|
164
|
-
continue;
|
165
|
-
const fileExtension = (0, path_1.extname)(file.name);
|
166
|
-
if (!exports.supportedExtensions.includes(fileExtension))
|
167
|
-
continue;
|
168
|
-
const name = (0, path_1.basename)(file.name, fileExtension);
|
169
|
-
const skip = skips.get(name);
|
170
|
-
if (skip)
|
171
|
-
skip.push(dir.name);
|
172
|
-
else
|
173
|
-
skips.set(name, [dir.name]);
|
174
|
-
}
|
175
|
-
}));
|
176
|
-
}
|
177
|
-
await Promise.all(promisesSkips);
|
178
|
-
let error = null;
|
179
|
-
const { script, srcLength } = await processScript(sourceCode).catch(reason => {
|
180
|
-
error = reason;
|
181
|
-
return {
|
182
|
-
script: "",
|
183
|
-
srcLength: 0
|
184
|
-
};
|
185
|
-
});
|
186
|
-
const info = {
|
187
|
-
file: path,
|
188
|
-
users: [],
|
189
|
-
minLength: 0,
|
190
|
-
error,
|
191
|
-
srcLength
|
192
|
-
};
|
193
|
-
const promises = [];
|
194
|
-
if (!error) {
|
195
|
-
if (script) {
|
196
|
-
const skip = skips.get(name) || [];
|
197
|
-
info.minLength = (0, lib_1.hackmudLength)(script);
|
198
|
-
if (!users.length) {
|
199
|
-
users = (await readDirectory(hackmudDir, { withFileTypes: true }))
|
200
|
-
.filter(a => a.isFile() && (0, path_1.extname)(a.name) == ".key")
|
201
|
-
.map(a => (0, path_1.basename)(a.name, ".key"));
|
202
|
-
}
|
203
|
-
for (const user of users) {
|
204
|
-
if (skip.includes(user))
|
205
|
-
continue;
|
206
|
-
info.users.push(user);
|
207
|
-
promises.push((0, lib_1.writeFilePersist)((0, path_1.resolve)(hackmudDir, user, "scripts", `${name}.js`), script));
|
208
|
-
}
|
209
|
-
}
|
210
|
-
else
|
211
|
-
info.error = new Error("processed script was empty");
|
212
|
-
}
|
213
|
-
if (onPush) {
|
214
|
-
await Promise.all(promises);
|
215
|
-
onPush(info);
|
216
|
-
}
|
217
|
-
}
|
218
|
-
}
|
219
|
-
else {
|
220
|
-
const user = (0, path_1.basename)((0, path_1.resolve)(path, ".."));
|
221
|
-
if ((!users.length || users.includes(user)) && (!scripts.length || scripts.includes(name))) {
|
222
|
-
const sourceCode = await readFile((0, path_1.resolve)(srcDir, path), { encoding: "utf-8" });
|
223
|
-
let error = null;
|
224
|
-
const { script, srcLength } = await processScript(sourceCode).catch(reason => {
|
225
|
-
error = reason;
|
226
|
-
return {
|
227
|
-
script: "",
|
228
|
-
srcLength: 0
|
229
|
-
};
|
230
|
-
});
|
231
|
-
const info = {
|
232
|
-
file: path,
|
233
|
-
users: [user],
|
234
|
-
minLength: 0,
|
235
|
-
error,
|
236
|
-
srcLength
|
237
|
-
};
|
238
|
-
if (!error) {
|
239
|
-
if (script) {
|
240
|
-
info.minLength = (0, lib_1.hackmudLength)(script);
|
241
|
-
await (0, lib_1.writeFilePersist)((0, path_1.resolve)(hackmudDir, user, "scripts", `${name}.js`), script);
|
242
|
-
}
|
243
|
-
else
|
244
|
-
info.error = new Error("processed script was empty");
|
245
|
-
}
|
246
|
-
onPush === null || onPush === void 0 ? void 0 : onPush(info);
|
247
|
-
}
|
248
|
-
}
|
249
|
-
}
|
250
|
-
});
|
251
|
-
if (genTypes) {
|
252
|
-
generateTypings(srcDir, (0, path_1.resolve)(srcDir, genTypes), hackmudDir);
|
253
|
-
watcher.on("add", () => generateTypings(srcDir, (0, path_1.resolve)(srcDir, genTypes), hackmudDir));
|
254
|
-
watcher.on("unlink", () => generateTypings(srcDir, (0, path_1.resolve)(srcDir, genTypes), hackmudDir));
|
255
|
-
}
|
256
|
-
}
|
257
|
-
exports.watch = watch;
|
258
|
-
/**
|
259
|
-
* Copies script from hackmud to local source folder.
|
260
|
-
*
|
261
|
-
* @param sourceFolderPath path to folder containing source files
|
262
|
-
* @param hackmudPath path to hackmud directory
|
263
|
-
* @param script script to pull in `user.name` format
|
264
|
-
*/
|
265
|
-
async function pull(sourceFolderPath, hackmudPath, script) {
|
266
|
-
const [user, name] = script.split(".");
|
267
|
-
await (0, lib_1.copyFilePersist)((0, path_1.resolve)(hackmudPath, user, "scripts", `${name}.js`), (0, path_1.resolve)(sourceFolderPath, user, `${name}.js`));
|
268
|
-
}
|
269
|
-
exports.pull = pull;
|
270
|
-
async function syncMacros(hackmudPath) {
|
271
|
-
const files = await readDirectory(hackmudPath, { withFileTypes: true });
|
272
|
-
const macros = new Map();
|
273
|
-
const users = [];
|
274
|
-
for (const file of files) {
|
275
|
-
if (!file.isFile())
|
276
|
-
continue;
|
277
|
-
switch ((0, path_1.extname)(file.name)) {
|
278
|
-
case ".macros":
|
279
|
-
{
|
280
|
-
const lines = (await readFile((0, path_1.resolve)(hackmudPath, file.name), { encoding: "utf-8" })).split("\n");
|
281
|
-
const date = (await getFileStatus((0, path_1.resolve)(hackmudPath, file.name))).mtime;
|
282
|
-
for (let i = 0; i < lines.length / 2 - 1; i++) {
|
283
|
-
const macroName = lines[i * 2];
|
284
|
-
const curMacro = macros.get(macroName);
|
285
|
-
if (!curMacro || date > curMacro.date)
|
286
|
-
macros.set(macroName, { date, macro: lines[i * 2 + 1] });
|
287
|
-
}
|
288
|
-
}
|
289
|
-
break;
|
290
|
-
case ".key":
|
291
|
-
{
|
292
|
-
users.push((0, path_1.basename)(file.name, ".key"));
|
293
|
-
}
|
294
|
-
break;
|
295
|
-
}
|
296
|
-
}
|
297
|
-
let macroFile = "";
|
298
|
-
let macrosSynced = 0;
|
299
|
-
for (const [name, { macro }] of [...macros].sort(([a], [b]) => (a > b) - (a < b))) {
|
300
|
-
if (macro[0] != macro[0].toLowerCase())
|
301
|
-
continue;
|
302
|
-
macroFile += `${name}\n${macro}\n`;
|
303
|
-
macrosSynced++;
|
304
|
-
}
|
305
|
-
for (const user of users)
|
306
|
-
writeFile((0, path_1.resolve)(hackmudPath, user + ".macros"), macroFile);
|
307
|
-
return { macrosSynced, usersSynced: users.length };
|
308
|
-
}
|
309
|
-
exports.syncMacros = syncMacros;
|
310
|
-
async function test(srcPath) {
|
311
|
-
const promises = [];
|
312
|
-
const errors = [];
|
313
|
-
for (const dirent of await readDirectory(srcPath, { withFileTypes: true })) {
|
314
|
-
if (dirent.isDirectory()) {
|
315
|
-
promises.push(readDirectory((0, path_1.resolve)(srcPath, dirent.name), { withFileTypes: true }).then(files => {
|
316
|
-
const promises = [];
|
317
|
-
for (const file of files) {
|
318
|
-
if (!file.isFile() || !exports.supportedExtensions.includes((0, path_1.extname)(file.name)))
|
319
|
-
continue;
|
320
|
-
promises.push(readFile((0, path_1.resolve)(srcPath, dirent.name, file.name), { encoding: "utf-8" })
|
321
|
-
.then(processScript)
|
322
|
-
.then(({ warnings }) => errors.push(...warnings.map(({ message, line }) => ({
|
323
|
-
file: `${dirent.name}/${file.name}`,
|
324
|
-
message, line
|
325
|
-
})))));
|
326
|
-
}
|
327
|
-
return Promise.all(promises);
|
328
|
-
}));
|
329
|
-
}
|
330
|
-
else if (dirent.isFile() && exports.supportedExtensions.includes((0, path_1.extname)(dirent.name))) {
|
331
|
-
promises.push(readFile((0, path_1.resolve)(srcPath, dirent.name), { encoding: "utf-8" })
|
332
|
-
.then(processScript)
|
333
|
-
.then(({ warnings }) => errors.push(...warnings.map(({ message, line }) => ({
|
334
|
-
file: dirent.name,
|
335
|
-
message, line
|
336
|
-
})))));
|
337
|
-
}
|
338
|
-
}
|
339
|
-
await Promise.all(promises);
|
340
|
-
return errors;
|
341
|
-
}
|
342
|
-
exports.test = test;
|
343
|
-
async function generateTypings(srcDir, target, hackmudPath) {
|
344
|
-
const users = new Set();
|
345
|
-
if (hackmudPath) {
|
346
|
-
for (const dirent of await readDirectory(hackmudPath, { withFileTypes: true })) {
|
347
|
-
if (dirent.isFile() && (0, path_1.extname)(dirent.name) == ".key")
|
348
|
-
users.add((0, path_1.basename)(dirent.name, ".key"));
|
349
|
-
}
|
350
|
-
}
|
351
|
-
const wildScripts = [];
|
352
|
-
const wildAnyScripts = [];
|
353
|
-
const allScripts = {};
|
354
|
-
const allAnyScripts = {};
|
355
|
-
for (const dirent of await readDirectory(srcDir, { withFileTypes: true })) {
|
356
|
-
if (dirent.isFile()) {
|
357
|
-
if ((0, path_1.extname)(dirent.name) == ".ts")
|
358
|
-
wildScripts.push((0, path_1.basename)(dirent.name, ".ts"));
|
359
|
-
else if ((0, path_1.extname)(dirent.name) == ".js")
|
360
|
-
wildAnyScripts.push((0, path_1.basename)(dirent.name, ".js"));
|
361
|
-
}
|
362
|
-
else if (dirent.isDirectory()) {
|
363
|
-
const scripts = allScripts[dirent.name] = [];
|
364
|
-
const anyScripts = allAnyScripts[dirent.name] = [];
|
365
|
-
users.add(dirent.name);
|
366
|
-
for (const file of await readDirectory((0, path_1.resolve)(srcDir, dirent.name), { withFileTypes: true })) {
|
367
|
-
if (file.isFile()) {
|
368
|
-
if ((0, path_1.extname)(file.name) == ".ts")
|
369
|
-
scripts.push((0, path_1.basename)(file.name, ".ts"));
|
370
|
-
else if ((0, path_1.extname)(file.name) == ".js")
|
371
|
-
anyScripts.push((0, path_1.basename)(file.name, ".js"));
|
372
|
-
}
|
373
|
-
}
|
374
|
-
}
|
375
|
-
}
|
376
|
-
let o = "";
|
377
|
-
for (const script of wildScripts)
|
378
|
-
o += `import { script as $${script}$ } from "./src/${script}"\n`;
|
379
|
-
o += "\n";
|
380
|
-
for (const user in allScripts) {
|
381
|
-
const scripts = allScripts[user];
|
382
|
-
for (const script of scripts)
|
383
|
-
o += `import { script as $${user}$${script}$ } from "./src/${user}/${script}"\n`;
|
384
|
-
}
|
385
|
-
// TODO detect security level and generate apropriate code
|
386
|
-
// TODO accurate function signatures
|
387
|
-
// currently I lose the generic-ness of my functions when I wrap them
|
388
|
-
// just regexing isn't enough and it looks like I'm going to need to parse the files in TypeScript to extract the signature
|
389
|
-
o += `
|
390
|
-
type ArrayRemoveFirst<A> = A extends [ infer FirstItem, ...infer Rest ] ? Rest : never
|
391
|
-
|
392
|
-
type Subscript<T extends (...args: any) => any> =
|
393
|
-
(...args: ArrayRemoveFirst<Parameters<T>>) => ReturnType<T> | ScriptFailure
|
394
|
-
|
395
|
-
type WildFullsec = Record<string, () => ScriptFailure> & {
|
396
|
-
`;
|
397
|
-
for (const script of wildScripts)
|
398
|
-
o += `\t${script}: Subscript<typeof $${script}$>\n`;
|
399
|
-
for (const script of wildAnyScripts)
|
400
|
-
o += `\t${script}: (...args: any) => any\n`;
|
401
|
-
o += "}\n\ndeclare global {\n\tinterface PlayerFullsec {";
|
402
|
-
let lastWasMultiLine = true;
|
403
|
-
for (const user of users) {
|
404
|
-
const scripts = allScripts[user];
|
405
|
-
const anyScripts = allAnyScripts[user];
|
406
|
-
if ((scripts && scripts.length) || (anyScripts && anyScripts.length)) {
|
407
|
-
lastWasMultiLine = true;
|
408
|
-
o += `\n\t\t${user}: WildFullsec & {\n`;
|
409
|
-
for (const script of scripts)
|
410
|
-
o += `\t\t\t${script}: Subscript<typeof $${user}$${script}$>\n`;
|
411
|
-
for (const script of anyScripts)
|
412
|
-
o += `\t\t\t${script}: (...args: any) => any\n`;
|
413
|
-
o += "\t\t}";
|
414
|
-
}
|
415
|
-
else {
|
416
|
-
if (lastWasMultiLine) {
|
417
|
-
o += "\n";
|
418
|
-
lastWasMultiLine = false;
|
419
|
-
}
|
420
|
-
o += `\t\t${user}: WildFullsec`;
|
421
|
-
}
|
422
|
-
o += "\n";
|
423
|
-
}
|
424
|
-
o += "\t}\n}\n";
|
425
|
-
await writeFile(target, o);
|
426
|
-
}
|
427
|
-
exports.generateTypings = generateTypings;
|
428
|
-
/**
|
429
|
-
* Minifies a given script
|
430
|
-
*
|
431
|
-
* @param script JavaScript or TypeScript code
|
432
|
-
*/
|
433
|
-
async function processScript(script) {
|
434
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _j;
|
435
|
-
let preScriptComments;
|
436
|
-
let autocomplete;
|
437
|
-
[, preScriptComments, script, autocomplete] = script.match(/((?:^\s*\/\/.*\n)*)\s*((?:.+?\/\/\s*(.+?)\s*$)?[^]*)/m);
|
438
|
-
if (!script)
|
439
|
-
throw new Error("script was empty");
|
440
|
-
if (script.match(/(?:SC|DB)\$/))
|
441
|
-
throw new Error("SC$ and DB$ are protected and cannot appear in a script");
|
442
|
-
let seclevel;
|
443
|
-
for (const line of preScriptComments.split("\n")) {
|
444
|
-
let [, autocompleteMatch, seclevelMatch] = (line.match(/^\s*\/\/\s*(?:@autocomplete\s*([^\s].*?)|@seclevel\s*([^\s].*?))\s*$/) || []);
|
445
|
-
if (autocompleteMatch)
|
446
|
-
autocomplete = autocompleteMatch;
|
447
|
-
else if (seclevelMatch) {
|
448
|
-
if (seclevelMatch.match(/^(?:fullsec|f|4|fs|full)$/i))
|
449
|
-
seclevel = 4;
|
450
|
-
else if (seclevelMatch.match(/^(?:highsec|h|3|hs|high)$/i))
|
451
|
-
seclevel = 3;
|
452
|
-
else if (seclevelMatch.match(/^(?:midsec|m|2|ms|mid)$/i))
|
453
|
-
seclevel = 2;
|
454
|
-
else if (seclevelMatch.match(/^(?:lowsec|l|1|ls|low)$/i))
|
455
|
-
seclevel = 1;
|
456
|
-
else if (seclevelMatch.match(/^(?:nullsec|n|0|ns|null)$/i))
|
457
|
-
seclevel = 0;
|
458
|
-
}
|
459
|
-
}
|
460
|
-
let detectedSeclevel;
|
461
|
-
if (script.match(/[#$][n0]s\.[a-z_][a-z_0-9]{0,24}\.[a-z_][a-z_0-9]{0,24}\(/))
|
462
|
-
detectedSeclevel = 0;
|
463
|
-
else if (script.match(/[#$][l1]s\.[a-z_][a-z_0-9]{0,24}\.[a-z_][a-z_0-9]{0,24}\(/))
|
464
|
-
detectedSeclevel = 1;
|
465
|
-
else if (script.match(/[#$][m2]s\.[a-z_][a-z_0-9]{0,24}\.[a-z_][a-z_0-9]{0,24}\(/))
|
466
|
-
detectedSeclevel = 2;
|
467
|
-
else if (script.match(/[#$][h3]s\.[a-z_][a-z_0-9]{0,24}\.[a-z_][a-z_0-9]{0,24}\(/))
|
468
|
-
detectedSeclevel = 3;
|
469
|
-
else if (script.match(/[#$][f4]s\.[a-z_][a-z_0-9]{0,24}\.[a-z_][a-z_0-9]{0,24}\(/))
|
470
|
-
detectedSeclevel = 4;
|
471
|
-
const seclevelNames = ["NULLSEC", "LOWSEC", "MIDSEC", "HIGHSEC", "FULLSEC"];
|
472
|
-
if (seclevel == undefined)
|
473
|
-
seclevel = (_a = seclevel !== null && seclevel !== void 0 ? seclevel : detectedSeclevel) !== null && _a !== void 0 ? _a : 0;
|
474
|
-
else if (detectedSeclevel != undefined && seclevel > detectedSeclevel)
|
475
|
-
throw new Error(`detected seclevel of ${seclevelNames[detectedSeclevel]} is lower than the provided seclevel of ${seclevelNames[seclevel]}`);
|
476
|
-
const semicolons = (_c = (_b = script.match(/;/g)) === null || _b === void 0 ? void 0 : _b.length) !== null && _c !== void 0 ? _c : 0;
|
477
|
-
script = script
|
478
|
-
.replace(/#[fhmln43210]s\.scripts\.quine\(\)/g, JSON.stringify(script))
|
479
|
-
.replace(/[#$][fhmln43210]?s\.([a-z_][a-z_0-9]{0,24})\.([a-z_][a-z_0-9]{0,24})\(/g, "SC$$$1$$$2(")
|
480
|
-
.replace(/^function\s*\(/, "function script(")
|
481
|
-
.replace(/#D\(/g, "$D(")
|
482
|
-
.replace(/#FMCL/g, "$FMCL")
|
483
|
-
.replace(/#G/g, "$G")
|
484
|
-
.replace(/[#$]db\./g, "DB$");
|
485
|
-
// typescript compilation, this runs on regular javascript too to convert
|
486
|
-
// any post es2015 syntax into es2015 syntax
|
487
|
-
const { outputText, diagnostics = [] } = typescript_1.default.transpileModule(script, {
|
488
|
-
compilerOptions: { target: typescript_1.default.ScriptTarget.ES2015 },
|
489
|
-
reportDiagnostics: true
|
490
|
-
});
|
491
|
-
const warnings = diagnostics.map(({ messageText, start }) => ({
|
492
|
-
message: typeof messageText == "string" ? messageText : messageText.messageText,
|
493
|
-
line: (0, lib_1.positionToLineNumber)(start, script)
|
494
|
-
}));
|
495
|
-
script = outputText.replace(/^export /, "");
|
496
|
-
// the typescript inserts semicolons where they weren't already so we take
|
497
|
-
// all semicolons out of the count and add the number of semicolons in the
|
498
|
-
// source to make things fair
|
499
|
-
let srcLength = (0, lib_1.hackmudLength)(script.replace(/^function\s*\w+\(/, "function("))
|
500
|
-
- ((_e = (_d = script.match(/;/g)) === null || _d === void 0 ? void 0 : _d.length) !== null && _e !== void 0 ? _e : 0)
|
501
|
-
+ semicolons
|
502
|
-
+ ((_g = (_f = script.match(/SC\$[a-zA-Z_][a-zA-Z0-9_]*\$[a-zA-Z_][a-zA-Z0-9_]*\(/g)) === null || _f === void 0 ? void 0 : _f.length) !== null && _g !== void 0 ? _g : 0)
|
503
|
-
+ ((_j = (_h = script.match(/DB\$/g)) === null || _h === void 0 ? void 0 : _h.length) !== null && _j !== void 0 ? _j : 0);
|
504
|
-
// remove dead code (so we don't waste chracters quine cheating strings
|
505
|
-
// that aren't even used)
|
506
|
-
script = (await (0, terser_1.minify)(script, {
|
507
|
-
ecma: 2015,
|
508
|
-
parse: { bare_returns: true }
|
509
|
-
})).code || "";
|
510
|
-
let blockStatementIndex;
|
511
|
-
if (script.startsWith("function "))
|
512
|
-
blockStatementIndex = getFunctionBodyStart(script);
|
513
|
-
else {
|
514
|
-
script = `function script(context, args) {\n${script}\n}`;
|
515
|
-
blockStatementIndex = 31;
|
516
|
-
srcLength += 24;
|
517
|
-
}
|
518
|
-
const scriptBeforeJSONValueReplacement = (await (0, terser_1.minify)(script, {
|
519
|
-
ecma: 2015,
|
520
|
-
compress: {
|
521
|
-
passes: Infinity,
|
522
|
-
unsafe: true,
|
523
|
-
unsafe_arrows: true,
|
524
|
-
unsafe_comps: true,
|
525
|
-
unsafe_symbols: true,
|
526
|
-
unsafe_methods: true,
|
527
|
-
unsafe_proto: true,
|
528
|
-
unsafe_regexp: true,
|
529
|
-
unsafe_undefined: true
|
530
|
-
},
|
531
|
-
format: { semicolons: false }
|
532
|
-
})).code || "";
|
533
|
-
const jsonValues = [];
|
534
|
-
let undefinedIsReferenced = false;
|
535
|
-
// we iterate through the tokens backwards so that substring replacements
|
536
|
-
// don't affect future replacements since a part of the string could be
|
537
|
-
// replaced with a string of a different length which messes up indexes
|
538
|
-
const tokens = [...(0, acorn_1.tokenizer)(script, { ecmaVersion: 2015 })].reverse().values();
|
539
|
-
for (const token of tokens) {
|
540
|
-
// we can't replace any tokens before the block statement or we'll break stuff
|
541
|
-
if (token.start < blockStatementIndex)
|
542
|
-
break;
|
543
|
-
switch (token.type) {
|
544
|
-
case acorn_1.tokTypes.backQuote:
|
545
|
-
{
|
546
|
-
const templateToken = tokens.next().value;
|
547
|
-
if (tokens.next().value.type == acorn_1.tokTypes.backQuote)
|
548
|
-
throw new Error("tagged templates not supported yet");
|
549
|
-
// no point in concatenating an empty string
|
550
|
-
if (templateToken.value == "") {
|
551
|
-
script = (0, lib_1.stringSplice)(script, "))", templateToken.start - 1, token.end);
|
552
|
-
break;
|
553
|
-
}
|
554
|
-
let jsonValueIndex = jsonValues.indexOf(templateToken.value);
|
555
|
-
if (jsonValueIndex == -1)
|
556
|
-
jsonValueIndex += jsonValues.push(templateToken.value);
|
557
|
-
script = (0, lib_1.stringSplice)(script, `)+_JSON_VALUE_${jsonValueIndex}_)`, templateToken.start - 1, token.end);
|
558
|
-
}
|
559
|
-
break;
|
560
|
-
case acorn_1.tokTypes.template:
|
561
|
-
{
|
562
|
-
if (tokens.next().value.type == acorn_1.tokTypes.backQuote) {
|
563
|
-
if (tokens.next().value.type == acorn_1.tokTypes.name)
|
564
|
-
throw new Error("tagged templates not supported yet");
|
565
|
-
// there *is* a point in concatenating an empty string at the
|
566
|
-
// start because foo + bar is not the same thing as "" + foo + bar
|
567
|
-
let jsonValueIndex = jsonValues.indexOf(token.value);
|
568
|
-
if (jsonValueIndex == -1)
|
569
|
-
jsonValueIndex += jsonValues.push(token.value);
|
570
|
-
script = (0, lib_1.stringSplice)(script, `(_JSON_VALUE_${jsonValueIndex}_+(`, token.start - 1, token.end + 2);
|
571
|
-
break;
|
572
|
-
}
|
573
|
-
// no point in concatenating an empty string
|
574
|
-
if (token.value == "") {
|
575
|
-
script = (0, lib_1.stringSplice)(script, ")+(", token.start - 1, token.end + 2);
|
576
|
-
break;
|
577
|
-
}
|
578
|
-
let jsonValueIndex = jsonValues.indexOf(token.value);
|
579
|
-
if (jsonValueIndex == -1)
|
580
|
-
jsonValueIndex += jsonValues.push(token.value);
|
581
|
-
script = (0, lib_1.stringSplice)(script, `)+_JSON_VALUE_${jsonValueIndex}_+(`, token.start - 1, token.end + 2);
|
582
|
-
}
|
583
|
-
break;
|
584
|
-
case acorn_1.tokTypes.name:
|
585
|
-
{
|
586
|
-
if (token.value.length < 3)
|
587
|
-
break;
|
588
|
-
const tokenBefore = tokens.next().value;
|
589
|
-
if (tokenBefore.type == acorn_1.tokTypes.dot) {
|
590
|
-
let jsonValueIndex = jsonValues.indexOf(token.value);
|
591
|
-
if (jsonValueIndex == -1)
|
592
|
-
jsonValueIndex += jsonValues.push(token.value);
|
593
|
-
script = (0, lib_1.stringSplice)(script, `[_JSON_VALUE_${jsonValueIndex}_]`, tokenBefore.start, token.end);
|
594
|
-
break;
|
595
|
-
}
|
596
|
-
if (token.value == "undefined") {
|
597
|
-
script = (0, lib_1.stringSplice)(script, " _UNDEFINED_ ", token.start, token.end);
|
598
|
-
undefinedIsReferenced = true;
|
599
|
-
}
|
600
|
-
}
|
601
|
-
break;
|
602
|
-
case acorn_1.tokTypes._null:
|
603
|
-
{
|
604
|
-
let jsonValueIndex = jsonValues.indexOf(null);
|
605
|
-
if (jsonValueIndex == -1)
|
606
|
-
jsonValueIndex += jsonValues.push(null);
|
607
|
-
script = (0, lib_1.stringSplice)(script, ` _JSON_VALUE_${jsonValueIndex}_ `, token.start, token.end);
|
608
|
-
}
|
609
|
-
break;
|
610
|
-
case acorn_1.tokTypes._true:
|
611
|
-
case acorn_1.tokTypes._false:
|
612
|
-
case acorn_1.tokTypes.num:
|
613
|
-
{
|
614
|
-
if (token.value == 0) {
|
615
|
-
const tokenBefore = tokens.next().value;
|
616
|
-
if (tokenBefore.type == acorn_1.tokTypes._void) {
|
617
|
-
script = (0, lib_1.stringSplice)(script, " _UNDEFINED_ ", tokenBefore.start, token.end);
|
618
|
-
undefinedIsReferenced = true;
|
619
|
-
}
|
620
|
-
// may as well break here since we're gonna break anyway
|
621
|
-
break;
|
622
|
-
}
|
623
|
-
if (token.value < 10)
|
624
|
-
break;
|
625
|
-
let jsonValueIndex = jsonValues.indexOf(token.value);
|
626
|
-
if (jsonValueIndex == -1)
|
627
|
-
jsonValueIndex += jsonValues.push(token.value);
|
628
|
-
script = (0, lib_1.stringSplice)(script, ` _JSON_VALUE_${jsonValueIndex}_ `, token.start, token.end);
|
629
|
-
}
|
630
|
-
break;
|
631
|
-
case acorn_1.tokTypes.string:
|
632
|
-
{
|
633
|
-
if (token.value.includes("\u0000"))
|
634
|
-
break;
|
635
|
-
let jsonValueIndex = jsonValues.indexOf(token.value);
|
636
|
-
if (jsonValueIndex == -1)
|
637
|
-
jsonValueIndex += jsonValues.push(token.value);
|
638
|
-
script = (0, lib_1.stringSplice)(script, ` _JSON_VALUE_${jsonValueIndex}_ `, token.start, token.end);
|
639
|
-
}
|
640
|
-
break;
|
641
|
-
case acorn_1.tokTypes._const:
|
642
|
-
{
|
643
|
-
script = (0, lib_1.stringSplice)(script, "let", token.start, token.end);
|
644
|
-
}
|
645
|
-
break;
|
646
|
-
case acorn_1.tokTypes._this:
|
647
|
-
throw new Error('"this" keyword is not supported in hackmud');
|
648
|
-
}
|
649
|
-
}
|
650
|
-
if (jsonValues.length) {
|
651
|
-
if (jsonValues.length == 1)
|
652
|
-
script = (0, lib_1.stringSplice)(script, `\nlet _JSON_VALUE_0_ = JSON.parse(SC$scripts$quine().split\`\t\`[_SPLIT_INDEX_])${undefinedIsReferenced ? ", _UNDEFINED_" : ""}\n`, blockStatementIndex + 1);
|
653
|
-
else
|
654
|
-
script = (0, lib_1.stringSplice)(script, `\nlet [ ${jsonValues.map((_, i) => `_JSON_VALUE_${i}_`).join(", ")} ] = JSON.parse(SC$scripts$quine().split\`\t\`[_SPLIT_INDEX_])${undefinedIsReferenced ? ", _UNDEFINED_" : ""}\n`, blockStatementIndex + 1);
|
655
|
-
}
|
656
|
-
else
|
657
|
-
script = script.replace(/_UNDEFINED_/g, "void 0");
|
658
|
-
script = (await (0, terser_1.minify)(script, {
|
659
|
-
ecma: 2015,
|
660
|
-
compress: {
|
661
|
-
passes: Infinity,
|
662
|
-
unsafe: true,
|
663
|
-
unsafe_arrows: true,
|
664
|
-
unsafe_comps: true,
|
665
|
-
unsafe_symbols: true,
|
666
|
-
unsafe_methods: true,
|
667
|
-
unsafe_proto: true,
|
668
|
-
unsafe_regexp: true,
|
669
|
-
unsafe_undefined: true
|
670
|
-
},
|
671
|
-
format: { semicolons: false }
|
672
|
-
})).code || "";
|
673
|
-
// this step affects the chracter count and can't be done after the count comparison
|
674
|
-
if (jsonValues.length) {
|
675
|
-
const json = JSON.stringify(jsonValues.length == 1 ? jsonValues[0] : jsonValues);
|
676
|
-
script = (0, lib_1.stringSplice)(script, `${autocomplete ? `//${autocomplete}\n` : ""}\n//\t${json}\t\n`, getFunctionBodyStart(script) + 1);
|
677
|
-
for (const [i, part] of script.split("\t").entries()) {
|
678
|
-
if (part == json) {
|
679
|
-
script = script.replace("_SPLIT_INDEX_", (await (0, terser_1.minify)(`$(${i})`, { ecma: 2015 })).code.match(/\$\((.+)\)/)[1]);
|
680
|
-
break;
|
681
|
-
}
|
682
|
-
}
|
683
|
-
}
|
684
|
-
if ((0, lib_1.hackmudLength)(scriptBeforeJSONValueReplacement) <= (0, lib_1.hackmudLength)(script)) {
|
685
|
-
script = scriptBeforeJSONValueReplacement;
|
686
|
-
if (autocomplete)
|
687
|
-
script = (0, lib_1.stringSplice)(script, `//${autocomplete}\n`, getFunctionBodyStart(script) + 1);
|
688
|
-
}
|
689
|
-
script = script
|
690
|
-
.replace(/^function\s*\w+\(/, "function(")
|
691
|
-
.replace(/SC\$([a-zA-Z_][a-zA-Z0-9_]*)\$([a-zA-Z_][a-zA-Z0-9_]*)\(/g, `#${"nlmhf"[seclevel]}s.$1.$2(`)
|
692
|
-
.replace(/\$D\(/g, "#D(")
|
693
|
-
.replace(/\$FMCL/g, "#FMCL")
|
694
|
-
.replace(/\$G/g, "#G")
|
695
|
-
.replace(/DB\$/g, "#db.");
|
696
|
-
return {
|
697
|
-
srcLength,
|
698
|
-
script,
|
699
|
-
warnings
|
700
|
-
};
|
701
|
-
}
|
702
|
-
exports.processScript = processScript;
|
703
|
-
function getFunctionBodyStart(code) {
|
704
|
-
const tokens = (0, acorn_1.tokenizer)(code, { ecmaVersion: 2015 });
|
705
|
-
tokens.getToken(); // function
|
706
|
-
tokens.getToken(); // name
|
707
|
-
tokens.getToken(); // (
|
708
|
-
let nests = 1;
|
709
|
-
while (nests) {
|
710
|
-
const token = tokens.getToken();
|
711
|
-
if (token.type == acorn_1.tokTypes.parenL)
|
712
|
-
nests++;
|
713
|
-
else if (token.type == acorn_1.tokTypes.parenR)
|
714
|
-
nests--;
|
715
|
-
}
|
716
|
-
return tokens.getToken().start; // {
|
717
|
-
}
|
718
|
-
exports.getFunctionBodyStart = getFunctionBodyStart;
|
1
|
+
import 'acorn';
|
2
|
+
import 'chokidar';
|
3
|
+
import 'escodegen';
|
4
|
+
import 'esprima';
|
5
|
+
import 'esquery';
|
6
|
+
import 'fs';
|
7
|
+
import 'path';
|
8
|
+
import 'terser';
|
9
|
+
import 'typescript';
|
10
|
+
export { g as generateTypings, e as getFunctionBodyStart, p as processScript, b as pull, d as push, s as supportedExtensions, a as syncMacros, t as test, c as watch } from './shared.js';
|