hackmud-script-manager 0.13.0-c461329 → 0.13.0-f373e9c
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/assert-1b7dada8.js +1 -0
- package/bin/hsm.d.ts +2 -0
- package/bin/hsm.js +2 -0
- package/generateTypings.d.ts +2 -0
- package/generateTypings.js +1 -0
- package/index.d.ts +15 -0
- package/index.js +1 -0
- package/package.json +35 -11
- package/processScript/index.d.ts +33 -0
- package/processScript/index.js +1 -0
- package/processScript/minify.d.ts +14 -0
- package/processScript/minify.js +1 -0
- package/processScript/postprocess.d.ts +2 -0
- package/processScript/postprocess.js +1 -0
- package/processScript/preprocess.d.ts +13 -0
- package/processScript/preprocess.js +1 -0
- package/processScript/shared.d.ts +3 -0
- package/processScript/shared.js +1 -0
- package/processScript/transform.d.ts +22 -0
- package/processScript/transform.js +1 -0
- package/pull.d.ts +9 -0
- package/pull.js +1 -0
- package/push.d.ts +28 -0
- package/push.js +1 -0
- package/spliceString-2c6f214f.js +1 -0
- package/syncMacros.d.ts +5 -0
- package/syncMacros.js +1 -0
- package/test.d.ts +6 -0
- package/test.js +1 -0
- package/watch.d.ts +14 -0
- package/watch.js +1 -0
- package/.gitattributes +0 -1
- package/.github/workflows/codeql-analysis.yml +0 -39
- package/.github/workflows/publish.yml +0 -42
- package/.vscode/settings.json +0 -6
- package/babel.config.json +0 -6
- package/rollup.config.js +0 -110
- package/scripts/build-package-json.js +0 -36
- package/scripts/jsconfig.json +0 -5
- package/scripts/version-dev.js +0 -25
- package/src/bin/hsm.ts +0 -505
- package/src/constants.json +0 -3
- package/src/generateTypings.ts +0 -116
- package/src/index.ts +0 -19
- package/src/modules.d.ts +0 -5
- package/src/processScript/index.ts +0 -198
- package/src/processScript/minify.ts +0 -529
- package/src/processScript/postprocess.ts +0 -38
- package/src/processScript/preprocess.ts +0 -146
- package/src/processScript/transform.ts +0 -760
- package/src/pull.ts +0 -16
- package/src/push.ts +0 -314
- package/src/syncMacros.ts +0 -52
- package/src/test.ts +0 -59
- package/src/tsconfig.json +0 -20
- package/src/watch.ts +0 -156
- package/tsconfig.json +0 -12
package/src/pull.ts
DELETED
@@ -1,16 +0,0 @@
|
|
1
|
-
import { copyFilePersistent } from "@samual/lib"
|
2
|
-
import { resolve as resolvePath } from "path"
|
3
|
-
|
4
|
-
/**
|
5
|
-
* Copies script from hackmud to local source folder.
|
6
|
-
*
|
7
|
-
* @param sourceFolderPath path to folder containing source files
|
8
|
-
* @param hackmudPath path to hackmud directory
|
9
|
-
* @param script script to pull in `user.name` format
|
10
|
-
*/
|
11
|
-
export async function pull(sourceFolderPath: string, hackmudPath: string, script: string) {
|
12
|
-
const [ user, name ] = script.split(".")
|
13
|
-
await copyFilePersistent(resolvePath(hackmudPath, user, "scripts", `${name}.js`), resolvePath(sourceFolderPath, user, `${name}.js`))
|
14
|
-
}
|
15
|
-
|
16
|
-
export default pull
|
package/src/push.ts
DELETED
@@ -1,314 +0,0 @@
|
|
1
|
-
import { countHackmudCharacters, DynamicMap, forEachParallel, writeFilePersistent } from "@samual/lib"
|
2
|
-
import fs from "fs"
|
3
|
-
import { basename as getBaseName, extname as getFileExtension, resolve as resolvePath } from "path"
|
4
|
-
import type { Info } from "."
|
5
|
-
import { supportedExtensions } from "./constants.json"
|
6
|
-
import processScript from "./processScript"
|
7
|
-
|
8
|
-
const { readFile, readdir: readDirectory } = fs.promises
|
9
|
-
|
10
|
-
interface PushOptions {
|
11
|
-
/**
|
12
|
-
* array of scripts in the format `foo.bar`
|
13
|
-
*
|
14
|
-
* also accepts wild card e.g. `*.bar` or `foo.*`
|
15
|
-
*
|
16
|
-
* pushes everything by default
|
17
|
-
*/
|
18
|
-
scripts: string | string[]
|
19
|
-
|
20
|
-
/** callback when a script is pushed */
|
21
|
-
onPush: (info: Info) => void
|
22
|
-
|
23
|
-
/** whether to do the minify step (defaults to `true`) */
|
24
|
-
minify: boolean
|
25
|
-
|
26
|
-
/** whether to mangle function and class names (defaults to `false`) */
|
27
|
-
mangleNames: boolean
|
28
|
-
}
|
29
|
-
|
30
|
-
/**
|
31
|
-
* Push scripts from a source directory to the hackmud directory.
|
32
|
-
*
|
33
|
-
* Files directly in the source folder are pushed to all users
|
34
|
-
* @param sourceDirectory directory containing source code
|
35
|
-
* @param hackmudDirectory directory created by hackmud containing user data including scripts
|
36
|
-
* @param options {@link PushOptions details}
|
37
|
-
* @returns array of info on pushed scripts
|
38
|
-
*/
|
39
|
-
export async function push(
|
40
|
-
sourceDirectory: string,
|
41
|
-
hackmudDirectory: string,
|
42
|
-
{
|
43
|
-
scripts = "*.*",
|
44
|
-
onPush = (info: Info) => {},
|
45
|
-
minify = true,
|
46
|
-
mangleNames = false
|
47
|
-
}: Partial<PushOptions> = {}
|
48
|
-
) {
|
49
|
-
if (typeof scripts == "string")
|
50
|
-
scripts = [ scripts ]
|
51
|
-
|
52
|
-
const scriptNamesByUser = new DynamicMap((user: string) => new Set<string>())
|
53
|
-
const wildScriptUsers = new Set<string>()
|
54
|
-
const wildUserScripts = new Set<string>()
|
55
|
-
|
56
|
-
let pushEverything = false
|
57
|
-
|
58
|
-
for (const fullScriptName of scripts) {
|
59
|
-
const [ user, scriptName ] = fullScriptName.split(".")
|
60
|
-
|
61
|
-
if (!user || user == "*") {
|
62
|
-
if (!scriptName || scriptName == "*")
|
63
|
-
pushEverything = true
|
64
|
-
else
|
65
|
-
wildUserScripts.add(scriptName)
|
66
|
-
} else if (!scriptName || scriptName == "*")
|
67
|
-
wildScriptUsers.add(user)
|
68
|
-
else
|
69
|
-
scriptNamesByUser.get(user).add(scriptName)
|
70
|
-
}
|
71
|
-
|
72
|
-
const usersByGlobalScriptsToPush = new DynamicMap((user: string) => new Set<string>())
|
73
|
-
const allInfo: Info[] = []
|
74
|
-
const scriptNamesAlreadyPushedByUser = new DynamicMap((user: string) => new Set<string>())
|
75
|
-
|
76
|
-
let sourceDirectoryDirents
|
77
|
-
|
78
|
-
// *.bar
|
79
|
-
if (wildUserScripts.size || pushEverything) {
|
80
|
-
const hackmudDirectoryDirents = await readDirectory(resolvePath(hackmudDirectory), { withFileTypes: true })
|
81
|
-
|
82
|
-
const allUsers = new Set([
|
83
|
-
...(sourceDirectoryDirents = await readDirectory(resolvePath(sourceDirectory), { withFileTypes: true }))
|
84
|
-
.filter(dirent => dirent.isDirectory())
|
85
|
-
.map(dirent => dirent.name),
|
86
|
-
...hackmudDirectoryDirents
|
87
|
-
.filter(dirent => dirent.isDirectory())
|
88
|
-
.map(dirent => dirent.name),
|
89
|
-
...hackmudDirectoryDirents
|
90
|
-
.filter(dirent => dirent.isFile() && getFileExtension(dirent.name) == ".key")
|
91
|
-
.map(dirent => dirent.name.slice(0, -4)),
|
92
|
-
...scriptNamesByUser.keys(),
|
93
|
-
...wildScriptUsers
|
94
|
-
])
|
95
|
-
|
96
|
-
if (pushEverything) {
|
97
|
-
for (const user of allUsers)
|
98
|
-
wildScriptUsers.add(user)
|
99
|
-
} else {
|
100
|
-
for (const user of allUsers) {
|
101
|
-
const scriptNames = scriptNamesByUser.get(user)
|
102
|
-
|
103
|
-
for (const scriptName of wildUserScripts)
|
104
|
-
scriptNames.add(scriptName)
|
105
|
-
}
|
106
|
-
}
|
107
|
-
}
|
108
|
-
|
109
|
-
// foo.*
|
110
|
-
await forEachParallel(wildScriptUsers, async user => {
|
111
|
-
await readDirectory(resolvePath(sourceDirectory, user), { withFileTypes: true }).then(async dirents => {
|
112
|
-
await forEachParallel(dirents, async dirent => {
|
113
|
-
const extension = getFileExtension(dirent.name)
|
114
|
-
|
115
|
-
if (dirent.isFile() && supportedExtensions.includes(extension)) {
|
116
|
-
const scriptName = getBaseName(dirent.name, extension)
|
117
|
-
const filePath = resolvePath(sourceDirectory, user, dirent.name)
|
118
|
-
|
119
|
-
const { srcLength, script: minifiedCode } = await processScript(
|
120
|
-
await readFile(filePath, { encoding: "utf-8" }),
|
121
|
-
{
|
122
|
-
minify,
|
123
|
-
scriptUser: user,
|
124
|
-
scriptName,
|
125
|
-
filePath,
|
126
|
-
mangleNames
|
127
|
-
}
|
128
|
-
)
|
129
|
-
|
130
|
-
const info: Info = {
|
131
|
-
file: `${user}/${dirent.name}`,
|
132
|
-
users: [ user ],
|
133
|
-
minLength: countHackmudCharacters(minifiedCode),
|
134
|
-
error: null,
|
135
|
-
srcLength
|
136
|
-
}
|
137
|
-
|
138
|
-
scriptNamesAlreadyPushedByUser.get(user).add(scriptName)
|
139
|
-
allInfo.push(info)
|
140
|
-
|
141
|
-
await writeFilePersistent(resolvePath(hackmudDirectory, user, `scripts/${scriptName}.js`), minifiedCode)
|
142
|
-
|
143
|
-
onPush(info)
|
144
|
-
}
|
145
|
-
})
|
146
|
-
}, (error: NodeJS.ErrnoException) => {
|
147
|
-
if (error.code != "ENOENT")
|
148
|
-
throw error
|
149
|
-
})
|
150
|
-
})
|
151
|
-
|
152
|
-
// foo.bar
|
153
|
-
await forEachParallel(scriptNamesByUser, async ([ user, scripts ]) => {
|
154
|
-
if (wildScriptUsers.has(user))
|
155
|
-
return
|
156
|
-
|
157
|
-
await forEachParallel(scripts, async scriptName => {
|
158
|
-
let code
|
159
|
-
let fileName
|
160
|
-
|
161
|
-
let filePath!: string
|
162
|
-
|
163
|
-
for (const extension of supportedExtensions) {
|
164
|
-
try {
|
165
|
-
fileName = `${scriptName}${extension}`
|
166
|
-
code = await readFile(filePath = resolvePath(sourceDirectory, user, fileName), { encoding: "utf-8" })
|
167
|
-
break
|
168
|
-
} catch {}
|
169
|
-
}
|
170
|
-
|
171
|
-
if (code) {
|
172
|
-
const { srcLength, script: minifiedCode } = await processScript(
|
173
|
-
code,
|
174
|
-
{
|
175
|
-
minify,
|
176
|
-
scriptUser: user,
|
177
|
-
scriptName,
|
178
|
-
filePath,
|
179
|
-
mangleNames
|
180
|
-
}
|
181
|
-
)
|
182
|
-
|
183
|
-
const info: Info = {
|
184
|
-
file: `${user}/${fileName}`,
|
185
|
-
users: [ user ],
|
186
|
-
minLength: countHackmudCharacters(minifiedCode),
|
187
|
-
error: null,
|
188
|
-
srcLength
|
189
|
-
}
|
190
|
-
|
191
|
-
allInfo.push(info)
|
192
|
-
|
193
|
-
await writeFilePersistent(resolvePath(hackmudDirectory, user, "scripts", `${scriptName}.js`), minifiedCode)
|
194
|
-
|
195
|
-
onPush(info)
|
196
|
-
} else
|
197
|
-
usersByGlobalScriptsToPush.get(scriptName).add(user)
|
198
|
-
})
|
199
|
-
})
|
200
|
-
|
201
|
-
// foo.* (global)
|
202
|
-
if (wildScriptUsers.size) {
|
203
|
-
await forEachParallel(sourceDirectoryDirents || await readDirectory(resolvePath(sourceDirectory), { withFileTypes: true }), async dirent => {
|
204
|
-
const extension = getFileExtension(dirent.name)
|
205
|
-
|
206
|
-
if (!dirent.isFile() || !supportedExtensions.includes(extension))
|
207
|
-
return
|
208
|
-
|
209
|
-
const scriptName = getBaseName(dirent.name, extension)
|
210
|
-
const usersToPushTo = [ ...wildScriptUsers, ...usersByGlobalScriptsToPush.get(scriptName) ].filter(user => !scriptNamesAlreadyPushedByUser.get(user).has(scriptName))
|
211
|
-
|
212
|
-
if (!usersToPushTo.length)
|
213
|
-
return
|
214
|
-
|
215
|
-
const uniqueID = Math.floor(Math.random() * (2 ** 52)).toString(36).padStart(11, "0")
|
216
|
-
const filePath = resolvePath(sourceDirectory, dirent.name)
|
217
|
-
|
218
|
-
const { srcLength, script: minifiedCode } = await processScript(
|
219
|
-
await readFile(filePath, { encoding: "utf-8" }),
|
220
|
-
{
|
221
|
-
minify,
|
222
|
-
scriptUser: true,
|
223
|
-
scriptName,
|
224
|
-
uniqueID,
|
225
|
-
filePath,
|
226
|
-
mangleNames
|
227
|
-
}
|
228
|
-
)
|
229
|
-
|
230
|
-
const info: Info = {
|
231
|
-
file: dirent.name,
|
232
|
-
users: usersToPushTo,
|
233
|
-
minLength: countHackmudCharacters(minifiedCode),
|
234
|
-
error: null,
|
235
|
-
srcLength
|
236
|
-
}
|
237
|
-
|
238
|
-
await forEachParallel(usersToPushTo, user =>
|
239
|
-
writeFilePersistent(
|
240
|
-
resolvePath(
|
241
|
-
hackmudDirectory,
|
242
|
-
user,
|
243
|
-
`scripts/${scriptName}.js`
|
244
|
-
),
|
245
|
-
minifiedCode
|
246
|
-
.replace(new RegExp(`$${uniqueID}$SCRIPT_USER`, "g"), user)
|
247
|
-
.replace(new RegExp(`$${uniqueID}$FULL_SCRIPT_NAME`, "g"), `${user}.${scriptName}`)
|
248
|
-
)
|
249
|
-
)
|
250
|
-
|
251
|
-
allInfo.push(info)
|
252
|
-
onPush(info)
|
253
|
-
})
|
254
|
-
} else {
|
255
|
-
// foo.bar (global)
|
256
|
-
await forEachParallel(usersByGlobalScriptsToPush, async ([ scriptName, users ]) => {
|
257
|
-
let code
|
258
|
-
let fileName!: string
|
259
|
-
let filePath!: string
|
260
|
-
|
261
|
-
for (const extension of supportedExtensions) {
|
262
|
-
try {
|
263
|
-
fileName = `${scriptName}${extension}`
|
264
|
-
code = await readFile(filePath = resolvePath(sourceDirectory, fileName), { encoding: "utf-8" })
|
265
|
-
break
|
266
|
-
} catch {}
|
267
|
-
}
|
268
|
-
|
269
|
-
if (code) {
|
270
|
-
const uniqueID = Math.floor(Math.random() * (2 ** 52)).toString(36).padStart(11, "0")
|
271
|
-
|
272
|
-
const { srcLength, script: minifiedCode } = await processScript(
|
273
|
-
code,
|
274
|
-
{
|
275
|
-
minify,
|
276
|
-
scriptUser: true,
|
277
|
-
scriptName,
|
278
|
-
uniqueID,
|
279
|
-
filePath,
|
280
|
-
mangleNames
|
281
|
-
}
|
282
|
-
)
|
283
|
-
|
284
|
-
const info: Info = {
|
285
|
-
file: fileName,
|
286
|
-
users: [ ...users ],
|
287
|
-
minLength: countHackmudCharacters(minifiedCode),
|
288
|
-
error: null,
|
289
|
-
srcLength
|
290
|
-
}
|
291
|
-
|
292
|
-
await forEachParallel(users, user =>
|
293
|
-
writeFilePersistent(
|
294
|
-
resolvePath(
|
295
|
-
hackmudDirectory,
|
296
|
-
user,
|
297
|
-
`scripts/${scriptName}.js`
|
298
|
-
),
|
299
|
-
minifiedCode
|
300
|
-
.replace(new RegExp(`$${uniqueID}$SCRIPT_USER`, "g"), user)
|
301
|
-
.replace(new RegExp(`$${uniqueID}$FULL_SCRIPT_NAME`, "g"), `${user}.${scriptName}`)
|
302
|
-
)
|
303
|
-
)
|
304
|
-
|
305
|
-
allInfo.push(info)
|
306
|
-
onPush(info)
|
307
|
-
}
|
308
|
-
})
|
309
|
-
}
|
310
|
-
|
311
|
-
return allInfo
|
312
|
-
}
|
313
|
-
|
314
|
-
export default push
|
package/src/syncMacros.ts
DELETED
@@ -1,52 +0,0 @@
|
|
1
|
-
import fs from "fs"
|
2
|
-
import { basename as getBaseName, extname as getFileExtension, resolve as resolvePath } from "path"
|
3
|
-
|
4
|
-
const { readFile, readdir: readDirectory, stat: getFileStatus, writeFile } = fs.promises
|
5
|
-
|
6
|
-
export async function syncMacros(hackmudPath: string) {
|
7
|
-
const files = await readDirectory(hackmudPath, { withFileTypes: true })
|
8
|
-
const macros = new Map<string, { macro: string, date: Date }>()
|
9
|
-
const users: string[] = []
|
10
|
-
|
11
|
-
for (const file of files) {
|
12
|
-
if (!file.isFile())
|
13
|
-
continue
|
14
|
-
|
15
|
-
switch (getFileExtension(file.name)) {
|
16
|
-
case ".macros": {
|
17
|
-
const lines = (await readFile(resolvePath(hackmudPath, file.name), { encoding: "utf-8" })).split("\n")
|
18
|
-
const date = (await getFileStatus(resolvePath(hackmudPath, file.name))).mtime
|
19
|
-
|
20
|
-
for (let i = 0; i < lines.length / 2 - 1; i++) {
|
21
|
-
const macroName = lines[i * 2]
|
22
|
-
const curMacro = macros.get(macroName)
|
23
|
-
|
24
|
-
if (!curMacro || date > curMacro.date)
|
25
|
-
macros.set(macroName, { date, macro: lines[i * 2 + 1] })
|
26
|
-
}
|
27
|
-
} break
|
28
|
-
|
29
|
-
case ".key": {
|
30
|
-
users.push(getBaseName(file.name, ".key"))
|
31
|
-
} break
|
32
|
-
}
|
33
|
-
}
|
34
|
-
|
35
|
-
let macroFile = ""
|
36
|
-
let macrosSynced = 0
|
37
|
-
|
38
|
-
for (const [ name, { macro } ] of [ ...macros ].sort(([ a ], [ b ]) => (a as any > b as any) - (a as any < b as any))) {
|
39
|
-
if (macro[0] != macro[0].toLowerCase())
|
40
|
-
continue
|
41
|
-
|
42
|
-
macroFile += `${name}\n${macro}\n`
|
43
|
-
macrosSynced++
|
44
|
-
}
|
45
|
-
|
46
|
-
for (const user of users)
|
47
|
-
writeFile(resolvePath(hackmudPath, user + ".macros"), macroFile)
|
48
|
-
|
49
|
-
return { macrosSynced, usersSynced: users.length }
|
50
|
-
}
|
51
|
-
|
52
|
-
export default syncMacros
|
package/src/test.ts
DELETED
@@ -1,59 +0,0 @@
|
|
1
|
-
import fs from "fs"
|
2
|
-
import { extname as getFileExtension, resolve as resolvePath } from "path"
|
3
|
-
import { supportedExtensions } from "./constants.json"
|
4
|
-
import processScript from "./processScript"
|
5
|
-
|
6
|
-
const { readFile, readdir: readDirectory } = fs.promises
|
7
|
-
|
8
|
-
export async function test(srcPath: string) {
|
9
|
-
const promises: Promise<any>[] = []
|
10
|
-
|
11
|
-
const errors: {
|
12
|
-
file: string
|
13
|
-
message: string
|
14
|
-
line: number
|
15
|
-
}[] = []
|
16
|
-
|
17
|
-
for (const dirent of await readDirectory(srcPath, { withFileTypes: true })) {
|
18
|
-
if (dirent.isDirectory()) {
|
19
|
-
promises.push(readDirectory(resolvePath(srcPath, dirent.name), { withFileTypes: true }).then(files => {
|
20
|
-
const promises: Promise<any>[] = []
|
21
|
-
|
22
|
-
for (const file of files) {
|
23
|
-
if (!file.isFile() || !supportedExtensions.includes(getFileExtension(file.name)))
|
24
|
-
continue
|
25
|
-
|
26
|
-
promises.push(
|
27
|
-
readFile(resolvePath(srcPath, dirent.name, file.name), { encoding: "utf-8" })
|
28
|
-
.then(processScript)
|
29
|
-
.then(({ warnings }) =>
|
30
|
-
errors.push(...warnings.map(({ message, line }) => ({
|
31
|
-
file: `${dirent.name}/${file.name}`,
|
32
|
-
message, line
|
33
|
-
})))
|
34
|
-
)
|
35
|
-
)
|
36
|
-
}
|
37
|
-
|
38
|
-
return Promise.all(promises)
|
39
|
-
}))
|
40
|
-
} else if (dirent.isFile() && supportedExtensions.includes(getFileExtension(dirent.name))) {
|
41
|
-
promises.push(
|
42
|
-
readFile(resolvePath(srcPath, dirent.name), { encoding: "utf-8" })
|
43
|
-
.then(processScript)
|
44
|
-
.then(({ warnings }) =>
|
45
|
-
errors.push(...warnings.map(({ message, line }) => ({
|
46
|
-
file: dirent.name,
|
47
|
-
message, line
|
48
|
-
})))
|
49
|
-
)
|
50
|
-
)
|
51
|
-
}
|
52
|
-
}
|
53
|
-
|
54
|
-
await Promise.all(promises)
|
55
|
-
|
56
|
-
return errors
|
57
|
-
}
|
58
|
-
|
59
|
-
export default test
|
package/src/tsconfig.json
DELETED
@@ -1,20 +0,0 @@
|
|
1
|
-
{
|
2
|
-
"compilerOptions": {
|
3
|
-
"target": "ES2019", /* ES2019 is oldest properly supported ES version in Node 12 which is the oldest currently supported Node version */
|
4
|
-
"module": "ES2020",
|
5
|
-
"strict": true,
|
6
|
-
"esModuleInterop": true,
|
7
|
-
"skipLibCheck": true,
|
8
|
-
"forceConsistentCasingInFileNames": true,
|
9
|
-
"declaration": true,
|
10
|
-
"outDir": "../dist",
|
11
|
-
"useUnknownInCatchVariables": true,
|
12
|
-
"exactOptionalPropertyTypes": true,
|
13
|
-
"noImplicitOverride": true,
|
14
|
-
"moduleResolution": "Node",
|
15
|
-
"resolveJsonModule": true,
|
16
|
-
"emitDeclarationOnly": true
|
17
|
-
},
|
18
|
-
"exclude": [ "bin" ],
|
19
|
-
"references": [ { "path": ".." } ]
|
20
|
-
}
|
package/src/watch.ts
DELETED
@@ -1,156 +0,0 @@
|
|
1
|
-
import { countHackmudCharacters, writeFilePersistent } from "@samual/lib"
|
2
|
-
import { watch as watchDirectory } from "chokidar"
|
3
|
-
import fs from "fs"
|
4
|
-
import { basename as getBaseName, extname as getFileExtension, resolve as resolvePath } from "path"
|
5
|
-
import type { Info } from "."
|
6
|
-
import { supportedExtensions } from "./constants.json"
|
7
|
-
import generateTypings from "./generateTypings"
|
8
|
-
import processScript from "./processScript"
|
9
|
-
|
10
|
-
const { readFile, readdir: readDirectory } = fs.promises
|
11
|
-
|
12
|
-
/**
|
13
|
-
* Watches target file or folder for updates and builds and pushes updated file.
|
14
|
-
*
|
15
|
-
* @param srcDir path to folder containing source files
|
16
|
-
* @param hackmudDir path to hackmud directory
|
17
|
-
* @param users users to push to (pushes to all if empty)
|
18
|
-
* @param scripts scripts to push from (pushes from all if empty)
|
19
|
-
* @param onPush function that's called after each script has been built and written
|
20
|
-
*/
|
21
|
-
export function watch(srcDir: string, hackmudDir: string, users: string[], scripts: string[], onPush?: (info: Info) => void, { genTypes }: { genTypes?: string | undefined } = {}) {
|
22
|
-
const watcher = watchDirectory("", { depth: 1, cwd: srcDir, awaitWriteFinish: { stabilityThreshold: 100 } }).on("change", async path => {
|
23
|
-
const extension = getFileExtension(path)
|
24
|
-
|
25
|
-
if (supportedExtensions.includes(extension)) {
|
26
|
-
const name = getBaseName(path, extension)
|
27
|
-
const fileName = getBaseName(path)
|
28
|
-
|
29
|
-
if (path == fileName) {
|
30
|
-
if (!scripts.length || scripts.includes(name)) {
|
31
|
-
const sourceCode = await readFile(resolvePath(srcDir, path), { encoding: "utf-8" })
|
32
|
-
const skips = new Map<string, string[]>()
|
33
|
-
const promisesSkips: Promise<any>[] = []
|
34
|
-
|
35
|
-
for (const dir of await readDirectory(srcDir, { withFileTypes: true })) {
|
36
|
-
if (!dir.isDirectory())
|
37
|
-
continue
|
38
|
-
|
39
|
-
promisesSkips.push(readDirectory(resolvePath(srcDir, dir.name), { withFileTypes: true }).then(files => {
|
40
|
-
for (const file of files) {
|
41
|
-
if (!file.isFile())
|
42
|
-
continue
|
43
|
-
|
44
|
-
const fileExtension = getFileExtension(file.name)
|
45
|
-
|
46
|
-
if (!supportedExtensions.includes(fileExtension))
|
47
|
-
continue
|
48
|
-
|
49
|
-
const name = getBaseName(file.name, fileExtension)
|
50
|
-
const skip = skips.get(name)
|
51
|
-
|
52
|
-
if (skip)
|
53
|
-
skip.push(dir.name)
|
54
|
-
else
|
55
|
-
skips.set(name, [ dir.name ])
|
56
|
-
}
|
57
|
-
}))
|
58
|
-
}
|
59
|
-
|
60
|
-
await Promise.all(promisesSkips)
|
61
|
-
|
62
|
-
let error = null
|
63
|
-
|
64
|
-
const { script, srcLength } = await processScript(sourceCode).catch(reason => {
|
65
|
-
error = reason
|
66
|
-
|
67
|
-
return {
|
68
|
-
script: "",
|
69
|
-
srcLength: 0
|
70
|
-
}
|
71
|
-
})
|
72
|
-
|
73
|
-
const info: Info = {
|
74
|
-
file: path,
|
75
|
-
users: [],
|
76
|
-
minLength: 0,
|
77
|
-
error,
|
78
|
-
srcLength
|
79
|
-
}
|
80
|
-
|
81
|
-
const promises: Promise<any>[] = []
|
82
|
-
|
83
|
-
if (!error) {
|
84
|
-
if (script) {
|
85
|
-
const skip = skips.get(name) || []
|
86
|
-
|
87
|
-
info.minLength = countHackmudCharacters(script)
|
88
|
-
|
89
|
-
if (!users.length) {
|
90
|
-
users = (await readDirectory(hackmudDir, { withFileTypes: true }))
|
91
|
-
.filter(a => a.isFile() && getFileExtension(a.name) == ".key")
|
92
|
-
.map(a => getBaseName(a.name, ".key"))
|
93
|
-
}
|
94
|
-
|
95
|
-
for (const user of users) {
|
96
|
-
if (skip.includes(user))
|
97
|
-
continue
|
98
|
-
|
99
|
-
info.users.push(user)
|
100
|
-
promises.push(writeFilePersistent(resolvePath(hackmudDir, user, "scripts", `${name}.js`), script))
|
101
|
-
}
|
102
|
-
} else
|
103
|
-
info.error = new Error("processed script was empty")
|
104
|
-
}
|
105
|
-
|
106
|
-
if (onPush) {
|
107
|
-
await Promise.all(promises)
|
108
|
-
onPush(info)
|
109
|
-
}
|
110
|
-
}
|
111
|
-
} else {
|
112
|
-
const user = getBaseName(resolvePath(path, ".."))
|
113
|
-
|
114
|
-
if ((!users.length || users.includes(user)) && (!scripts.length || scripts.includes(name))) {
|
115
|
-
const sourceCode = await readFile(resolvePath(srcDir, path), { encoding: "utf-8" })
|
116
|
-
let error = null
|
117
|
-
|
118
|
-
const { script, srcLength } = await processScript(sourceCode).catch(reason => {
|
119
|
-
error = reason
|
120
|
-
|
121
|
-
return {
|
122
|
-
script: "",
|
123
|
-
srcLength: 0
|
124
|
-
}
|
125
|
-
})
|
126
|
-
|
127
|
-
const info: Info = {
|
128
|
-
file: path,
|
129
|
-
users: [ user ],
|
130
|
-
minLength: 0,
|
131
|
-
error,
|
132
|
-
srcLength
|
133
|
-
}
|
134
|
-
|
135
|
-
if (!error) {
|
136
|
-
if (script) {
|
137
|
-
info.minLength = countHackmudCharacters(script)
|
138
|
-
await writeFilePersistent(resolvePath(hackmudDir, user, "scripts", `${name}.js`), script)
|
139
|
-
} else
|
140
|
-
info.error = new Error("processed script was empty")
|
141
|
-
}
|
142
|
-
|
143
|
-
onPush?.(info)
|
144
|
-
}
|
145
|
-
}
|
146
|
-
}
|
147
|
-
})
|
148
|
-
|
149
|
-
if (genTypes) {
|
150
|
-
generateTypings(srcDir, resolvePath(srcDir, genTypes), hackmudDir)
|
151
|
-
watcher.on("add", () => generateTypings(srcDir, resolvePath(srcDir, genTypes), hackmudDir))
|
152
|
-
watcher.on("unlink", () => generateTypings(srcDir, resolvePath(srcDir, genTypes), hackmudDir))
|
153
|
-
}
|
154
|
-
}
|
155
|
-
|
156
|
-
export default watch
|
package/tsconfig.json
DELETED