@gesslar/muddy 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +89 -0
- package/UNLICENSE.txt +24 -0
- package/package.json +69 -0
- package/src/Muddy.js +738 -0
- package/src/Type.js +113 -0
- package/src/Watch.js +139 -0
- package/src/cli.js +88 -0
- package/src/modules/Action.js +158 -0
- package/src/modules/Alias.js +45 -0
- package/src/modules/Key.js +61 -0
- package/src/modules/Mfile.js +16 -0
- package/src/modules/MudletModule.js +188 -0
- package/src/modules/Script.js +59 -0
- package/src/modules/Timer.js +76 -0
- package/src/modules/Trigger.js +259 -0
- package/src/modules/Variable.js +82 -0
package/src/Muddy.js
ADDED
|
@@ -0,0 +1,738 @@
|
|
|
1
|
+
import {ActionBuilder as AB, ACTIVITY, ActionRunner as AR} from "@gesslar/actioneer"
|
|
2
|
+
import c from "@gesslar/colours"
|
|
3
|
+
import {Data, DirectoryObject, FileObject, FileSystem, Promised, Sass, Valid} from "@gesslar/toolkit"
|
|
4
|
+
import AdmZip from "adm-zip"
|
|
5
|
+
import {mkdtempSync} from "node:fs"
|
|
6
|
+
import os from "node:os"
|
|
7
|
+
import path from "node:path"
|
|
8
|
+
import {create, fragment} from "xmlbuilder2"
|
|
9
|
+
|
|
10
|
+
import Type from "./Type.js"
|
|
11
|
+
import Mfile from "./modules/Mfile.js"
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Type imports.
|
|
15
|
+
*
|
|
16
|
+
* @import {Glog} from "@gesslar/toolkit"
|
|
17
|
+
* @import {XMLBuilder} from "xmlbuilder2"
|
|
18
|
+
* @import {MfileData, BaseContext, SrcContext} from "./Muddy.d.ts"
|
|
19
|
+
* @import {ModuleTypeContext, JsonFilesContext} from "./Muddy.d.ts"
|
|
20
|
+
* @import {JsonDefinition, JsonModule, JsonModulesContext} from "./Muddy.d.ts"
|
|
21
|
+
* @import {PackageNode, PackageContext, WorkContext} from "./Muddy.d.ts"
|
|
22
|
+
* @import {GeneratedContext, MfileResult} from "./Muddy.d.ts"
|
|
23
|
+
*/
|
|
24
|
+
|
|
25
|
+
let /** @type {Glog} */ glog
|
|
26
|
+
let /** @type {string} */ indent
|
|
27
|
+
|
|
28
|
+
const {IF, SPLIT} = ACTIVITY
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Main Muddy package builder class.
|
|
32
|
+
*
|
|
33
|
+
* Orchestrates the process of converting a source directory structure into
|
|
34
|
+
* a Mudlet package (.mpackage) file by:
|
|
35
|
+
* - Reading package metadata from mfile
|
|
36
|
+
* - Discovering and processing module definitions (scripts, aliases, triggers,
|
|
37
|
+
* etc.)
|
|
38
|
+
* - Building an XML document representation
|
|
39
|
+
* - Packaging everything into a compressed .mpackage file
|
|
40
|
+
*/
|
|
41
|
+
export default class Muddy {
|
|
42
|
+
#projectDirectory
|
|
43
|
+
#srcDirectory
|
|
44
|
+
#temp
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* Main entry point for the Muddy package builder.
|
|
48
|
+
*
|
|
49
|
+
* @param {DirectoryObject} projectDirectory - The root directory of the project to build
|
|
50
|
+
* @param {Glog} log - Logger instance for output
|
|
51
|
+
* @returns {Promise<unknown>} The result of the build process
|
|
52
|
+
* @throws {Error} If execution fails at any step
|
|
53
|
+
*/
|
|
54
|
+
async run(projectDirectory, log) {
|
|
55
|
+
Valid.type(projectDirectory, "DirectoryObject")
|
|
56
|
+
Valid.type(log, "Glog")
|
|
57
|
+
|
|
58
|
+
this.#projectDirectory = projectDirectory
|
|
59
|
+
this.#srcDirectory = projectDirectory.getDirectory("src")
|
|
60
|
+
|
|
61
|
+
const temp = mkdtempSync(path.join(os.tmpdir(), "muddy-"))
|
|
62
|
+
this.#temp = new DirectoryObject(temp)
|
|
63
|
+
|
|
64
|
+
glog = log
|
|
65
|
+
indent = c`{OK}•{/} `
|
|
66
|
+
|
|
67
|
+
const builder = new AB(this)
|
|
68
|
+
const runner = new AR(builder)
|
|
69
|
+
|
|
70
|
+
try {
|
|
71
|
+
return await runner.run({
|
|
72
|
+
projectDirectory: this.#projectDirectory,
|
|
73
|
+
srcDirectory: this.#srcDirectory,
|
|
74
|
+
})
|
|
75
|
+
} catch(error) {
|
|
76
|
+
throw Sass.new("Executing Muddy.", error)
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Configures the action builder with the processing pipeline steps.
|
|
82
|
+
*
|
|
83
|
+
* @param {AB} builder - Builder instance
|
|
84
|
+
*/
|
|
85
|
+
async setup(builder) {
|
|
86
|
+
try {
|
|
87
|
+
builder
|
|
88
|
+
.do("Read mfile", this.#readMfile)
|
|
89
|
+
.do("Process modules", SPLIT,
|
|
90
|
+
this.#splitPackageDirs,
|
|
91
|
+
this.#rejoinPackageDirs,
|
|
92
|
+
new AB()
|
|
93
|
+
.do("Scan for Package JSON files", this.#scanForPackageJsonFiles)
|
|
94
|
+
.do("Load the discovered JSONs", this.#loadJsonDatums)
|
|
95
|
+
.do("Determine the shape of package branch", this.#mapPackage)
|
|
96
|
+
.do("Discover and load Lua", this.#loadLua)
|
|
97
|
+
.do("Create module from Lua", this.#createModule)
|
|
98
|
+
.do("Generate XML fragment", this.#buildXML)
|
|
99
|
+
)
|
|
100
|
+
.do("Setup temporary workspace", this.#setupTemporaryWorkspace)
|
|
101
|
+
.do("Generate XML Document", this.#generateXMLDocument)
|
|
102
|
+
.do("Generate config.lua", this.#generateConfigLua)
|
|
103
|
+
.do("Process resources", this.#processResources)
|
|
104
|
+
.do("Zzzzzzzzzzip", this.#closeTheBarnDoor)
|
|
105
|
+
.do("Write .output", IF, ctx => ctx.mfile.outputFile, this.#writeOutputFile)
|
|
106
|
+
.done(this.#cleanUp)
|
|
107
|
+
} catch(error) {
|
|
108
|
+
throw Sass.new("Building the action.", error)
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
/**
|
|
113
|
+
* Reads and validates the mfile metadata file from the project root.
|
|
114
|
+
*
|
|
115
|
+
* @private
|
|
116
|
+
* @param {DirectoryObject} projectDirectory - The project root directory
|
|
117
|
+
* @returns {Promise<MfileResult>}
|
|
118
|
+
* The context with loaded mfile data, or failure object if mfile doesn't exist
|
|
119
|
+
*/
|
|
120
|
+
#readMfile = async ctx => {
|
|
121
|
+
const {projectDirectory} = ctx
|
|
122
|
+
const mfileObject = projectDirectory.getFile("mfile")
|
|
123
|
+
|
|
124
|
+
if(!await mfileObject.exists)
|
|
125
|
+
throw Sass.new(`No such file ${mfileObject.url}`)
|
|
126
|
+
|
|
127
|
+
glog.info(c`Pulling metadata from {other}mfile{/}.`)
|
|
128
|
+
const mfile = await mfileObject.loadData()
|
|
129
|
+
|
|
130
|
+
glog.table(mfile)
|
|
131
|
+
if(mfile.outputFile === true)
|
|
132
|
+
glog.info(
|
|
133
|
+
c`Will write {other}.output{/} file at root of project with json `+
|
|
134
|
+
`object containing package name and file location at build end.`
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
return Object.assign(ctx, {mfile})
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
/**
|
|
141
|
+
* Splits processing into parallel tasks for each module type (aliases, scripts, triggers, etc.).
|
|
142
|
+
*
|
|
143
|
+
* @private
|
|
144
|
+
* @param {SrcContext} ctx - The context object
|
|
145
|
+
* @returns {Promise<Array<ModuleTypeContext>>} Array of contexts for each module type
|
|
146
|
+
*/
|
|
147
|
+
#splitPackageDirs = async ctx => {
|
|
148
|
+
const {srcDirectory} = ctx
|
|
149
|
+
|
|
150
|
+
return Type.PLURAL.map(e => ({kind: e, srcDirectory}))
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Rejoins the split parallel processing results back into the main context.
|
|
155
|
+
*
|
|
156
|
+
* @private
|
|
157
|
+
* @param {SrcContext} orig - The original context object
|
|
158
|
+
* @param {Array<Promise>} settled - The settled promises from parallel processing
|
|
159
|
+
* @returns {Promise<SrcContext & {packages: Array<XMLBuilder>}>} Context with packages array containing all processed modules
|
|
160
|
+
* @throws {Error} If any of the parallel tasks rejected
|
|
161
|
+
*/
|
|
162
|
+
#rejoinPackageDirs = async(orig, settled) => {
|
|
163
|
+
if(Promised.hasRejected(settled))
|
|
164
|
+
Promised.throw(`Processing package JSON files.`, settled)
|
|
165
|
+
|
|
166
|
+
const values = Promised.values(settled)
|
|
167
|
+
|
|
168
|
+
return Object.assign(orig, {packages: values})
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Scans for JSON definition files matching the module type pattern.
|
|
173
|
+
*
|
|
174
|
+
* @private
|
|
175
|
+
* @param {ModuleTypeContext} ctx - The context object
|
|
176
|
+
* @returns {Promise<JsonFilesContext>} Context with jsonFiles array
|
|
177
|
+
*/
|
|
178
|
+
#scanForPackageJsonFiles = async ctx => {
|
|
179
|
+
const {kind, srcDirectory} = ctx
|
|
180
|
+
|
|
181
|
+
glog.info(c`Scanning for {${kind}}${kind}{/}`)
|
|
182
|
+
|
|
183
|
+
const pattern = `**/${kind}.json`
|
|
184
|
+
const found = await srcDirectory.glob(pattern)
|
|
185
|
+
const jsonFiles = found.files
|
|
186
|
+
|
|
187
|
+
jsonFiles.forEach(e =>
|
|
188
|
+
glog
|
|
189
|
+
.use(indent)
|
|
190
|
+
.success(c`Found {${kind}}${e.relativeTo(srcDirectory)}{/}`)
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
return {srcDirectory, kind, jsonFiles}
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
/**
|
|
197
|
+
* Loads JSON definition files and normalizes their boolean values.
|
|
198
|
+
*
|
|
199
|
+
* @private
|
|
200
|
+
* @param {JsonFilesContext} ctx - The context object
|
|
201
|
+
* @returns {Promise<JsonModulesContext>} Context with jsonModules array
|
|
202
|
+
*/
|
|
203
|
+
#loadJsonDatums = async ctx => {
|
|
204
|
+
const {jsonFiles} = ctx
|
|
205
|
+
|
|
206
|
+
const jsonModules = []
|
|
207
|
+
|
|
208
|
+
for(const jsonFile of jsonFiles) {
|
|
209
|
+
const defs = await jsonFile.loadData()
|
|
210
|
+
|
|
211
|
+
this.#normalizeBooleanValues(defs)
|
|
212
|
+
|
|
213
|
+
jsonModules.push({jsonFile, jsonDefinitions: defs})
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
return Object.assign(ctx, {jsonModules})
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
/**
|
|
220
|
+
* Maps JSON modules into a hierarchical package tree structure that mirrors the directory layout.
|
|
221
|
+
*
|
|
222
|
+
* Creates a nested tree where each node represents a directory level, with definitions
|
|
223
|
+
* attached to the appropriate nodes based on their file system location.
|
|
224
|
+
*
|
|
225
|
+
* @private
|
|
226
|
+
* @param {JsonModulesContext} ctx - The context object
|
|
227
|
+
* @returns {Promise<PackageContext>} Context with pkg (package tree) property
|
|
228
|
+
*/
|
|
229
|
+
#mapPackage = async ctx => {
|
|
230
|
+
const {jsonModules, srcDirectory, kind} = ctx
|
|
231
|
+
const top = srcDirectory.trail.length
|
|
232
|
+
const maptory = () => new Map([
|
|
233
|
+
["name", ""],
|
|
234
|
+
// Ordered set of child package nodes
|
|
235
|
+
["children", new Set()],
|
|
236
|
+
// Ordered array of JSON definition objects that live at this node
|
|
237
|
+
["definitions", []],
|
|
238
|
+
// Ordered array of module instances created from definitions in this subtree
|
|
239
|
+
["modules", []],
|
|
240
|
+
["parent", null],
|
|
241
|
+
["jsonFile", null],
|
|
242
|
+
])
|
|
243
|
+
|
|
244
|
+
const pkg = maptory().set("name", "root")
|
|
245
|
+
|
|
246
|
+
// when we go in, we need to:
|
|
247
|
+
// 1. for each item in the trail, create a nested object, using the file
|
|
248
|
+
// as the key (cos we can find it again!) -> GUI
|
|
249
|
+
// 2. every step that ISN'T the last one, we create a new nest that is a
|
|
250
|
+
// folder, and we, on the LAST one, do the full one showing the things
|
|
251
|
+
// like GUI, but also for ThresholdUI (the nested test one)
|
|
252
|
+
for(const {jsonFile, jsonDefinitions} of jsonModules) {
|
|
253
|
+
let trail = jsonFile.parent.trail.slice(top + 1)
|
|
254
|
+
|
|
255
|
+
// The first element in the trail is the kind directory (e.g. "scripts")
|
|
256
|
+
// which Mudlet does not represent as a folder node. Strip it so that
|
|
257
|
+
// directories under the kind (e.g. "GUI", "Test Level 1") become the
|
|
258
|
+
// first-level children in the package tree.
|
|
259
|
+
if(trail.length > 0 && trail[0] === kind)
|
|
260
|
+
trail = trail.slice(1)
|
|
261
|
+
|
|
262
|
+
// Start from the root package node and walk/create children for each
|
|
263
|
+
// element of the trail so the in-memory tree mirrors the directory tree.
|
|
264
|
+
let node = pkg
|
|
265
|
+
if(trail.length > 0) {
|
|
266
|
+
for(const pieceOfBranch of trail) {
|
|
267
|
+
const children = node.get("children")
|
|
268
|
+
|
|
269
|
+
// Reuse an existing child node with the same name if present to
|
|
270
|
+
// preserve both structure and discovery order.
|
|
271
|
+
let child = [...children].find(c => c.get("name") === pieceOfBranch)
|
|
272
|
+
if(!child) {
|
|
273
|
+
child = maptory().set("name", pieceOfBranch)
|
|
274
|
+
children.add(child)
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
node = child
|
|
278
|
+
}
|
|
279
|
+
} else {
|
|
280
|
+
// JSON file lives directly under src/, attach it to a child node with
|
|
281
|
+
// an empty name to distinguish it from the virtual "root".
|
|
282
|
+
const children = node.get("children")
|
|
283
|
+
let child = [...children].find(c => c.get("name") === "")
|
|
284
|
+
if(!child) {
|
|
285
|
+
child = maptory().set("name", "")
|
|
286
|
+
children.add(child)
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
node = child
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
// Attach JSON file and append its definitions at this leaf node.
|
|
293
|
+
node.set("jsonFile", jsonFile)
|
|
294
|
+
const defs = node.get("definitions")
|
|
295
|
+
defs.push(...jsonDefinitions)
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
return Object.assign(ctx, {pkg})
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
/**
|
|
302
|
+
* Loads Lua script files referenced in JSON definitions.
|
|
303
|
+
*
|
|
304
|
+
* @private
|
|
305
|
+
* @param {PackageContext} ctx - The context object
|
|
306
|
+
* @returns {Promise<PackageContext>} The context object
|
|
307
|
+
*/
|
|
308
|
+
#loadLua = async ctx => {
|
|
309
|
+
const {kind, pkg, srcDirectory} = ctx
|
|
310
|
+
|
|
311
|
+
await this.#_loadLua(kind, pkg, srcDirectory)
|
|
312
|
+
|
|
313
|
+
return ctx
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
/**
|
|
317
|
+
* Recursively loads Lua scripts for a package tree node and its children.
|
|
318
|
+
*
|
|
319
|
+
* For each definition that references a script file, this loads the .lua file
|
|
320
|
+
* from disk and attaches it to the definition.
|
|
321
|
+
*
|
|
322
|
+
* @private
|
|
323
|
+
* @param {string} kind - The module type
|
|
324
|
+
* @param {PackageNode} node - The current package tree node
|
|
325
|
+
* @param {DirectoryObject} srcDirectory - The src directory for relative path resolution
|
|
326
|
+
* @returns {Promise<void>}
|
|
327
|
+
*/
|
|
328
|
+
#_loadLua = async(kind, node, srcDirectory) => {
|
|
329
|
+
// First recurse into children so we always walk the whole tree.
|
|
330
|
+
const children = node.get("children")
|
|
331
|
+
for(const child of children)
|
|
332
|
+
await this.#_loadLua(kind, child, srcDirectory)
|
|
333
|
+
|
|
334
|
+
const jsonFile = node.get("jsonFile")
|
|
335
|
+
const definitions = node.get("definitions")
|
|
336
|
+
|
|
337
|
+
if(!jsonFile || !definitions || definitions.length === 0)
|
|
338
|
+
return
|
|
339
|
+
|
|
340
|
+
for(const jsonDefinition of definitions) {
|
|
341
|
+
const {name: scriptName = "", script = ""} = jsonDefinition
|
|
342
|
+
|
|
343
|
+
if(!scriptName || script)
|
|
344
|
+
continue
|
|
345
|
+
|
|
346
|
+
const expected = `${scriptName.replaceAll(/\s/g, "_")}.lua`
|
|
347
|
+
const scriptFile = jsonFile.parent.getFile(expected)
|
|
348
|
+
if(!await scriptFile.exists) {
|
|
349
|
+
glog.warn(c`{${kind}}${scriptFile.relativeTo(srcDirectory)}{/} does not exist`)
|
|
350
|
+
jsonDefinition.script = ""
|
|
351
|
+
} else {
|
|
352
|
+
const relative = scriptFile.relativeTo(this.#projectDirectory)
|
|
353
|
+
|
|
354
|
+
glog.success("Using script from", relative, "for", Type.TO_SINGLE[kind], scriptName)
|
|
355
|
+
|
|
356
|
+
const loaded = await scriptFile.read()
|
|
357
|
+
|
|
358
|
+
jsonDefinition.script = loaded
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
/**
|
|
364
|
+
* Creates module instances from the package tree definitions.
|
|
365
|
+
*
|
|
366
|
+
* @private
|
|
367
|
+
* @param {PackageContext} ctx - The context object
|
|
368
|
+
* @returns {PackageContext} The context object
|
|
369
|
+
*/
|
|
370
|
+
#createModule = ctx => {
|
|
371
|
+
const {pkg, kind} = ctx
|
|
372
|
+
const cl = Type.CLASS[kind]
|
|
373
|
+
|
|
374
|
+
const modules = this.#_createModule(cl, pkg, true)
|
|
375
|
+
|
|
376
|
+
pkg.set("modules", modules)
|
|
377
|
+
|
|
378
|
+
return ctx
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
/**
|
|
382
|
+
* Recursively creates module instances for a package tree node.
|
|
383
|
+
*
|
|
384
|
+
* Creates module instances for all definitions in the tree, wrapping directory
|
|
385
|
+
* structures in folder modules to preserve the hierarchy.
|
|
386
|
+
*
|
|
387
|
+
* @private
|
|
388
|
+
* @param {Function} cl - The module class constructor
|
|
389
|
+
* @param {PackageNode} node - The current package tree node
|
|
390
|
+
* @param {boolean} [isRoot=false] - Whether this is the root node
|
|
391
|
+
* @returns {Array<unknown>} Array of module instances
|
|
392
|
+
*/
|
|
393
|
+
#_createModule = (cl, node, isRoot=false) => {
|
|
394
|
+
const modules = []
|
|
395
|
+
|
|
396
|
+
// Recursively build modules for all children of this node and collect
|
|
397
|
+
// their resulting modules.
|
|
398
|
+
const children = node.get("children")
|
|
399
|
+
for(const child of children) {
|
|
400
|
+
const childModules = this.#_createModule(cl, child, false)
|
|
401
|
+
modules.push(...childModules)
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
// Then this node's own definitions become leaf modules.
|
|
405
|
+
const definitions = node.get("definitions")
|
|
406
|
+
if(definitions && definitions.length > 0) {
|
|
407
|
+
for(const def of definitions)
|
|
408
|
+
modules.push(new cl(def))
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
// For the root package node we never create a folder wrapper; its modules
|
|
412
|
+
// are the concatenation of its direct children and any nameless/top-level
|
|
413
|
+
// nodes. For any other node with a non-empty name, we wrap its modules in
|
|
414
|
+
// a folder Script/ScriptGroup so that directory structure is preserved.
|
|
415
|
+
if(isRoot) {
|
|
416
|
+
node.set("modules", modules)
|
|
417
|
+
|
|
418
|
+
return modules
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
const name = node.get("name") ?? ""
|
|
422
|
+
|
|
423
|
+
if(name.length > 0 && modules.length > 0) {
|
|
424
|
+
const folder = new cl({
|
|
425
|
+
name,
|
|
426
|
+
isFolder: "yes",
|
|
427
|
+
isActive: "yes",
|
|
428
|
+
script: "",
|
|
429
|
+
})
|
|
430
|
+
|
|
431
|
+
modules.forEach(m => folder.addChild(m))
|
|
432
|
+
|
|
433
|
+
node.set("modules", [folder])
|
|
434
|
+
|
|
435
|
+
return [folder]
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
node.set("modules", modules)
|
|
439
|
+
|
|
440
|
+
return modules
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
/**
|
|
444
|
+
* Builds XML fragments for a package type.
|
|
445
|
+
*
|
|
446
|
+
* @private
|
|
447
|
+
* @param {PackageContext} ctx - The context object
|
|
448
|
+
* @returns {XMLBuilder} XML fragment for the package
|
|
449
|
+
*/
|
|
450
|
+
#buildXML = ctx => {
|
|
451
|
+
const {kind, pkg} = ctx
|
|
452
|
+
const packageTag = Type.PACKAGES[kind]
|
|
453
|
+
const modules = pkg.get("modules") ?? []
|
|
454
|
+
const packageXml = this.#_buildXML(modules, kind)
|
|
455
|
+
|
|
456
|
+
const frag = fragment().ele(packageTag).import(packageXml)
|
|
457
|
+
|
|
458
|
+
return frag
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
/**
|
|
462
|
+
* Builds XML fragments from an array of module instances.
|
|
463
|
+
*
|
|
464
|
+
* @private
|
|
465
|
+
* @param {Array<unknown>} src - An ordered array of modules
|
|
466
|
+
* @returns {XMLBuilder} The XML fragment
|
|
467
|
+
*/
|
|
468
|
+
#_buildXML = src => {
|
|
469
|
+
const frag = fragment()
|
|
470
|
+
|
|
471
|
+
if(!src || src.length === 0)
|
|
472
|
+
return frag
|
|
473
|
+
|
|
474
|
+
for(const module of src)
|
|
475
|
+
frag.import(module.toXMLFragment())
|
|
476
|
+
|
|
477
|
+
return frag
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
/**
|
|
481
|
+
* Creates a temporary work directory for staging package files.
|
|
482
|
+
*
|
|
483
|
+
* @private
|
|
484
|
+
* @param {SrcContext & {packages: Array<XMLBuilder>}} ctx - The context object
|
|
485
|
+
* @returns {Promise<WorkContext>} Context with workDirectory property
|
|
486
|
+
*/
|
|
487
|
+
#setupTemporaryWorkspace = async ctx => {
|
|
488
|
+
ctx.workDirectory = this.#temp.getDirectory("work")
|
|
489
|
+
await ctx.workDirectory.assureExists()
|
|
490
|
+
|
|
491
|
+
return ctx
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
/**
|
|
495
|
+
* Generates the complete Mudlet package XML document.
|
|
496
|
+
*
|
|
497
|
+
* Combines all module XML fragments into a single MudletPackage XML document
|
|
498
|
+
* with proper DTD declaration.
|
|
499
|
+
*
|
|
500
|
+
* @private
|
|
501
|
+
* @param {WorkContext} ctx - The context object
|
|
502
|
+
* @returns {Promise<WorkContext & {xmlFile: FileObject}>} Context with xmlFile property
|
|
503
|
+
*/
|
|
504
|
+
#generateXMLDocument = async ctx => {
|
|
505
|
+
glog.info(`Converting scanned data to Mudlet package XML now`)
|
|
506
|
+
|
|
507
|
+
const {packages: xmlFragments, mfile, workDirectory} = ctx
|
|
508
|
+
|
|
509
|
+
const root = create({version: "1.0", encoding: "UTF-8"})
|
|
510
|
+
.ele("MudletPackage", {version: "1.001"})
|
|
511
|
+
.dtd("MudletPackage")
|
|
512
|
+
|
|
513
|
+
xmlFragments.forEach(e => root.import(e))
|
|
514
|
+
const output = root.end({prettyPrint: true})
|
|
515
|
+
const outputFile = workDirectory.getFile(`${mfile.package}.xml`)
|
|
516
|
+
|
|
517
|
+
glog.info(`XML created successfully, writing it to disk`)
|
|
518
|
+
|
|
519
|
+
await outputFile.write(output)
|
|
520
|
+
|
|
521
|
+
Object.assign(ctx, {xmlFile: outputFile})
|
|
522
|
+
|
|
523
|
+
return ctx
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
/**
|
|
527
|
+
* Generates the config.lua file with package metadata.
|
|
528
|
+
*
|
|
529
|
+
* @private
|
|
530
|
+
* @param {WorkContext} ctx - The context object
|
|
531
|
+
* @returns {Promise<WorkContext & {configFile: FileObject}>} Context with configFile property
|
|
532
|
+
*/
|
|
533
|
+
#generateConfigLua = async ctx => {
|
|
534
|
+
const {mfile, workDirectory} = ctx
|
|
535
|
+
|
|
536
|
+
const out = []
|
|
537
|
+
for(const [k,v] of Mfile.MFILE_TO_CONFIG.entries())
|
|
538
|
+
out.push(`${v} = [[${mfile[k]}]]`)
|
|
539
|
+
|
|
540
|
+
// This isn't sourced anywhere, so we just make it up.
|
|
541
|
+
out.push(`created = [[${this.#iso()}]]`)
|
|
542
|
+
const configFile = workDirectory.getFile("config.lua")
|
|
543
|
+
await configFile.write(out.join("\n"))
|
|
544
|
+
|
|
545
|
+
ctx.configFile = configFile
|
|
546
|
+
|
|
547
|
+
return ctx
|
|
548
|
+
}
|
|
549
|
+
|
|
550
|
+
/**
|
|
551
|
+
* Processes resource files (icon, additional files) from src/resources.
|
|
552
|
+
*
|
|
553
|
+
* Copies icon to .mudlet/Icon/ directory and recursively copies all other
|
|
554
|
+
* resources to the work directory.
|
|
555
|
+
*
|
|
556
|
+
* @private
|
|
557
|
+
* @param {GeneratedContext} ctx - The context object
|
|
558
|
+
* @returns {Promise<GeneratedContext>} The context object
|
|
559
|
+
*/
|
|
560
|
+
#processResources = async ctx => {
|
|
561
|
+
const {mfile, workDirectory, srcDirectory} = ctx
|
|
562
|
+
|
|
563
|
+
const resourcesDirectory = srcDirectory.getDirectory("resources")
|
|
564
|
+
if(!await resourcesDirectory.exists) {
|
|
565
|
+
glog.warn(
|
|
566
|
+
c`No such directory '${resourcesDirectory.relativeTo(srcDirectory)}'`
|
|
567
|
+
)
|
|
568
|
+
|
|
569
|
+
return ctx
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
if(mfile.icon) {
|
|
573
|
+
const srcIcon = resourcesDirectory.getFile(mfile.icon)
|
|
574
|
+
if(!await srcIcon.exists) {
|
|
575
|
+
glog.warn(
|
|
576
|
+
c`No such icon file '${srcIcon.relativeTo(srcIcon.parent)}'`
|
|
577
|
+
)
|
|
578
|
+
} else {
|
|
579
|
+
const iconData = await srcIcon.readBinary()
|
|
580
|
+
const destIconDir = workDirectory.getDirectory(".mudlet").getDirectory("Icon")
|
|
581
|
+
await destIconDir.assureExists({recursive: true})
|
|
582
|
+
|
|
583
|
+
const destIcon = destIconDir.getFile(srcIcon.name)
|
|
584
|
+
await destIcon.writeBinary(iconData)
|
|
585
|
+
}
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
// Now we just literally copy everything from resources into the main work
|
|
589
|
+
// directory. Mudlet will do the same thing, allowing the entire structure
|
|
590
|
+
// to be replicated in a predicktable fashion.
|
|
591
|
+
await this.#recursiveResourcesCopy(resourcesDirectory, workDirectory)
|
|
592
|
+
|
|
593
|
+
return ctx
|
|
594
|
+
}
|
|
595
|
+
|
|
596
|
+
/**
|
|
597
|
+
* Recursively copies files and directories from resources to work directory.
|
|
598
|
+
*
|
|
599
|
+
* @private
|
|
600
|
+
* @param {DirectoryObject} res - The source resources directory
|
|
601
|
+
* @param {DirectoryObject} work - The destination work directory
|
|
602
|
+
* @returns {Promise<void>}
|
|
603
|
+
*/
|
|
604
|
+
#recursiveResourcesCopy = async(res, work) => {
|
|
605
|
+
const {files, directories} = await res.read()
|
|
606
|
+
|
|
607
|
+
await work.assureExists({recursive: true})
|
|
608
|
+
|
|
609
|
+
// Witchcraft. I will not be taking questions at this time.
|
|
610
|
+
await Promised.settle(
|
|
611
|
+
[files, directories].flat().map(async e => {
|
|
612
|
+
if(e.isFile) {
|
|
613
|
+
await e.copy(work.getFile(e.name).path)
|
|
614
|
+
} else if(e.isDirectory) {
|
|
615
|
+
await this.#recursiveResourcesCopy(e, work.getDirectory(e.name))
|
|
616
|
+
}
|
|
617
|
+
})
|
|
618
|
+
)
|
|
619
|
+
}
|
|
620
|
+
|
|
621
|
+
/**
|
|
622
|
+
* Creates the final .mpackage zip file from the work directory.
|
|
623
|
+
*
|
|
624
|
+
* @private
|
|
625
|
+
* @param {GeneratedContext} ctx - The context object
|
|
626
|
+
* @returns {Promise<GeneratedContext>} The context object
|
|
627
|
+
*/
|
|
628
|
+
#closeTheBarnDoor = async ctx => {
|
|
629
|
+
const {mfile, projectDirectory, workDirectory} = ctx
|
|
630
|
+
|
|
631
|
+
const mpackage = new AdmZip()
|
|
632
|
+
|
|
633
|
+
glog.info(`Adding contents of '${workDirectory.path}'`)
|
|
634
|
+
mpackage.addLocalFolder(workDirectory.path)
|
|
635
|
+
|
|
636
|
+
const mpackageFile = projectDirectory.getFile(`${mfile.package}.mpackage`)
|
|
637
|
+
if(await mpackageFile.exists)
|
|
638
|
+
await mpackageFile.delete()
|
|
639
|
+
|
|
640
|
+
mpackage.writeZip(mpackageFile.path)
|
|
641
|
+
const size = await mpackageFile.size()
|
|
642
|
+
|
|
643
|
+
glog.info(c`{<B}${mpackageFile.path}{B>} written to disk (${size.toLocaleString()} bytes)`)
|
|
644
|
+
|
|
645
|
+
return Object.assign(ctx, {mpackageFile})
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
#writeOutputFile = async ctx => {
|
|
649
|
+
const {mfile, projectDirectory, mpackageFile} = ctx
|
|
650
|
+
const outputFile = projectDirectory.getFile(".output")
|
|
651
|
+
const output = JSON.stringify({
|
|
652
|
+
name: mfile.package,
|
|
653
|
+
path: Data.prepend(FileSystem.relativeOrAbsolute(projectDirectory, mpackageFile), "/")
|
|
654
|
+
})
|
|
655
|
+
|
|
656
|
+
if(await outputFile.exists)
|
|
657
|
+
await outputFile.delete()
|
|
658
|
+
|
|
659
|
+
await outputFile.write(Data.append(output, "\n"))
|
|
660
|
+
|
|
661
|
+
const size = await outputFile.size()
|
|
662
|
+
glog.info(c`{<B}${outputFile.path}{B>} written to disk (${size.toLocaleString()} bytes)`)
|
|
663
|
+
|
|
664
|
+
return ctx
|
|
665
|
+
}
|
|
666
|
+
|
|
667
|
+
/**
|
|
668
|
+
* Cleans up temporary directory after package creation.
|
|
669
|
+
*
|
|
670
|
+
* @private
|
|
671
|
+
* @param {GeneratedContext} ctx - The context object
|
|
672
|
+
* @returns {Promise<GeneratedContext>} The context object
|
|
673
|
+
*/
|
|
674
|
+
#cleanUp = async ctx => {
|
|
675
|
+
await this.#recursiveDelete(this.#temp, true)
|
|
676
|
+
|
|
677
|
+
return ctx
|
|
678
|
+
}
|
|
679
|
+
|
|
680
|
+
/* Utility methods */
|
|
681
|
+
|
|
682
|
+
/**
|
|
683
|
+
* Normalizes boolean values in object entries to 'yes'/'no' strings.
|
|
684
|
+
*
|
|
685
|
+
* Mudlet XML format expects 'yes'/'no' strings rather than true/false booleans.
|
|
686
|
+
*
|
|
687
|
+
* @private
|
|
688
|
+
* @param {Array<JsonDefinition>} object - Array of objects to normalize
|
|
689
|
+
* @returns {Array<JsonDefinition>} The normalized object array
|
|
690
|
+
*/
|
|
691
|
+
#normalizeBooleanValues = object => {
|
|
692
|
+
for(const entry of object) {
|
|
693
|
+
Object.entries(entry).forEach(([k,v]) => {
|
|
694
|
+
if(v === true)
|
|
695
|
+
entry[k] = "yes"
|
|
696
|
+
else if(v === false)
|
|
697
|
+
entry[k] = "no"
|
|
698
|
+
})
|
|
699
|
+
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
return object
|
|
703
|
+
}
|
|
704
|
+
|
|
705
|
+
/**
|
|
706
|
+
* Recursively deletes a directory and its contents.
|
|
707
|
+
*
|
|
708
|
+
* @private
|
|
709
|
+
* @param {DirectoryObject} dir - The directory to delete
|
|
710
|
+
* @param {boolean} [includeSelf=false] - Whether to delete the directory itself
|
|
711
|
+
* @returns {Promise<void>}
|
|
712
|
+
*/
|
|
713
|
+
#recursiveDelete = async(dir, includeSelf=false) => {
|
|
714
|
+
const {files, directories} = await dir.read()
|
|
715
|
+
|
|
716
|
+
await Promised.settle(
|
|
717
|
+
[files, directories].flat().map(async e => {
|
|
718
|
+
if(e.isFile) {
|
|
719
|
+
await e.delete()
|
|
720
|
+
} else if(e.isDirectory) {
|
|
721
|
+
await this.#recursiveDelete(e)
|
|
722
|
+
|
|
723
|
+
await e.delete()
|
|
724
|
+
}
|
|
725
|
+
})
|
|
726
|
+
)
|
|
727
|
+
|
|
728
|
+
includeSelf && await dir.delete()
|
|
729
|
+
}
|
|
730
|
+
|
|
731
|
+
/**
|
|
732
|
+
* Generates an ISO 8601 formatted timestamp with timezone.
|
|
733
|
+
*
|
|
734
|
+
* @private
|
|
735
|
+
* @returns {string} ISO timestamp in format YYYY-MM-DDTHH:mm:ss+0000
|
|
736
|
+
*/
|
|
737
|
+
#iso = () => new Date().toISOString().replace(/\.\d{3}Z$/, "+0000")
|
|
738
|
+
}
|