sandstone-cli 1.1.4 → 1.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/commands/build.d.ts +2 -1
- package/lib/commands/build.js +26 -8
- package/lib/commands/create.js +1 -1
- package/lib/commands/dependency.d.ts +0 -1
- package/lib/commands/dependency.js +2 -5
- package/lib/commands/index.d.ts +1 -1
- package/lib/commands/index.js +1 -1
- package/lib/commands/watch.js +3 -9
- package/package.json +4 -13
- package/src/commands/build.ts +29 -11
- package/src/commands/create.ts +1 -1
- package/src/commands/dependency.ts +2 -6
- package/src/commands/index.ts +1 -1
- package/src/commands/watch.ts +4 -11
- package/src/index.ts +1 -1
- package/bin/.env.sand +0 -1
- package/bin/run.cjs +0 -25
- package/lib/build/index.d.ts +0 -27
- package/lib/build/index.js +0 -454
- package/src/build/index.ts +0 -584
package/lib/build/index.js
DELETED
|
@@ -1,454 +0,0 @@
|
|
|
1
|
-
import path from 'path';
|
|
2
|
-
import * as os from 'os';
|
|
3
|
-
import crypto from 'crypto';
|
|
4
|
-
import { pathToFileURL } from 'url';
|
|
5
|
-
import fs from 'fs-extra';
|
|
6
|
-
import PrettyError from 'pretty-error';
|
|
7
|
-
import walk from 'klaw';
|
|
8
|
-
import chalk from 'chalk';
|
|
9
|
-
import AdmZip from 'adm-zip';
|
|
10
|
-
import deleteEmpty from 'delete-empty';
|
|
11
|
-
const pe = new PrettyError();
|
|
12
|
-
// Return the hash of a string
|
|
13
|
-
function hash(stringToHash) {
|
|
14
|
-
return crypto.createHash('md5').update(stringToHash).digest('hex');
|
|
15
|
-
}
|
|
16
|
-
// Recursively create a directory, without failing if it already exists
|
|
17
|
-
async function mkDir(dirPath) {
|
|
18
|
-
try {
|
|
19
|
-
await new Promise((resolve, reject) => {
|
|
20
|
-
fs.mkdir(dirPath, { recursive: true }, (err) => {
|
|
21
|
-
if (err)
|
|
22
|
-
reject(err);
|
|
23
|
-
resolve();
|
|
24
|
-
});
|
|
25
|
-
});
|
|
26
|
-
}
|
|
27
|
-
catch (error) {
|
|
28
|
-
// Directory already exists
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
let cache;
|
|
32
|
-
/**
|
|
33
|
-
*
|
|
34
|
-
* @param worldName The name of the world
|
|
35
|
-
* @param minecraftPath The optional location of the .minecraft folder.
|
|
36
|
-
* If left unspecified, the .minecraft will be found automatically.
|
|
37
|
-
*/
|
|
38
|
-
async function getClientWorldPath(worldName, minecraftPath = undefined) {
|
|
39
|
-
let mcPath;
|
|
40
|
-
if (minecraftPath) {
|
|
41
|
-
mcPath = minecraftPath;
|
|
42
|
-
}
|
|
43
|
-
else {
|
|
44
|
-
mcPath = (await getClientPath());
|
|
45
|
-
}
|
|
46
|
-
const savesPath = path.join(mcPath, 'saves');
|
|
47
|
-
const worldPath = path.join(savesPath, worldName);
|
|
48
|
-
if (!fs.existsSync(worldPath)) {
|
|
49
|
-
const existingWorlds = (await fs.readdir(savesPath, { withFileTypes: true })).filter((f) => f.isDirectory).map((f) => f.name);
|
|
50
|
-
throw new Error(`Unable to locate the "${worldPath}" folder. Word ${worldName} does not exists. List of existing worlds: ${JSON.stringify(existingWorlds, null, 2)}`);
|
|
51
|
-
}
|
|
52
|
-
return worldPath;
|
|
53
|
-
}
|
|
54
|
-
/**
|
|
55
|
-
* Get the .minecraft path
|
|
56
|
-
*/
|
|
57
|
-
async function getClientPath() {
|
|
58
|
-
function getMCPath() {
|
|
59
|
-
switch (os.platform()) {
|
|
60
|
-
case 'win32':
|
|
61
|
-
return path.join(os.homedir(), 'AppData/Roaming/.minecraft');
|
|
62
|
-
case 'darwin':
|
|
63
|
-
return path.join(os.homedir(), 'Library/Application Support/minecraft');
|
|
64
|
-
case 'linux':
|
|
65
|
-
default:
|
|
66
|
-
return path.join(os.homedir(), '.minecraft');
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
const mcPath = getMCPath();
|
|
70
|
-
try {
|
|
71
|
-
await fs.stat(mcPath);
|
|
72
|
-
}
|
|
73
|
-
catch (e) {
|
|
74
|
-
console.warn('Unable to locate the .minecraft folder. Will not be able to export to client.');
|
|
75
|
-
return undefined;
|
|
76
|
-
}
|
|
77
|
-
return mcPath;
|
|
78
|
-
}
|
|
79
|
-
/**
|
|
80
|
-
* Build the project, but might throw errors.
|
|
81
|
-
*
|
|
82
|
-
* @param cliOptions The options to build the project with.
|
|
83
|
-
*
|
|
84
|
-
* @param projectFolder The folder of the project. It needs a sandstone.config.ts, and it or one of its parent needs a package.json.
|
|
85
|
-
*/
|
|
86
|
-
async function _buildProject(cliOptions, { absProjectFolder, projectFolder, rootFolder, sandstoneConfigFolder }) {
|
|
87
|
-
var _a, _b, _c, _d, _e, _f, _g;
|
|
88
|
-
// First, read sandstone.config.ts to get all properties
|
|
89
|
-
const sandstoneConfig = (await import(pathToFileURL(path.join(sandstoneConfigFolder, 'sandstone.config.ts')).toString())).default;
|
|
90
|
-
const { scripts } = sandstoneConfig;
|
|
91
|
-
let { saveOptions } = sandstoneConfig;
|
|
92
|
-
if (saveOptions === undefined)
|
|
93
|
-
saveOptions = {};
|
|
94
|
-
const outputFolder = path.join(rootFolder, '.sandstone', 'output');
|
|
95
|
-
/// OPTIONS ///
|
|
96
|
-
const clientPath = !cliOptions.production ? (cliOptions.clientPath || saveOptions.clientPath || await getClientPath()) : undefined;
|
|
97
|
-
const server = !cliOptions.production && (cliOptions.serverPath || saveOptions.serverPath || cliOptions.ssh || saveOptions.ssh) ? await (async () => {
|
|
98
|
-
if (cliOptions.ssh || saveOptions.ssh) {
|
|
99
|
-
const sshOptions = JSON.stringify(await fs.readFile(cliOptions.ssh || saveOptions.ssh, 'utf8'));
|
|
100
|
-
// TODO: implement SFTP
|
|
101
|
-
return {
|
|
102
|
-
readFile: async (relativePath, encoding = 'utf8') => { },
|
|
103
|
-
writeFile: async (relativePath, contents) => { },
|
|
104
|
-
remove: async (relativePath) => { },
|
|
105
|
-
};
|
|
106
|
-
}
|
|
107
|
-
const serverPath = cliOptions.serverPath || saveOptions.serverPath;
|
|
108
|
-
return {
|
|
109
|
-
readFile: async (relativePath, encoding = 'utf8') => await fs.readFile(path.join(serverPath, relativePath), encoding),
|
|
110
|
-
writeFile: async (relativePath, contents) => {
|
|
111
|
-
if (contents === undefined) {
|
|
112
|
-
await fs.unlink(path.join(serverPath, relativePath));
|
|
113
|
-
}
|
|
114
|
-
else {
|
|
115
|
-
await fs.writeFile(path.join(serverPath, relativePath), contents);
|
|
116
|
-
}
|
|
117
|
-
},
|
|
118
|
-
remove: async (relativePath) => await fs.remove(path.join(serverPath, relativePath))
|
|
119
|
-
};
|
|
120
|
-
})() : undefined;
|
|
121
|
-
let worldName = cliOptions.world || saveOptions.world;
|
|
122
|
-
// Make sure the world exists
|
|
123
|
-
if (worldName && !cliOptions.production) {
|
|
124
|
-
await getClientWorldPath(worldName, clientPath);
|
|
125
|
-
}
|
|
126
|
-
const root = cliOptions.root !== undefined ? cliOptions.root : saveOptions.root;
|
|
127
|
-
const packName = (_a = cliOptions.name) !== null && _a !== void 0 ? _a : sandstoneConfig.name;
|
|
128
|
-
if (worldName && root) {
|
|
129
|
-
throw new Error(`Expected only 'world' or 'root'. Got both.`);
|
|
130
|
-
}
|
|
131
|
-
// Important /!\: The below if statements, which set environment variables, must run before importing any Sandstone file.
|
|
132
|
-
// Set the pack ID environment variable
|
|
133
|
-
// Set production/development mode
|
|
134
|
-
if (cliOptions.production) {
|
|
135
|
-
process.env.SANDSTONE_ENV = 'production';
|
|
136
|
-
}
|
|
137
|
-
else {
|
|
138
|
-
process.env.SANDSTONE_ENV = 'development';
|
|
139
|
-
}
|
|
140
|
-
process.env.WORKING_DIR = absProjectFolder;
|
|
141
|
-
if (sandstoneConfig.packUid) {
|
|
142
|
-
process.env.PACK_UID = sandstoneConfig.packUid;
|
|
143
|
-
}
|
|
144
|
-
// Set the namespace
|
|
145
|
-
const namespace = cliOptions.namespace || sandstoneConfig.namespace;
|
|
146
|
-
if (namespace) {
|
|
147
|
-
process.env.NAMESPACE = namespace;
|
|
148
|
-
}
|
|
149
|
-
for (const [k, pack] of Object.entries(sandstoneConfig.packs)) {
|
|
150
|
-
if (pack.onConflict) {
|
|
151
|
-
for (const resource of Object.entries(pack.onConflict)) {
|
|
152
|
-
process.env[`${resource[0].toUpperCase()}_CONFLICT_STRATEGY`] = resource[1];
|
|
153
|
-
}
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
// JSON indentation
|
|
157
|
-
process.env.INDENTATION = saveOptions.indentation;
|
|
158
|
-
// Pack mcmeta
|
|
159
|
-
process.env.PACK_OPTIONS = JSON.stringify(sandstoneConfig.packs);
|
|
160
|
-
// Configure error display
|
|
161
|
-
if (!cliOptions.fullTrace) {
|
|
162
|
-
pe.skipNodeFiles();
|
|
163
|
-
}
|
|
164
|
-
/// IMPORTING USER CODE ///
|
|
165
|
-
// The configuration is ready.
|
|
166
|
-
// Now, let's run the beforeAll script
|
|
167
|
-
await ((_b = scripts === null || scripts === void 0 ? void 0 : scripts.beforeAll) === null || _b === void 0 ? void 0 : _b.call(scripts));
|
|
168
|
-
// Finally, let's import from the index.
|
|
169
|
-
let error = false;
|
|
170
|
-
let sandstonePack;
|
|
171
|
-
const filePath = path.join(projectFolder, 'index.ts');
|
|
172
|
-
try {
|
|
173
|
-
// Sometimes, a file might not exist because it has been deleted.
|
|
174
|
-
if (await fs.pathExists(filePath)) {
|
|
175
|
-
sandstonePack = (await import(pathToFileURL(filePath).toString())).default;
|
|
176
|
-
}
|
|
177
|
-
}
|
|
178
|
-
catch (e) {
|
|
179
|
-
logError(e, absProjectFolder);
|
|
180
|
-
error = true;
|
|
181
|
-
}
|
|
182
|
-
if (error) {
|
|
183
|
-
return;
|
|
184
|
-
}
|
|
185
|
-
/// Add new dependencies ///
|
|
186
|
-
if (cliOptions.dependencies) {
|
|
187
|
-
for (const dependency of cliOptions.dependencies) {
|
|
188
|
-
sandstonePack.core.depend(...dependency);
|
|
189
|
-
}
|
|
190
|
-
}
|
|
191
|
-
/// SAVING RESULTS ///
|
|
192
|
-
// Setup the cache if it doesn't exist.
|
|
193
|
-
// This cache is here to avoid writing files on disk when they did not change.
|
|
194
|
-
const newCache = {};
|
|
195
|
-
const cacheFile = path.join(rootFolder, '.sandstone', 'cache.json');
|
|
196
|
-
if (cache === undefined) {
|
|
197
|
-
let oldCache;
|
|
198
|
-
try {
|
|
199
|
-
const fileRead = await fs.readFile(cacheFile, 'utf8');
|
|
200
|
-
if (fileRead) {
|
|
201
|
-
oldCache = JSON.parse(fileRead);
|
|
202
|
-
}
|
|
203
|
-
}
|
|
204
|
-
catch { }
|
|
205
|
-
if (oldCache) {
|
|
206
|
-
cache = oldCache;
|
|
207
|
-
}
|
|
208
|
-
else {
|
|
209
|
-
cache = {};
|
|
210
|
-
}
|
|
211
|
-
}
|
|
212
|
-
// Save the pack
|
|
213
|
-
// Run the beforeSave script (TODO: This is where sandstone-server will remove restart env vars)
|
|
214
|
-
await ((_c = scripts === null || scripts === void 0 ? void 0 : scripts.beforeSave) === null || _c === void 0 ? void 0 : _c.call(scripts));
|
|
215
|
-
const excludeOption = (_d = saveOptions.resources) === null || _d === void 0 ? void 0 : _d.exclude;
|
|
216
|
-
const fileExclusions = excludeOption ? {
|
|
217
|
-
generated: (excludeOption.generated || excludeOption),
|
|
218
|
-
existing: (excludeOption.existing || excludeOption)
|
|
219
|
-
} : false;
|
|
220
|
-
const fileHandlers = ((_e = saveOptions.resources) === null || _e === void 0 ? void 0 : _e.handle) || false;
|
|
221
|
-
const packTypes = await sandstonePack.save({
|
|
222
|
-
// Additional parameters
|
|
223
|
-
dry: cliOptions.dry,
|
|
224
|
-
verbose: cliOptions.verbose,
|
|
225
|
-
fileHandler: (_f = saveOptions.customFileHandler) !== null && _f !== void 0 ? _f : (async (relativePath, content) => {
|
|
226
|
-
let pathPass = true;
|
|
227
|
-
if (fileExclusions && fileExclusions.generated) {
|
|
228
|
-
for (const exclude of fileExclusions.generated) {
|
|
229
|
-
if (!Array.isArray(exclude)) {
|
|
230
|
-
pathPass = !exclude.test(relativePath);
|
|
231
|
-
}
|
|
232
|
-
}
|
|
233
|
-
}
|
|
234
|
-
if (fileHandlers) {
|
|
235
|
-
for (const handler of fileHandlers) {
|
|
236
|
-
if (handler.path.test(relativePath)) {
|
|
237
|
-
content = await handler.callback(content);
|
|
238
|
-
}
|
|
239
|
-
}
|
|
240
|
-
}
|
|
241
|
-
if (pathPass) {
|
|
242
|
-
// We hash the relative path alongside the content to ensure unique hash.
|
|
243
|
-
const hashValue = hash(content + relativePath);
|
|
244
|
-
// Add to new cache.
|
|
245
|
-
newCache[relativePath] = hashValue;
|
|
246
|
-
if (cache[relativePath] === hashValue) {
|
|
247
|
-
// Already in cache - skip
|
|
248
|
-
return;
|
|
249
|
-
}
|
|
250
|
-
// Not in cache: write to disk
|
|
251
|
-
const realPath = path.join(outputFolder, relativePath);
|
|
252
|
-
await mkDir(path.dirname(realPath));
|
|
253
|
-
return await fs.writeFile(realPath, content);
|
|
254
|
-
}
|
|
255
|
-
})
|
|
256
|
-
});
|
|
257
|
-
async function handleResources(packType) {
|
|
258
|
-
const working = path.join(rootFolder, 'resources', packType);
|
|
259
|
-
let exists = false;
|
|
260
|
-
try {
|
|
261
|
-
await fs.access(working);
|
|
262
|
-
exists = true;
|
|
263
|
-
}
|
|
264
|
-
catch (e) { }
|
|
265
|
-
if (exists) {
|
|
266
|
-
for await (const file of walk(path.join(rootFolder, 'resources', packType), { filter: (_path) => {
|
|
267
|
-
const relativePath = path.join(packType, _path.split(working)[1]);
|
|
268
|
-
let pathPass = true;
|
|
269
|
-
if (fileExclusions && fileExclusions.existing) {
|
|
270
|
-
for (const exclude of fileExclusions.existing) {
|
|
271
|
-
pathPass = Array.isArray(exclude) ? !exclude[0].test(relativePath) : !exclude.test(relativePath);
|
|
272
|
-
}
|
|
273
|
-
}
|
|
274
|
-
return pathPass;
|
|
275
|
-
} })) {
|
|
276
|
-
const relativePath = path.join(packType, file.path.split(working)[1]);
|
|
277
|
-
try {
|
|
278
|
-
let content = await fs.readFile(file.path);
|
|
279
|
-
if (fileHandlers) {
|
|
280
|
-
for (const handler of fileHandlers) {
|
|
281
|
-
if (handler.path.test(relativePath)) {
|
|
282
|
-
content = await handler.callback(content);
|
|
283
|
-
}
|
|
284
|
-
}
|
|
285
|
-
}
|
|
286
|
-
// We hash the relative path alongside the content to ensure unique hash.
|
|
287
|
-
const hashValue = hash(content + relativePath);
|
|
288
|
-
// Add to new cache.
|
|
289
|
-
newCache[relativePath] = hashValue;
|
|
290
|
-
if (cache[relativePath] !== hashValue) {
|
|
291
|
-
// Not in cache: write to disk
|
|
292
|
-
const realPath = path.join(outputFolder, relativePath);
|
|
293
|
-
await mkDir(path.dirname(realPath));
|
|
294
|
-
await fs.writeFile(realPath, content);
|
|
295
|
-
}
|
|
296
|
-
}
|
|
297
|
-
catch (e) { }
|
|
298
|
-
}
|
|
299
|
-
}
|
|
300
|
-
}
|
|
301
|
-
async function archiveOutput(packType) {
|
|
302
|
-
const input = path.join(outputFolder, packType.type);
|
|
303
|
-
if ((await fs.readdir(input)).length !== 0) {
|
|
304
|
-
const archive = new AdmZip();
|
|
305
|
-
await archive.addLocalFolderPromise(input, {});
|
|
306
|
-
await archive.writeZipPromise(`${path.join(outputFolder, 'archives', `${packName}_${packType.type}`)}.zip`, { overwrite: true });
|
|
307
|
-
return true;
|
|
308
|
-
}
|
|
309
|
-
return false;
|
|
310
|
-
}
|
|
311
|
-
// TODO: implement linking to make the cache more useful when not archiving.
|
|
312
|
-
if (!cliOptions.production) {
|
|
313
|
-
for await (const _packType of packTypes) {
|
|
314
|
-
const packType = _packType[1];
|
|
315
|
-
const outputPath = path.join(outputFolder, packType.type);
|
|
316
|
-
await fs.ensureDir(outputPath);
|
|
317
|
-
if (packType.handleOutput) {
|
|
318
|
-
await packType.handleOutput('output', async (relativePath, encoding = 'utf8') => await fs.readFile(path.join(outputPath, relativePath), encoding), async (relativePath, contents) => {
|
|
319
|
-
if (contents === undefined) {
|
|
320
|
-
await fs.unlink(path.join(outputPath, relativePath));
|
|
321
|
-
}
|
|
322
|
-
else {
|
|
323
|
-
await fs.writeFile(path.join(outputPath, relativePath), contents);
|
|
324
|
-
}
|
|
325
|
-
});
|
|
326
|
-
}
|
|
327
|
-
handleResources(packType.type);
|
|
328
|
-
let archivedOutput = false;
|
|
329
|
-
if (packType.archiveOutput) {
|
|
330
|
-
archivedOutput = await archiveOutput(packType);
|
|
331
|
-
}
|
|
332
|
-
// Handle client
|
|
333
|
-
if (!(server && packType.networkSides === 'server') && clientPath) {
|
|
334
|
-
let fullClientPath;
|
|
335
|
-
if (worldName) {
|
|
336
|
-
fullClientPath = path.join(clientPath, packType.clientPath);
|
|
337
|
-
try {
|
|
338
|
-
fullClientPath = fullClientPath.replace('$packName$', packName);
|
|
339
|
-
}
|
|
340
|
-
catch { }
|
|
341
|
-
try {
|
|
342
|
-
fullClientPath = fullClientPath.replace('$worldName$', worldName);
|
|
343
|
-
}
|
|
344
|
-
catch { }
|
|
345
|
-
}
|
|
346
|
-
else {
|
|
347
|
-
fullClientPath = path.join(clientPath, packType.rootPath);
|
|
348
|
-
try {
|
|
349
|
-
fullClientPath = fullClientPath.replace('$packName$', packName);
|
|
350
|
-
}
|
|
351
|
-
catch { }
|
|
352
|
-
}
|
|
353
|
-
if (packType.archiveOutput) {
|
|
354
|
-
if (archivedOutput) {
|
|
355
|
-
await fs.copyFile(`${path.join(outputFolder, 'archives', `${packName}_${packType.type}`)}.zip`, `${fullClientPath}.zip`);
|
|
356
|
-
}
|
|
357
|
-
}
|
|
358
|
-
else {
|
|
359
|
-
await fs.remove(fullClientPath);
|
|
360
|
-
await fs.copy(outputPath, fullClientPath);
|
|
361
|
-
}
|
|
362
|
-
if (packType.handleOutput) {
|
|
363
|
-
await packType.handleOutput('client', async (relativePath, encoding = 'utf8') => await fs.readFile(path.join(clientPath, relativePath), encoding), async (relativePath, contents) => {
|
|
364
|
-
if (contents === undefined) {
|
|
365
|
-
fs.unlink(path.join(clientPath, relativePath));
|
|
366
|
-
}
|
|
367
|
-
else {
|
|
368
|
-
await fs.writeFile(path.join(clientPath, relativePath), contents);
|
|
369
|
-
}
|
|
370
|
-
});
|
|
371
|
-
}
|
|
372
|
-
}
|
|
373
|
-
// Handle server
|
|
374
|
-
if (server && (packType.networkSides === 'server' || packType.networkSides === 'both')) {
|
|
375
|
-
let serverPath = packType.serverPath;
|
|
376
|
-
try {
|
|
377
|
-
serverPath = serverPath.replace('$packName$', packName);
|
|
378
|
-
}
|
|
379
|
-
catch { }
|
|
380
|
-
if (packType.archiveOutput && archivedOutput) {
|
|
381
|
-
await server.writeFile(await fs.readFile(`${outputPath}.zip`, 'utf8'), `${serverPath}.zip`);
|
|
382
|
-
}
|
|
383
|
-
else {
|
|
384
|
-
server.remove(serverPath);
|
|
385
|
-
for await (const file of walk(outputPath)) {
|
|
386
|
-
await server.writeFile(path.join(serverPath, file.path.split(outputPath)[1]), await fs.readFile(file.path));
|
|
387
|
-
}
|
|
388
|
-
}
|
|
389
|
-
if (packType.handleOutput) {
|
|
390
|
-
await packType.handleOutput('server', server.readFile, server.writeFile);
|
|
391
|
-
}
|
|
392
|
-
}
|
|
393
|
-
}
|
|
394
|
-
}
|
|
395
|
-
else {
|
|
396
|
-
for await (const packType of packTypes) {
|
|
397
|
-
const outputPath = path.join(outputFolder, packType.type);
|
|
398
|
-
if (packType.handleOutput) {
|
|
399
|
-
await packType.handleOutput('output', async (relativePath, encoding = 'utf8') => await fs.readFile(path.join(outputPath, relativePath), encoding), async (relativePath, contents) => {
|
|
400
|
-
if (contents === undefined) {
|
|
401
|
-
await fs.unlink(path.join(outputPath, relativePath));
|
|
402
|
-
}
|
|
403
|
-
else {
|
|
404
|
-
await fs.writeFile(path.join(outputPath, relativePath), contents);
|
|
405
|
-
}
|
|
406
|
-
});
|
|
407
|
-
}
|
|
408
|
-
handleResources(packType.type);
|
|
409
|
-
if (packType.archiveOutput) {
|
|
410
|
-
archiveOutput(packType);
|
|
411
|
-
}
|
|
412
|
-
}
|
|
413
|
-
}
|
|
414
|
-
// Delete old files that aren't cached anymore
|
|
415
|
-
const oldFilesNames = new Set(Object.keys(cache));
|
|
416
|
-
Object.keys(newCache).forEach(name => oldFilesNames.delete(name));
|
|
417
|
-
for await (const name of oldFilesNames) {
|
|
418
|
-
await fs.rm(path.join(outputFolder, name));
|
|
419
|
-
}
|
|
420
|
-
await deleteEmpty(outputFolder);
|
|
421
|
-
// Override old cache
|
|
422
|
-
cache = newCache;
|
|
423
|
-
// Write the cache to disk
|
|
424
|
-
await fs.writeFile(cacheFile, JSON.stringify(cache));
|
|
425
|
-
// Run the afterAll script
|
|
426
|
-
await ((_g = scripts === null || scripts === void 0 ? void 0 : scripts.afterAll) === null || _g === void 0 ? void 0 : _g.call(scripts));
|
|
427
|
-
console.log(`Pack(s) compiled! View output in ./.sandstone/output/`);
|
|
428
|
-
}
|
|
429
|
-
/**
|
|
430
|
-
* Build the project. Will log errors and never throw any.
|
|
431
|
-
*
|
|
432
|
-
* @param options The options to build the project with.
|
|
433
|
-
*
|
|
434
|
-
* @param projectFolder The folder of the project. It needs a sandstone.config.ts, and it or one of its parent needs a package.json.
|
|
435
|
-
*/
|
|
436
|
-
export async function buildProject(options, folders) {
|
|
437
|
-
try {
|
|
438
|
-
await _buildProject(options, folders);
|
|
439
|
-
}
|
|
440
|
-
catch (err) {
|
|
441
|
-
console.log(err);
|
|
442
|
-
}
|
|
443
|
-
}
|
|
444
|
-
function logError(err, file) {
|
|
445
|
-
if (err) {
|
|
446
|
-
if (file) {
|
|
447
|
-
console.error(' ' + chalk.bgRed.white('BuildError') + chalk.gray(':'), `While loading "${file}", the following error happened:\n`);
|
|
448
|
-
}
|
|
449
|
-
debugger;
|
|
450
|
-
console.error(pe.render(err));
|
|
451
|
-
}
|
|
452
|
-
}
|
|
453
|
-
process.on('unhandledRejection', logError);
|
|
454
|
-
process.on('uncaughtException', logError);
|