sandstone-cli 1.2.5 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bun.lock +490 -0
- package/lib/commands/build.d.ts +20 -6
- package/lib/commands/build.js +617 -24
- package/lib/commands/create.js +23 -37
- package/lib/commands/dependency.js +22 -34
- package/lib/commands/index.d.ts +1 -1
- package/lib/commands/watch.d.ts +5 -15
- package/lib/commands/watch.js +261 -42
- package/lib/index.d.ts +0 -1
- package/lib/index.js +2 -3
- package/lib/shared.js +3 -2
- package/lib/ui/WatchUI.d.ts +9 -0
- package/lib/ui/WatchUI.js +183 -0
- package/lib/ui/logger.d.ts +20 -0
- package/lib/ui/logger.js +189 -0
- package/lib/ui/types.d.ts +26 -0
- package/lib/ui/types.js +1 -0
- package/lib/utils.d.ts +25 -16
- package/lib/utils.js +51 -35
- package/package.json +15 -8
- package/src/commands/build.ts +805 -49
- package/src/commands/create.ts +23 -42
- package/src/commands/dependency.ts +29 -47
- package/src/commands/index.ts +3 -3
- package/src/commands/watch.ts +320 -73
- package/src/create.ts +4 -4
- package/src/index.ts +7 -8
- package/src/shared.ts +3 -2
- package/src/ui/WatchUI.tsx +269 -0
- package/src/ui/logger.ts +210 -0
- package/src/ui/types.ts +32 -0
- package/src/utils.ts +87 -44
- package/tsconfig.json +5 -3
package/lib/commands/build.js
CHANGED
|
@@ -1,30 +1,623 @@
|
|
|
1
|
-
import path from 'path';
|
|
2
|
-
import
|
|
3
|
-
import
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import os from 'node:os';
|
|
3
|
+
import crypto from 'node:crypto';
|
|
4
|
+
import { pathToFileURL } from 'node:url';
|
|
5
|
+
import fs from 'fs-extra';
|
|
6
|
+
import chalk from 'chalk';
|
|
7
|
+
import AdmZip from 'adm-zip';
|
|
8
|
+
import { log, logInfo, logWarn, logError as logErrorFn, logDebug, logTrace, initLoggerNoFile, setSilent } from '../ui/logger.js';
|
|
9
|
+
import { canUseSymlinks } from '../utils.js';
|
|
10
|
+
import { split } from 'obliterator';
|
|
11
|
+
// Console capture for watch mode - wraps console to redirect output to our log file
|
|
12
|
+
const originalConsole = globalThis.console;
|
|
13
|
+
let consoleWrapped = false;
|
|
14
|
+
export function enableConsoleCapture() {
|
|
15
|
+
if (consoleWrapped)
|
|
16
|
+
return;
|
|
17
|
+
consoleWrapped = true;
|
|
18
|
+
globalThis.console.log = (...args) => log(...args);
|
|
19
|
+
globalThis.console.info = (...args) => logInfo(...args);
|
|
20
|
+
globalThis.console.warn = (...args) => logWarn(...args);
|
|
21
|
+
globalThis.console.error = (...args) => logErrorFn(args.join(' '));
|
|
22
|
+
globalThis.console.debug = (...args) => logDebug(...args);
|
|
23
|
+
globalThis.console.trace = (...args) => {
|
|
24
|
+
const traceObj = { stack: '' };
|
|
25
|
+
Error.captureStackTrace(traceObj, globalThis.console.trace);
|
|
26
|
+
const cleanedStack = traceObj.stack
|
|
27
|
+
.replace(/^Error\n/, '') // Remove "Error" header line
|
|
28
|
+
.replace(/\?hot-hook=\d+/g, '')
|
|
29
|
+
.replace(/file:\/\/\/?/g, '');
|
|
30
|
+
logTrace(...args, '\n' + cleanedStack);
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
export function disableConsoleCapture() {
|
|
34
|
+
if (!consoleWrapped)
|
|
35
|
+
return;
|
|
36
|
+
consoleWrapped = false;
|
|
37
|
+
// Restore original methods
|
|
38
|
+
const methodsToRestore = ['log', 'info', 'warn', 'error', 'debug', 'trace'];
|
|
39
|
+
for (const method of methodsToRestore) {
|
|
40
|
+
;
|
|
41
|
+
globalThis.console[method] = originalConsole[method].bind(originalConsole);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
function hash(stringToHash) {
|
|
45
|
+
return crypto.createHash('md5').update(stringToHash).digest('hex');
|
|
46
|
+
}
|
|
47
|
+
let cache;
|
|
48
|
+
let symlinksAvailable;
|
|
49
|
+
async function getClientPath() {
|
|
50
|
+
function getMCPath() {
|
|
51
|
+
switch (os.platform()) {
|
|
52
|
+
case 'win32':
|
|
53
|
+
return path.join(os.homedir(), 'AppData/Roaming/.minecraft');
|
|
54
|
+
case 'darwin':
|
|
55
|
+
return path.join(os.homedir(), 'Library/Application Support/minecraft');
|
|
56
|
+
case 'linux':
|
|
57
|
+
default:
|
|
58
|
+
return path.join(os.homedir(), '.minecraft');
|
|
14
59
|
}
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
60
|
+
}
|
|
61
|
+
const mcPath = getMCPath();
|
|
62
|
+
try {
|
|
63
|
+
await fs.stat(mcPath);
|
|
64
|
+
}
|
|
65
|
+
catch {
|
|
66
|
+
log('Unable to locate the .minecraft folder. Will not be able to export to client.');
|
|
67
|
+
return undefined;
|
|
68
|
+
}
|
|
69
|
+
return mcPath;
|
|
70
|
+
}
|
|
71
|
+
async function getClientWorldPath(worldName, minecraftPath) {
|
|
72
|
+
const mcPath = minecraftPath ?? (await getClientPath());
|
|
73
|
+
const savesPath = path.join(mcPath, 'saves');
|
|
74
|
+
const worldPath = path.join(savesPath, worldName);
|
|
75
|
+
if (!fs.existsSync(worldPath)) {
|
|
76
|
+
const existingWorlds = (await fs.readdir(savesPath, { withFileTypes: true }))
|
|
77
|
+
.filter((f) => f.isDirectory())
|
|
78
|
+
.map((f) => f.name);
|
|
79
|
+
throw new Error(`Unable to locate the "${worldPath}" folder. World ${worldName} does not exist. List of existing worlds: ${JSON.stringify(existingWorlds, null, 2)}`);
|
|
80
|
+
}
|
|
81
|
+
return worldPath;
|
|
82
|
+
}
|
|
83
|
+
// Boilerplate resources to exclude from counts
|
|
84
|
+
const BOILERPLATE_NAMESPACES = new Set(['load', '__sandstone__']);
|
|
85
|
+
const BOILERPLATE_FUNCTIONS = new Set(['__init__']);
|
|
86
|
+
const BOILERPLATE_TAG = { namespace: 'minecraft', name: 'load' };
|
|
87
|
+
function isBoilerplateResource(resource) {
|
|
88
|
+
const ns = resource.namespace || '';
|
|
89
|
+
const pathParts = resource.path || [];
|
|
90
|
+
const name = pathParts[pathParts.length - 1] || '';
|
|
91
|
+
// Exclude load namespace and __sandstone__ namespace
|
|
92
|
+
if (BOILERPLATE_NAMESPACES.has(ns))
|
|
93
|
+
return true;
|
|
94
|
+
// Exclude __init__ functions
|
|
95
|
+
if (BOILERPLATE_FUNCTIONS.has(name))
|
|
96
|
+
return true;
|
|
97
|
+
if (ns === BOILERPLATE_TAG.namespace && name === BOILERPLATE_TAG.name)
|
|
98
|
+
return true;
|
|
99
|
+
return false;
|
|
100
|
+
}
|
|
101
|
+
function countResources(sandstonePack) {
|
|
102
|
+
let functions = 0;
|
|
103
|
+
let other = 0;
|
|
104
|
+
for (const node of sandstonePack.core.resourceNodes) {
|
|
105
|
+
const resource = node.resource;
|
|
106
|
+
// Skip boilerplate resources
|
|
107
|
+
if (isBoilerplateResource(resource))
|
|
108
|
+
continue;
|
|
109
|
+
// Check if it's a function (MCFunctionClass)
|
|
110
|
+
if (resource.constructor?.name === '_RawMCFunctionClass') {
|
|
111
|
+
functions++;
|
|
21
112
|
}
|
|
22
113
|
else {
|
|
23
|
-
|
|
114
|
+
other++;
|
|
24
115
|
}
|
|
116
|
+
}
|
|
117
|
+
return { functions, other };
|
|
118
|
+
}
|
|
119
|
+
async function handleSymlink(folder, packName, cache, minecraftPath, targetPath, linkPath) {
|
|
120
|
+
const allowPath = `[glob]${path.resolve(folder)}${path.sep}**${path.sep}*`;
|
|
121
|
+
const allowedList = path.join(minecraftPath, 'allowed_symlinks.txt');
|
|
122
|
+
const comment = `# Sandstone Pack: ${packName}\n`;
|
|
123
|
+
try {
|
|
124
|
+
const currentlyAllowed = await fs.readFile(allowedList);
|
|
125
|
+
if (!currentlyAllowed.includes(allowPath)) {
|
|
126
|
+
await fs.writeFile(allowedList, `${currentlyAllowed}\n#\n${comment}${allowPath}`);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
catch (e) {
|
|
130
|
+
await fs.writeFile(allowedList, `${comment}${allowPath}`);
|
|
131
|
+
}
|
|
132
|
+
await fs.lstat(linkPath).then(() => {
|
|
133
|
+
throw new Error(`Tried to add a symlink at "${linkPath}", encountered an existing file.`);
|
|
134
|
+
}).catch(() => { });
|
|
135
|
+
await fs.symlink(path.resolve(targetPath), linkPath);
|
|
136
|
+
cache.symlinks ??= [];
|
|
137
|
+
cache.symlinks.push(linkPath);
|
|
138
|
+
}
|
|
139
|
+
export async function loadBuildContext(cliOptions, folder) {
|
|
140
|
+
// Load sandstone.config.ts
|
|
141
|
+
const configPath = path.join(folder, 'sandstone.config.ts');
|
|
142
|
+
const configUrl = pathToFileURL(configPath).toString();
|
|
143
|
+
const sandstoneConfig = (await import(configUrl)).default;
|
|
144
|
+
// Build the context for sandstone
|
|
145
|
+
const namespace = cliOptions.namespace || sandstoneConfig.namespace;
|
|
146
|
+
const conflictStrategies = {};
|
|
147
|
+
if (sandstoneConfig.onConflict) {
|
|
148
|
+
for (const [resource, strategy] of Object.entries(sandstoneConfig.onConflict)) {
|
|
149
|
+
conflictStrategies[resource] = strategy;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
// Import sandstone from the project's node_modules, not the CLI's
|
|
153
|
+
// This ensures we use the same module instance as the user code
|
|
154
|
+
const sandstoneUrl = pathToFileURL(path.join(folder, 'node_modules', 'sandstone', 'dist', 'index.js'));
|
|
155
|
+
/* @ts-ignore */
|
|
156
|
+
const { createSandstonePack, resetSandstonePack } = (await import(sandstoneUrl));
|
|
157
|
+
const context = {
|
|
158
|
+
workingDir: folder,
|
|
159
|
+
namespace,
|
|
160
|
+
packUid: sandstoneConfig.packUid,
|
|
161
|
+
packOptions: sandstoneConfig.packs,
|
|
162
|
+
conflictStrategies,
|
|
163
|
+
loadVersion: sandstoneConfig.loadVersion,
|
|
164
|
+
};
|
|
165
|
+
// Create the pack with context
|
|
166
|
+
const sandstonePack = createSandstonePack(context);
|
|
167
|
+
return { sandstoneConfig, sandstonePack, resetSandstonePack };
|
|
168
|
+
}
|
|
169
|
+
async function _buildProject(cliOptions, folder, silent = false, existingContext, watching = false) {
|
|
170
|
+
// Read project package.json to get entrypoint
|
|
171
|
+
const packageJsonPath = path.join(folder, 'package.json');
|
|
172
|
+
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf-8'));
|
|
173
|
+
// Get the entrypoint from the "module" field
|
|
174
|
+
const entrypoint = packageJson.module;
|
|
175
|
+
if (!entrypoint) {
|
|
176
|
+
throw new Error('No "module" field found in package.json. Please specify the entrypoint for your pack code.');
|
|
177
|
+
}
|
|
178
|
+
const entrypointPath = path.join(folder, entrypoint);
|
|
179
|
+
// Load or use existing context
|
|
180
|
+
const { sandstoneConfig, sandstonePack, resetSandstonePack } = existingContext ??
|
|
181
|
+
await loadBuildContext(cliOptions, folder);
|
|
182
|
+
// Reset pack state before each build
|
|
183
|
+
resetSandstonePack();
|
|
184
|
+
const { scripts, resources } = sandstoneConfig;
|
|
185
|
+
const saveOptions = sandstoneConfig.saveOptions || {};
|
|
186
|
+
const outputFolder = path.join(folder, '.sandstone', 'output');
|
|
187
|
+
// Resolve options
|
|
188
|
+
const worldName = cliOptions.world || saveOptions.world;
|
|
189
|
+
const root = cliOptions.root !== undefined ? cliOptions.root : saveOptions.root;
|
|
190
|
+
// Only use explicitly configured client path for now
|
|
191
|
+
// We'll auto-detect after save() if there are client-side packs that need exporting
|
|
192
|
+
let clientPath = !cliOptions.production
|
|
193
|
+
? cliOptions.clientPath || saveOptions.clientPath
|
|
194
|
+
: undefined;
|
|
195
|
+
if (worldName && !cliOptions.production) {
|
|
196
|
+
// Need client path for world export
|
|
197
|
+
clientPath ??= await getClientPath();
|
|
198
|
+
if (clientPath) {
|
|
199
|
+
await getClientWorldPath(worldName, clientPath);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
else if (root && !cliOptions.production) {
|
|
203
|
+
// Need client path for root export
|
|
204
|
+
clientPath ??= await getClientPath();
|
|
205
|
+
}
|
|
206
|
+
const serverPath = !cliOptions.production
|
|
207
|
+
? cliOptions.serverPath || saveOptions.serverPath
|
|
208
|
+
: undefined;
|
|
209
|
+
const packName = cliOptions.name ?? sandstoneConfig.name;
|
|
210
|
+
if (worldName && root) {
|
|
211
|
+
throw new Error("Expected only 'world' or 'root'. Got both.");
|
|
212
|
+
}
|
|
213
|
+
// Run beforeAll script
|
|
214
|
+
await scripts?.beforeAll?.();
|
|
215
|
+
// Import user code (this executes their pack definitions)
|
|
216
|
+
if (!silent) {
|
|
217
|
+
log('Compiling source...');
|
|
218
|
+
}
|
|
219
|
+
try {
|
|
220
|
+
if (await fs.pathExists(entrypointPath)) {
|
|
221
|
+
const isBun = Object.hasOwn(globalThis, 'Bun');
|
|
222
|
+
const entrypointUrl = pathToFileURL(entrypointPath).toString();
|
|
223
|
+
if (watching && !isBun) {
|
|
224
|
+
// Hot-hook for Node.js - only this should be hot reloaded
|
|
225
|
+
// Bun doesn't support hot-hook, we clear require.cache instead in watch.ts
|
|
226
|
+
await import(entrypointUrl, { with: { hot: 'true' } });
|
|
227
|
+
}
|
|
228
|
+
else {
|
|
229
|
+
await import(entrypointUrl);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
catch (e) {
|
|
234
|
+
const errorMsg = `While loading "${entrypointPath}":\n${cliOptions.fullTrace ? e : (e.message || e)}`;
|
|
235
|
+
if (!silent) {
|
|
236
|
+
console.error(chalk.bgRed.white('BuildError') + chalk.gray(':'), errorMsg);
|
|
237
|
+
}
|
|
238
|
+
log('BuildError:', errorMsg);
|
|
239
|
+
throw e; // Re-throw for buildCommand to handle
|
|
240
|
+
}
|
|
241
|
+
// Add dependencies if specified
|
|
242
|
+
if (cliOptions.dependencies) {
|
|
243
|
+
for (const dependency of cliOptions.dependencies) {
|
|
244
|
+
sandstonePack.core.depend(...dependency);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
// Setup cache
|
|
248
|
+
const newCache = { files: {}, archives: [] };
|
|
249
|
+
const cacheFile = path.join(folder, '.sandstone', 'cache.json');
|
|
250
|
+
// Track which pack types have changed files
|
|
251
|
+
const changedPackTypes = new Set();
|
|
252
|
+
// Track directories containing new files
|
|
253
|
+
const newDirs = new Set();
|
|
254
|
+
if (cache === undefined) {
|
|
255
|
+
try {
|
|
256
|
+
const fileRead = await fs.readFile(cacheFile, 'utf8');
|
|
257
|
+
if (fileRead) {
|
|
258
|
+
const parsed = JSON.parse(fileRead);
|
|
259
|
+
// Handle legacy cache format (plain Record<string, string>)
|
|
260
|
+
if (parsed.files) {
|
|
261
|
+
cache = parsed;
|
|
262
|
+
}
|
|
263
|
+
else {
|
|
264
|
+
cache = { files: parsed };
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
catch {
|
|
269
|
+
cache = { files: {} };
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
// Check symlink availability (use cached value if available)
|
|
273
|
+
if (symlinksAvailable === undefined) {
|
|
274
|
+
if (cache.canUseSymlinks !== undefined) {
|
|
275
|
+
symlinksAvailable = cache.canUseSymlinks;
|
|
276
|
+
}
|
|
277
|
+
else {
|
|
278
|
+
symlinksAvailable = await canUseSymlinks();
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
newCache.canUseSymlinks = symlinksAvailable;
|
|
282
|
+
// Run beforeSave script
|
|
283
|
+
await scripts?.beforeSave?.();
|
|
284
|
+
// File exclusion setup
|
|
285
|
+
const excludeOption = resources?.exclude;
|
|
286
|
+
const fileExclusions = excludeOption
|
|
287
|
+
? {
|
|
288
|
+
generated: ('generated' in excludeOption ? excludeOption.generated : excludeOption),
|
|
289
|
+
existing: ('existing' in excludeOption ? excludeOption.existing : excludeOption),
|
|
290
|
+
}
|
|
291
|
+
: false;
|
|
292
|
+
const fileHandlers = resources?.handle || false;
|
|
293
|
+
// Save the pack
|
|
294
|
+
const packTypes = await sandstonePack.save({
|
|
295
|
+
dry: cliOptions.dry ?? false,
|
|
296
|
+
verbose: cliOptions.verbose ?? false,
|
|
297
|
+
fileHandler: saveOptions.customFileHandler ??
|
|
298
|
+
(async (relativePath, content) => {
|
|
299
|
+
let pathPass = true;
|
|
300
|
+
if (fileExclusions && fileExclusions.generated) {
|
|
301
|
+
for (const exclude of fileExclusions.generated) {
|
|
302
|
+
if (!Array.isArray(exclude)) {
|
|
303
|
+
pathPass = !exclude.test(relativePath);
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
if (fileHandlers) {
|
|
308
|
+
for (const handler of fileHandlers) {
|
|
309
|
+
if (handler.path.test(relativePath)) {
|
|
310
|
+
content = await handler.callback(content);
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
if (pathPass) {
|
|
315
|
+
const hashValue = hash(content + relativePath);
|
|
316
|
+
newCache.files[relativePath] = hashValue;
|
|
317
|
+
// Track parent directories
|
|
318
|
+
for (let dir = path.dirname(relativePath); dir && dir !== '.'; dir = path.dirname(dir)) {
|
|
319
|
+
newDirs.add(dir);
|
|
320
|
+
}
|
|
321
|
+
if (cache.files[relativePath] === hashValue) {
|
|
322
|
+
return;
|
|
323
|
+
}
|
|
324
|
+
// Track that this pack type has changed
|
|
325
|
+
const packTypeDir = relativePath.split(/[/\\]/)[0];
|
|
326
|
+
changedPackTypes.add(packTypeDir);
|
|
327
|
+
const realPath = path.join(outputFolder, relativePath);
|
|
328
|
+
await fs.ensureDir(path.dirname(realPath));
|
|
329
|
+
return await fs.writeFile(realPath, content);
|
|
330
|
+
}
|
|
331
|
+
}),
|
|
25
332
|
});
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
333
|
+
// Handle resources folder
|
|
334
|
+
async function handleResources(packType) {
|
|
335
|
+
const working = path.join(folder, 'resources', packType);
|
|
336
|
+
if (!(await fs.pathExists(working))) {
|
|
337
|
+
return;
|
|
338
|
+
}
|
|
339
|
+
const walk = async (dir) => {
|
|
340
|
+
const files = [];
|
|
341
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
342
|
+
for (const entry of entries) {
|
|
343
|
+
const fullPath = path.join(dir, entry.name);
|
|
344
|
+
if (entry.isDirectory()) {
|
|
345
|
+
files.push(...(await walk(fullPath)));
|
|
346
|
+
}
|
|
347
|
+
else {
|
|
348
|
+
files.push(fullPath);
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
return files;
|
|
352
|
+
};
|
|
353
|
+
for (const file of await walk(working)) {
|
|
354
|
+
const relativePath = path.join(packType, file.substring(working.length + 1));
|
|
355
|
+
let pathPass = true;
|
|
356
|
+
if (fileExclusions && fileExclusions.existing) {
|
|
357
|
+
for (const exclude of fileExclusions.existing) {
|
|
358
|
+
pathPass = Array.isArray(exclude) ? !exclude[0].test(relativePath) : !exclude.test(relativePath);
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
if (!pathPass)
|
|
362
|
+
continue;
|
|
363
|
+
try {
|
|
364
|
+
let content = await fs.readFile(file);
|
|
365
|
+
if (fileHandlers) {
|
|
366
|
+
for (const handler of fileHandlers) {
|
|
367
|
+
if (handler.path.test(relativePath)) {
|
|
368
|
+
content = (await handler.callback(content));
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
const hashValue = hash(content + relativePath);
|
|
373
|
+
newCache.files[relativePath] = hashValue;
|
|
374
|
+
for (let dir = path.dirname(relativePath); dir && dir !== '.'; dir = path.dirname(dir)) {
|
|
375
|
+
if (newDirs.has(dir)) {
|
|
376
|
+
break;
|
|
377
|
+
}
|
|
378
|
+
else {
|
|
379
|
+
newDirs.add(dir);
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
if (cache.files[relativePath] !== hashValue) {
|
|
383
|
+
// Track that this pack type has changed
|
|
384
|
+
changedPackTypes.add(packType);
|
|
385
|
+
const realPath = path.join(outputFolder, relativePath);
|
|
386
|
+
await fs.ensureDir(path.dirname(realPath));
|
|
387
|
+
await fs.writeFile(realPath, content);
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
catch { }
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
// Archive output if needed
|
|
394
|
+
async function archiveOutput(packType) {
|
|
395
|
+
const input = path.join(outputFolder, packType.type);
|
|
396
|
+
const files = await fs.readdir(input).catch(() => []);
|
|
397
|
+
if (files.length === 0)
|
|
398
|
+
return false;
|
|
399
|
+
const archiveName = `${packName}_${packType.type}.zip`;
|
|
400
|
+
newCache.archives.push(archiveName);
|
|
401
|
+
const archive = new AdmZip();
|
|
402
|
+
await archive.addLocalFolderPromise(input, {});
|
|
403
|
+
await fs.ensureDir(path.join(outputFolder, 'archives'));
|
|
404
|
+
await archive.writeZipPromise(path.join(outputFolder, 'archives', archiveName), { overwrite: true });
|
|
405
|
+
return true;
|
|
406
|
+
}
|
|
407
|
+
// Export to client/server
|
|
408
|
+
if (!cliOptions.production) {
|
|
409
|
+
// Check if there are any client-side packs that need exporting
|
|
410
|
+
// If so and clientPath not set, try to find it now (after dependencies resolved)
|
|
411
|
+
const packTypesArray = [...packTypes];
|
|
412
|
+
const hasClientPacks = packTypesArray.some(([, pt]) => pt.networkSides === 'client');
|
|
413
|
+
if (hasClientPacks && !clientPath && (root || worldName)) {
|
|
414
|
+
clientPath = await getClientPath();
|
|
415
|
+
}
|
|
416
|
+
// When no world/root specified, only export client-side packs (resource packs + dependencies)
|
|
417
|
+
const resourcePackOnlyExport = !worldName && !root;
|
|
418
|
+
for (const [, packType] of packTypesArray) {
|
|
419
|
+
const outputPath = path.join(outputFolder, packType.type);
|
|
420
|
+
await fs.ensureDir(outputPath);
|
|
421
|
+
if (packType.handleOutput) {
|
|
422
|
+
await packType.handleOutput('output', async (relativePath, encoding = 'utf8') => await fs.readFile(path.join(outputPath, relativePath), encoding), async (relativePath, contents) => {
|
|
423
|
+
if (contents === undefined) {
|
|
424
|
+
await fs.unlink(path.join(outputPath, relativePath));
|
|
425
|
+
}
|
|
426
|
+
else {
|
|
427
|
+
await fs.writeFile(path.join(outputPath, relativePath), contents);
|
|
428
|
+
}
|
|
429
|
+
});
|
|
430
|
+
}
|
|
431
|
+
await handleResources(packType.type);
|
|
432
|
+
// Skip archive and export if no files in this pack type changed
|
|
433
|
+
if (!changedPackTypes.has(packType.type)) {
|
|
434
|
+
continue;
|
|
435
|
+
}
|
|
436
|
+
let archivedOutput = false;
|
|
437
|
+
if (packType.archiveOutput && saveOptions.exportZips) {
|
|
438
|
+
archivedOutput = await archiveOutput(packType);
|
|
439
|
+
}
|
|
440
|
+
// Handle client export
|
|
441
|
+
// Skip non-client packs (datapacks) when in resource pack only mode (no world/root specified)
|
|
442
|
+
if (clientPath && !(resourcePackOnlyExport && packType.networkSides !== 'client')) {
|
|
443
|
+
let fullClientPath;
|
|
444
|
+
// Only export the resource pack to `$worldName$/resources.zip` if exportZips is on
|
|
445
|
+
if (worldName && (packType.type !== 'resourcepack' || saveOptions.exportZips)) {
|
|
446
|
+
fullClientPath = path.join(clientPath, packType.clientPath)
|
|
447
|
+
.replace('$packName$', packName)
|
|
448
|
+
.replace('$worldName$', worldName);
|
|
449
|
+
}
|
|
450
|
+
else {
|
|
451
|
+
fullClientPath = path.join(clientPath, packType.rootPath).replace('$packName$', packName);
|
|
452
|
+
}
|
|
453
|
+
if (packType.archiveOutput && archivedOutput && saveOptions.exportZips) {
|
|
454
|
+
const archivePath = path.join(outputFolder, 'archives', `${packName}_${packType.type}.zip`);
|
|
455
|
+
await fs.copyFile(archivePath, `${fullClientPath}.zip`);
|
|
456
|
+
}
|
|
457
|
+
else if (symlinksAvailable) {
|
|
458
|
+
if (cache.symlinks === undefined || !cache.symlinks.includes(fullClientPath)) {
|
|
459
|
+
handleSymlink(folder, packName, newCache, clientPath, outputPath, fullClientPath);
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
else {
|
|
463
|
+
await fs.remove(fullClientPath);
|
|
464
|
+
await fs.copy(outputPath, fullClientPath);
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
// Handle server export (skip client-only packs like resource packs)
|
|
468
|
+
if (serverPath && packType.networkSides === 'server') {
|
|
469
|
+
const fullServerPath = path.join(serverPath, packType.serverPath).replace('$packName$', packName);
|
|
470
|
+
if (packType.archiveOutput && archivedOutput && saveOptions.exportZips) {
|
|
471
|
+
const archivePath = path.join(outputFolder, 'archives', `${packName}_${packType.type}.zip`);
|
|
472
|
+
await fs.copyFile(archivePath, `${fullServerPath}.zip`);
|
|
473
|
+
}
|
|
474
|
+
else if (symlinksAvailable) {
|
|
475
|
+
if (cache.symlinks === undefined || !cache.symlinks.includes(fullServerPath)) {
|
|
476
|
+
handleSymlink(folder, packName, newCache, serverPath, outputPath, fullServerPath);
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
else {
|
|
480
|
+
await fs.remove(fullServerPath);
|
|
481
|
+
await fs.copy(outputPath, fullServerPath);
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
else {
|
|
487
|
+
// Production mode
|
|
488
|
+
for await (const [, packType] of packTypes) {
|
|
489
|
+
const outputPath = path.join(outputFolder, packType.type);
|
|
490
|
+
if (packType.handleOutput) {
|
|
491
|
+
await packType.handleOutput('output', async (relativePath, encoding = 'utf8') => await fs.readFile(path.join(outputPath, relativePath), encoding), async (relativePath, contents) => {
|
|
492
|
+
if (contents === undefined) {
|
|
493
|
+
await fs.unlink(path.join(outputPath, relativePath));
|
|
494
|
+
}
|
|
495
|
+
else {
|
|
496
|
+
await fs.writeFile(path.join(outputPath, relativePath), contents);
|
|
497
|
+
}
|
|
498
|
+
});
|
|
499
|
+
}
|
|
500
|
+
await handleResources(packType.type);
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
// Clean up old files, directories, and symlinks not in new cache
|
|
504
|
+
if (cliOptions.dry !== true) {
|
|
505
|
+
for (const file of Object.keys(cache.files)) {
|
|
506
|
+
if (!(file in newCache.files)) {
|
|
507
|
+
await fs.rm(path.join(outputFolder, file));
|
|
508
|
+
let dir = undefined;
|
|
509
|
+
for (const segment of split(new RegExp(RegExp.escape(path.sep)), path.dirname(file))) {
|
|
510
|
+
dir = dir === undefined ? segment : path.join(dir, segment);
|
|
511
|
+
if (!newDirs.has(dir)) {
|
|
512
|
+
await fs.rm(path.join(outputFolder, dir), { force: true, recursive: true });
|
|
513
|
+
break;
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
// Clean up old archives
|
|
519
|
+
if (cache.archives) {
|
|
520
|
+
const archivesDir = path.join(outputFolder, 'archives');
|
|
521
|
+
if (newCache.archives === undefined || newCache.archives.length === 0) {
|
|
522
|
+
await fs.rm(archivesDir, { force: true, recursive: true });
|
|
523
|
+
}
|
|
524
|
+
for (const archive of cache.archives) {
|
|
525
|
+
if (!newCache.archives.includes(archive)) {
|
|
526
|
+
await fs.rm(path.join(archivesDir, archive));
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
}
|
|
530
|
+
if (cache.symlinks) {
|
|
531
|
+
const newSymlinks = new Set(newCache.symlinks);
|
|
532
|
+
for (const symlink of cache.symlinks) {
|
|
533
|
+
if (!newSymlinks.has(symlink)) {
|
|
534
|
+
await fs.rm(symlink);
|
|
535
|
+
}
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
// Update cache
|
|
539
|
+
cache = newCache;
|
|
540
|
+
await fs.ensureDir(path.dirname(cacheFile));
|
|
541
|
+
await fs.writeFile(cacheFile, JSON.stringify(cache));
|
|
542
|
+
}
|
|
543
|
+
// Run afterAll script
|
|
544
|
+
await scripts?.afterAll?.();
|
|
545
|
+
// Count resources (excluding boilerplate)
|
|
546
|
+
const resourceCounts = countResources(sandstonePack);
|
|
547
|
+
const exports = [clientPath && 'client', serverPath && 'server'].filter(Boolean).join(' & ') || false;
|
|
548
|
+
const countMsg = `${resourceCounts.functions} functions, ${resourceCounts.other} other resources`;
|
|
549
|
+
if (!silent) {
|
|
550
|
+
log(`Pack(s) compiled! (${countMsg})${exports ? ` Exported to ${exports}.` : ''}`);
|
|
551
|
+
}
|
|
552
|
+
return { resourceCounts, sandstoneConfig, sandstonePack, resetSandstonePack };
|
|
553
|
+
}
|
|
554
|
+
export async function _buildCommand(opts, _folder, existingContext, watching = false) {
|
|
555
|
+
const folder = _folder ?? opts.path;
|
|
556
|
+
try {
|
|
557
|
+
const result = await _buildProject(opts, folder, true, existingContext, watching);
|
|
558
|
+
return {
|
|
559
|
+
success: true,
|
|
560
|
+
resourceCounts: result?.resourceCounts ?? { functions: 0, other: 0 },
|
|
561
|
+
timestamp: Date.now(),
|
|
562
|
+
sandstoneConfig: result?.sandstoneConfig,
|
|
563
|
+
sandstonePack: result?.sandstonePack,
|
|
564
|
+
resetSandstonePack: result?.resetSandstonePack,
|
|
565
|
+
};
|
|
566
|
+
}
|
|
567
|
+
catch (err) {
|
|
568
|
+
const errorMessage = err.message || String(err);
|
|
569
|
+
// Always include stack trace for better debugging - format paths for terminal clickability
|
|
570
|
+
const stack = err.stack || '';
|
|
571
|
+
// Clean up stack trace: remove ?hot-hook query params and convert file:// URLs to paths
|
|
572
|
+
const cleanedStack = stack
|
|
573
|
+
.replace(/\?hot-hook=\d+/g, '') // Remove hot-hook cache busting params
|
|
574
|
+
.replace(/file:\/\/\//g, '') // Convert file:/// URLs to paths (Windows)
|
|
575
|
+
.replace(/file:\/\//g, ''); // Convert file:// URLs to paths (Unix)
|
|
576
|
+
const formattedError = cleanedStack ? `${errorMessage}\n${cleanedStack}` : errorMessage;
|
|
577
|
+
log('Build failed:', errorMessage);
|
|
578
|
+
return {
|
|
579
|
+
success: false,
|
|
580
|
+
error: formattedError,
|
|
581
|
+
resourceCounts: { functions: 0, other: 0 },
|
|
582
|
+
timestamp: Date.now(),
|
|
583
|
+
};
|
|
584
|
+
}
|
|
585
|
+
}
|
|
586
|
+
export async function buildCommand(opts, _folder, silent = false) {
|
|
587
|
+
// Commander passes Command object as second arg, so check for string explicitly
|
|
588
|
+
const folder = (typeof _folder === 'string') ? _folder : opts.path;
|
|
589
|
+
// Initialize logger without file for build mode
|
|
590
|
+
initLoggerNoFile();
|
|
591
|
+
setSilent(silent);
|
|
592
|
+
try {
|
|
593
|
+
const result = await _buildProject(opts, folder, silent);
|
|
594
|
+
if (silent) {
|
|
595
|
+
return {
|
|
596
|
+
success: true,
|
|
597
|
+
resourceCounts: result?.resourceCounts ?? { functions: 0, other: 0 },
|
|
598
|
+
timestamp: Date.now(),
|
|
599
|
+
sandstoneConfig: result?.sandstoneConfig,
|
|
600
|
+
sandstonePack: result?.sandstonePack,
|
|
601
|
+
resetSandstonePack: result?.resetSandstonePack,
|
|
602
|
+
};
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
catch (err) {
|
|
606
|
+
const errorMessage = err.message || String(err);
|
|
607
|
+
if (!silent) {
|
|
608
|
+
console.error(chalk.red('Build failed:'), errorMessage);
|
|
609
|
+
if (opts.fullTrace) {
|
|
610
|
+
console.error(err);
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
log('Build failed:', errorMessage);
|
|
614
|
+
if (silent) {
|
|
615
|
+
return {
|
|
616
|
+
success: false,
|
|
617
|
+
error: opts.fullTrace ? String(err) : errorMessage,
|
|
618
|
+
resourceCounts: { functions: 0, other: 0 },
|
|
619
|
+
timestamp: Date.now(),
|
|
620
|
+
};
|
|
621
|
+
}
|
|
622
|
+
}
|
|
30
623
|
}
|