@magentrix-corp/magentrix-cli 1.3.16 → 1.3.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +25 -25
- package/README.md +1166 -1166
- package/actions/autopublish.old.js +293 -293
- package/actions/config.js +182 -182
- package/actions/create.js +466 -466
- package/actions/help.js +164 -164
- package/actions/iris/buildStage.js +874 -874
- package/actions/iris/delete.js +256 -256
- package/actions/iris/dev.js +391 -391
- package/actions/iris/index.js +6 -6
- package/actions/iris/link.js +375 -375
- package/actions/iris/recover.js +268 -268
- package/actions/main.js +80 -80
- package/actions/publish.js +1420 -1420
- package/actions/pull.js +684 -684
- package/actions/setup.js +148 -148
- package/actions/status.js +17 -17
- package/actions/update.js +248 -248
- package/bin/magentrix.js +393 -393
- package/package.json +55 -55
- package/utils/assetPaths.js +158 -158
- package/utils/autopublishLock.js +77 -77
- package/utils/cacher.js +206 -206
- package/utils/cli/checkInstanceUrl.js +76 -74
- package/utils/cli/helpers/compare.js +282 -282
- package/utils/cli/helpers/ensureApiKey.js +63 -63
- package/utils/cli/helpers/ensureCredentials.js +68 -68
- package/utils/cli/helpers/ensureInstanceUrl.js +75 -75
- package/utils/cli/writeRecords.js +262 -262
- package/utils/compare.js +135 -135
- package/utils/compress.js +17 -17
- package/utils/config.js +527 -527
- package/utils/debug.js +144 -144
- package/utils/diagnostics/testPublishLogic.js +96 -96
- package/utils/diff.js +49 -49
- package/utils/downloadAssets.js +291 -291
- package/utils/filetag.js +115 -115
- package/utils/hash.js +14 -14
- package/utils/iris/backup.js +411 -411
- package/utils/iris/builder.js +541 -541
- package/utils/iris/config-reader.js +664 -664
- package/utils/iris/deleteHelper.js +150 -150
- package/utils/iris/errors.js +537 -537
- package/utils/iris/linker.js +601 -601
- package/utils/iris/lock.js +360 -360
- package/utils/iris/validation.js +360 -360
- package/utils/iris/validator.js +281 -281
- package/utils/iris/zipper.js +248 -248
- package/utils/logger.js +291 -291
- package/utils/magentrix/api/assets.js +220 -220
- package/utils/magentrix/api/auth.js +107 -107
- package/utils/magentrix/api/createEntity.js +61 -61
- package/utils/magentrix/api/deleteEntity.js +55 -55
- package/utils/magentrix/api/iris.js +251 -251
- package/utils/magentrix/api/meqlQuery.js +36 -36
- package/utils/magentrix/api/retrieveEntity.js +86 -86
- package/utils/magentrix/api/updateEntity.js +66 -66
- package/utils/magentrix/fetch.js +168 -168
- package/utils/merge.js +22 -22
- package/utils/permissionError.js +70 -70
- package/utils/preferences.js +40 -40
- package/utils/progress.js +469 -469
- package/utils/spinner.js +43 -43
- package/utils/template.js +52 -52
- package/utils/updateFileBase.js +121 -121
- package/utils/workspaces.js +108 -108
- package/vars/config.js +11 -11
- package/vars/global.js +50 -50
package/utils/downloadAssets.js
CHANGED
|
@@ -1,292 +1,292 @@
|
|
|
1
|
-
import { EXPORT_ROOT } from "../vars/global.js";
|
|
2
|
-
import { downloadAssetsZip, listAssets } from "./magentrix/api/assets.js";
|
|
3
|
-
import fs from 'fs';
|
|
4
|
-
import extract from 'extract-zip';
|
|
5
|
-
import { v4 as uuidv4 } from 'uuid';
|
|
6
|
-
import fspath from 'path';
|
|
7
|
-
import { setFileTag } from "./filetag.js";
|
|
8
|
-
import { toLocalPath } from "./assetPaths.js";
|
|
9
|
-
import chalk from 'chalk';
|
|
10
|
-
import Config from "./config.js";
|
|
11
|
-
import { sha256 } from "./hash.js";
|
|
12
|
-
|
|
13
|
-
const config = new Config();
|
|
14
|
-
|
|
15
|
-
export const walkAssets = async (instanceUrl, token, assetPath) => {
|
|
16
|
-
const assetResults = await listAssets(instanceUrl, token, assetPath);
|
|
17
|
-
const walkedAssets = [];
|
|
18
|
-
|
|
19
|
-
for (const asset of assetResults.Assets) {
|
|
20
|
-
if (asset.Type === 'Folder') {
|
|
21
|
-
walkedAssets.push({
|
|
22
|
-
...asset,
|
|
23
|
-
ApiPath: asset.Path, // Keep original API path
|
|
24
|
-
Path: toLocalPath(asset.Path), // Convert for local file system
|
|
25
|
-
Children: await walkAssets(instanceUrl, token, asset.Path),
|
|
26
|
-
ParentFolder: toLocalPath(assetResults.CurrentPath),
|
|
27
|
-
ParentApiPath: assetResults.CurrentPath, // Keep API path for downloads
|
|
28
|
-
})
|
|
29
|
-
} else {
|
|
30
|
-
walkedAssets.push({
|
|
31
|
-
...asset,
|
|
32
|
-
ApiPath: asset.Path, // Keep original API path
|
|
33
|
-
Path: toLocalPath(asset.Path), // Convert for local file system
|
|
34
|
-
ParentFolder: toLocalPath(assetResults.CurrentPath),
|
|
35
|
-
ParentApiPath: assetResults.CurrentPath, // Keep API path for downloads
|
|
36
|
-
});
|
|
37
|
-
}
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
return walkedAssets;
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
export const downloadAssets = async (instanceUrl, token, path, progressCallback = null, logger = null) => {
|
|
44
|
-
const allAssets = await walkAssets(instanceUrl, token, path);
|
|
45
|
-
|
|
46
|
-
// Count total files for progress tracking
|
|
47
|
-
let totalFiles = 0;
|
|
48
|
-
let downloadedFiles = 0;
|
|
49
|
-
|
|
50
|
-
const countFiles = (assets) => {
|
|
51
|
-
for (const asset of assets) {
|
|
52
|
-
if (asset.Type === 'File') {
|
|
53
|
-
totalFiles++;
|
|
54
|
-
}
|
|
55
|
-
if (asset.Type === 'Folder' && asset.Children) {
|
|
56
|
-
countFiles(asset.Children);
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
};
|
|
60
|
-
countFiles(allAssets);
|
|
61
|
-
|
|
62
|
-
// Collect all base updates to write at the end (batch operation)
|
|
63
|
-
const baseUpdates = {};
|
|
64
|
-
|
|
65
|
-
const iterateDownload = async (assets) => {
|
|
66
|
-
// Handle empty assets array
|
|
67
|
-
if (!assets || assets.length === 0) {
|
|
68
|
-
return;
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
const parentApiPath = assets?.[0]?.ParentApiPath; // Use API path for API calls
|
|
72
|
-
const parentLocalFolder = assets?.[0]?.ParentFolder; // Use local path for file system
|
|
73
|
-
const folders = assets.filter(asset => asset.Type === 'Folder');
|
|
74
|
-
const files = assets.filter(asset => asset.Type === 'File');
|
|
75
|
-
|
|
76
|
-
for (const folder of folders) {
|
|
77
|
-
const folderPath = fspath.join(EXPORT_ROOT, folder.Path);
|
|
78
|
-
fs.mkdirSync(folderPath, { recursive: true });
|
|
79
|
-
|
|
80
|
-
// Collect base update for folder
|
|
81
|
-
if (fs.existsSync(folderPath)) {
|
|
82
|
-
const folderStats = fs.statSync(folderPath);
|
|
83
|
-
baseUpdates[folder.Path] = {
|
|
84
|
-
lastModified: folderStats.mtimeMs,
|
|
85
|
-
contentHash: '',
|
|
86
|
-
compressedContent: '',
|
|
87
|
-
recordId: folder.Path,
|
|
88
|
-
type: folder.Type,
|
|
89
|
-
filePath: folder.Path,
|
|
90
|
-
lastKnownActualPath: folderPath,
|
|
91
|
-
lastKnownPath: fspath.resolve(folder.Path)
|
|
92
|
-
};
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
await iterateDownload(folder?.Children || []);
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
if (files.length > 0) {
|
|
99
|
-
// Batch files dynamically to avoid URL length limits
|
|
100
|
-
// Testing shows the API fails at ~2150 chars (likely 2048 limit on the server)
|
|
101
|
-
// We use 2000 as a safe limit with buffer
|
|
102
|
-
const MAX_URL_LENGTH = 2000;
|
|
103
|
-
const fileBatches = [];
|
|
104
|
-
|
|
105
|
-
// Calculate base URL length once (everything except the file names)
|
|
106
|
-
const baseUrl = new URL("/api/3.0/staticassets/download", instanceUrl);
|
|
107
|
-
baseUrl.searchParams.set("path", parentApiPath);
|
|
108
|
-
baseUrl.searchParams.set('download-format', 'zip');
|
|
109
|
-
// The names will be added as: &names=encoded_comma_separated_list
|
|
110
|
-
// Calculate the base without names parameter
|
|
111
|
-
const baseUrlStr = baseUrl.toString();
|
|
112
|
-
const baseLength = baseUrlStr.length + '&names='.length;
|
|
113
|
-
|
|
114
|
-
let currentBatch = [];
|
|
115
|
-
let currentNames = '';
|
|
116
|
-
|
|
117
|
-
for (const file of files) {
|
|
118
|
-
// Calculate what the names parameter would be if we add this file
|
|
119
|
-
const testNames = currentNames
|
|
120
|
-
? `${currentNames},${file.Name}`
|
|
121
|
-
: file.Name;
|
|
122
|
-
|
|
123
|
-
// Calculate the URL length with this names string encoded
|
|
124
|
-
const encodedTestNames = encodeURIComponent(testNames);
|
|
125
|
-
const testLength = baseLength + encodedTestNames.length;
|
|
126
|
-
|
|
127
|
-
// Check if adding this file would exceed the URL limit
|
|
128
|
-
if (testLength > MAX_URL_LENGTH && currentBatch.length > 0) {
|
|
129
|
-
// Start a new batch - current batch is full
|
|
130
|
-
fileBatches.push([...currentBatch]);
|
|
131
|
-
currentBatch = [file];
|
|
132
|
-
currentNames = file.Name;
|
|
133
|
-
} else {
|
|
134
|
-
// Add to current batch
|
|
135
|
-
currentBatch.push(file);
|
|
136
|
-
currentNames = testNames;
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
// Add the last batch if it has files
|
|
141
|
-
if (currentBatch.length > 0) {
|
|
142
|
-
fileBatches.push(currentBatch);
|
|
143
|
-
}
|
|
144
|
-
|
|
145
|
-
for (let batchIndex = 0; batchIndex < fileBatches.length; batchIndex++) {
|
|
146
|
-
const batch = fileBatches[batchIndex];
|
|
147
|
-
|
|
148
|
-
// Skip empty batches
|
|
149
|
-
if (!batch || batch.length === 0) {
|
|
150
|
-
continue;
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
const batchZipName = fileBatches.length > 1
|
|
154
|
-
? `assets-batch-${batchIndex + 1}.zip`
|
|
155
|
-
: 'assets.zip';
|
|
156
|
-
|
|
157
|
-
try {
|
|
158
|
-
// Validate that we have the required data
|
|
159
|
-
if (!parentApiPath) {
|
|
160
|
-
console.warn(`Warning: Skipping batch because parentApiPath is undefined. Files: ${batch.map(f => f.Name).join(', ')}`);
|
|
161
|
-
continue;
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
if (!parentLocalFolder) {
|
|
165
|
-
console.warn(`Warning: Skipping batch because parentLocalFolder is undefined. Path: ${parentApiPath}`);
|
|
166
|
-
continue;
|
|
167
|
-
}
|
|
168
|
-
|
|
169
|
-
// Debug logging for problematic paths
|
|
170
|
-
if (logger && parentApiPath.includes('@')) {
|
|
171
|
-
logger.info('Downloading batch with special characters', {
|
|
172
|
-
path: parentApiPath,
|
|
173
|
-
fileCount: batch.length,
|
|
174
|
-
firstFile: batch[0]?.Name
|
|
175
|
-
});
|
|
176
|
-
}
|
|
177
|
-
|
|
178
|
-
const savedAs = await downloadAssetsZip({
|
|
179
|
-
baseUrl: instanceUrl,
|
|
180
|
-
token: token, // "Bearer" prefix added in code
|
|
181
|
-
path: parentApiPath, // Use API path for API call
|
|
182
|
-
names: batch.map(file => file.Name),
|
|
183
|
-
outFile: fspath.join(EXPORT_ROOT, parentLocalFolder, batchZipName), // Use local path for file system
|
|
184
|
-
});
|
|
185
|
-
|
|
186
|
-
await extract(savedAs, {
|
|
187
|
-
dir: fspath.resolve(fspath.join(EXPORT_ROOT, parentLocalFolder)) // Use local path for extraction
|
|
188
|
-
});
|
|
189
|
-
|
|
190
|
-
fs.rmSync(savedAs);
|
|
191
|
-
|
|
192
|
-
// Collect base updates for all files in this batch
|
|
193
|
-
// We'll write them all at once at the end for performance
|
|
194
|
-
for (const file of batch) {
|
|
195
|
-
try {
|
|
196
|
-
const filePath = fspath.join(EXPORT_ROOT, file.Path);
|
|
197
|
-
if (fs.existsSync(filePath)) {
|
|
198
|
-
const fileStats = fs.statSync(filePath);
|
|
199
|
-
const fileContent = fs.readFileSync(filePath, "utf-8");
|
|
200
|
-
const contentHash = sha256(fileContent);
|
|
201
|
-
|
|
202
|
-
baseUpdates[file.Path] = {
|
|
203
|
-
lastModified: fileStats.mtimeMs,
|
|
204
|
-
contentHash,
|
|
205
|
-
compressedContent: '', // Assets don't store content
|
|
206
|
-
recordId: file.Path,
|
|
207
|
-
type: file.Type,
|
|
208
|
-
filePath: file.Path,
|
|
209
|
-
lastKnownActualPath: filePath,
|
|
210
|
-
lastKnownPath: fspath.resolve(file.Path)
|
|
211
|
-
};
|
|
212
|
-
}
|
|
213
|
-
} catch (err) {
|
|
214
|
-
// Log but don't fail the download
|
|
215
|
-
if (logger) {
|
|
216
|
-
logger.warning(`Failed to prepare base update for ${file.Path}`, { error: err.message });
|
|
217
|
-
}
|
|
218
|
-
}
|
|
219
|
-
}
|
|
220
|
-
|
|
221
|
-
// Update progress
|
|
222
|
-
downloadedFiles += batch.length;
|
|
223
|
-
if (progressCallback) {
|
|
224
|
-
progressCallback(downloadedFiles, totalFiles, `Downloaded ${downloadedFiles}/${totalFiles} files`);
|
|
225
|
-
}
|
|
226
|
-
} catch (error) {
|
|
227
|
-
// Check if this is a 404 error (files don't exist on server)
|
|
228
|
-
const is404 = error.message && error.message.includes('404');
|
|
229
|
-
|
|
230
|
-
if (is404) {
|
|
231
|
-
// Files don't exist on server - log info but continue
|
|
232
|
-
// This is expected behavior for files that were deleted on the server
|
|
233
|
-
const fileNames = batch.map(f => f.Name).join(', ');
|
|
234
|
-
const infoMessage = `Skipped ${batch.length} missing files from ${parentApiPath || '(undefined path)'}`;
|
|
235
|
-
|
|
236
|
-
console.warn(chalk.gray(`\n ℹ️ ${infoMessage}`));
|
|
237
|
-
console.warn(chalk.gray(` These files don't exist on the server anymore.`));
|
|
238
|
-
|
|
239
|
-
// Log to file if logger is available (as INFO, not WARNING)
|
|
240
|
-
if (logger) {
|
|
241
|
-
logger.info(infoMessage, {
|
|
242
|
-
path: parentApiPath,
|
|
243
|
-
fileCount: batch.length,
|
|
244
|
-
firstFewFiles: fileNames.substring(0, 200) + (fileNames.length > 200 ? '...' : '')
|
|
245
|
-
});
|
|
246
|
-
}
|
|
247
|
-
|
|
248
|
-
// Still update progress as if we "downloaded" them (they don't exist)
|
|
249
|
-
downloadedFiles += batch.length;
|
|
250
|
-
if (progressCallback) {
|
|
251
|
-
progressCallback(downloadedFiles, totalFiles, `Skipped ${batch.length} missing files`);
|
|
252
|
-
}
|
|
253
|
-
} else {
|
|
254
|
-
// Other errors should still fail
|
|
255
|
-
const fileNames = batch.map(f => f.Name).join(', ');
|
|
256
|
-
console.error(`\nFailed to download batch from ${parentApiPath || '(undefined path)'}`);
|
|
257
|
-
console.error(`Files: ${fileNames}`);
|
|
258
|
-
console.error(`Error: ${error.message}\n`);
|
|
259
|
-
throw error;
|
|
260
|
-
}
|
|
261
|
-
}
|
|
262
|
-
}
|
|
263
|
-
}
|
|
264
|
-
}
|
|
265
|
-
|
|
266
|
-
await iterateDownload(allAssets);
|
|
267
|
-
|
|
268
|
-
// Write all base updates in ONE operation at the end (super fast!)
|
|
269
|
-
if (Object.keys(baseUpdates).length > 0) {
|
|
270
|
-
if (logger) {
|
|
271
|
-
logger.info(`Writing ${Object.keys(baseUpdates).length} asset base updates to base.json`);
|
|
272
|
-
}
|
|
273
|
-
|
|
274
|
-
// Read current base.json once (pass null as key to get entire config)
|
|
275
|
-
const baseConfig = config.read(null, { filename: "base.json" }) || {};
|
|
276
|
-
|
|
277
|
-
// Merge all updates
|
|
278
|
-
Object.assign(baseConfig, baseUpdates);
|
|
279
|
-
|
|
280
|
-
// Write once
|
|
281
|
-
const baseJsonPath = fspath.join(fspath.dirname(config.projectConfigPath), "base.json");
|
|
282
|
-
fs.writeFileSync(baseJsonPath, JSON.stringify(baseConfig, null, 2), { mode: 0o600 });
|
|
283
|
-
|
|
284
|
-
if (logger) {
|
|
285
|
-
logger.info(`Successfully wrote all asset base updates`);
|
|
286
|
-
}
|
|
287
|
-
}
|
|
288
|
-
|
|
289
|
-
return {
|
|
290
|
-
tree: allAssets
|
|
291
|
-
};
|
|
1
|
+
import { EXPORT_ROOT } from "../vars/global.js";
|
|
2
|
+
import { downloadAssetsZip, listAssets } from "./magentrix/api/assets.js";
|
|
3
|
+
import fs from 'fs';
|
|
4
|
+
import extract from 'extract-zip';
|
|
5
|
+
import { v4 as uuidv4 } from 'uuid';
|
|
6
|
+
import fspath from 'path';
|
|
7
|
+
import { setFileTag } from "./filetag.js";
|
|
8
|
+
import { toLocalPath } from "./assetPaths.js";
|
|
9
|
+
import chalk from 'chalk';
|
|
10
|
+
import Config from "./config.js";
|
|
11
|
+
import { sha256 } from "./hash.js";
|
|
12
|
+
|
|
13
|
+
const config = new Config();
|
|
14
|
+
|
|
15
|
+
export const walkAssets = async (instanceUrl, token, assetPath) => {
|
|
16
|
+
const assetResults = await listAssets(instanceUrl, token, assetPath);
|
|
17
|
+
const walkedAssets = [];
|
|
18
|
+
|
|
19
|
+
for (const asset of assetResults.Assets) {
|
|
20
|
+
if (asset.Type === 'Folder') {
|
|
21
|
+
walkedAssets.push({
|
|
22
|
+
...asset,
|
|
23
|
+
ApiPath: asset.Path, // Keep original API path
|
|
24
|
+
Path: toLocalPath(asset.Path), // Convert for local file system
|
|
25
|
+
Children: await walkAssets(instanceUrl, token, asset.Path),
|
|
26
|
+
ParentFolder: toLocalPath(assetResults.CurrentPath),
|
|
27
|
+
ParentApiPath: assetResults.CurrentPath, // Keep API path for downloads
|
|
28
|
+
})
|
|
29
|
+
} else {
|
|
30
|
+
walkedAssets.push({
|
|
31
|
+
...asset,
|
|
32
|
+
ApiPath: asset.Path, // Keep original API path
|
|
33
|
+
Path: toLocalPath(asset.Path), // Convert for local file system
|
|
34
|
+
ParentFolder: toLocalPath(assetResults.CurrentPath),
|
|
35
|
+
ParentApiPath: assetResults.CurrentPath, // Keep API path for downloads
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
return walkedAssets;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export const downloadAssets = async (instanceUrl, token, path, progressCallback = null, logger = null) => {
|
|
44
|
+
const allAssets = await walkAssets(instanceUrl, token, path);
|
|
45
|
+
|
|
46
|
+
// Count total files for progress tracking
|
|
47
|
+
let totalFiles = 0;
|
|
48
|
+
let downloadedFiles = 0;
|
|
49
|
+
|
|
50
|
+
const countFiles = (assets) => {
|
|
51
|
+
for (const asset of assets) {
|
|
52
|
+
if (asset.Type === 'File') {
|
|
53
|
+
totalFiles++;
|
|
54
|
+
}
|
|
55
|
+
if (asset.Type === 'Folder' && asset.Children) {
|
|
56
|
+
countFiles(asset.Children);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
countFiles(allAssets);
|
|
61
|
+
|
|
62
|
+
// Collect all base updates to write at the end (batch operation)
|
|
63
|
+
const baseUpdates = {};
|
|
64
|
+
|
|
65
|
+
const iterateDownload = async (assets) => {
|
|
66
|
+
// Handle empty assets array
|
|
67
|
+
if (!assets || assets.length === 0) {
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
const parentApiPath = assets?.[0]?.ParentApiPath; // Use API path for API calls
|
|
72
|
+
const parentLocalFolder = assets?.[0]?.ParentFolder; // Use local path for file system
|
|
73
|
+
const folders = assets.filter(asset => asset.Type === 'Folder');
|
|
74
|
+
const files = assets.filter(asset => asset.Type === 'File');
|
|
75
|
+
|
|
76
|
+
for (const folder of folders) {
|
|
77
|
+
const folderPath = fspath.join(EXPORT_ROOT, folder.Path);
|
|
78
|
+
fs.mkdirSync(folderPath, { recursive: true });
|
|
79
|
+
|
|
80
|
+
// Collect base update for folder
|
|
81
|
+
if (fs.existsSync(folderPath)) {
|
|
82
|
+
const folderStats = fs.statSync(folderPath);
|
|
83
|
+
baseUpdates[folder.Path] = {
|
|
84
|
+
lastModified: folderStats.mtimeMs,
|
|
85
|
+
contentHash: '',
|
|
86
|
+
compressedContent: '',
|
|
87
|
+
recordId: folder.Path,
|
|
88
|
+
type: folder.Type,
|
|
89
|
+
filePath: folder.Path,
|
|
90
|
+
lastKnownActualPath: folderPath,
|
|
91
|
+
lastKnownPath: fspath.resolve(folder.Path)
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
await iterateDownload(folder?.Children || []);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
if (files.length > 0) {
|
|
99
|
+
// Batch files dynamically to avoid URL length limits
|
|
100
|
+
// Testing shows the API fails at ~2150 chars (likely 2048 limit on the server)
|
|
101
|
+
// We use 2000 as a safe limit with buffer
|
|
102
|
+
const MAX_URL_LENGTH = 2000;
|
|
103
|
+
const fileBatches = [];
|
|
104
|
+
|
|
105
|
+
// Calculate base URL length once (everything except the file names)
|
|
106
|
+
const baseUrl = new URL("/api/3.0/staticassets/download", instanceUrl);
|
|
107
|
+
baseUrl.searchParams.set("path", parentApiPath);
|
|
108
|
+
baseUrl.searchParams.set('download-format', 'zip');
|
|
109
|
+
// The names will be added as: &names=encoded_comma_separated_list
|
|
110
|
+
// Calculate the base without names parameter
|
|
111
|
+
const baseUrlStr = baseUrl.toString();
|
|
112
|
+
const baseLength = baseUrlStr.length + '&names='.length;
|
|
113
|
+
|
|
114
|
+
let currentBatch = [];
|
|
115
|
+
let currentNames = '';
|
|
116
|
+
|
|
117
|
+
for (const file of files) {
|
|
118
|
+
// Calculate what the names parameter would be if we add this file
|
|
119
|
+
const testNames = currentNames
|
|
120
|
+
? `${currentNames},${file.Name}`
|
|
121
|
+
: file.Name;
|
|
122
|
+
|
|
123
|
+
// Calculate the URL length with this names string encoded
|
|
124
|
+
const encodedTestNames = encodeURIComponent(testNames);
|
|
125
|
+
const testLength = baseLength + encodedTestNames.length;
|
|
126
|
+
|
|
127
|
+
// Check if adding this file would exceed the URL limit
|
|
128
|
+
if (testLength > MAX_URL_LENGTH && currentBatch.length > 0) {
|
|
129
|
+
// Start a new batch - current batch is full
|
|
130
|
+
fileBatches.push([...currentBatch]);
|
|
131
|
+
currentBatch = [file];
|
|
132
|
+
currentNames = file.Name;
|
|
133
|
+
} else {
|
|
134
|
+
// Add to current batch
|
|
135
|
+
currentBatch.push(file);
|
|
136
|
+
currentNames = testNames;
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// Add the last batch if it has files
|
|
141
|
+
if (currentBatch.length > 0) {
|
|
142
|
+
fileBatches.push(currentBatch);
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
for (let batchIndex = 0; batchIndex < fileBatches.length; batchIndex++) {
|
|
146
|
+
const batch = fileBatches[batchIndex];
|
|
147
|
+
|
|
148
|
+
// Skip empty batches
|
|
149
|
+
if (!batch || batch.length === 0) {
|
|
150
|
+
continue;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
const batchZipName = fileBatches.length > 1
|
|
154
|
+
? `assets-batch-${batchIndex + 1}.zip`
|
|
155
|
+
: 'assets.zip';
|
|
156
|
+
|
|
157
|
+
try {
|
|
158
|
+
// Validate that we have the required data
|
|
159
|
+
if (!parentApiPath) {
|
|
160
|
+
console.warn(`Warning: Skipping batch because parentApiPath is undefined. Files: ${batch.map(f => f.Name).join(', ')}`);
|
|
161
|
+
continue;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
if (!parentLocalFolder) {
|
|
165
|
+
console.warn(`Warning: Skipping batch because parentLocalFolder is undefined. Path: ${parentApiPath}`);
|
|
166
|
+
continue;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// Debug logging for problematic paths
|
|
170
|
+
if (logger && parentApiPath.includes('@')) {
|
|
171
|
+
logger.info('Downloading batch with special characters', {
|
|
172
|
+
path: parentApiPath,
|
|
173
|
+
fileCount: batch.length,
|
|
174
|
+
firstFile: batch[0]?.Name
|
|
175
|
+
});
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
const savedAs = await downloadAssetsZip({
|
|
179
|
+
baseUrl: instanceUrl,
|
|
180
|
+
token: token, // "Bearer" prefix added in code
|
|
181
|
+
path: parentApiPath, // Use API path for API call
|
|
182
|
+
names: batch.map(file => file.Name),
|
|
183
|
+
outFile: fspath.join(EXPORT_ROOT, parentLocalFolder, batchZipName), // Use local path for file system
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
await extract(savedAs, {
|
|
187
|
+
dir: fspath.resolve(fspath.join(EXPORT_ROOT, parentLocalFolder)) // Use local path for extraction
|
|
188
|
+
});
|
|
189
|
+
|
|
190
|
+
fs.rmSync(savedAs);
|
|
191
|
+
|
|
192
|
+
// Collect base updates for all files in this batch
|
|
193
|
+
// We'll write them all at once at the end for performance
|
|
194
|
+
for (const file of batch) {
|
|
195
|
+
try {
|
|
196
|
+
const filePath = fspath.join(EXPORT_ROOT, file.Path);
|
|
197
|
+
if (fs.existsSync(filePath)) {
|
|
198
|
+
const fileStats = fs.statSync(filePath);
|
|
199
|
+
const fileContent = fs.readFileSync(filePath, "utf-8");
|
|
200
|
+
const contentHash = sha256(fileContent);
|
|
201
|
+
|
|
202
|
+
baseUpdates[file.Path] = {
|
|
203
|
+
lastModified: fileStats.mtimeMs,
|
|
204
|
+
contentHash,
|
|
205
|
+
compressedContent: '', // Assets don't store content
|
|
206
|
+
recordId: file.Path,
|
|
207
|
+
type: file.Type,
|
|
208
|
+
filePath: file.Path,
|
|
209
|
+
lastKnownActualPath: filePath,
|
|
210
|
+
lastKnownPath: fspath.resolve(file.Path)
|
|
211
|
+
};
|
|
212
|
+
}
|
|
213
|
+
} catch (err) {
|
|
214
|
+
// Log but don't fail the download
|
|
215
|
+
if (logger) {
|
|
216
|
+
logger.warning(`Failed to prepare base update for ${file.Path}`, { error: err.message });
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// Update progress
|
|
222
|
+
downloadedFiles += batch.length;
|
|
223
|
+
if (progressCallback) {
|
|
224
|
+
progressCallback(downloadedFiles, totalFiles, `Downloaded ${downloadedFiles}/${totalFiles} files`);
|
|
225
|
+
}
|
|
226
|
+
} catch (error) {
|
|
227
|
+
// Check if this is a 404 error (files don't exist on server)
|
|
228
|
+
const is404 = error.message && error.message.includes('404');
|
|
229
|
+
|
|
230
|
+
if (is404) {
|
|
231
|
+
// Files don't exist on server - log info but continue
|
|
232
|
+
// This is expected behavior for files that were deleted on the server
|
|
233
|
+
const fileNames = batch.map(f => f.Name).join(', ');
|
|
234
|
+
const infoMessage = `Skipped ${batch.length} missing files from ${parentApiPath || '(undefined path)'}`;
|
|
235
|
+
|
|
236
|
+
console.warn(chalk.gray(`\n ℹ️ ${infoMessage}`));
|
|
237
|
+
console.warn(chalk.gray(` These files don't exist on the server anymore.`));
|
|
238
|
+
|
|
239
|
+
// Log to file if logger is available (as INFO, not WARNING)
|
|
240
|
+
if (logger) {
|
|
241
|
+
logger.info(infoMessage, {
|
|
242
|
+
path: parentApiPath,
|
|
243
|
+
fileCount: batch.length,
|
|
244
|
+
firstFewFiles: fileNames.substring(0, 200) + (fileNames.length > 200 ? '...' : '')
|
|
245
|
+
});
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// Still update progress as if we "downloaded" them (they don't exist)
|
|
249
|
+
downloadedFiles += batch.length;
|
|
250
|
+
if (progressCallback) {
|
|
251
|
+
progressCallback(downloadedFiles, totalFiles, `Skipped ${batch.length} missing files`);
|
|
252
|
+
}
|
|
253
|
+
} else {
|
|
254
|
+
// Other errors should still fail
|
|
255
|
+
const fileNames = batch.map(f => f.Name).join(', ');
|
|
256
|
+
console.error(`\nFailed to download batch from ${parentApiPath || '(undefined path)'}`);
|
|
257
|
+
console.error(`Files: ${fileNames}`);
|
|
258
|
+
console.error(`Error: ${error.message}\n`);
|
|
259
|
+
throw error;
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
await iterateDownload(allAssets);
|
|
267
|
+
|
|
268
|
+
// Write all base updates in ONE operation at the end (super fast!)
|
|
269
|
+
if (Object.keys(baseUpdates).length > 0) {
|
|
270
|
+
if (logger) {
|
|
271
|
+
logger.info(`Writing ${Object.keys(baseUpdates).length} asset base updates to base.json`);
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
// Read current base.json once (pass null as key to get entire config)
|
|
275
|
+
const baseConfig = config.read(null, { filename: "base.json" }) || {};
|
|
276
|
+
|
|
277
|
+
// Merge all updates
|
|
278
|
+
Object.assign(baseConfig, baseUpdates);
|
|
279
|
+
|
|
280
|
+
// Write once
|
|
281
|
+
const baseJsonPath = fspath.join(fspath.dirname(config.projectConfigPath), "base.json");
|
|
282
|
+
fs.writeFileSync(baseJsonPath, JSON.stringify(baseConfig, null, 2), { mode: 0o600 });
|
|
283
|
+
|
|
284
|
+
if (logger) {
|
|
285
|
+
logger.info(`Successfully wrote all asset base updates`);
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
return {
|
|
290
|
+
tree: allAssets
|
|
291
|
+
};
|
|
292
292
|
}
|