@salesforce/storefront-next-dev 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.txt +181 -0
- package/README.md +302 -0
- package/dist/cartridge-services/index.d.ts +60 -0
- package/dist/cartridge-services/index.d.ts.map +1 -0
- package/dist/cartridge-services/index.js +954 -0
- package/dist/cartridge-services/index.js.map +1 -0
- package/dist/cli.js +3373 -0
- package/dist/configs/react-router.config.d.ts +13 -0
- package/dist/configs/react-router.config.d.ts.map +1 -0
- package/dist/configs/react-router.config.js +36 -0
- package/dist/configs/react-router.config.js.map +1 -0
- package/dist/extensibility/templates/install-instructions.mdc.hbs +192 -0
- package/dist/extensibility/templates/uninstall-instructions.mdc.hbs +137 -0
- package/dist/index.d.ts +327 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +2606 -0
- package/dist/index.js.map +1 -0
- package/dist/mrt/sfnext-server-chunk-DUt5XHAg.mjs +1 -0
- package/dist/mrt/sfnext-server-jiti-DjnmHo-6.mjs +10 -0
- package/dist/mrt/sfnext-server-jiti-DjnmHo-6.mjs.map +1 -0
- package/dist/mrt/ssr.d.ts +19 -0
- package/dist/mrt/ssr.d.ts.map +1 -0
- package/dist/mrt/ssr.mjs +246 -0
- package/dist/mrt/ssr.mjs.map +1 -0
- package/dist/mrt/streamingHandler.d.ts +11 -0
- package/dist/mrt/streamingHandler.d.ts.map +1 -0
- package/dist/mrt/streamingHandler.mjs +255 -0
- package/dist/mrt/streamingHandler.mjs.map +1 -0
- package/dist/react-router/Scripts.d.ts +36 -0
- package/dist/react-router/Scripts.d.ts.map +1 -0
- package/dist/react-router/Scripts.js +68 -0
- package/dist/react-router/Scripts.js.map +1 -0
- package/package.json +157 -0
|
@@ -0,0 +1,954 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path, { extname } from "path";
|
|
3
|
+
import archiver from "archiver";
|
|
4
|
+
import { access, mkdir, readFile, readdir, rm, writeFile } from "node:fs/promises";
|
|
5
|
+
import { basename, extname as extname$1, join, resolve } from "node:path";
|
|
6
|
+
import { execSync } from "node:child_process";
|
|
7
|
+
import { Node, Project } from "ts-morph";
|
|
8
|
+
import { existsSync, readFileSync, unlinkSync } from "node:fs";
|
|
9
|
+
import { tmpdir } from "node:os";
|
|
10
|
+
import { randomUUID } from "node:crypto";
|
|
11
|
+
import { npmRunPathEnv } from "npm-run-path";
|
|
12
|
+
|
|
13
|
+
//#region src/cartridge-services/types.ts
|
|
14
|
+
const WEBDAV_BASE = "/on/demandware.servlet/webdav/Sites";
|
|
15
|
+
const CARTRIDGES_PATH = "Cartridges";
|
|
16
|
+
const HTTP_METHODS = {
|
|
17
|
+
PUT: "PUT",
|
|
18
|
+
POST: "POST",
|
|
19
|
+
DELETE: "DELETE"
|
|
20
|
+
};
|
|
21
|
+
const CONTENT_TYPES = {
|
|
22
|
+
APPLICATION_ZIP: "application/zip",
|
|
23
|
+
APPLICATION_FORM_URLENCODED: "application/x-www-form-urlencoded",
|
|
24
|
+
APPLICATION_JSON: "application/json"
|
|
25
|
+
};
|
|
26
|
+
const WEBDAV_OPERATIONS = {
|
|
27
|
+
UNZIP: "UNZIP",
|
|
28
|
+
TARGET_CARTRIDGES: "cartridges"
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
//#endregion
|
|
32
|
+
//#region src/cartridge-services/sfcc-client.ts
|
|
33
|
+
/**
|
|
34
|
+
* Create HTTP request options for WebDAV operations (file upload/download)
|
|
35
|
+
*
|
|
36
|
+
* @param instance - The Commerce Cloud instance hostname
|
|
37
|
+
* @param path - The WebDAV path (e.g., '/cartridges')
|
|
38
|
+
* @param basicAuth - Base64 encoded basic authentication credentials (required)
|
|
39
|
+
* @param method - HTTP method (PUT, DELETE, UNZIP, etc.)
|
|
40
|
+
* @param formData - Optional form data for the request
|
|
41
|
+
* @returns Configured HTTP request options for WebDAV operations
|
|
42
|
+
*/
|
|
43
|
+
function getWebdavOptions(instance, path$1, basicAuth, method, formData) {
|
|
44
|
+
const endpoint = `${WEBDAV_BASE}/${path$1}`;
|
|
45
|
+
return {
|
|
46
|
+
baseUrl: `https://${instance}`,
|
|
47
|
+
uri: endpoint,
|
|
48
|
+
auth: { basic: basicAuth },
|
|
49
|
+
method,
|
|
50
|
+
...formData && { form: formData }
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Check if an HTTP response indicates an authentication error and throw if so
|
|
55
|
+
*
|
|
56
|
+
* @param response - The HTTP response to check
|
|
57
|
+
* @throws Error with authentication message if status code is 401
|
|
58
|
+
*/
|
|
59
|
+
function checkAuthenticationError(response) {
|
|
60
|
+
if (response.statusCode === 401) throw new Error("Authentication failed. Please login again.");
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Execute an HTTP request using the native fetch API with default SSL validation
|
|
64
|
+
*
|
|
65
|
+
* This function handles general HTTP requests and does not automatically set Content-Type headers.
|
|
66
|
+
* Callers must set the appropriate Content-Type header in opts.headers based on their body type
|
|
67
|
+
*
|
|
68
|
+
* @param opts - HTTP request configuration including URL, method, headers, and body
|
|
69
|
+
* @returns Promise resolving to an object containing the HTTP response and parsed body
|
|
70
|
+
* @throws Error if the HTTP request fails or cannot be completed
|
|
71
|
+
*/
|
|
72
|
+
async function makeRequest(opts) {
|
|
73
|
+
const url = opts.uri;
|
|
74
|
+
const fetchOptions = {
|
|
75
|
+
...opts,
|
|
76
|
+
headers: {
|
|
77
|
+
Authorization: `Basic ${opts.auth.basic}`,
|
|
78
|
+
...opts.headers
|
|
79
|
+
}
|
|
80
|
+
};
|
|
81
|
+
if (opts.form) {
|
|
82
|
+
const formData = new URLSearchParams();
|
|
83
|
+
Object.entries(opts.form).forEach(([key, value]) => {
|
|
84
|
+
formData.append(key, String(value));
|
|
85
|
+
});
|
|
86
|
+
fetchOptions.body = formData;
|
|
87
|
+
fetchOptions.headers = {
|
|
88
|
+
...fetchOptions.headers,
|
|
89
|
+
"Content-Type": CONTENT_TYPES.APPLICATION_FORM_URLENCODED
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
try {
|
|
93
|
+
const response = await fetch(url, fetchOptions);
|
|
94
|
+
const body = response.headers.get("content-type")?.includes(CONTENT_TYPES.APPLICATION_JSON) ? await response.json() : await response.text();
|
|
95
|
+
const headers = {};
|
|
96
|
+
response.headers.forEach((value, key) => {
|
|
97
|
+
headers[key] = value;
|
|
98
|
+
});
|
|
99
|
+
return {
|
|
100
|
+
response: {
|
|
101
|
+
statusCode: response.status,
|
|
102
|
+
statusMessage: response.statusText,
|
|
103
|
+
headers
|
|
104
|
+
},
|
|
105
|
+
body
|
|
106
|
+
};
|
|
107
|
+
} catch (error) {
|
|
108
|
+
throw new Error(`HTTP request failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
//#endregion
|
|
113
|
+
//#region src/cartridge-services/validation.ts
|
|
114
|
+
/**
|
|
115
|
+
* Validation error class for cartridge service parameter validation
|
|
116
|
+
*/
|
|
117
|
+
var ValidationError = class extends Error {
|
|
118
|
+
constructor(message) {
|
|
119
|
+
super(message);
|
|
120
|
+
this.name = "ValidationError";
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
/**
|
|
124
|
+
* Validate Commerce Cloud instance hostname
|
|
125
|
+
*
|
|
126
|
+
* @param instance - The instance hostname to validate
|
|
127
|
+
* @throws ValidationError if instance is invalid
|
|
128
|
+
*/
|
|
129
|
+
function validateInstance(instance) {
|
|
130
|
+
if (!instance || typeof instance !== "string") throw new ValidationError("Instance parameter is required and must be a string");
|
|
131
|
+
if (instance.trim().length === 0) throw new ValidationError("Instance parameter cannot be empty");
|
|
132
|
+
if (!instance.includes(".")) throw new ValidationError("Parameter instance must be a valid domain name");
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Validate cartridge file (must be a ZIP file)
|
|
136
|
+
*
|
|
137
|
+
* @param cartridgePath - The cartridge file path to validate
|
|
138
|
+
* @throws ValidationError if cartridge is invalid
|
|
139
|
+
*/
|
|
140
|
+
function validateCartridgePath(cartridgePath) {
|
|
141
|
+
if (!cartridgePath || typeof cartridgePath !== "string") throw new ValidationError("cartridge parameter is required and must be a string");
|
|
142
|
+
if (cartridgePath.trim().length === 0) throw new ValidationError("cartridge parameter cannot be empty");
|
|
143
|
+
const ext = extname(cartridgePath).toLowerCase();
|
|
144
|
+
if (ext !== "") throw new ValidationError(`cartridge must be a directory, got: ${ext}`);
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Validate Basic Auth credentials
|
|
148
|
+
*
|
|
149
|
+
* @param basicAuth - The base64 encoded basic auth credentials to validate
|
|
150
|
+
* @throws ValidationError if credentials are invalid
|
|
151
|
+
*/
|
|
152
|
+
function validateBasicAuth(basicAuth) {
|
|
153
|
+
if (!basicAuth || typeof basicAuth !== "string") throw new ValidationError("Basic auth credentials parameter is required and must be a string");
|
|
154
|
+
if (basicAuth.trim().length === 0) throw new ValidationError("Basic auth credentials parameter cannot be empty");
|
|
155
|
+
if (basicAuth.length < 10) throw new ValidationError("Basic auth credentials appear to be too short to be valid");
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* Validate code version name
|
|
159
|
+
*
|
|
160
|
+
* @param version - The code version name to validate
|
|
161
|
+
* @throws ValidationError if version is invalid
|
|
162
|
+
*/
|
|
163
|
+
function validateVersion(version) {
|
|
164
|
+
if (!version || typeof version !== "string") throw new ValidationError("Version parameter is required and must be a string");
|
|
165
|
+
if (version.trim().length === 0) throw new ValidationError("Version parameter cannot be empty");
|
|
166
|
+
if (!/^[a-zA-Z0-9._-]+$/.test(version)) throw new ValidationError("Version parameter contains invalid characters. Only alphanumeric, dots, hyphens, and underscores are allowed");
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* Validate WebDAV path
|
|
170
|
+
*
|
|
171
|
+
* @param webdavPath - The WebDAV path to validate
|
|
172
|
+
* @throws ValidationError if path is invalid
|
|
173
|
+
*/
|
|
174
|
+
function validateWebdavPath(webdavPath) {
|
|
175
|
+
if (!webdavPath || typeof webdavPath !== "string") throw new ValidationError("WebDAV path parameter is required and must be a string");
|
|
176
|
+
if (!webdavPath.startsWith("/")) throw new ValidationError("WebDAV path must start with a forward slash");
|
|
177
|
+
}
|
|
178
|
+
/**
|
|
179
|
+
* Validate all parameters for deployCode function
|
|
180
|
+
*
|
|
181
|
+
* @param instance - Commerce Cloud instance hostname
|
|
182
|
+
* @param codeVersionName - Target code version name
|
|
183
|
+
* @param cartridgeDirectoryPath - Path to the source directory
|
|
184
|
+
* @param basicAuth - Base64 encoded basic auth credentials
|
|
185
|
+
* @param cartridgeWebDevPath - WebDAV path for cartridge deployment
|
|
186
|
+
* @throws ValidationError if any parameter is invalid
|
|
187
|
+
*/
|
|
188
|
+
function validateDeployCodeParams(instance, codeVersionName, cartridgeDirectoryPath, basicAuth, cartridgeWebDevPath) {
|
|
189
|
+
validateInstance(instance);
|
|
190
|
+
validateVersion(codeVersionName);
|
|
191
|
+
validateCartridgePath(cartridgeDirectoryPath);
|
|
192
|
+
validateBasicAuth(basicAuth);
|
|
193
|
+
validateWebdavPath(cartridgeWebDevPath);
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
//#endregion
|
|
197
|
+
//#region src/cartridge-services/deploy-cartridge.ts
|
|
198
|
+
/**
|
|
199
|
+
* Extract the filename (including extension) from a file path
|
|
200
|
+
*
|
|
201
|
+
* @param filePath - The full path to the file
|
|
202
|
+
* @returns The filename portion of the path (e.g., 'archive.zip' from '/path/to/archive.zip')
|
|
203
|
+
*/
|
|
204
|
+
function getFilename(filePath) {
|
|
205
|
+
return path.basename(filePath);
|
|
206
|
+
}
|
|
207
|
+
/**
|
|
208
|
+
* Create a ZIP cartridge from a directory
|
|
209
|
+
*
|
|
210
|
+
* @param sourceDir - The directory to zip
|
|
211
|
+
* @param outputPath - The output ZIP file path (can be same as sourceDir)
|
|
212
|
+
* @returns Promise resolving when the ZIP file is created
|
|
213
|
+
*/
|
|
214
|
+
async function zipCartridge(sourceDir, outputPath) {
|
|
215
|
+
const archive = archiver("zip", { zlib: { level: 9 } });
|
|
216
|
+
const output = fs.createWriteStream(outputPath);
|
|
217
|
+
archive.pipe(output);
|
|
218
|
+
archive.directory(sourceDir, false);
|
|
219
|
+
await archive.finalize();
|
|
220
|
+
}
|
|
221
|
+
/**
|
|
222
|
+
* Build the WebDAV endpoint URL for a file
|
|
223
|
+
*
|
|
224
|
+
* @param instance - The Commerce Cloud instance hostname
|
|
225
|
+
* @param path - The WebDAV path (e.g., 'Cartridges/local_metadata')
|
|
226
|
+
* @param file - The local file path (filename will be extracted)
|
|
227
|
+
* @returns The complete WebDAV endpoint URL
|
|
228
|
+
*/
|
|
229
|
+
function buildWebdavEndpoint(instance, webdavPath, file) {
|
|
230
|
+
return `https://${instance}${WEBDAV_BASE}/${webdavPath}/${getFilename(file)}`;
|
|
231
|
+
}
|
|
232
|
+
/**
|
|
233
|
+
* Unzip an uploaded archive file on Commerce Cloud via WebDAV
|
|
234
|
+
*
|
|
235
|
+
* @param instance - The Commerce Cloud instance hostname
|
|
236
|
+
* @param path - The WebDAV path where the file was uploaded
|
|
237
|
+
* @param file - The local file path (used to determine the remote filename)
|
|
238
|
+
* @param basicAuth - Base64 encoded basic authentication credentials
|
|
239
|
+
* @returns Promise resolving to HTTP response and body from the unzip operation
|
|
240
|
+
*/
|
|
241
|
+
async function unzip(instance, webdavPath, file, basicAuth) {
|
|
242
|
+
const endpoint = buildWebdavEndpoint(instance, webdavPath, file);
|
|
243
|
+
const opts = getWebdavOptions(instance, webdavPath, basicAuth, HTTP_METHODS.POST, {
|
|
244
|
+
method: WEBDAV_OPERATIONS.UNZIP,
|
|
245
|
+
target: WEBDAV_OPERATIONS.TARGET_CARTRIDGES
|
|
246
|
+
});
|
|
247
|
+
opts.uri = endpoint;
|
|
248
|
+
const result = await makeRequest(opts);
|
|
249
|
+
checkAuthenticationError(result.response);
|
|
250
|
+
return result;
|
|
251
|
+
}
|
|
252
|
+
/**
|
|
253
|
+
* Delete a file from Commerce Cloud via WebDAV
|
|
254
|
+
*
|
|
255
|
+
* @param instance - The Commerce Cloud instance hostname
|
|
256
|
+
* @param path - The WebDAV path where the file is located
|
|
257
|
+
* @param file - The local file path (used to determine the remote filename)
|
|
258
|
+
* @param basicAuth - Base64 encoded basic authentication credentials
|
|
259
|
+
* @returns Promise resolving to HTTP response and body from the delete operation
|
|
260
|
+
*/
|
|
261
|
+
async function deleteFile(instance, webdavPath, file, basicAuth) {
|
|
262
|
+
const endpoint = buildWebdavEndpoint(instance, webdavPath, file);
|
|
263
|
+
const opts = getWebdavOptions(instance, webdavPath, basicAuth, HTTP_METHODS.DELETE);
|
|
264
|
+
opts.uri = endpoint;
|
|
265
|
+
const result = await makeRequest(opts);
|
|
266
|
+
checkAuthenticationError(result.response);
|
|
267
|
+
return result;
|
|
268
|
+
}
|
|
269
|
+
/**
|
|
270
|
+
* Upload a file to a specific cartridge version on Commerce Cloud via WebDAV (internal function)
|
|
271
|
+
*
|
|
272
|
+
* @param instance - The Commerce Cloud instance hostname
|
|
273
|
+
* @param codeVersionName - The target code version name
|
|
274
|
+
* @param filePath - The local file path to upload
|
|
275
|
+
* @param basicAuth - Base64 encoded basic authentication credentials
|
|
276
|
+
* @returns Promise resolving to HTTP response and body from the upload operation
|
|
277
|
+
*/
|
|
278
|
+
async function postFile(instance, codeVersionName, filePath, basicAuth) {
|
|
279
|
+
const targetPath = `${CARTRIDGES_PATH}/${codeVersionName}`;
|
|
280
|
+
try {
|
|
281
|
+
const endpoint = buildWebdavEndpoint(instance, targetPath, filePath);
|
|
282
|
+
const opts = getWebdavOptions(instance, targetPath, basicAuth, HTTP_METHODS.PUT);
|
|
283
|
+
opts.uri = endpoint;
|
|
284
|
+
opts.body = fs.createReadStream(filePath);
|
|
285
|
+
opts.duplex = "half";
|
|
286
|
+
opts.headers = {
|
|
287
|
+
...opts.headers,
|
|
288
|
+
"Content-Type": CONTENT_TYPES.APPLICATION_ZIP
|
|
289
|
+
};
|
|
290
|
+
const result = await makeRequest(opts);
|
|
291
|
+
checkAuthenticationError(result.response);
|
|
292
|
+
if (![
|
|
293
|
+
200,
|
|
294
|
+
201,
|
|
295
|
+
204
|
|
296
|
+
].includes(result.response.statusCode)) throw new Error(`Post file "${filePath}" failed: ${result.response.statusCode} (${result.response.statusMessage})`);
|
|
297
|
+
return result;
|
|
298
|
+
} catch (error) {
|
|
299
|
+
throw new Error(`Post file "${filePath}" failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
/**
|
|
303
|
+
* Deploy code to Commerce Cloud by uploading, unzipping, and cleaning up
|
|
304
|
+
*
|
|
305
|
+
* This function performs a complete code deployment workflow:
|
|
306
|
+
* 1. Uploads the archive file via WebDAV to the specified cartridge version
|
|
307
|
+
* 2. Unzips the archive on the server
|
|
308
|
+
* 3. Deletes the uploaded archive file
|
|
309
|
+
* 4. Returns the deployed version name
|
|
310
|
+
*
|
|
311
|
+
* @param instance - The Commerce Cloud instance hostname
|
|
312
|
+
* @param codeVersionName - The target code version name
|
|
313
|
+
* @param sourceDir - The local directory containing the source files to deploy
|
|
314
|
+
* @param basicAuth - Base64 encoded basic authentication credentials
|
|
315
|
+
* @returns Promise resolving to deployment result with the version name
|
|
316
|
+
* @throws Error if any step of the deployment process fails
|
|
317
|
+
*/
|
|
318
|
+
async function deployCode(instance, codeVersionName, sourceDir, basicAuth) {
|
|
319
|
+
validateDeployCodeParams(instance, codeVersionName, sourceDir, basicAuth, `/${CARTRIDGES_PATH}/${codeVersionName}/cartridges`);
|
|
320
|
+
const tempZipPath = path.join(path.dirname(sourceDir), `metadata-${Date.now()}.zip`);
|
|
321
|
+
try {
|
|
322
|
+
await zipCartridge(sourceDir, tempZipPath);
|
|
323
|
+
const file = path.basename(tempZipPath);
|
|
324
|
+
await postFile(instance, codeVersionName, tempZipPath, basicAuth);
|
|
325
|
+
const unzipResult = await unzip(instance, `${CARTRIDGES_PATH}/${codeVersionName}`, file, basicAuth);
|
|
326
|
+
if (![
|
|
327
|
+
200,
|
|
328
|
+
201,
|
|
329
|
+
202
|
|
330
|
+
].includes(unzipResult.response.statusCode)) throw new Error(`Deploy code ${file} failed (unzip step): ${unzipResult.response.statusCode} (${unzipResult.response.statusMessage})`);
|
|
331
|
+
const deleteResult = await deleteFile(instance, `${CARTRIDGES_PATH}/${codeVersionName}`, file, basicAuth);
|
|
332
|
+
if (![200, 204].includes(deleteResult.response.statusCode)) throw new Error(`Delete ZIP file ${file} after deployment failed (deleteFile step): ${deleteResult.response.statusCode} (${deleteResult.response.statusMessage})`);
|
|
333
|
+
return { version: getFilename(file).replace(".zip", "") };
|
|
334
|
+
} catch (error) {
|
|
335
|
+
if (error instanceof Error) throw error;
|
|
336
|
+
throw new Error(`Deploy code ${sourceDir} failed: ${String(error)}`);
|
|
337
|
+
} finally {
|
|
338
|
+
if (fs.existsSync(tempZipPath)) fs.unlinkSync(tempZipPath);
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
//#endregion
|
|
343
|
+
//#region src/cartridge-services/react-router-config.ts
|
|
344
|
+
let isCliAvailable = null;
|
|
345
|
+
function checkReactRouterCli(projectDirectory) {
|
|
346
|
+
if (isCliAvailable !== null) return isCliAvailable;
|
|
347
|
+
try {
|
|
348
|
+
execSync("react-router --version", {
|
|
349
|
+
cwd: projectDirectory,
|
|
350
|
+
env: npmRunPathEnv(),
|
|
351
|
+
stdio: "pipe"
|
|
352
|
+
});
|
|
353
|
+
isCliAvailable = true;
|
|
354
|
+
} catch {
|
|
355
|
+
isCliAvailable = false;
|
|
356
|
+
}
|
|
357
|
+
return isCliAvailable;
|
|
358
|
+
}
|
|
359
|
+
/**
|
|
360
|
+
* Get the fully resolved routes from React Router by invoking its CLI.
|
|
361
|
+
* This ensures we get the exact same route resolution as React Router uses internally,
|
|
362
|
+
* including all presets, file-system routes, and custom route configurations.
|
|
363
|
+
* @param projectDirectory - The project root directory
|
|
364
|
+
* @returns Array of resolved route config entries
|
|
365
|
+
* @example
|
|
366
|
+
* const routes = getReactRouterRoutes('/path/to/project');
|
|
367
|
+
* // Returns the same structure as `react-router routes --json`
|
|
368
|
+
*/
|
|
369
|
+
function getReactRouterRoutes(projectDirectory) {
|
|
370
|
+
if (!checkReactRouterCli(projectDirectory)) throw new Error("React Router CLI is not available. Please make sure @react-router/dev is installed and accessible.");
|
|
371
|
+
const tempFile = join(tmpdir(), `react-router-routes-${randomUUID()}.json`);
|
|
372
|
+
try {
|
|
373
|
+
execSync(`react-router routes --json > "${tempFile}"`, {
|
|
374
|
+
cwd: projectDirectory,
|
|
375
|
+
env: npmRunPathEnv(),
|
|
376
|
+
encoding: "utf-8",
|
|
377
|
+
stdio: [
|
|
378
|
+
"pipe",
|
|
379
|
+
"pipe",
|
|
380
|
+
"pipe"
|
|
381
|
+
]
|
|
382
|
+
});
|
|
383
|
+
const output = readFileSync(tempFile, "utf-8");
|
|
384
|
+
return JSON.parse(output);
|
|
385
|
+
} catch (error) {
|
|
386
|
+
throw new Error(`Failed to get routes from React Router CLI: ${error.message}`);
|
|
387
|
+
} finally {
|
|
388
|
+
try {
|
|
389
|
+
if (existsSync(tempFile)) unlinkSync(tempFile);
|
|
390
|
+
} catch {}
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
/**
|
|
394
|
+
* Convert a file path to its corresponding route path using React Router's CLI.
|
|
395
|
+
* This ensures we get the exact same route resolution as React Router uses internally.
|
|
396
|
+
* @param filePath - Absolute path to the route file
|
|
397
|
+
* @param projectRoot - The project root directory
|
|
398
|
+
* @returns The route path (e.g., '/cart', '/product/:productId')
|
|
399
|
+
* @example
|
|
400
|
+
* const route = filePathToRoute('/path/to/project/src/routes/_app.cart.tsx', '/path/to/project');
|
|
401
|
+
* // Returns: '/cart'
|
|
402
|
+
*/
|
|
403
|
+
function filePathToRoute(filePath, projectRoot) {
|
|
404
|
+
const filePathPosix = filePath.replace(/\\/g, "/");
|
|
405
|
+
const flatRoutes = flattenRoutes(getReactRouterRoutes(projectRoot));
|
|
406
|
+
for (const route of flatRoutes) {
|
|
407
|
+
const routeFilePosix = route.file.replace(/\\/g, "/");
|
|
408
|
+
if (filePathPosix.endsWith(routeFilePosix) || filePathPosix.endsWith(`/${routeFilePosix}`)) return route.path;
|
|
409
|
+
const routeFileNormalized = routeFilePosix.replace(/^\.\//, "");
|
|
410
|
+
if (filePathPosix.endsWith(routeFileNormalized) || filePathPosix.endsWith(`/${routeFileNormalized}`)) return route.path;
|
|
411
|
+
}
|
|
412
|
+
console.warn(`Warning: Could not find route for file: ${filePath}`);
|
|
413
|
+
return "/unknown";
|
|
414
|
+
}
|
|
415
|
+
/**
|
|
416
|
+
* Flatten a nested route tree into a flat array with computed paths.
|
|
417
|
+
* Each route will have its full path computed from parent paths.
|
|
418
|
+
* @param routes - The nested route config entries
|
|
419
|
+
* @param parentPath - The parent path prefix (used internally for recursion)
|
|
420
|
+
* @returns Flat array of routes with their full paths
|
|
421
|
+
*/
|
|
422
|
+
function flattenRoutes(routes, parentPath = "") {
|
|
423
|
+
const result = [];
|
|
424
|
+
for (const route of routes) {
|
|
425
|
+
let fullPath;
|
|
426
|
+
if (route.index) fullPath = parentPath || "/";
|
|
427
|
+
else if (route.path) {
|
|
428
|
+
const pathSegment = route.path.startsWith("/") ? route.path : `/${route.path}`;
|
|
429
|
+
fullPath = parentPath ? `${parentPath}${pathSegment}`.replace(/\/+/g, "/") : pathSegment;
|
|
430
|
+
} else fullPath = parentPath || "/";
|
|
431
|
+
if (route.id) result.push({
|
|
432
|
+
id: route.id,
|
|
433
|
+
path: fullPath,
|
|
434
|
+
file: route.file,
|
|
435
|
+
index: route.index
|
|
436
|
+
});
|
|
437
|
+
if (route.children && route.children.length > 0) {
|
|
438
|
+
const childPath = route.path ? fullPath : parentPath;
|
|
439
|
+
result.push(...flattenRoutes(route.children, childPath));
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
return result;
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
//#endregion
|
|
446
|
+
//#region src/cartridge-services/generate-cartridge.ts
|
|
447
|
+
const SKIP_DIRECTORIES = [
|
|
448
|
+
"build",
|
|
449
|
+
"dist",
|
|
450
|
+
"node_modules",
|
|
451
|
+
".git",
|
|
452
|
+
".next",
|
|
453
|
+
"coverage"
|
|
454
|
+
];
|
|
455
|
+
const DEFAULT_COMPONENT_GROUP = "odyssey_base";
|
|
456
|
+
const ARCH_TYPE_HEADLESS = "headless";
|
|
457
|
+
const VALID_ATTRIBUTE_TYPES = [
|
|
458
|
+
"string",
|
|
459
|
+
"text",
|
|
460
|
+
"markup",
|
|
461
|
+
"integer",
|
|
462
|
+
"boolean",
|
|
463
|
+
"product",
|
|
464
|
+
"category",
|
|
465
|
+
"file",
|
|
466
|
+
"page",
|
|
467
|
+
"image",
|
|
468
|
+
"url",
|
|
469
|
+
"enum",
|
|
470
|
+
"custom",
|
|
471
|
+
"cms_record"
|
|
472
|
+
];
|
|
473
|
+
const TYPE_MAPPING = {
|
|
474
|
+
String: "string",
|
|
475
|
+
string: "string",
|
|
476
|
+
Number: "integer",
|
|
477
|
+
number: "integer",
|
|
478
|
+
Boolean: "boolean",
|
|
479
|
+
boolean: "boolean",
|
|
480
|
+
Date: "string",
|
|
481
|
+
URL: "url",
|
|
482
|
+
CMSRecord: "cms_record"
|
|
483
|
+
};
|
|
484
|
+
function resolveAttributeType(decoratorType, tsMorphType, fieldName) {
|
|
485
|
+
if (decoratorType) {
|
|
486
|
+
if (!VALID_ATTRIBUTE_TYPES.includes(decoratorType)) {
|
|
487
|
+
console.error(`Error: Invalid attribute type '${decoratorType}' for field '${fieldName || "unknown"}'. Valid types are: ${VALID_ATTRIBUTE_TYPES.join(", ")}`);
|
|
488
|
+
process.exit(1);
|
|
489
|
+
}
|
|
490
|
+
return decoratorType;
|
|
491
|
+
}
|
|
492
|
+
if (tsMorphType && TYPE_MAPPING[tsMorphType]) return TYPE_MAPPING[tsMorphType];
|
|
493
|
+
return "string";
|
|
494
|
+
}
|
|
495
|
+
function toHumanReadableName(fieldName) {
|
|
496
|
+
return fieldName.replace(/([A-Z])/g, " $1").replace(/^./, (str) => str.toUpperCase()).trim();
|
|
497
|
+
}
|
|
498
|
+
function toCamelCaseFileName(name) {
|
|
499
|
+
if (!/[\s-]/.test(name)) return name;
|
|
500
|
+
return name.split(/[\s-]+/).map((word, index) => {
|
|
501
|
+
if (index === 0) return word.toLowerCase();
|
|
502
|
+
return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
|
|
503
|
+
}).join("");
|
|
504
|
+
}
|
|
505
|
+
function getTypeFromTsMorph(property, _sourceFile) {
|
|
506
|
+
try {
|
|
507
|
+
const typeNode = property.getTypeNode();
|
|
508
|
+
if (typeNode) return typeNode.getText().split("|")[0].split("&")[0].trim();
|
|
509
|
+
} catch {}
|
|
510
|
+
return "string";
|
|
511
|
+
}
|
|
512
|
+
function parseExpression(expression) {
|
|
513
|
+
if (Node.isStringLiteral(expression)) return expression.getLiteralValue();
|
|
514
|
+
else if (Node.isNumericLiteral(expression)) return expression.getLiteralValue();
|
|
515
|
+
else if (Node.isTrueLiteral(expression)) return true;
|
|
516
|
+
else if (Node.isFalseLiteral(expression)) return false;
|
|
517
|
+
else if (Node.isObjectLiteralExpression(expression)) return parseNestedObject(expression);
|
|
518
|
+
else if (Node.isArrayLiteralExpression(expression)) return parseArrayLiteral(expression);
|
|
519
|
+
else return expression.getText();
|
|
520
|
+
}
|
|
521
|
+
function parseNestedObject(objectLiteral) {
|
|
522
|
+
const result = {};
|
|
523
|
+
try {
|
|
524
|
+
const properties = objectLiteral.getProperties();
|
|
525
|
+
for (const property of properties) if (Node.isPropertyAssignment(property)) {
|
|
526
|
+
const name = property.getName();
|
|
527
|
+
const initializer = property.getInitializer();
|
|
528
|
+
if (initializer) result[name] = parseExpression(initializer);
|
|
529
|
+
}
|
|
530
|
+
} catch (error) {
|
|
531
|
+
console.warn(`Warning: Could not parse nested object: ${error.message}`);
|
|
532
|
+
return result;
|
|
533
|
+
}
|
|
534
|
+
return result;
|
|
535
|
+
}
|
|
536
|
+
function parseArrayLiteral(arrayLiteral) {
|
|
537
|
+
const result = [];
|
|
538
|
+
try {
|
|
539
|
+
const elements = arrayLiteral.getElements();
|
|
540
|
+
for (const element of elements) result.push(parseExpression(element));
|
|
541
|
+
} catch (error) {
|
|
542
|
+
console.warn(`Warning: Could not parse array literal: ${error.message}`);
|
|
543
|
+
}
|
|
544
|
+
return result;
|
|
545
|
+
}
|
|
546
|
+
function parseDecoratorArgs(decorator) {
|
|
547
|
+
const result = {};
|
|
548
|
+
try {
|
|
549
|
+
const args = decorator.getArguments();
|
|
550
|
+
if (args.length === 0) return result;
|
|
551
|
+
const firstArg = args[0];
|
|
552
|
+
if (Node.isObjectLiteralExpression(firstArg)) {
|
|
553
|
+
const properties = firstArg.getProperties();
|
|
554
|
+
for (const property of properties) if (Node.isPropertyAssignment(property)) {
|
|
555
|
+
const name = property.getName();
|
|
556
|
+
const initializer = property.getInitializer();
|
|
557
|
+
if (initializer) result[name] = parseExpression(initializer);
|
|
558
|
+
}
|
|
559
|
+
} else if (Node.isStringLiteral(firstArg)) {
|
|
560
|
+
result.id = parseExpression(firstArg);
|
|
561
|
+
if (args.length > 1) {
|
|
562
|
+
const secondArg = args[1];
|
|
563
|
+
if (Node.isObjectLiteralExpression(secondArg)) {
|
|
564
|
+
const properties = secondArg.getProperties();
|
|
565
|
+
for (const property of properties) if (Node.isPropertyAssignment(property)) {
|
|
566
|
+
const name = property.getName();
|
|
567
|
+
const initializer = property.getInitializer();
|
|
568
|
+
if (initializer) result[name] = parseExpression(initializer);
|
|
569
|
+
}
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
return result;
|
|
574
|
+
} catch (error) {
|
|
575
|
+
console.warn(`Warning: Could not parse decorator arguments: ${error.message}`);
|
|
576
|
+
return result;
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
function extractAttributesFromSource(sourceFile, className) {
|
|
580
|
+
const attributes = [];
|
|
581
|
+
try {
|
|
582
|
+
const classDeclaration = sourceFile.getClass(className);
|
|
583
|
+
if (!classDeclaration) return attributes;
|
|
584
|
+
const properties = classDeclaration.getProperties();
|
|
585
|
+
for (const property of properties) {
|
|
586
|
+
const attributeDecorator = property.getDecorator("AttributeDefinition");
|
|
587
|
+
if (!attributeDecorator) continue;
|
|
588
|
+
const fieldName = property.getName();
|
|
589
|
+
const config = parseDecoratorArgs(attributeDecorator);
|
|
590
|
+
const isRequired = !property.hasQuestionToken();
|
|
591
|
+
const inferredType = config.type || getTypeFromTsMorph(property, sourceFile);
|
|
592
|
+
const attribute = {
|
|
593
|
+
id: config.id || fieldName,
|
|
594
|
+
name: config.name || toHumanReadableName(fieldName),
|
|
595
|
+
type: resolveAttributeType(config.type, inferredType, fieldName),
|
|
596
|
+
required: config.required !== void 0 ? config.required : isRequired,
|
|
597
|
+
description: config.description || `Field: ${fieldName}`
|
|
598
|
+
};
|
|
599
|
+
if (config.values) attribute.values = config.values;
|
|
600
|
+
if (config.defaultValue !== void 0) attribute.default_value = config.defaultValue;
|
|
601
|
+
attributes.push(attribute);
|
|
602
|
+
}
|
|
603
|
+
} catch (error) {
|
|
604
|
+
console.warn(`Warning: Could not extract attributes from class ${className}: ${error.message}`);
|
|
605
|
+
}
|
|
606
|
+
return attributes;
|
|
607
|
+
}
|
|
608
|
+
function extractRegionDefinitionsFromSource(sourceFile, className) {
|
|
609
|
+
const regionDefinitions = [];
|
|
610
|
+
try {
|
|
611
|
+
const classDeclaration = sourceFile.getClass(className);
|
|
612
|
+
if (!classDeclaration) return regionDefinitions;
|
|
613
|
+
const classRegionDecorator = classDeclaration.getDecorator("RegionDefinition");
|
|
614
|
+
if (classRegionDecorator) {
|
|
615
|
+
const args = classRegionDecorator.getArguments();
|
|
616
|
+
if (args.length > 0) {
|
|
617
|
+
const firstArg = args[0];
|
|
618
|
+
if (Node.isArrayLiteralExpression(firstArg)) {
|
|
619
|
+
const elements = firstArg.getElements();
|
|
620
|
+
for (const element of elements) if (Node.isObjectLiteralExpression(element)) {
|
|
621
|
+
const regionConfig = parseDecoratorArgs({ getArguments: () => [element] });
|
|
622
|
+
const regionDefinition = {
|
|
623
|
+
id: regionConfig.id || "region",
|
|
624
|
+
name: regionConfig.name || "Region"
|
|
625
|
+
};
|
|
626
|
+
if (regionConfig.componentTypes) regionDefinition.component_types = regionConfig.componentTypes;
|
|
627
|
+
if (Array.isArray(regionConfig.componentTypeInclusions)) regionDefinition.component_type_inclusions = regionConfig.componentTypeInclusions.map((incl) => ({ type_id: incl }));
|
|
628
|
+
if (Array.isArray(regionConfig.componentTypeExclusions)) regionDefinition.component_type_exclusions = regionConfig.componentTypeExclusions.map((excl) => ({ type_id: excl }));
|
|
629
|
+
if (regionConfig.maxComponents !== void 0) regionDefinition.max_components = regionConfig.maxComponents;
|
|
630
|
+
if (regionConfig.minComponents !== void 0) regionDefinition.min_components = regionConfig.minComponents;
|
|
631
|
+
if (regionConfig.allowMultiple !== void 0) regionDefinition.allow_multiple = regionConfig.allowMultiple;
|
|
632
|
+
if (regionConfig.defaultComponentConstructors) regionDefinition.default_component_constructors = regionConfig.defaultComponentConstructors;
|
|
633
|
+
regionDefinitions.push(regionDefinition);
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
} catch (error) {
|
|
639
|
+
console.warn(`Warning: Could not extract region definitions from class ${className}: ${error.message}`);
|
|
640
|
+
}
|
|
641
|
+
return regionDefinitions;
|
|
642
|
+
}
|
|
643
|
+
async function processComponentFile(filePath, _projectRoot) {
|
|
644
|
+
try {
|
|
645
|
+
const content = await readFile(filePath, "utf-8");
|
|
646
|
+
const components = [];
|
|
647
|
+
if (!content.includes("@Component")) return components;
|
|
648
|
+
try {
|
|
649
|
+
const sourceFile = new Project({
|
|
650
|
+
useInMemoryFileSystem: true,
|
|
651
|
+
skipAddingFilesFromTsConfig: true
|
|
652
|
+
}).createSourceFile(filePath, content);
|
|
653
|
+
const classes = sourceFile.getClasses();
|
|
654
|
+
for (const classDeclaration of classes) {
|
|
655
|
+
const componentDecorator = classDeclaration.getDecorator("Component");
|
|
656
|
+
if (!componentDecorator) continue;
|
|
657
|
+
const className = classDeclaration.getName();
|
|
658
|
+
if (!className) continue;
|
|
659
|
+
const componentConfig = parseDecoratorArgs(componentDecorator);
|
|
660
|
+
const attributes = extractAttributesFromSource(sourceFile, className);
|
|
661
|
+
const regionDefinitions = extractRegionDefinitionsFromSource(sourceFile, className);
|
|
662
|
+
const componentMetadata = {
|
|
663
|
+
typeId: componentConfig.id || className.toLowerCase(),
|
|
664
|
+
name: componentConfig.name || toHumanReadableName(className),
|
|
665
|
+
group: componentConfig.group || DEFAULT_COMPONENT_GROUP,
|
|
666
|
+
description: componentConfig.description || `Custom component: ${className}`,
|
|
667
|
+
regionDefinitions,
|
|
668
|
+
attributes
|
|
669
|
+
};
|
|
670
|
+
components.push(componentMetadata);
|
|
671
|
+
}
|
|
672
|
+
} catch (error) {
|
|
673
|
+
console.warn(`Warning: Could not process file ${filePath}:`, error.message);
|
|
674
|
+
}
|
|
675
|
+
return components;
|
|
676
|
+
} catch (error) {
|
|
677
|
+
console.warn(`Warning: Could not read file ${filePath}:`, error.message);
|
|
678
|
+
return [];
|
|
679
|
+
}
|
|
680
|
+
}
|
|
681
|
+
async function processPageTypeFile(filePath, projectRoot) {
|
|
682
|
+
try {
|
|
683
|
+
const content = await readFile(filePath, "utf-8");
|
|
684
|
+
const pageTypes = [];
|
|
685
|
+
if (!content.includes("@PageType")) return pageTypes;
|
|
686
|
+
try {
|
|
687
|
+
const sourceFile = new Project({
|
|
688
|
+
useInMemoryFileSystem: true,
|
|
689
|
+
skipAddingFilesFromTsConfig: true
|
|
690
|
+
}).createSourceFile(filePath, content);
|
|
691
|
+
const classes = sourceFile.getClasses();
|
|
692
|
+
for (const classDeclaration of classes) {
|
|
693
|
+
const pageTypeDecorator = classDeclaration.getDecorator("PageType");
|
|
694
|
+
if (!pageTypeDecorator) continue;
|
|
695
|
+
const className = classDeclaration.getName();
|
|
696
|
+
if (!className) continue;
|
|
697
|
+
const pageTypeConfig = parseDecoratorArgs(pageTypeDecorator);
|
|
698
|
+
const attributes = extractAttributesFromSource(sourceFile, className);
|
|
699
|
+
const regionDefinitions = extractRegionDefinitionsFromSource(sourceFile, className);
|
|
700
|
+
const route = filePathToRoute(filePath, projectRoot);
|
|
701
|
+
const pageTypeMetadata = {
|
|
702
|
+
typeId: pageTypeConfig.id || className.toLowerCase(),
|
|
703
|
+
name: pageTypeConfig.name || toHumanReadableName(className),
|
|
704
|
+
description: pageTypeConfig.description || `Custom page type: ${className}`,
|
|
705
|
+
regionDefinitions,
|
|
706
|
+
supportedAspectTypes: pageTypeConfig.supportedAspectTypes || [],
|
|
707
|
+
attributes,
|
|
708
|
+
route
|
|
709
|
+
};
|
|
710
|
+
pageTypes.push(pageTypeMetadata);
|
|
711
|
+
}
|
|
712
|
+
} catch (error) {
|
|
713
|
+
console.warn(`Warning: Could not process file ${filePath}:`, error.message);
|
|
714
|
+
}
|
|
715
|
+
return pageTypes;
|
|
716
|
+
} catch (error) {
|
|
717
|
+
console.warn(`Warning: Could not read file ${filePath}:`, error.message);
|
|
718
|
+
return [];
|
|
719
|
+
}
|
|
720
|
+
}
|
|
721
|
+
async function processAspectFile(filePath, _projectRoot) {
|
|
722
|
+
try {
|
|
723
|
+
const content = await readFile(filePath, "utf-8");
|
|
724
|
+
const aspects = [];
|
|
725
|
+
if (!filePath.endsWith(".json") || !content.trim().startsWith("{")) return aspects;
|
|
726
|
+
if (!filePath.includes("/aspects/") && !filePath.includes("\\aspects\\")) return aspects;
|
|
727
|
+
try {
|
|
728
|
+
const aspectData = JSON.parse(content);
|
|
729
|
+
const fileName = basename(filePath, ".json");
|
|
730
|
+
if (!aspectData.name || !aspectData.attribute_definitions) return aspects;
|
|
731
|
+
const aspectMetadata = {
|
|
732
|
+
id: fileName,
|
|
733
|
+
name: aspectData.name,
|
|
734
|
+
description: aspectData.description || `Aspect type: ${aspectData.name}`,
|
|
735
|
+
attributeDefinitions: aspectData.attribute_definitions || [],
|
|
736
|
+
supportedObjectTypes: aspectData.supported_object_types || []
|
|
737
|
+
};
|
|
738
|
+
aspects.push(aspectMetadata);
|
|
739
|
+
} catch (parseError) {
|
|
740
|
+
console.warn(`Warning: Could not parse JSON in file ${filePath}:`, parseError.message);
|
|
741
|
+
}
|
|
742
|
+
return aspects;
|
|
743
|
+
} catch (error) {
|
|
744
|
+
console.warn(`Warning: Could not read file ${filePath}:`, error.message);
|
|
745
|
+
return [];
|
|
746
|
+
}
|
|
747
|
+
}
|
|
748
|
+
async function generateComponentCartridge(component, outputDir, dryRun = false) {
|
|
749
|
+
const fileName = toCamelCaseFileName(component.typeId);
|
|
750
|
+
const groupDir = join(outputDir, component.group);
|
|
751
|
+
const outputPath = join(groupDir, `${fileName}.json`);
|
|
752
|
+
if (!dryRun) {
|
|
753
|
+
try {
|
|
754
|
+
await mkdir(groupDir, { recursive: true });
|
|
755
|
+
} catch {}
|
|
756
|
+
const attributeDefinitionGroups = [{
|
|
757
|
+
id: component.typeId,
|
|
758
|
+
name: component.name,
|
|
759
|
+
description: component.description,
|
|
760
|
+
attribute_definitions: component.attributes
|
|
761
|
+
}];
|
|
762
|
+
const cartridgeData = {
|
|
763
|
+
name: component.name,
|
|
764
|
+
description: component.description,
|
|
765
|
+
group: component.group,
|
|
766
|
+
arch_type: ARCH_TYPE_HEADLESS,
|
|
767
|
+
region_definitions: component.regionDefinitions || [],
|
|
768
|
+
attribute_definition_groups: attributeDefinitionGroups
|
|
769
|
+
};
|
|
770
|
+
await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));
|
|
771
|
+
}
|
|
772
|
+
const prefix = dryRun ? " - [DRY RUN]" : " -";
|
|
773
|
+
console.log(`${prefix} ${String(component.typeId)}: ${String(component.name)} (${String(component.attributes.length)} attributes) → ${fileName}.json`);
|
|
774
|
+
}
|
|
775
|
+
async function generatePageTypeCartridge(pageType, outputDir, dryRun = false) {
|
|
776
|
+
const fileName = toCamelCaseFileName(pageType.name);
|
|
777
|
+
const outputPath = join(outputDir, `${fileName}.json`);
|
|
778
|
+
if (!dryRun) {
|
|
779
|
+
const cartridgeData = {
|
|
780
|
+
name: pageType.name,
|
|
781
|
+
description: pageType.description,
|
|
782
|
+
arch_type: ARCH_TYPE_HEADLESS,
|
|
783
|
+
region_definitions: pageType.regionDefinitions || []
|
|
784
|
+
};
|
|
785
|
+
if (pageType.attributes && pageType.attributes.length > 0) cartridgeData.attribute_definition_groups = [{
|
|
786
|
+
id: pageType.typeId || fileName,
|
|
787
|
+
name: pageType.name,
|
|
788
|
+
description: pageType.description,
|
|
789
|
+
attribute_definitions: pageType.attributes
|
|
790
|
+
}];
|
|
791
|
+
if (pageType.supportedAspectTypes) cartridgeData.supported_aspect_types = pageType.supportedAspectTypes;
|
|
792
|
+
if (pageType.route) cartridgeData.route = pageType.route;
|
|
793
|
+
await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));
|
|
794
|
+
}
|
|
795
|
+
const prefix = dryRun ? " - [DRY RUN]" : " -";
|
|
796
|
+
console.log(`${prefix} ${String(pageType.name)}: ${String(pageType.description)} (${String(pageType.attributes.length)} attributes) → ${fileName}.json`);
|
|
797
|
+
}
|
|
798
|
+
async function generateAspectCartridge(aspect, outputDir, dryRun = false) {
|
|
799
|
+
const fileName = toCamelCaseFileName(aspect.id);
|
|
800
|
+
const outputPath = join(outputDir, `${fileName}.json`);
|
|
801
|
+
if (!dryRun) {
|
|
802
|
+
const cartridgeData = {
|
|
803
|
+
name: aspect.name,
|
|
804
|
+
description: aspect.description,
|
|
805
|
+
arch_type: ARCH_TYPE_HEADLESS,
|
|
806
|
+
attribute_definitions: aspect.attributeDefinitions || []
|
|
807
|
+
};
|
|
808
|
+
if (aspect.supportedObjectTypes) cartridgeData.supported_object_types = aspect.supportedObjectTypes;
|
|
809
|
+
await writeFile(outputPath, JSON.stringify(cartridgeData, null, 2));
|
|
810
|
+
}
|
|
811
|
+
const prefix = dryRun ? " - [DRY RUN]" : " -";
|
|
812
|
+
console.log(`${prefix} ${String(aspect.name)}: ${String(aspect.description)} (${String(aspect.attributeDefinitions.length)} attributes) → ${fileName}.json`);
|
|
813
|
+
}
|
|
814
|
+
/**
|
|
815
|
+
* Runs ESLint with --fix on the specified directory to format JSON files.
|
|
816
|
+
* This ensures generated JSON files match the project's Prettier/ESLint configuration.
|
|
817
|
+
*/
|
|
818
|
+
function lintGeneratedFiles(metadataDir, projectRoot) {
|
|
819
|
+
try {
|
|
820
|
+
console.log("🔧 Running ESLint --fix on generated JSON files...");
|
|
821
|
+
execSync(`npx eslint "${metadataDir}/**/*.json" --fix --no-error-on-unmatched-pattern`, {
|
|
822
|
+
cwd: projectRoot,
|
|
823
|
+
stdio: "pipe",
|
|
824
|
+
encoding: "utf-8"
|
|
825
|
+
});
|
|
826
|
+
console.log("✅ JSON files formatted successfully");
|
|
827
|
+
} catch (error) {
|
|
828
|
+
const execError = error;
|
|
829
|
+
if (execError.status === 2) {
|
|
830
|
+
const errMsg = execError.stderr || execError.stdout || "Unknown error";
|
|
831
|
+
console.warn(`⚠️ Warning: Could not run ESLint --fix: ${errMsg}`);
|
|
832
|
+
} else if (execError.stderr && execError.stderr.includes("error")) console.warn(`⚠️ Warning: Some linting issues could not be auto-fixed. Run ESLint manually to review.`);
|
|
833
|
+
else console.log("✅ JSON files formatted successfully");
|
|
834
|
+
}
|
|
835
|
+
}
|
|
836
|
+
async function generateMetadata(projectDirectory, metadataDirectory, options) {
|
|
837
|
+
try {
|
|
838
|
+
const filePaths = options?.filePaths;
|
|
839
|
+
const isIncrementalMode = filePaths && filePaths.length > 0;
|
|
840
|
+
const dryRun = options?.dryRun || false;
|
|
841
|
+
if (dryRun) console.log("🔍 [DRY RUN] Scanning for decorated components and page types...");
|
|
842
|
+
else if (isIncrementalMode) console.log(`🔍 Generating metadata for ${filePaths.length} specified file(s)...`);
|
|
843
|
+
else console.log("🔍 Generating metadata for decorated components and page types...");
|
|
844
|
+
const projectRoot = resolve(projectDirectory);
|
|
845
|
+
const srcDir = join(projectRoot, "src");
|
|
846
|
+
const metadataDir = resolve(metadataDirectory);
|
|
847
|
+
const componentsOutputDir = join(metadataDir, "components");
|
|
848
|
+
const pagesOutputDir = join(metadataDir, "pages");
|
|
849
|
+
const aspectsOutputDir = join(metadataDir, "aspects");
|
|
850
|
+
if (!dryRun) {
|
|
851
|
+
if (!isIncrementalMode) {
|
|
852
|
+
console.log("🗑️ Cleaning existing output directories...");
|
|
853
|
+
for (const outputDir of [
|
|
854
|
+
componentsOutputDir,
|
|
855
|
+
pagesOutputDir,
|
|
856
|
+
aspectsOutputDir
|
|
857
|
+
]) try {
|
|
858
|
+
await rm(outputDir, {
|
|
859
|
+
recursive: true,
|
|
860
|
+
force: true
|
|
861
|
+
});
|
|
862
|
+
console.log(` - Deleted: ${outputDir}`);
|
|
863
|
+
} catch {
|
|
864
|
+
console.log(` - Directory not found (skipping): ${outputDir}`);
|
|
865
|
+
}
|
|
866
|
+
} else console.log("📝 Incremental mode: existing cartridge files will be preserved/overwritten");
|
|
867
|
+
console.log("📁 Creating output directories...");
|
|
868
|
+
for (const outputDir of [
|
|
869
|
+
componentsOutputDir,
|
|
870
|
+
pagesOutputDir,
|
|
871
|
+
aspectsOutputDir
|
|
872
|
+
]) try {
|
|
873
|
+
await mkdir(outputDir, { recursive: true });
|
|
874
|
+
} catch (error) {
|
|
875
|
+
try {
|
|
876
|
+
await access(outputDir);
|
|
877
|
+
} catch {
|
|
878
|
+
console.error(`❌ Error: Failed to create output directory ${outputDir}: ${error.message}`);
|
|
879
|
+
process.exit(1);
|
|
880
|
+
}
|
|
881
|
+
}
|
|
882
|
+
} else if (isIncrementalMode) console.log(`📝 [DRY RUN] Would process ${filePaths.length} specific file(s)`);
|
|
883
|
+
else console.log("📝 [DRY RUN] Would clean and regenerate all metadata files");
|
|
884
|
+
let files = [];
|
|
885
|
+
if (isIncrementalMode && filePaths) {
|
|
886
|
+
files = filePaths.map((fp) => resolve(projectRoot, fp));
|
|
887
|
+
console.log(`📂 Processing ${files.length} specified file(s)...`);
|
|
888
|
+
} else {
|
|
889
|
+
const scanDirectory = async (dir) => {
|
|
890
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
891
|
+
for (const entry of entries) {
|
|
892
|
+
const fullPath = join(dir, entry.name);
|
|
893
|
+
if (entry.isDirectory()) {
|
|
894
|
+
if (!SKIP_DIRECTORIES.includes(entry.name)) await scanDirectory(fullPath);
|
|
895
|
+
} else if (entry.isFile() && (extname$1(entry.name) === ".ts" || extname$1(entry.name) === ".tsx" || extname$1(entry.name) === ".json")) files.push(fullPath);
|
|
896
|
+
}
|
|
897
|
+
};
|
|
898
|
+
await scanDirectory(srcDir);
|
|
899
|
+
}
|
|
900
|
+
const allComponents = [];
|
|
901
|
+
const allPageTypes = [];
|
|
902
|
+
const allAspects = [];
|
|
903
|
+
for (const file of files) {
|
|
904
|
+
const components = await processComponentFile(file, projectRoot);
|
|
905
|
+
allComponents.push(...components);
|
|
906
|
+
const pageTypes = await processPageTypeFile(file, projectRoot);
|
|
907
|
+
allPageTypes.push(...pageTypes);
|
|
908
|
+
const aspects = await processAspectFile(file, projectRoot);
|
|
909
|
+
allAspects.push(...aspects);
|
|
910
|
+
}
|
|
911
|
+
if (allComponents.length === 0 && allPageTypes.length === 0 && allAspects.length === 0) {
|
|
912
|
+
console.log("⚠️ No decorated components, page types, or aspect files found.");
|
|
913
|
+
return {
|
|
914
|
+
componentsGenerated: 0,
|
|
915
|
+
pageTypesGenerated: 0,
|
|
916
|
+
aspectsGenerated: 0,
|
|
917
|
+
totalFiles: 0
|
|
918
|
+
};
|
|
919
|
+
}
|
|
920
|
+
if (allComponents.length > 0) {
|
|
921
|
+
console.log(`✅ Found ${allComponents.length} decorated component(s):`);
|
|
922
|
+
for (const component of allComponents) await generateComponentCartridge(component, componentsOutputDir, dryRun);
|
|
923
|
+
if (dryRun) console.log(`📄 [DRY RUN] Would generate ${allComponents.length} component metadata file(s) in: ${componentsOutputDir}`);
|
|
924
|
+
else console.log(`📄 Generated ${allComponents.length} component metadata file(s) in: ${componentsOutputDir}`);
|
|
925
|
+
}
|
|
926
|
+
if (allPageTypes.length > 0) {
|
|
927
|
+
console.log(`✅ Found ${allPageTypes.length} decorated page type(s):`);
|
|
928
|
+
for (const pageType of allPageTypes) await generatePageTypeCartridge(pageType, pagesOutputDir, dryRun);
|
|
929
|
+
if (dryRun) console.log(`📄 [DRY RUN] Would generate ${allPageTypes.length} page type metadata file(s) in: ${pagesOutputDir}`);
|
|
930
|
+
else console.log(`📄 Generated ${allPageTypes.length} page type metadata file(s) in: ${pagesOutputDir}`);
|
|
931
|
+
}
|
|
932
|
+
if (allAspects.length > 0) {
|
|
933
|
+
console.log(`✅ Found ${allAspects.length} decorated aspect(s):`);
|
|
934
|
+
for (const aspect of allAspects) await generateAspectCartridge(aspect, aspectsOutputDir, dryRun);
|
|
935
|
+
if (dryRun) console.log(`📄 [DRY RUN] Would generate ${allAspects.length} aspect metadata file(s) in: ${aspectsOutputDir}`);
|
|
936
|
+
else console.log(`📄 Generated ${allAspects.length} aspect metadata file(s) in: ${aspectsOutputDir}`);
|
|
937
|
+
}
|
|
938
|
+
const shouldLintFix = options?.lintFix !== false;
|
|
939
|
+
if (!dryRun && shouldLintFix && (allComponents.length > 0 || allPageTypes.length > 0 || allAspects.length > 0)) lintGeneratedFiles(metadataDir, projectRoot);
|
|
940
|
+
return {
|
|
941
|
+
componentsGenerated: allComponents.length,
|
|
942
|
+
pageTypesGenerated: allPageTypes.length,
|
|
943
|
+
aspectsGenerated: allAspects.length,
|
|
944
|
+
totalFiles: allComponents.length + allPageTypes.length + allAspects.length
|
|
945
|
+
};
|
|
946
|
+
} catch (error) {
|
|
947
|
+
console.error("❌ Error:", error.message);
|
|
948
|
+
process.exit(1);
|
|
949
|
+
}
|
|
950
|
+
}
|
|
951
|
+
|
|
952
|
+
//#endregion
|
|
953
|
+
export { deployCode, generateMetadata };
|
|
954
|
+
//# sourceMappingURL=index.js.map
|